From 04d11bfb2bf430226af75c302f69e2d284ba395b Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Thu, 6 Oct 2022 18:40:27 -0500 Subject: [PATCH 001/108] 1.12 Fix some Java warnings (#2146) * Fix some Java warnings * Fix accidental change --- java/src/hdf/hdf5lib/HDF5Constants.java | 2 +- .../exceptions/HDF5LibraryException.java | 2 +- java/src/jni/h5aImp.c | 30 +- java/src/jni/h5dImp.c | 28 +- java/src/jni/h5util.c | 296 +++++++++--------- java/test/TestH5Fparams.java | 1 + java/test/TestH5Ocreate.java | 4 + java/test/TestH5Oparams.java | 3 + 8 files changed, 183 insertions(+), 183 deletions(-) diff --git a/java/src/hdf/hdf5lib/HDF5Constants.java b/java/src/hdf/hdf5lib/HDF5Constants.java index 67b94492ee8..e3973976b3d 100644 --- a/java/src/hdf/hdf5lib/HDF5Constants.java +++ b/java/src/hdf/hdf5lib/HDF5Constants.java @@ -19,7 +19,7 @@ * @page HDF5CONST Constants and Enumerated Types * This class contains C constants and enumerated types of HDF5 library. The * values of these constants are obtained from the library by calling - * the JNI function jconstant, where jconstant is any of the private constants + * the JNI function jconstant, where jconstant is used for any of the private constants * which start their name with "H5" need to be converted. *

* Do not edit this file! diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5LibraryException.java b/java/src/hdf/hdf5lib/exceptions/HDF5LibraryException.java index 3de2a281869..d8f9346d232 100644 --- a/java/src/hdf/hdf5lib/exceptions/HDF5LibraryException.java +++ b/java/src/hdf/hdf5lib/exceptions/HDF5LibraryException.java @@ -108,7 +108,7 @@ public HDF5LibraryException(String s) /** * @ingroup JERRLIB * - * Return a error message for the minor error number. + * Return an error message for the minor error number. * * These messages come from @ref H5E. * diff --git a/java/src/jni/h5aImp.c b/java/src/jni/h5aImp.c index e5e8462e5e9..086fea87450 100644 --- a/java/src/jni/h5aImp.c +++ b/java/src/jni/h5aImp.c @@ -1084,7 +1084,7 @@ Java_hdf_hdf5lib_H5_H5AreadVL(JNIEnv *env, jclass clss, jlong attr_id, jlong mem /* Get size of data array */ if ((n = ENVPTR->GetArrayLength(ENVONLY, buf)) < 0) { CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); - H5_BAD_ARGUMENT_ERROR(ENVONLY, "H5Aread: readBuf length < 0"); + H5_BAD_ARGUMENT_ERROR(ENVONLY, "H5AreadVL: readBuf length < 0"); } dims[0] = (hsize_t)n; @@ -1095,12 +1095,12 @@ Java_hdf_hdf5lib_H5_H5AreadVL(JNIEnv *env, jclass clss, jlong attr_id, jlong mem if ((type_class = H5Tget_class((hid_t)mem_type_id)) < 0) H5_LIBRARY_ERROR(ENVONLY); if (type_class == H5T_VLEN) { - size_t typeSize; - hid_t memb = H5I_INVALID_HID; - H5T_class_t vlClass; - size_t vlSize; - void *rawBuf = NULL; - jobject *jList = NULL; + size_t typeSize; + hid_t memb = H5I_INVALID_HID; + H5T_class_t vlClass; + size_t vlSize; + void *rawBuf = NULL; + jobjectArray jList = NULL; size_t i, j, x; @@ -1234,7 +1234,7 @@ Java_hdf_hdf5lib_H5_H5AreadVL(JNIEnv *env, jclass clss, jlong attr_id, jlong mem } jobj = ENVPTR->CallStaticObjectMethod(ENVONLY, cFloat, floatValueMid, - floatValue); + (double)floatValue); CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); break; } @@ -1321,7 +1321,7 @@ Java_hdf_hdf5lib_H5_H5AwriteVL(JNIEnv *env, jclass clss, jlong attr_id, jlong me htri_t vl_data_class; herr_t status = FAIL; jboolean writeBufIsCopy; - jbyteArray *writeBuf = NULL; + jbyteArray writeBuf = NULL; UNUSED(clss); @@ -1344,12 +1344,12 @@ Java_hdf_hdf5lib_H5_H5AwriteVL(JNIEnv *env, jclass clss, jlong attr_id, jlong me if ((type_class = H5Tget_class((hid_t)mem_type_id)) < 0) H5_LIBRARY_ERROR(ENVONLY); if (type_class == H5T_VLEN) { - size_t typeSize; - hid_t memb = H5I_INVALID_HID; - H5T_class_t vlClass; - size_t vlSize; - void *rawBuf = NULL; - jobject *jList = NULL; + size_t typeSize; + hid_t memb = H5I_INVALID_HID; + H5T_class_t vlClass; + size_t vlSize; + void *rawBuf = NULL; + jobjectArray jList = NULL; size_t i, j, x; diff --git a/java/src/jni/h5dImp.c b/java/src/jni/h5dImp.c index e6e9a76927b..59775e3b838 100644 --- a/java/src/jni/h5dImp.c +++ b/java/src/jni/h5dImp.c @@ -1110,12 +1110,12 @@ Java_hdf_hdf5lib_H5_H5DreadVL(JNIEnv *env, jclass clss, jlong dataset_id, jlong if ((type_class = H5Tget_class((hid_t)mem_type_id)) < 0) H5_LIBRARY_ERROR(ENVONLY); if (type_class == H5T_VLEN) { - size_t typeSize; - hid_t memb = H5I_INVALID_HID; - H5T_class_t vlClass; - size_t vlSize; - void *rawBuf = NULL; - jobject *jList = NULL; + size_t typeSize; + hid_t memb = H5I_INVALID_HID; + H5T_class_t vlClass; + size_t vlSize; + void *rawBuf = NULL; + jobjectArray jList = NULL; size_t i, j, x; @@ -1250,7 +1250,7 @@ Java_hdf_hdf5lib_H5_H5DreadVL(JNIEnv *env, jclass clss, jlong dataset_id, jlong } jobj = ENVPTR->CallStaticObjectMethod(ENVONLY, cFloat, floatValueMid, - floatValue); + (double)floatValue); CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); break; } @@ -1334,7 +1334,7 @@ Java_hdf_hdf5lib_H5_H5DwriteVL(JNIEnv *env, jclass clss, jlong dataset_id, jlong htri_t vl_data_class; herr_t status = FAIL; jboolean writeBufIsCopy; - jbyteArray *writeBuf = NULL; + jbyteArray writeBuf = NULL; UNUSED(clss); @@ -1353,12 +1353,12 @@ Java_hdf_hdf5lib_H5_H5DwriteVL(JNIEnv *env, jclass clss, jlong dataset_id, jlong if ((type_class = H5Tget_class((hid_t)mem_type_id)) < 0) H5_LIBRARY_ERROR(ENVONLY); if (type_class == H5T_VLEN) { - size_t typeSize; - hid_t memb = H5I_INVALID_HID; - H5T_class_t vlClass; - size_t vlSize; - void *rawBuf = NULL; - jobject *jList = NULL; + size_t typeSize; + hid_t memb = H5I_INVALID_HID; + H5T_class_t vlClass; + size_t vlSize; + void *rawBuf = NULL; + jobjectArray jList = NULL; size_t i, j, x; diff --git a/java/src/jni/h5util.c b/java/src/jni/h5util.c index 4140a9ec527..76a050a40ef 100644 --- a/java/src/jni/h5util.c +++ b/java/src/jni/h5util.c @@ -436,7 +436,7 @@ h5str_convert(JNIEnv *env, char **in_str, hid_t container, hid_t tid, void *out_ break; case H5T_ENUM: { - void *value; + void *value = NULL; token = HDstrtok(this_str, delimiter); @@ -865,7 +865,7 @@ h5str_sprintf(JNIEnv *env, h5str_t *out_str, hid_t container, hid_t tid, void *i if (NULL == (this_str = (char *)HDmalloc(this_len))) H5_OUT_OF_MEMORY_ERROR(ENVONLY, "h5str_sprintf: failed to allocate string buffer"); - if (HDsnprintf(this_str, this_len, "%g", tmp_float) < 0) + if (HDsnprintf(this_str, this_len, "%g", (double)tmp_float) < 0) H5_JNI_FATAL_ERROR(ENVONLY, "h5str_sprintf: HDsnprintf failure"); break; @@ -2273,10 +2273,10 @@ h5str_render_bin_output(FILE *stream, hid_t container, hid_t tid, void *_mem, hs } case H5T_STRING: { - unsigned char tempuchar; unsigned int i; H5T_str_t pad; - char *s; + char *s = NULL; + unsigned char tempuchar; if ((pad = H5Tget_strpad(tid)) < 0) { ret_value = FAIL; @@ -2287,7 +2287,7 @@ h5str_render_bin_output(FILE *stream, hid_t container, hid_t tid, void *_mem, hs mem = ((unsigned char *)_mem) + block_index * size; if (H5Tis_variable_str(tid)) { - s = *(char **)mem; + s = *(char **)((void *)mem); if (s != NULL) size = HDstrlen(s); } @@ -2919,24 +2919,20 @@ h5str_dump_region_attribute(JNIEnv *env, h5str_t *str, hid_t region_id) int h5str_dump_simple_dset(JNIEnv *env, FILE *stream, hid_t dset, int binary_order) { + hid_t f_space = H5I_INVALID_HID; /* file data space */ hsize_t elmtno; /* counter */ + size_t i = 0; /* counter */ + int sndims; /* rank of dataspace */ + int carry; /* counter carry value */ hsize_t zero[8]; /* vector of zeros */ hsize_t total_size[H5S_MAX_RANK]; /* total size of dataset*/ - size_t i; /* counter */ - hid_t f_space = H5I_INVALID_HID; /* file data space */ - int ndims; - int carry; /* counter carry value */ - - /* Print info */ - hssize_t p_nelmts; /* total selected elmts */ - size_t p_type_nbytes; /* size of memory type */ /* Stripmine info */ - void *sm_buf = NULL; /* buffer for raw data */ - hsize_t sm_size[H5S_MAX_RANK]; /* stripmine size */ - hsize_t sm_nbytes; /* bytes per stripmine */ - hsize_t sm_nelmts; /* elements per stripmine */ - hid_t sm_space = H5I_INVALID_HID; /* stripmine data space */ + hsize_t sm_size[H5S_MAX_RANK]; /* stripmine size */ + hsize_t sm_nbytes; /* bytes per stripmine */ + hsize_t sm_nelmts; /* elements per stripmine */ + unsigned char *sm_buf = NULL; /* buffer for raw data */ + hid_t sm_space = H5I_INVALID_HID; /* stripmine data space */ /* Hyperslab info */ hsize_t hs_offset[H5S_MAX_RANK]; /* starting offset */ @@ -2944,11 +2940,11 @@ h5str_dump_simple_dset(JNIEnv *env, FILE *stream, hid_t dset, int binary_order) hsize_t hs_nelmts; /* elements in request */ /* VL data special information */ - unsigned int vl_data = 0; /* contains VL datatypes */ - hid_t p_type = H5I_INVALID_HID; - hid_t f_type = H5I_INVALID_HID; + unsigned int vl_data = 0; /* contains VL datatypes */ + int ret_value = FAIL; - int ret_value = FAIL; + hid_t p_type = H5I_INVALID_HID; + hid_t f_type = H5I_INVALID_HID; if (dset < 0) H5_BAD_ARGUMENT_ERROR(ENVONLY, "h5str_dump_simple_dset: dset ID < 0"); @@ -2959,18 +2955,21 @@ h5str_dump_simple_dset(JNIEnv *env, FILE *stream, hid_t dset, int binary_order) if ((f_space = H5Dget_space(dset)) < 0) H5_LIBRARY_ERROR(ENVONLY); - if ((ndims = H5Sget_simple_extent_ndims(f_space)) < 0) + if ((sndims = H5Sget_simple_extent_ndims(f_space)) < 0) H5_LIBRARY_ERROR(ENVONLY); + /* Assume entire data space to be printed */ if (H5Sget_simple_extent_dims(f_space, total_size, NULL) < 0) H5_LIBRARY_ERROR(ENVONLY); if (H5Tequal(f_type, H5T_STD_REF_DSETREG)) { + hssize_t p_nelmts; /* total selected elmts */ + if ((p_nelmts = H5Sget_simple_extent_npoints(f_space)) < 0) H5_LIBRARY_ERROR(ENVONLY); if (NULL == - (sm_buf = (H5R_ref_t *)HDcalloc(MAX(sizeof(unsigned), sizeof(H5R_ref_t)), (size_t)p_nelmts))) + (sm_buf = (unsigned char *)HDcalloc(MAX(sizeof(unsigned), sizeof(H5R_ref_t)), (size_t)p_nelmts))) H5_OUT_OF_MEMORY_ERROR(ENVONLY, "h5str_dump_simple_mem: failed to allocate sm_buf"); /* Read the data */ @@ -2987,6 +2986,10 @@ h5str_dump_simple_dset(JNIEnv *env, FILE *stream, hid_t dset, int binary_order) } } else { + /* Print info */ + size_t p_type_nbytes; /* size of memory type */ + hsize_t p_nelmts; /* total selected elmts */ + switch (binary_order) { case 1: { if ((p_type = h5str_get_native_type(f_type)) < 0) @@ -3017,109 +3020,102 @@ h5str_dump_simple_dset(JNIEnv *env, FILE *stream, hid_t dset, int binary_order) } } - if ((size_t)ndims <= (sizeof(sm_size) / sizeof(sm_size[0]))) { - if (H5Sget_simple_extent_dims(f_space, total_size, NULL) < 0) - H5_LIBRARY_ERROR(ENVONLY); + /* Calculate the number of elements we're going to print */ + p_nelmts = 1; - /* Calculate the number of elements we're going to print */ - p_nelmts = 1; - - if (ndims > 0) { - for (i = 0; i < (size_t)ndims; i++) - p_nelmts *= total_size[i]; - } /* end if */ + if (sndims > 0) { + for (i = 0; i < (size_t)sndims; i++) + p_nelmts *= total_size[i]; + } /* end if */ - if (p_nelmts > 0) { - /* Check if we have VL data in the dataset's datatype */ - if (h5str_detect_vlen(p_type) != 0) - vl_data = 1; + if (p_nelmts > 0) { + /* Check if we have VL data in the dataset's datatype */ + if (h5str_detect_vlen(p_type) != 0) + vl_data = 1; - /* - * Determine the strip mine size and allocate a buffer. The strip mine is - * a hyperslab whose size is manageable. - */ - if (!(sm_nbytes = p_type_nbytes = H5Tget_size(p_type))) - H5_LIBRARY_ERROR(ENVONLY); + /* + * Determine the strip mine size and allocate a buffer. The strip mine is + * a hyperslab whose size is manageable. + */ + if (!(sm_nbytes = p_type_nbytes = H5Tget_size(p_type))) + H5_LIBRARY_ERROR(ENVONLY); - if (ndims > 0) { - for (i = (size_t)ndims; i > 0; --i) { - hsize_t size = H5TOOLS_BUFSIZE / sm_nbytes; - if (size == 0) /* datum size > H5TOOLS_BUFSIZE */ - size = 1; - sm_size[i - 1] = (((total_size[i - 1]) < (size)) ? (total_size[i - 1]) : (size)); - sm_nbytes *= sm_size[i - 1]; - } + if (sndims > 0) { + for (i = (size_t)sndims; i > 0; --i) { + hsize_t size = H5TOOLS_BUFSIZE / sm_nbytes; + if (size == 0) /* datum size > H5TOOLS_BUFSIZE */ + size = 1; + sm_size[i - 1] = (((total_size[i - 1]) < (size)) ? (total_size[i - 1]) : (size)); + sm_nbytes *= sm_size[i - 1]; } + } - if (sm_nbytes > 0) { - if (NULL == (sm_buf = (unsigned char *)HDmalloc((size_t)sm_nbytes))) - H5_OUT_OF_MEMORY_ERROR(ENVONLY, "h5str_dump_simple_dset: failed to allocate sm_buf"); + if (sm_nbytes > 0) { + if (NULL == (sm_buf = (unsigned char *)HDmalloc((size_t)sm_nbytes))) + H5_OUT_OF_MEMORY_ERROR(ENVONLY, "h5str_dump_simple_dset: failed to allocate sm_buf"); - sm_nelmts = sm_nbytes / p_type_nbytes; + sm_nelmts = sm_nbytes / p_type_nbytes; - if ((sm_space = H5Screate_simple(1, &sm_nelmts, NULL)) < 0) - H5_LIBRARY_ERROR(ENVONLY); - - /* The stripmine loop */ - HDmemset(hs_offset, 0, sizeof hs_offset); - HDmemset(zero, 0, sizeof zero); - - for (elmtno = 0; elmtno < (hsize_t)p_nelmts; elmtno += hs_nelmts) { - /* Calculate the hyperslab size */ - if (ndims > 0) { - for (i = 0, hs_nelmts = 1; i < (size_t)ndims; i++) { - hs_size[i] = (((total_size[i] - hs_offset[i]) < (sm_size[i])) - ? (total_size[i] - hs_offset[i]) - : (sm_size[i])); - hs_nelmts *= hs_size[i]; - } - - if (H5Sselect_hyperslab(f_space, H5S_SELECT_SET, hs_offset, NULL, hs_size, NULL) < - 0) - H5_LIBRARY_ERROR(ENVONLY); + if ((sm_space = H5Screate_simple(1, &sm_nelmts, NULL)) < 0) + H5_LIBRARY_ERROR(ENVONLY); - if (H5Sselect_hyperslab(sm_space, H5S_SELECT_SET, zero, NULL, &hs_nelmts, NULL) < - 0) - H5_LIBRARY_ERROR(ENVONLY); + /* The stripmine loop */ + HDmemset(hs_offset, 0, sizeof hs_offset); + HDmemset(zero, 0, sizeof zero); + + for (elmtno = 0; elmtno < (hsize_t)p_nelmts; elmtno += hs_nelmts) { + /* Calculate the hyperslab size */ + if (sndims > 0) { + for (i = 0, hs_nelmts = 1; i < (size_t)sndims; i++) { + hs_size[i] = (((total_size[i] - hs_offset[i]) < (sm_size[i])) + ? (total_size[i] - hs_offset[i]) + : (sm_size[i])); + hs_nelmts *= hs_size[i]; } - else { - if (H5Sselect_all(f_space) < 0) - H5_LIBRARY_ERROR(ENVONLY); - if (H5Sselect_all(sm_space) < 0) - H5_LIBRARY_ERROR(ENVONLY); + if (H5Sselect_hyperslab(f_space, H5S_SELECT_SET, hs_offset, NULL, hs_size, NULL) < 0) + H5_LIBRARY_ERROR(ENVONLY); - hs_nelmts = 1; - } + if (H5Sselect_hyperslab(sm_space, H5S_SELECT_SET, zero, NULL, &hs_nelmts, NULL) < 0) + H5_LIBRARY_ERROR(ENVONLY); + } + else { + if (H5Sselect_all(f_space) < 0) + H5_LIBRARY_ERROR(ENVONLY); - /* Read the data */ - if (H5Dread(dset, p_type, sm_space, f_space, H5P_DEFAULT, sm_buf) < 0) + if (H5Sselect_all(sm_space) < 0) H5_LIBRARY_ERROR(ENVONLY); - if (binary_order == 99) { - if (h5str_dump_simple_data(ENVONLY, stream, dset, p_type, sm_buf, hs_nelmts) < 0) - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - } - else { - if (h5str_render_bin_output(stream, dset, p_type, sm_buf, hs_nelmts) < 0) - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - } + hs_nelmts = 1; + } - /* Reclaim any VL memory, if necessary */ - if (vl_data) { - if (H5Treclaim(p_type, sm_space, H5P_DEFAULT, sm_buf) < 0) - H5_LIBRARY_ERROR(ENVONLY); - } + /* Read the data */ + if (H5Dread(dset, p_type, sm_space, f_space, H5P_DEFAULT, sm_buf) < 0) + H5_LIBRARY_ERROR(ENVONLY); + + if (binary_order == 99) { + if (h5str_dump_simple_data(ENVONLY, stream, dset, p_type, sm_buf, hs_nelmts) < 0) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + } + else { + if (h5str_render_bin_output(stream, dset, p_type, sm_buf, hs_nelmts) < 0) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + } - /* Calculate the next hyperslab offset */ - for (i = (size_t)ndims, carry = 1; i > 0 && carry; --i) { - hs_offset[i - 1] += hs_size[i - 1]; + /* Reclaim any VL memory, if necessary */ + if (vl_data) { + if (H5Treclaim(p_type, sm_space, H5P_DEFAULT, sm_buf) < 0) + H5_LIBRARY_ERROR(ENVONLY); + } - if (hs_offset[i - 1] == total_size[i - 1]) - hs_offset[i - 1] = 0; - else - carry = 0; - } + /* Calculate the next hyperslab offset */ + for (i = (size_t)sndims, carry = 1; i > 0 && carry; --i) { + hs_offset[i - 1] += hs_size[i - 1]; + + if (hs_offset[i - 1] == total_size[i - 1]) + hs_offset[i - 1] = 0; + else + carry = 0; } } } @@ -3148,15 +3144,12 @@ h5str_dump_simple_mem(JNIEnv *env, FILE *stream, hid_t attr_id, int binary_order { hid_t f_space = H5I_INVALID_HID; /* file data space */ hsize_t alloc_size; - int ndims; /* rank of dataspace */ - unsigned i; /* counters */ - hsize_t total_size[H5S_MAX_RANK]; /* total size of dataset*/ - hssize_t p_nelmts; /* total selected elmts */ + int sndims; /* rank of dataspace */ + unsigned i; /* counters */ + hsize_t total_size[H5S_MAX_RANK]; /* total size of dataset */ + int ret_value = 0; - void *sm_buf = NULL; /* buffer for raw data */ - hsize_t sm_size[H5S_MAX_RANK]; /* stripmine size */ - - int ret_value = 0; + unsigned char *sm_buf = NULL; /* buffer for raw data */ /* VL data special information */ unsigned int vl_data = 0; /* contains VL datatypes */ @@ -3172,18 +3165,20 @@ h5str_dump_simple_mem(JNIEnv *env, FILE *stream, hid_t attr_id, int binary_order if (H5I_INVALID_HID == (f_space = H5Aget_space(attr_id))) H5_LIBRARY_ERROR(ENVONLY); - if ((ndims = H5Sget_simple_extent_ndims(f_space)) < 0) + if ((sndims = H5Sget_simple_extent_ndims(f_space)) < 0) H5_LIBRARY_ERROR(ENVONLY); if (H5Sget_simple_extent_dims(f_space, total_size, NULL) < 0) H5_LIBRARY_ERROR(ENVONLY); if (H5Tequal(f_type, H5T_STD_REF_DSETREG)) { + hssize_t p_nelmts; /* total selected elmts */ + if ((p_nelmts = H5Sget_simple_extent_npoints(f_space)) < 0) H5_LIBRARY_ERROR(ENVONLY); if (NULL == - (sm_buf = (H5R_ref_t *)HDcalloc(MAX(sizeof(unsigned), sizeof(H5R_ref_t)), (size_t)p_nelmts))) + (sm_buf = (unsigned char *)HDcalloc(MAX(sizeof(unsigned), sizeof(H5R_ref_t)), (size_t)p_nelmts))) H5_OUT_OF_MEMORY_ERROR(ENVONLY, "h5str_dump_simple_mem: failed to allocate sm_buf"); /* Read the data */ @@ -3200,6 +3195,8 @@ h5str_dump_simple_mem(JNIEnv *env, FILE *stream, hid_t attr_id, int binary_order } } else { + hsize_t p_nelmts; /* total selected elmts */ + switch (binary_order) { case 1: { if ((p_type = h5str_get_native_type(f_type)) < 0) @@ -3230,45 +3227,40 @@ h5str_dump_simple_mem(JNIEnv *env, FILE *stream, hid_t attr_id, int binary_order } } - if ((size_t)ndims <= (sizeof(sm_size) / sizeof(sm_size[0]))) { - if (H5Sget_simple_extent_dims(f_space, total_size, NULL) < 0) - H5_LIBRARY_ERROR(ENVONLY); - /* Calculate the number of elements we're going to print */ - p_nelmts = 1; + /* Calculate the number of elements we're going to print */ + p_nelmts = 1; - if (ndims > 0) { - for (i = 0; i < (size_t)ndims; i++) - p_nelmts *= total_size[i]; - } /* end if */ + if (sndims > 0) { + for (i = 0; i < (size_t)sndims; i++) + p_nelmts *= total_size[i]; + } /* end if */ - if (p_nelmts > 0) { - /* Check if we have VL data in the dataset's datatype */ - if (h5str_detect_vlen(p_type) != 0) - vl_data = 1; + if (p_nelmts > 0) { + /* Check if we have VL data in the dataset's datatype */ + if (h5str_detect_vlen(p_type) != 0) + vl_data = 1; - alloc_size = (size_t)p_nelmts * H5Tget_size(p_type); - if (NULL == (sm_buf = (unsigned char *)HDmalloc((size_t)alloc_size))) - H5_OUT_OF_MEMORY_ERROR(ENVONLY, "h5str_dump_simple_mem: failed to allocate sm_buf"); + alloc_size = (size_t)p_nelmts * H5Tget_size(p_type); + if (NULL == (sm_buf = (unsigned char *)HDmalloc((size_t)alloc_size))) + H5_OUT_OF_MEMORY_ERROR(ENVONLY, "h5str_dump_simple_mem: failed to allocate sm_buf"); - /* Read the data */ - if (H5Aread(attr_id, p_type, sm_buf) < 0) - H5_LIBRARY_ERROR(ENVONLY); + /* Read the data */ + if (H5Aread(attr_id, p_type, sm_buf) < 0) + H5_LIBRARY_ERROR(ENVONLY); - if (binary_order == 99) { - if (h5str_dump_simple_data(ENVONLY, stream, attr_id, p_type, sm_buf, (size_t)p_nelmts) < - 0) - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - } - else { - if (h5str_render_bin_output(stream, attr_id, p_type, sm_buf, (size_t)p_nelmts) < 0) - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - } + if (binary_order == 99) { + if (h5str_dump_simple_data(ENVONLY, stream, attr_id, p_type, sm_buf, (size_t)p_nelmts) < 0) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + } + else { + if (h5str_render_bin_output(stream, attr_id, p_type, sm_buf, (size_t)p_nelmts) < 0) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + } - /* Reclaim any VL memory, if necessary */ - if (vl_data) { - if (H5Treclaim(p_type, f_space, H5P_DEFAULT, sm_buf) < 0) - H5_LIBRARY_ERROR(ENVONLY); - } + /* Reclaim any VL memory, if necessary */ + if (vl_data) { + if (H5Treclaim(p_type, f_space, H5P_DEFAULT, sm_buf) < 0) + H5_LIBRARY_ERROR(ENVONLY); } } } diff --git a/java/test/TestH5Fparams.java b/java/test/TestH5Fparams.java index 6accc3b3dc3..3fcfe3bf57c 100644 --- a/java/test/TestH5Fparams.java +++ b/java/test/TestH5Fparams.java @@ -77,6 +77,7 @@ public void testH5Funmount_null() throws Throwable H5.H5Funmount(-1, null); } + @SuppressWarnings("deprecation") @Ignore public void testH5Fis_hdf5_text() { diff --git a/java/test/TestH5Ocreate.java b/java/test/TestH5Ocreate.java index 096abcd2183..eb57938d299 100644 --- a/java/test/TestH5Ocreate.java +++ b/java/test/TestH5Ocreate.java @@ -449,6 +449,7 @@ public int callback(long group, String name, H5O_info_t info, H5O_iterate_opdata ((idata)((H5O_iter_data)iter_data).iterdata.get(3)).link_name.compareToIgnoreCase("G1/DS2") == 0); } + @SuppressWarnings("deprecation") @Test public void testH5Ocomment() { @@ -479,6 +480,7 @@ public void testH5Ocomment() assertTrue("H5Oget_comment: ", obj_comment.compareTo("Test Comment") == 0); } + @SuppressWarnings("deprecation") @Test public void testH5Ocomment_clear() { @@ -525,6 +527,7 @@ public void testH5Ocomment_clear() assertTrue("H5Oget_comment: ", obj_comment == null); } + @SuppressWarnings("deprecation") @Test public void testH5Ocomment_by_name() { @@ -548,6 +551,7 @@ public void testH5Ocomment_by_name() assertTrue("H5Oget_comment_by_name: ", obj_comment.compareTo("Test Comment") == 0); } + @SuppressWarnings("deprecation") @Test public void testH5Ocomment_by_name_clear() { diff --git a/java/test/TestH5Oparams.java b/java/test/TestH5Oparams.java index 280e9fd8497..296babceb68 100644 --- a/java/test/TestH5Oparams.java +++ b/java/test/TestH5Oparams.java @@ -168,6 +168,7 @@ public void testH5Ovisit_by_name_null() throws Throwable H5.H5Ovisit_by_name(-1, "Bogus", -1, -1, null, null, 0, -1); } + @SuppressWarnings("deprecation") @Test(expected = HDF5LibraryException.class) public void testH5Oset_comment_invalid() throws Throwable { @@ -180,12 +181,14 @@ public void testH5Oget_comment_invalid() throws Throwable H5.H5Oget_comment(-1); } + @SuppressWarnings("deprecation") @Test(expected = HDF5LibraryException.class) public void testH5Oset_comment_by_name_invalid() throws Throwable { H5.H5Oset_comment_by_name(-1, "Bogus", null, -1); } + @SuppressWarnings("deprecation") @Test(expected = NullPointerException.class) public void testH5Oset_comment_by_name_null() throws Throwable { From 1a81091b35bd4452338cc908c2c1fdb97dc1773d Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Thu, 6 Oct 2022 18:41:07 -0500 Subject: [PATCH 002/108] fixed fortran parallel testing with cmake (#2148) --- CMakeLists.txt | 1 + fortran/testpar/CMakeLists.txt | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 6ef148e16fb..73bf6bf4b53 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1083,6 +1083,7 @@ if (EXISTS "${HDF5_SOURCE_DIR}/fortran" AND IS_DIRECTORY "${HDF5_SOURCE_DIR}/for # Parallel IO usage requires MPI to be Linked and Included if (H5_HAVE_PARALLEL) + find_package(MPI REQUIRED COMPONENTS Fortran) set (LINK_Fortran_LIBS ${LINK_Fortran_LIBS} ${MPI_Fortran_LIBRARIES}) if (MPI_Fortran_LINK_FLAGS) set (CMAKE_Fortran_EXE_LINKER_FLAGS "${MPI_Fortran_LINK_FLAGS} ${CMAKE_EXE_LINKER_FLAGS}") diff --git a/fortran/testpar/CMakeLists.txt b/fortran/testpar/CMakeLists.txt index 12489528ff9..d41343c36ed 100644 --- a/fortran/testpar/CMakeLists.txt +++ b/fortran/testpar/CMakeLists.txt @@ -4,7 +4,7 @@ project (HDF5_FORTRAN_TESTPAR C Fortran) #----------------------------------------------------------------------------- # Setup include Directories #----------------------------------------------------------------------------- -set (TESTPAR_INCLUDES ${MPI_Fortran_INCLUDE_DIRS} ${HDF5_F90_BINARY_DIR} ${HDF5_F90_SRC_DIR}/src)) +set (TESTPAR_INCLUDES ${MPI_Fortran_INCLUDE_DIRS} ${HDF5_F90_BINARY_DIR} ${HDF5_F90_SRC_DIR}/src) if (NOT BUILD_SHARED_LIBS) set (TESTPAR_INCLUDES ${TESTPAR_INCLUDES} ${CMAKE_Fortran_MODULE_DIRECTORY}/static) else () From 66c8a487221e354d5e8e820176e9a08887caa052 Mon Sep 17 00:00:00 2001 From: Neil Fortner Date: Fri, 4 Nov 2022 07:51:21 -0500 Subject: [PATCH 003/108] Fix problem with variable length attributes being accessed through multiple file handles (#2181) (#2207) --- release_docs/RELEASE.txt | 10 +++++ src/H5Aint.c | 4 ++ test/tattr.c | 79 +++++++++++++++++++++++++++++++++++++++- 3 files changed, 92 insertions(+), 1 deletion(-) diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 26c62b432a4..23641e2da2f 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -170,6 +170,16 @@ Bug Fixes since HDF5-1.12.1 release =================================== Library ------- + - Fixed an issue with variable length attributes + + Previously, if a variable length attribute was held open while its file + was opened through another handle, the same attribute was opened through + the second file handle, and the second file and attribute handles were + closed, attempting to write to the attribute through the first handle + would cause an error. + + (NAF - 2022/10/24) + - Fixed an issue with hyperslab selections Previously, when combining hyperslab selections, it was possible for the diff --git a/src/H5Aint.c b/src/H5Aint.c index 0bb95766253..99e81fa6537 100644 --- a/src/H5Aint.c +++ b/src/H5Aint.c @@ -723,6 +723,10 @@ H5A__write(H5A_t *attr, const H5T_t *mem_type, const void *buf) HDassert(mem_type); HDassert(buf); + /* Patch the top level file pointer in attr->shared->dt->shared->u.vlen.f if needed */ + if (H5T_patch_vlen_file(attr->shared->dt, H5F_VOL_OBJ(attr->oloc.file)) < 0) + HGOTO_ERROR(H5E_ATTR, H5E_CANTINIT, FAIL, "can't patch VL datatype file pointer") + /* Get # of elements for attribute's dataspace */ if ((snelmts = H5S_GET_EXTENT_NPOINTS(attr->shared->ds)) < 0) HGOTO_ERROR(H5E_ATTR, H5E_CANTCOUNT, FAIL, "dataspace is invalid") diff --git a/test/tattr.c b/test/tattr.c index 5c630920264..61e528614a9 100644 --- a/test/tattr.c +++ b/test/tattr.c @@ -11073,6 +11073,81 @@ test_attr_bug9(hid_t fcpl, hid_t fapl) CHECK(ret, FAIL, "H5Sclose"); } /* test_attr_bug9() */ +/**************************************************************** +** +** test_attr_bug10(): Test basic H5A (attribute) code. +** Attempts to trigger a bug which would result in a +** segfault. Create a vlen attribute through a file +** handle, then open the same file through a different +** handle, open the same attribute through the second file +** handle, then close the second file and attribute +** handles, then write to the attribute through the first +** handle. +** +****************************************************************/ +static void +test_attr_bug10(hid_t fcpl, hid_t fapl) +{ + hid_t fid1, fid2; /* File IDs */ + hid_t aid1, aid2; /* Attribute IDs */ + hid_t sid; /* Dataspace ID */ + hid_t tid; /* Datatype ID */ + hsize_t dims[1] = {1}; /* Attribute dimensions */ + const char *wbuf[1] = {"foo"}; /* Write buffer */ + herr_t ret; /* Generic return status */ + + /* Output message about test being performed */ + MESSAGE(5, ("Testing that vlen attributes can be written to after a second file handle is closed\n")); + + /* Create dataspace */ + sid = H5Screate_simple(1, dims, NULL); + CHECK(sid, FAIL, "H5Screate_simple"); + + /* Create VL string datatype */ + tid = H5Tcopy(H5T_C_S1); + CHECK(tid, FAIL, "H5Tcreate"); + ret = H5Tset_size(tid, H5T_VARIABLE); + CHECK(ret, FAIL, "H5Tset_size"); + + /* Create file */ + fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl); + CHECK(fid1, FAIL, "H5Fcreate"); + + /* Create attribute on root group */ + aid1 = H5Acreate2(fid1, "attr", tid, sid, H5P_DEFAULT, H5P_DEFAULT); + CHECK(aid1, FAIL, "H5Acreate2"); + + /* Open the same file again */ + fid2 = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl); + CHECK(fid2, FAIL, "H5Fcreate"); + + /* Open the same attribute through the second file handle */ + aid2 = H5Aopen(fid2, "attr", H5P_DEFAULT); + CHECK(aid2, FAIL, "H5Aopen"); + + /* Close the second attribute and file handles */ + ret = H5Aclose(aid2); + CHECK(ret, FAIL, "H5Aclose"); + ret = H5Fclose(fid2); + CHECK(ret, FAIL, "H5Fclose"); + + /* Write to the attribute through the first handle */ + ret = H5Awrite(aid1, tid, wbuf); + + /* Close IDs */ + ret = H5Aclose(aid1); + CHECK(ret, FAIL, "H5Aclose"); + + ret = H5Fclose(fid1); + CHECK(ret, FAIL, "H5Fclose"); + + ret = H5Tclose(tid); + CHECK(ret, FAIL, "H5Tclose"); + + ret = H5Sclose(sid); + CHECK(ret, FAIL, "H5Sclose"); +} /* test_attr_bug10() */ + /**************************************************************** ** ** test_attr_delete_dense(): @@ -11301,7 +11376,9 @@ test_attr(void) * attributes being larger than 64K */ test_attr_bug8(my_fcpl, my_fapl); /* Test attribute expanding object header with undecoded messages */ - test_attr_bug9(my_fcpl, my_fapl); /* Test large attributes converting to dense storage */ + test_attr_bug9(my_fcpl, my_fapl); /* Test large attributes converting to dense storage */ + test_attr_bug10(my_fcpl, my_fapl); /* Test writing an attribute after opening and closing + through a different file handle */ /* tests specific to the "new format" */ if (new_format == TRUE) { From 7a1885451213244da0b244338435830fc4dd6ea1 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Fri, 4 Nov 2022 08:13:08 -0500 Subject: [PATCH 004/108] Merge #2198 correct CMake MPI linking (#2215) --- config/cmake/scripts/CTestScript.cmake | 4 ++-- examples/CMakeLists.txt | 4 ++-- release_docs/RELEASE.txt | 8 +++++++- src/CMakeLists.txt | 12 ++++++------ testpar/CMakeLists.txt | 8 ++++---- tools/lib/CMakeLists.txt | 4 ++-- tools/src/h5diff/CMakeLists.txt | 4 ++-- tools/src/h5perf/CMakeLists.txt | 4 ++-- 8 files changed, 27 insertions(+), 21 deletions(-) diff --git a/config/cmake/scripts/CTestScript.cmake b/config/cmake/scripts/CTestScript.cmake index b142ade8b4e..fa149c3fa92 100644 --- a/config/cmake/scripts/CTestScript.cmake +++ b/config/cmake/scripts/CTestScript.cmake @@ -51,7 +51,7 @@ endif () set (BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DSITE:STRING=${CTEST_SITE} -DBUILDNAME:STRING=${CTEST_BUILD_NAME}") # Launchers work only with Makefile and Ninja generators. -if(NOT "${CTEST_CMAKE_GENERATOR}" MATCHES "Make|Ninja") +if(NOT "${CTEST_CMAKE_GENERATOR}" MATCHES "Make|Ninja" OR LOCAL_SKIP_TEST) set(CTEST_USE_LAUNCHERS 0) set(ENV{CTEST_USE_LAUNCHERS_DEFAULT} 0) set(BUILD_OPTIONS "${BUILD_OPTIONS} -DCTEST_USE_LAUNCHERS:BOOL=OFF") @@ -269,7 +269,7 @@ endif () ## -- LOCAL_MEMCHECK_TEST executes the Valgrind testing ## -- LOCAL_COVERAGE_TEST executes code coverage process ## -------------------------- - ctest_start (${MODEL} TRACK ${MODEL}) + ctest_start (${MODEL} GROUP ${MODEL}) if (LOCAL_UPDATE) ctest_update (SOURCE "${CTEST_SOURCE_DIRECTORY}") endif () diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 3f329c1aca7..f453467f10d 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -76,10 +76,10 @@ if (H5_HAVE_PARALLEL) target_include_directories (${parallel_example} PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (${parallel_example} STATIC) - target_link_libraries (${parallel_example} PRIVATE ${HDF5_LIB_TARGET} ${MPI_C_LIBRARIES}) + target_link_libraries (${parallel_example} PRIVATE ${HDF5_LIB_TARGET} MPI::MPI_C) else () TARGET_C_PROPERTIES (${parallel_example} SHARED) - target_link_libraries (${parallel_example} PRIVATE ${HDF5_LIBSH_TARGET} ${MPI_C_LIBRARIES}) + target_link_libraries (${parallel_example} PRIVATE ${HDF5_LIBSH_TARGET} MPI::MPI_C) endif () set_target_properties (${parallel_example} PROPERTIES FOLDER examples) diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 23641e2da2f..ce18ed90920 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -225,7 +225,13 @@ Bug Fixes since HDF5-1.12.1 release Configuration ------------- - - + - Move MPI libraries link from PRIVATE to PUBLIC + + The install dependencies were not including the need for MPI libraries when + an application or library was built with the C library. Also updated the + CMake target link command to use the newer style MPI::MPI_C link variable. + + (ADB - 2022/20/27) Tools diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index df3eadfa07e..a9802c162b9 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -1014,7 +1014,7 @@ target_include_directories (H5detect PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_ target_compile_definitions(H5detect PUBLIC ${HDF_EXTRA_C_FLAGS} ${HDF_EXTRA_FLAGS}) TARGET_C_PROPERTIES (H5detect STATIC) target_link_libraries (H5detect - PRIVATE "$<$:${MPI_C_LIBRARIES}>" $<$,$>:ws2_32.lib> + PRIVATE "$<$:MPI::MPI_C>" $<$,$>:ws2_32.lib> ) target_compile_options(H5detect PRIVATE "$<$:-O0>" @@ -1108,7 +1108,7 @@ target_include_directories (H5make_libsettings PRIVATE "${HDF5_SRC_DIR};${HDF5_S target_compile_definitions(H5make_libsettings PUBLIC ${HDF_EXTRA_C_FLAGS} ${HDF_EXTRA_FLAGS}) TARGET_C_PROPERTIES (H5make_libsettings STATIC) target_link_libraries (H5make_libsettings - PRIVATE "$<$:${MPI_C_LIBRARIES}>" $<$,$>:ws2_32.lib> + PRIVATE "$<$:MPI::MPI_C>" $<$,$>:ws2_32.lib> ) target_compile_options(H5make_libsettings PRIVATE "$<$:-O0>" @@ -1179,8 +1179,8 @@ if (NOT ONLY_SHARED_LIBS) ) TARGET_C_PROPERTIES (${HDF5_LIB_TARGET} STATIC) target_link_libraries (${HDF5_LIB_TARGET} - PRIVATE ${LINK_LIBS} ${LINK_COMP_LIBS} "$<$:${MPI_C_LIBRARIES}>" - PUBLIC $<$>:${CMAKE_DL_LIBS}> + PRIVATE ${LINK_LIBS} ${LINK_COMP_LIBS} + PUBLIC $<$>:${CMAKE_DL_LIBS}> "$<$:MPI::MPI_C>" ) if (NOT WIN32) target_link_libraries (${HDF5_LIB_TARGET} @@ -1221,8 +1221,8 @@ if (BUILD_SHARED_LIBS) ) TARGET_C_PROPERTIES (${HDF5_LIBSH_TARGET} SHARED) target_link_libraries (${HDF5_LIBSH_TARGET} - PRIVATE ${LINK_LIBS} ${LINK_COMP_LIBS} "$<$:${MPI_C_LIBRARIES}>" $<$:Threads::Threads> - PUBLIC $<$>:${CMAKE_DL_LIBS}> + PRIVATE ${LINK_LIBS} ${LINK_COMP_LIBS} $<$:Threads::Threads> + PUBLIC $<$>:${CMAKE_DL_LIBS}> "$<$:MPI::MPI_C>" ) set_global_variable (HDF5_LIBRARIES_TO_EXPORT "${HDF5_LIBRARIES_TO_EXPORT};${HDF5_LIBSH_TARGET}") H5_SET_LIB_OPTIONS (${HDF5_LIBSH_TARGET} ${HDF5_LIB_NAME} SHARED "LIB") diff --git a/testpar/CMakeLists.txt b/testpar/CMakeLists.txt index ff4446ce974..4d9e65bd49e 100644 --- a/testpar/CMakeLists.txt +++ b/testpar/CMakeLists.txt @@ -29,12 +29,12 @@ target_include_directories (testphdf5 if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (testphdf5 STATIC) target_link_libraries (testphdf5 - PRIVATE ${HDF5_TEST_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:${MPI_C_LIBRARIES}>" + PRIVATE ${HDF5_TEST_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:MPI::MPI_C>" ) else () TARGET_C_PROPERTIES (testphdf5 SHARED) target_link_libraries (testphdf5 - PRIVATE ${HDF5_TEST_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$:${MPI_C_LIBRARIES}>" + PRIVATE ${HDF5_TEST_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$:MPI::MPI_C>" ) endif () set_target_properties (testphdf5 PROPERTIES FOLDER test/par) @@ -55,13 +55,13 @@ macro (ADD_H5P_EXE file) if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (${file} STATIC) target_link_libraries (${file} - PRIVATE ${HDF5_TEST_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:${MPI_C_LIBRARIES}>" + PRIVATE ${HDF5_TEST_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:MPI::MPI_C>" $<$,$>:ws2_32.lib> ) else () TARGET_C_PROPERTIES (${file} SHARED) target_link_libraries (${file} - PRIVATE ${HDF5_TEST_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$:${MPI_C_LIBRARIES}>" + PRIVATE ${HDF5_TEST_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$:MPI::MPI_C>" $<$,$>:ws2_32.lib> ) endif () diff --git a/tools/lib/CMakeLists.txt b/tools/lib/CMakeLists.txt index 6e4d3e63755..a49b53bbfe8 100644 --- a/tools/lib/CMakeLists.txt +++ b/tools/lib/CMakeLists.txt @@ -46,7 +46,7 @@ if (NOT ONLY_SHARED_LIBS) TARGET_C_PROPERTIES (${HDF5_TOOLS_LIB_TARGET} STATIC) target_link_libraries (${HDF5_TOOLS_LIB_TARGET} PUBLIC ${HDF5_LIB_TARGET} - PRIVATE "$<$:${MPI_C_LIBRARIES}>" + PRIVATE "$<$:MPI::MPI_C>" ) set_global_variable (HDF5_LIBRARIES_TO_EXPORT "${HDF5_LIBRARIES_TO_EXPORT};${HDF5_TOOLS_LIB_TARGET}") H5_SET_LIB_OPTIONS (${HDF5_TOOLS_LIB_TARGET} ${HDF5_TOOLS_LIB_NAME} STATIC 0) @@ -69,7 +69,7 @@ if (BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (${HDF5_TOOLS_LIBSH_TARGET} SHARED) target_link_libraries (${HDF5_TOOLS_LIBSH_TARGET} PUBLIC ${HDF5_LIBSH_TARGET} - PRIVATE "$<$:${MPI_C_LIBRARIES}>" + PRIVATE "$<$:MPI::MPI_C>" ) set_global_variable (HDF5_LIBRARIES_TO_EXPORT "${HDF5_LIBRARIES_TO_EXPORT};${HDF5_TOOLS_LIBSH_TARGET}") H5_SET_LIB_OPTIONS (${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_TOOLS_LIB_NAME} SHARED "TOOLS") diff --git a/tools/src/h5diff/CMakeLists.txt b/tools/src/h5diff/CMakeLists.txt index e54b2da2806..b3402a898f7 100644 --- a/tools/src/h5diff/CMakeLists.txt +++ b/tools/src/h5diff/CMakeLists.txt @@ -57,7 +57,7 @@ if (H5_HAVE_PARALLEL) target_include_directories (ph5diff PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(ph5diff PRIVATE "${HDF5_CMAKE_C_FLAGS}") TARGET_C_PROPERTIES (ph5diff STATIC) - target_link_libraries (ph5diff PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:${MPI_C_LIBRARIES}>") + target_link_libraries (ph5diff PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:MPI::MPI_C>") set_target_properties (ph5diff PROPERTIES FOLDER tools) set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};ph5diff") set (H5_DEP_EXECUTABLES ${H5_DEP_EXECUTABLES} ph5diff) @@ -70,7 +70,7 @@ if (H5_HAVE_PARALLEL) target_include_directories (ph5diff-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(ph5diff-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}") TARGET_C_PROPERTIES (ph5diff-shared SHARED) - target_link_libraries (ph5diff-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$:${MPI_C_LIBRARIES}>") + target_link_libraries (ph5diff-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$:MPI::MPI_C>") set_target_properties (ph5diff-shared PROPERTIES FOLDER tools) set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};ph5diff-shared") set (H5_DEP_EXECUTABLES ${H5_DEP_EXECUTABLES} ph5diff-shared) diff --git a/tools/src/h5perf/CMakeLists.txt b/tools/src/h5perf/CMakeLists.txt index 36b0b2f37d5..4caef5978f1 100644 --- a/tools/src/h5perf/CMakeLists.txt +++ b/tools/src/h5perf/CMakeLists.txt @@ -67,10 +67,10 @@ if (H5_HAVE_PARALLEL) target_include_directories (h5perf PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT ONLY_SHARED_LIBS) TARGET_C_PROPERTIES (h5perf STATIC) - target_link_libraries (h5perf PRIVATE ${LINK_LIBS} ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:${MPI_C_LIBRARIES}>") + target_link_libraries (h5perf PRIVATE ${LINK_LIBS} ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:MPI::MPI_C>") else () TARGET_C_PROPERTIES (h5perf SHARED) - target_link_libraries (h5perf PRIVATE ${LINK_LIBS} ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$:${MPI_C_LIBRARIES}>") + target_link_libraries (h5perf PRIVATE ${LINK_LIBS} ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$:MPI::MPI_C>") endif () set_target_properties (h5perf PROPERTIES FOLDER perform) set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};h5perf") From 52854566ecad891ee60458777932539d3edab6e0 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Fri, 4 Nov 2022 10:37:17 -0500 Subject: [PATCH 005/108] 1.12 Merge HDFGroup#2175 Implement option for using CMake GNUInstallDirs (#2221) --- CMakeInstallation.cmake | 2 +- c++/src/CMakeLists.txt | 4 +- config/cmake/CTestScript.cmake | 2 +- config/cmake/HDF5PluginCache.cmake | 32 ++++----- config/cmake/HDF5PluginMacros.cmake | 5 ++ config/cmake/cacheinit.cmake | 11 ++++ config/cmake/scripts/HDF5options.cmake | 3 +- config/cmake_ext_mod/HDFLibMacros.cmake | 22 ++++++- config/cmake_ext_mod/HDFMacros.cmake | 86 +++++++++++++++++-------- doxygen/CMakeLists.txt | 2 +- fortran/src/CMakeLists.txt | 6 +- hl/c++/src/CMakeLists.txt | 4 +- hl/fortran/src/CMakeLists.txt | 6 +- hl/src/CMakeLists.txt | 4 +- java/src/hdf/hdf5lib/CMakeLists.txt | 2 +- release_docs/INSTALL_CMake.txt | 19 +++++- release_docs/RELEASE.txt | 10 +++ src/CMakeLists.txt | 4 +- src/H5Dmodule.h | 8 +-- 19 files changed, 166 insertions(+), 66 deletions(-) diff --git a/CMakeInstallation.cmake b/CMakeInstallation.cmake index 1fe516c039a..2c4c304964c 100644 --- a/CMakeInstallation.cmake +++ b/CMakeInstallation.cmake @@ -238,7 +238,7 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED) endif () install ( FILES ${release_files} - DESTINATION ${HDF5_INSTALL_DATA_DIR} + DESTINATION ${HDF5_INSTALL_DOC_DIR} COMPONENT hdfdocuments ) endif () diff --git a/c++/src/CMakeLists.txt b/c++/src/CMakeLists.txt index 2a37dea8bf3..c8fc82df70b 100644 --- a/c++/src/CMakeLists.txt +++ b/c++/src/CMakeLists.txt @@ -173,8 +173,8 @@ endif () #----------------------------------------------------------------------------- set (_PKG_CONFIG_PREFIX ${CMAKE_INSTALL_PREFIX}) set (_PKG_CONFIG_EXEC_PREFIX \${prefix}) -set (_PKG_CONFIG_LIBDIR \${exec_prefix}/lib) -set (_PKG_CONFIG_INCLUDEDIR \${prefix}/include) +set (_PKG_CONFIG_LIBDIR \${exec_prefix}/${HDF5_INSTALL_LIB_DIR}) +set (_PKG_CONFIG_INCLUDEDIR \${prefix}/${HDF5_INSTALL_INCLUDE_DIR}) set (_PKG_CONFIG_LIBNAME "${HDF5_CPP_LIB_CORENAME}") set (_PKG_CONFIG_VERSION "${HDF5_PACKAGE_VERSION}") diff --git a/config/cmake/CTestScript.cmake b/config/cmake/CTestScript.cmake index 0ed90635087..dcf26851278 100644 --- a/config/cmake/CTestScript.cmake +++ b/config/cmake/CTestScript.cmake @@ -54,7 +54,7 @@ else () endif () #----------------------------------------------------------------------------- -# MAC machines need special option +# MacOS machines need special options #----------------------------------------------------------------------------- if (APPLE) # Compiler choice diff --git a/config/cmake/HDF5PluginCache.cmake b/config/cmake/HDF5PluginCache.cmake index 3b085ddf54b..f5334bba740 100644 --- a/config/cmake/HDF5PluginCache.cmake +++ b/config/cmake/HDF5PluginCache.cmake @@ -1,29 +1,31 @@ -# This is the CMakeCache file. +# CMake cache file for external HDF5 filter plugins -######################## +######################### # EXTERNAL cache entries -######################## +######################### # examples are the tests for plugins -set (H5PL_BUILD_TESTING ON CACHE BOOL "Enable h5pl testing" FORCE) -set (BUILD_EXAMPLES ON CACHE BOOL "Build h5pl Examples" FORCE) +set (H5PL_BUILD_TESTING ON CACHE BOOL "Enable H5PL testing" FORCE) +set (BUILD_EXAMPLES ON CACHE BOOL "Build H5PL Examples" FORCE) -set (HDF5_HDF5_HEADER "h5pubconf.h" CACHE STRING "Name of HDF5 header" FORCE) -set (HDF5_LINK_LIBS ${HDF5_LIBSH_TARGET} CACHE STRING "hdf5 target" FORCE) -#set (HDF5_INCLUDE_DIR $ CACHE PATH "hdf5 include dirs" FORCE) -set (HDF5_INCLUDE_DIR "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR}" CACHE PATH "hdf5 include dirs" FORCE) -set (HDF5_INCLUDE_DIRS "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR}" CACHE PATH "hdf5 include dirs" FORCE) -set (HDF5_DIR ${CMAKE_CURRENT_BINARY_DIR} CACHE STRING "hdf5 build folder" FORCE) +set (HDF5_HDF5_HEADER "H5pubconf.h" CACHE STRING "Name of HDF5 header" FORCE) +set (HDF5_LINK_LIBS ${HDF5_LIBSH_TARGET} CACHE STRING "HDF5 target" FORCE) +#set (HDF5_INCLUDE_DIR $ CACHE PATH "HDF5 include dirs" FORCE) +set (HDF5_INCLUDE_DIR "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR}" CACHE PATH "HDF5 include dirs" FORCE) +set (HDF5_INCLUDE_DIRS "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR}" CACHE PATH "HDF5 include dirs" FORCE) +set (HDF5_DIR ${CMAKE_CURRENT_BINARY_DIR} CACHE STRING "HDF5 build folder" FORCE) -set (HDF5_DUMP_EXECUTABLE $ CACHE STRING "hdf5 h5dump target" FORCE) -set (HDF5_REPACK_EXECUTABLE $ CACHE STRING "hdf5 h5repack target" FORCE) +set (HDF5_DUMP_EXECUTABLE $ CACHE STRING "HDF5 h5dump target" FORCE) +set (HDF5_REPACK_EXECUTABLE $ CACHE STRING "HDF5 h5repack target" FORCE) set (H5PL_ALLOW_EXTERNAL_SUPPORT "${HDF5_ALLOW_EXTERNAL_SUPPORT}" CACHE STRING "Allow External Library Building (NO GIT TGZ)" FORCE) -set (H5PL_GIT_URL "https://github.com/HDFGroup/hdf5_plugins.git" CACHE STRING "Use plugins from HDF repository" FORCE) +set (H5PL_GIT_URL "https://github.com/HDFGroup/hdf5_plugins.git" CACHE STRING "Use plugins from HDF Group repository" FORCE) set (H5PL_GIT_BRANCH "master" CACHE STRING "" FORCE) set (H5PL_TGZ_NAME "${PLUGIN_TGZ_NAME}" CACHE STRING "Use plugins from compressed file" FORCE) set (PL_PACKAGE_NAME "${PLUGIN_PACKAGE_NAME}" CACHE STRING "Name of plugins package" FORCE) -set (H5PL_CPACK_ENABLE OFF CACHE BOOL "Enable the CPACK include and components" FORCE) +set (H5PL_CPACK_ENABLE OFF CACHE BOOL "Enable CPack include and components" FORCE) + +set (H5PL_USE_GNU_DIRS ${HDF5_USE_GNU_DIRS} CACHE BOOL "TRUE to use GNU Coding Standard install directory variables" FORCE) diff --git a/config/cmake/HDF5PluginMacros.cmake b/config/cmake/HDF5PluginMacros.cmake index dbed15c6ea2..5cb5f99b057 100644 --- a/config/cmake/HDF5PluginMacros.cmake +++ b/config/cmake/HDF5PluginMacros.cmake @@ -67,6 +67,11 @@ macro (EXTERNAL_PLUGIN_LIBRARY compress_type) add_dependencies (h5ex_d_zfp ${HDF5_LIBSH_TARGET}) target_include_directories (h5ex_d_zfp PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR}") endif () + if (ENABLE_ZSTD) + add_dependencies (h5zstd ${HDF5_LIBSH_TARGET}) + add_dependencies (h5ex_d_zstd ${HDF5_LIBSH_TARGET}) + target_include_directories (h5ex_d_zstd PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR}") + endif () endif () if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") message (VERBOSE "HDF5_INCLUDE_DIR=${HDF5_INCLUDE_DIR}") diff --git a/config/cmake/cacheinit.cmake b/config/cmake/cacheinit.cmake index de6f92f89f1..faa05339260 100644 --- a/config/cmake/cacheinit.cmake +++ b/config/cmake/cacheinit.cmake @@ -181,3 +181,14 @@ set (ZFP_TGZ_NAME "zfp.tar.gz" CACHE STRING "Use ZFP from compressed file" FORCE set (ZFP_PACKAGE_NAME "zfp" CACHE STRING "Name of ZFP package" FORCE) +###### +# zstd +###### + +set (ZSTD_GIT_URL "https://github.com/facebook/zstd" CACHE STRING "Use ZSTD from repository" FORCE) +set (ZSTD_GIT_BRANCH "dev" CACHE STRING "" FORCE) + +set (ZSTD_TGZ_NAME "zstd.tar.gz" CACHE STRING "Use ZSTD from compressed file" FORCE) + +set (ZSTD_PACKAGE_NAME "zstd" CACHE STRING "Name of ZSTD package" FORCE) + diff --git a/config/cmake/scripts/HDF5options.cmake b/config/cmake/scripts/HDF5options.cmake index bab60de617e..e14dfba94a1 100644 --- a/config/cmake/scripts/HDF5options.cmake +++ b/config/cmake/scripts/HDF5options.cmake @@ -74,7 +74,7 @@ set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ALLOW_EXTERNAL_SUPPORT:STRIN #set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_ENCODING:BOOL=OFF") #### package examples #### -#set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_PACK_EXAMPLES:BOOL=ON -DHDF5_EXAMPLES_COMPRESSED:STRING=HDF5Examples-2.0.1-Source.tar.gz -DHDF5_EXAMPLES_COMPRESSED_DIR:PATH=${CTEST_SCRIPT_DIRECTORY}") +#set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_PACK_EXAMPLES:BOOL=ON -DHDF5_EXAMPLES_COMPRESSED:STRING=HDF5Examples-2.0.2-Source.tar.gz -DHDF5_EXAMPLES_COMPRESSED_DIR:PATH=${CTEST_SCRIPT_DIRECTORY}") ############################################################################################# ### enable parallel builds @@ -96,6 +96,7 @@ endif() ############################################################################################# ### disable test program builds +#If using CTestScript.cmake file be sure to uncomment set (LOCAL_SKIP_TEST "TRUE") #set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DBUILD_TESTING:BOOL=OFF") ############################################################################################# diff --git a/config/cmake_ext_mod/HDFLibMacros.cmake b/config/cmake_ext_mod/HDFLibMacros.cmake index 2c5a9bf4b50..740ddae7eaa 100644 --- a/config/cmake_ext_mod/HDFLibMacros.cmake +++ b/config/cmake_ext_mod/HDFLibMacros.cmake @@ -24,12 +24,15 @@ macro (EXTERNAL_JPEG_LIBRARY compress_type jpeg_pic) -DJPEG_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT} -DJPEG_EXTERNALLY_CONFIGURED:BOOL=OFF -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE} + -DCMAKE_DEBUG_POSTFIX:STRING=${CMAKE_DEBUG_POSTFIX} -DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX} -DCMAKE_RUNTIME_OUTPUT_DIRECTORY:PATH=${CMAKE_RUNTIME_OUTPUT_DIRECTORY} -DCMAKE_LIBRARY_OUTPUT_DIRECTORY:PATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY} -DCMAKE_ARCHIVE_OUTPUT_DIRECTORY:PATH=${CMAKE_ARCHIVE_OUTPUT_DIRECTORY} -DCMAKE_PDB_OUTPUT_DIRECTORY:PATH=${CMAKE_PDB_OUTPUT_DIRECTORY} -DCMAKE_ANSI_CFLAGS:STRING=${jpeg_pic} + -DHDF_USE_GNU_DIRS:STRING=${HDF5_USE_GNU_DIRS} + -DCMAKE_OSX_ARCHITECTURES:STRING=${CMAKE_OSX_ARCHITECTURES} -DCMAKE_TOOLCHAIN_FILE:STRING=${CMAKE_TOOLCHAIN_FILE} -DPACKAGE_NAMESPACE=${HDF_PACKAGE_NAMESPACE} ) @@ -43,12 +46,15 @@ macro (EXTERNAL_JPEG_LIBRARY compress_type jpeg_pic) -DJPEG_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT} -DJPEG_EXTERNALLY_CONFIGURED:BOOL=OFF -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE} + -DCMAKE_DEBUG_POSTFIX:STRING=${CMAKE_DEBUG_POSTFIX} -DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX} -DCMAKE_RUNTIME_OUTPUT_DIRECTORY:PATH=${CMAKE_RUNTIME_OUTPUT_DIRECTORY} -DCMAKE_LIBRARY_OUTPUT_DIRECTORY:PATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY} -DCMAKE_ARCHIVE_OUTPUT_DIRECTORY:PATH=${CMAKE_ARCHIVE_OUTPUT_DIRECTORY} -DCMAKE_PDB_OUTPUT_DIRECTORY:PATH=${CMAKE_PDB_OUTPUT_DIRECTORY} -DCMAKE_ANSI_CFLAGS:STRING=${jpeg_pic} + -DHDF_USE_GNU_DIRS:STRING=${HDF5_USE_GNU_DIRS} + -DCMAKE_OSX_ARCHITECTURES:STRING=${CMAKE_OSX_ARCHITECTURES} -DCMAKE_TOOLCHAIN_FILE:STRING=${CMAKE_TOOLCHAIN_FILE} -DPACKAGE_NAMESPACE=${HDF_PACKAGE_NAMESPACE} ) @@ -93,6 +99,7 @@ macro (EXTERNAL_SZIP_LIBRARY compress_type encoding) -DSZIP_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT} -DSZIP_EXTERNALLY_CONFIGURED:BOOL=OFF -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE} + -DCMAKE_DEBUG_POSTFIX:STRING=${CMAKE_DEBUG_POSTFIX} -DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX} -DCMAKE_RUNTIME_OUTPUT_DIRECTORY:PATH=${CMAKE_RUNTIME_OUTPUT_DIRECTORY} -DCMAKE_LIBRARY_OUTPUT_DIRECTORY:PATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY} @@ -100,6 +107,8 @@ macro (EXTERNAL_SZIP_LIBRARY compress_type encoding) -DCMAKE_PDB_OUTPUT_DIRECTORY:PATH=${CMAKE_PDB_OUTPUT_DIRECTORY} -DCMAKE_ANSI_CFLAGS:STRING=${CMAKE_ANSI_CFLAGS} -DSZIP_ENABLE_ENCODING:BOOL=${encoding} + -DHDF_USE_GNU_DIRS:STRING=${HDF5_USE_GNU_DIRS} + -DCMAKE_OSX_ARCHITECTURES:STRING=${CMAKE_OSX_ARCHITECTURES} -DCMAKE_TOOLCHAIN_FILE:STRING=${CMAKE_TOOLCHAIN_FILE} -DPACKAGE_NAMESPACE=${HDF_PACKAGE_NAMESPACE} ) @@ -113,6 +122,7 @@ macro (EXTERNAL_SZIP_LIBRARY compress_type encoding) -DSZIP_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT} -DSZIP_EXTERNALLY_CONFIGURED:BOOL=OFF -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE} + -DCMAKE_DEBUG_POSTFIX:STRING=${CMAKE_DEBUG_POSTFIX} -DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX} -DCMAKE_RUNTIME_OUTPUT_DIRECTORY:PATH=${CMAKE_RUNTIME_OUTPUT_DIRECTORY} -DCMAKE_LIBRARY_OUTPUT_DIRECTORY:PATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY} @@ -120,6 +130,8 @@ macro (EXTERNAL_SZIP_LIBRARY compress_type encoding) -DCMAKE_PDB_OUTPUT_DIRECTORY:PATH=${CMAKE_PDB_OUTPUT_DIRECTORY} -DCMAKE_ANSI_CFLAGS:STRING=${CMAKE_ANSI_CFLAGS} -DSZIP_ENABLE_ENCODING:BOOL=${encoding} + -DHDF_USE_GNU_DIRS:STRING=${HDF5_USE_GNU_DIRS} + -DCMAKE_OSX_ARCHITECTURES:STRING=${CMAKE_OSX_ARCHITECTURES} -DCMAKE_TOOLCHAIN_FILE:STRING=${CMAKE_TOOLCHAIN_FILE} -DPACKAGE_NAMESPACE=${HDF_PACKAGE_NAMESPACE} ) @@ -184,12 +196,15 @@ macro (EXTERNAL_ZLIB_LIBRARY compress_type) -DZLIB_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT} -DZLIB_EXTERNALLY_CONFIGURED:BOOL=OFF -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE} + -DCMAKE_DEBUG_POSTFIX:STRING=${CMAKE_DEBUG_POSTFIX} -DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX} -DCMAKE_RUNTIME_OUTPUT_DIRECTORY:PATH=${CMAKE_RUNTIME_OUTPUT_DIRECTORY} -DCMAKE_LIBRARY_OUTPUT_DIRECTORY:PATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY} -DCMAKE_ARCHIVE_OUTPUT_DIRECTORY:PATH=${CMAKE_ARCHIVE_OUTPUT_DIRECTORY} -DCMAKE_PDB_OUTPUT_DIRECTORY:PATH=${CMAKE_PDB_OUTPUT_DIRECTORY} -DCMAKE_ANSI_CFLAGS:STRING=${CMAKE_ANSI_CFLAGS} + -DHDF_USE_GNU_DIRS:STRING=${HDF5_USE_GNU_DIRS} + -DCMAKE_OSX_ARCHITECTURES:STRING=${CMAKE_OSX_ARCHITECTURES} -DCMAKE_TOOLCHAIN_FILE:STRING=${CMAKE_TOOLCHAIN_FILE} -DPACKAGE_NAMESPACE=${HDF_PACKAGE_NAMESPACE} ) @@ -203,19 +218,24 @@ macro (EXTERNAL_ZLIB_LIBRARY compress_type) -DZLIB_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT} -DZLIB_EXTERNALLY_CONFIGURED:BOOL=OFF -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE} + -DCMAKE_DEBUG_POSTFIX:STRING=${CMAKE_DEBUG_POSTFIX} -DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX} -DCMAKE_RUNTIME_OUTPUT_DIRECTORY:PATH=${CMAKE_RUNTIME_OUTPUT_DIRECTORY} -DCMAKE_LIBRARY_OUTPUT_DIRECTORY:PATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY} -DCMAKE_ARCHIVE_OUTPUT_DIRECTORY:PATH=${CMAKE_ARCHIVE_OUTPUT_DIRECTORY} -DCMAKE_PDB_OUTPUT_DIRECTORY:PATH=${CMAKE_PDB_OUTPUT_DIRECTORY} -DCMAKE_ANSI_CFLAGS:STRING=${CMAKE_ANSI_CFLAGS} + -DHDF_USE_GNU_DIRS:STRING=${HDF5_USE_GNU_DIRS} + -DCMAKE_OSX_ARCHITECTURES:STRING=${CMAKE_OSX_ARCHITECTURES} -DCMAKE_TOOLCHAIN_FILE:STRING=${CMAKE_TOOLCHAIN_FILE} -DPACKAGE_NAMESPACE=${HDF_PACKAGE_NAMESPACE} ) endif () externalproject_get_property (HDF5_ZLIB BINARY_DIR SOURCE_DIR) - set (ZLIB_LIB_NAME "z") + if (NOT ZLIB_LIB_NAME) + set (ZLIB_LIB_NAME "z") + endif () ##include (${BINARY_DIR}/${ZLIB_PACKAGE_NAME}${HDF_PACKAGE_EXT}-targets.cmake) # Create imported target zlib-static add_library(${HDF_PACKAGE_NAMESPACE}zlib-static STATIC IMPORTED) diff --git a/config/cmake_ext_mod/HDFMacros.cmake b/config/cmake_ext_mod/HDFMacros.cmake index 6f517bd7b5e..863b7ba2a68 100644 --- a/config/cmake_ext_mod/HDFMacros.cmake +++ b/config/cmake_ext_mod/HDFMacros.cmake @@ -12,30 +12,30 @@ #------------------------------------------------------------------------------- macro (SET_HDF_BUILD_TYPE) - get_property(_isMultiConfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG) - if(_isMultiConfig) - set(HDF_CFG_NAME ${CMAKE_BUILD_TYPE}) - set(HDF_BUILD_TYPE ${CMAKE_CFG_INTDIR}) - set(HDF_CFG_BUILD_TYPE \${CMAKE_INSTALL_CONFIG_NAME}) - else() - set(HDF_CFG_BUILD_TYPE ".") - if(CMAKE_BUILD_TYPE) - set(HDF_CFG_NAME ${CMAKE_BUILD_TYPE}) - set(HDF_BUILD_TYPE ${CMAKE_BUILD_TYPE}) - else() - set(HDF_CFG_NAME "Release") - set(HDF_BUILD_TYPE "Release") - endif() - endif() - if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) + get_property (_isMultiConfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG) + if (_isMultiConfig) + set (HDF_CFG_NAME ${CMAKE_BUILD_TYPE}) + set (HDF_BUILD_TYPE ${CMAKE_CFG_INTDIR}) + set (HDF_CFG_BUILD_TYPE \${CMAKE_INSTALL_CONFIG_NAME}) + else () + set (HDF_CFG_BUILD_TYPE ".") + if (CMAKE_BUILD_TYPE) + set (HDF_CFG_NAME ${CMAKE_BUILD_TYPE}) + set (HDF_BUILD_TYPE ${CMAKE_BUILD_TYPE}) + else () + set (HDF_CFG_NAME "Release") + set (HDF_BUILD_TYPE "Release") + endif () + endif () + if (NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") message (VERBOSE "Setting build type to 'RelWithDebInfo' as none was specified.") endif() - set(CMAKE_BUILD_TYPE RelWithDebInfo CACHE STRING "Choose the type of build." FORCE) + set (CMAKE_BUILD_TYPE RelWithDebInfo CACHE STRING "Choose the type of build." FORCE) # Set the possible values of build type for cmake-gui - set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "Debug" "Release" + set_property (CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "Debug" "Release" "MinSizeRel" "RelWithDebInfo") - endif() + endif () endmacro () #------------------------------------------------------------------------------- @@ -45,7 +45,7 @@ endmacro () #------------------------------------------------------------------------------- macro (IDE_GENERATED_PROPERTIES SOURCE_PATH HEADERS SOURCES) - #set(source_group_path "Source/AIM/${NAME}") + #set (source_group_path "Source/AIM/${NAME}") string (REPLACE "/" "\\\\" source_group_path ${SOURCE_PATH}) source_group (${source_group_path} FILES ${HEADERS} ${SOURCES}) @@ -321,6 +321,36 @@ macro (HDFTEST_COPY_FILE src dest target) endmacro () macro (HDF_DIR_PATHS package_prefix) + option (HDF5_USE_GNU_DIRS "TRUE to use GNU Coding Standard install directory variables, FALSE to use historical settings" FALSE) + if (HDF5_USE_GNU_DIRS) + include(GNUInstallDirs) + if (NOT ${package_prefix}_INSTALL_BIN_DIR) + set (${package_prefix}_INSTALL_BIN_DIR ${CMAKE_INSTALL_BINDIR}) + endif () + if (NOT ${package_prefix}_INSTALL_LIB_DIR) + set (${package_prefix}_INSTALL_LIB_DIR ${CMAKE_INSTALL_LIBDIR}) + endif () + if (NOT ${package_prefix}_INSTALL_JAR_DIR) + set (${package_prefix}_INSTALL_JAR_DIR ${CMAKE_INSTALL_LIBDIR}) + endif () + if (NOT ${package_prefix}_INSTALL_INCLUDE_DIR) + set (${package_prefix}_INSTALL_INCLUDE_DIR ${CMAKE_INSTALL_INCLUDEDIR}) + endif () + if (NOT ${package_prefix}_INSTALL_MODULE_DIR) + set (${package_prefix}_INSTALL_MODULE_DIR ${CMAKE_INSTALL_INCLUDEDIR}/mod) + endif () + if (NOT ${package_prefix}_INSTALL_DATA_DIR) + set (${package_prefix}_INSTALL_DATA_DIR ${CMAKE_INSTALL_DATADIR}) + endif () + if (NOT ${package_prefix}_INSTALL_CMAKE_DIR) + set (${package_prefix}_INSTALL_CMAKE_DIR ${CMAKE_INSTALL_LIBDIR}/cmake) + endif () + if (NOT ${package_prefix}_INSTALL_DOC_DIR) + set (${package_prefix}_INSTALL_DOC_DIR ${CMAKE_INSTALL_DOCDIR}) + endif () + message(STATUS "GNU: ${${package_prefix}_INSTALL_DOC_DIR}") + endif () + if (APPLE) option (${package_prefix}_BUILD_FRAMEWORKS "TRUE to build as frameworks libraries, FALSE to build according to BUILD_SHARED_LIBS" FALSE) endif () @@ -365,6 +395,10 @@ macro (HDF_DIR_PATHS package_prefix) if (NOT ${package_prefix}_INSTALL_CMAKE_DIR) set (${package_prefix}_INSTALL_CMAKE_DIR cmake) endif () + if (NOT ${package_prefix}_INSTALL_DOC_DIR) + set (${package_prefix}_INSTALL_DOC_DIR ${${package_prefix}_INSTALL_DATA_DIR}) + endif () + message(STATUS "Final: ${${package_prefix}_INSTALL_DOC_DIR}") # Always use full RPATH, i.e. don't skip the full RPATH for the build tree set (CMAKE_SKIP_BUILD_RPATH FALSE) @@ -391,12 +425,12 @@ macro (HDF_DIR_PATHS package_prefix) endif () #set the default debug suffix for all library targets - if(NOT CMAKE_DEBUG_POSTFIX) - if (WIN32) - set (CMAKE_DEBUG_POSTFIX "_D") - else () - set (CMAKE_DEBUG_POSTFIX "_debug") - endif () + if(NOT CMAKE_DEBUG_POSTFIX) + if (WIN32) + set (CMAKE_DEBUG_POSTFIX "_D") + else () + set (CMAKE_DEBUG_POSTFIX "_debug") + endif () endif () SET_HDF_BUILD_TYPE() diff --git a/doxygen/CMakeLists.txt b/doxygen/CMakeLists.txt index 472c4dc9039..fda7e7d6e03 100644 --- a/doxygen/CMakeLists.txt +++ b/doxygen/CMakeLists.txt @@ -37,7 +37,7 @@ if (DOXYGEN_FOUND) install ( DIRECTORY ${HDF5_BINARY_DIR}/hdf5lib_docs/html - DESTINATION ${HDF5_INSTALL_DATA_DIR} + DESTINATION ${HDF5_INSTALL_DOC_DIR} COMPONENT Documents ) diff --git a/fortran/src/CMakeLists.txt b/fortran/src/CMakeLists.txt index 0c84aa6d259..77ce82f8067 100644 --- a/fortran/src/CMakeLists.txt +++ b/fortran/src/CMakeLists.txt @@ -537,9 +537,9 @@ endif () #----------------------------------------------------------------------------- set (_PKG_CONFIG_PREFIX ${CMAKE_INSTALL_PREFIX}) set (_PKG_CONFIG_EXEC_PREFIX \${prefix}) -set (_PKG_CONFIG_LIBDIR \${exec_prefix}/lib) -set (_PKG_CONFIG_INCLUDEDIR \${prefix}/include) -set (_PKG_CONFIG_MODULEDIR \${prefix}/mod) +set (_PKG_CONFIG_LIBDIR \${exec_prefix}/${HDF5_INSTALL_LIB_DIR}) +set (_PKG_CONFIG_INCLUDEDIR \${prefix}/${HDF5_INSTALL_INCLUDE_DIR}) +set (_PKG_CONFIG_MODULEDIR \${prefix}/${HDF5_INSTALL_MODULE_DIR}) set (_PKG_CONFIG_LIBNAME "${HDF5_F90_LIB_CORENAME}") set (_PKG_CONFIG_VERSION "${HDF5_PACKAGE_VERSION}") diff --git a/hl/c++/src/CMakeLists.txt b/hl/c++/src/CMakeLists.txt index c516df15e7e..e56d6279e55 100644 --- a/hl/c++/src/CMakeLists.txt +++ b/hl/c++/src/CMakeLists.txt @@ -91,8 +91,8 @@ endif () #----------------------------------------------------------------------------- set (_PKG_CONFIG_PREFIX ${CMAKE_INSTALL_PREFIX}) set (_PKG_CONFIG_EXEC_PREFIX \${prefix}) -set (_PKG_CONFIG_LIBDIR \${exec_prefix}/lib) -set (_PKG_CONFIG_INCLUDEDIR \${prefix}/include) +set (_PKG_CONFIG_LIBDIR \${exec_prefix}/${HDF5_INSTALL_LIB_DIR}) +set (_PKG_CONFIG_INCLUDEDIR \${prefix}/${HDF5_INSTALL_INCLUDE_DIR}) set (_PKG_CONFIG_LIBNAME "${HDF5_HL_CPP_LIB_CORENAME}") set (_PKG_CONFIG_VERSION "${HDF5_PACKAGE_VERSION}") diff --git a/hl/fortran/src/CMakeLists.txt b/hl/fortran/src/CMakeLists.txt index 4413424a9b2..7a484dcf150 100644 --- a/hl/fortran/src/CMakeLists.txt +++ b/hl/fortran/src/CMakeLists.txt @@ -324,9 +324,9 @@ endif () #----------------------------------------------------------------------------- set (_PKG_CONFIG_PREFIX ${CMAKE_INSTALL_PREFIX}) set (_PKG_CONFIG_EXEC_PREFIX \${prefix}) -set (_PKG_CONFIG_LIBDIR \${exec_prefix}/lib) -set (_PKG_CONFIG_INCLUDEDIR \${prefix}/include) -set (_PKG_CONFIG_MODULEDIR \${prefix}/mod) +set (_PKG_CONFIG_LIBDIR \${exec_prefix}/${HDF5_INSTALL_LIB_DIR}) +set (_PKG_CONFIG_INCLUDEDIR \${prefix}/${HDF5_INSTALL_INCLUDE_DIR}) +set (_PKG_CONFIG_MODULEDIR \${prefix}/${HDF5_INSTALL_MODULE_DIR}) set (_PKG_CONFIG_LIBNAME "${HDF5_HL_F90_LIB_CORENAME}") set (_PKG_CONFIG_VERSION "${HDF5_PACKAGE_VERSION}") diff --git a/hl/src/CMakeLists.txt b/hl/src/CMakeLists.txt index a97d6fa3829..93a62bc7356 100644 --- a/hl/src/CMakeLists.txt +++ b/hl/src/CMakeLists.txt @@ -123,8 +123,8 @@ endif () #----------------------------------------------------------------------------- set (_PKG_CONFIG_PREFIX ${CMAKE_INSTALL_PREFIX}) set (_PKG_CONFIG_EXEC_PREFIX \${prefix}) -set (_PKG_CONFIG_LIBDIR \${exec_prefix}/lib) -set (_PKG_CONFIG_INCLUDEDIR \${prefix}/include) +set (_PKG_CONFIG_LIBDIR \${exec_prefix}/${HDF5_INSTALL_LIB_DIR}) +set (_PKG_CONFIG_INCLUDEDIR \${prefix}/${HDF5_INSTALL_INCLUDE_DIR}) set (_PKG_CONFIG_LIBNAME "${HDF5_HL_LIB_CORENAME}") set (_PKG_CONFIG_VERSION "${HDF5_PACKAGE_VERSION}") diff --git a/java/src/hdf/hdf5lib/CMakeLists.txt b/java/src/hdf/hdf5lib/CMakeLists.txt index 92471974cff..16745fa22c5 100644 --- a/java/src/hdf/hdf5lib/CMakeLists.txt +++ b/java/src/hdf/hdf5lib/CMakeLists.txt @@ -141,7 +141,7 @@ create_javadoc(hdf5_java_doc CLASSPATH ${CMAKE_JAVA_INCLUDE_PATH} WINDOWTITLE "HDF5 Java" DOCTITLE "

HDF5 Java Wrapper

" - INSTALLPATH ${HDF5_INSTALL_DATA_DIR} + INSTALLPATH ${HDF5_INSTALL_DOC_DIR} AUTHOR TRUE USE TRUE VERSION TRUE diff --git a/release_docs/INSTALL_CMake.txt b/release_docs/INSTALL_CMake.txt index 95576e181f0..44d32038413 100644 --- a/release_docs/INSTALL_CMake.txt +++ b/release_docs/INSTALL_CMake.txt @@ -32,7 +32,8 @@ CMake version 1. We suggest you obtain the latest CMake from the Kitware web site. The HDF5 1.12."X" product requires a minimum CMake version 3.12, where "X" is the current HDF5 release version. If you are using - VS2019, the minimum version is 3.15. + VS2019, the minimum version is 3.15. For VS2022, the minimum + version is 3.21. Note: To change the install prefix from the platform defaults initialize @@ -753,8 +754,20 @@ if (MSVC) HDF5_INSTALL_DATA_DIR "." else () HDF5_INSTALL_DATA_DIR "share" +HDF5_INSTALL_DOC_DIR "HDF5_INSTALL_DATA_DIR" + +Defaults as defined by the `GNU Coding Standards` +HDF5_INSTALL_BIN_DIR "bin" +HDF5_INSTALL_LIB_DIR "lib" +HDF5_INSTALL_INCLUDE_DIR "include" +HDF5_INSTALL_MODULE_DIR "HDF5_INSTALL_INCLUDE_DIR/mod" +HDF5_INSTALL_CMAKE_DIR "HDF5_INSTALL_LIB_DIR/cmake" +HDF5_INSTALL_DATA_DIR "share" +HDF5_INSTALL_DOC_DIR "HDF5_INSTALL_DATA_DIR/doc/hdf5" ---------------- HDF5 Advanced Options --------------------- +HDF5_USE_GNU_DIRS "TRUE to use GNU Coding Standard install directory variables, + FALSE to use historical settings" FALSE ONLY_SHARED_LIBS "Only Build Shared Libraries" OFF ALLOW_UNSUPPORTED "Allow unsupported combinations of configure options" OFF HDF5_EXTERNAL_LIB_PREFIX "Use prefix for custom library naming." "" @@ -832,6 +845,10 @@ NOTE: flag is not available on windows and some modern linux systems will ignore the flag. +NOTE: + The HDF5_USE_GNU_DIRS option is usually recommended for linux platforms, but may + be useful on other platforms. See the CMake documentation for more details. + ---------------- Unsupported Library Options --------------------- The threadsafe, C++ and Java interfaces are not compatible with the HDF5_ENABLE_PARALLEL option. diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index ce18ed90920..96e452a89d8 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -47,6 +47,16 @@ New Features Configuration: ------------- + - Add new CMake configuration variable HDF5_USE_GNU_DIRS + + HDF5_USE_GNU_DIRS (default OFF) selects the use of GNU Coding Standard install + directory variables by including the CMake module, GNUInstallDirs(see CMake + documentation for details). The HDF_DIR_PATHS macro in the HDFMacros.cmake file + sets various PATH variables for use during the build, test and install processes. + By default, the historical settings for these variables will be used. + + (ADB - 2022/10/21, GH-2175, GH-1716) + - Correct the usage of CMAKE_Fortran_MODULE_DIRECTORY and where to install Fortran mod files. diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index a9802c162b9..c6294c41af1 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -1288,8 +1288,8 @@ endif () #----------------------------------------------------------------------------- set (_PKG_CONFIG_PREFIX ${CMAKE_INSTALL_PREFIX}) set (_PKG_CONFIG_EXEC_PREFIX \${prefix}) -set (_PKG_CONFIG_LIBDIR \${exec_prefix}/lib) -set (_PKG_CONFIG_INCLUDEDIR \${prefix}/include) +set (_PKG_CONFIG_LIBDIR \${exec_prefix}/${HDF5_INSTALL_LIB_DIR}) +set (_PKG_CONFIG_INCLUDEDIR \${prefix}/${HDF5_INSTALL_INCLUDE_DIR}) set (_PKG_CONFIG_LIBNAME "${HDF5_LIB_CORENAME}") set (_PKG_CONFIG_VERSION "${HDF5_PACKAGE_VERSION}") diff --git a/src/H5Dmodule.h b/src/H5Dmodule.h index 4ad370924da..00751a91b3f 100644 --- a/src/H5Dmodule.h +++ b/src/H5Dmodule.h @@ -1897,10 +1897,10 @@ allocated if necessary. * byte 0 * * - * ???????? - * ????SPPP - * PPPPPPPP - * PPPP???? + * ???????? + * ????SPPP + * PPPPPPPP + * PPPP???? * * * From 1da36c90b636f6cd1889338a61602582e5187f00 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Mon, 12 Dec 2022 09:32:01 -0600 Subject: [PATCH 006/108] 1.12 Correct requires setting for pkgconfig files #2280 (#2283) * Correct requires setting for pkgconfig files #2280 * Change macos for CI --- .github/workflows/main.yml | 6 +++--- .github/workflows/pr-check.yml | 6 +++--- c++/src/CMakeLists.txt | 4 ++-- fortran/src/CMakeLists.txt | 4 ++-- hl/c++/src/CMakeLists.txt | 4 ++-- hl/fortran/src/CMakeLists.txt | 4 ++-- hl/src/CMakeLists.txt | 4 ++-- release_docs/RELEASE.txt | 8 ++++++++ 8 files changed, 24 insertions(+), 16 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 2282a097b14..5d3e14d68dd 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -48,7 +48,7 @@ jobs: generator: "-G Ninja" - name: "macOS Latest Clang" artifact: "macOS.tar.xz" - os: macos-latest + os: macos-11 build_type: "Release" cpp: ON fortran: OFF @@ -109,7 +109,7 @@ jobs: generator: "-G Ninja" - name: "macOS TS Clang" artifact: "macOSTS.tar.xz" - os: macos-latest + os: macos-11 build_type: "Release" cpp: OFF fortran: OFF @@ -171,7 +171,7 @@ jobs: if: matrix.os == 'windows-latest' - name: Install Dependencies (macOS) run: brew install ninja - if: matrix.os == 'macos-latest' + if: matrix.os == 'macos-11' - name: Set environment for MSVC (Windows) if: matrix.os == 'windows-latest' run: | diff --git a/.github/workflows/pr-check.yml b/.github/workflows/pr-check.yml index 5dd923ff665..b48f835dbe1 100644 --- a/.github/workflows/pr-check.yml +++ b/.github/workflows/pr-check.yml @@ -47,7 +47,7 @@ jobs: generator: "-G Ninja" - name: "macOS Latest Clang" artifact: "macOS.tar.xz" - os: macos-latest + os: macos-11 build_type: "Release" cpp: ON fortran: OFF @@ -108,7 +108,7 @@ jobs: generator: "-G Ninja" - name: "macOS TS Clang" artifact: "macOSTS.tar.xz" - os: macos-latest + os: macos-11 build_type: "Release" cpp: OFF fortran: OFF @@ -170,7 +170,7 @@ jobs: if: matrix.os == 'windows-latest' - name: Install Dependencies (macOS) run: brew install ninja - if: matrix.os == 'macos-latest' + if: matrix.os == 'macos-11' - name: Set environment for MSVC (Windows) if: matrix.os == 'windows-latest' run: | diff --git a/c++/src/CMakeLists.txt b/c++/src/CMakeLists.txt index c8fc82df70b..afb1c9fdbb3 100644 --- a/c++/src/CMakeLists.txt +++ b/c++/src/CMakeLists.txt @@ -187,8 +187,8 @@ if (BUILD_SHARED_LIBS) set (_PKG_CONFIG_SH_LIBS "${_PKG_CONFIG_SH_LIBS} -l${HDF5_CPP_LIB_CORENAME}") endif () -set (_PKG_CONFIG_REQUIRES "${HDF5_LIB_CORENAME}-${HDF5_PACKAGE_VERSION}") -set (_PKG_CONFIG_REQUIRES_PRIVATE "${HDF5_LIB_CORENAME}-${HDF5_PACKAGE_VERSION}") +set (_PKG_CONFIG_REQUIRES "${HDF5_LIB_CORENAME} = ${HDF5_PACKAGE_VERSION}") +set (_PKG_CONFIG_REQUIRES_PRIVATE "${HDF5_LIB_CORENAME} = ${HDF5_PACKAGE_VERSION}") configure_file ( ${HDF_CONFIG_DIR}/libhdf5.pc.in diff --git a/fortran/src/CMakeLists.txt b/fortran/src/CMakeLists.txt index 77ce82f8067..409af0e753d 100644 --- a/fortran/src/CMakeLists.txt +++ b/fortran/src/CMakeLists.txt @@ -552,8 +552,8 @@ if (BUILD_SHARED_LIBS) set (_PKG_CONFIG_SH_LIBS "${_PKG_CONFIG_SH_LIBS} -l${HDF5_F90_LIB_CORENAME}") endif () -set (_PKG_CONFIG_REQUIRES "${HDF5_LIB_CORENAME}-${HDF5_PACKAGE_VERSION}") -set (_PKG_CONFIG_REQUIRES_PRIVATE "${HDF5_LIB_CORENAME}-${HDF5_PACKAGE_VERSION}") +set (_PKG_CONFIG_REQUIRES "${HDF5_LIB_CORENAME} = ${HDF5_PACKAGE_VERSION}") +set (_PKG_CONFIG_REQUIRES_PRIVATE "${HDF5_LIB_CORENAME} = ${HDF5_PACKAGE_VERSION}") configure_file ( ${HDF_CONFIG_DIR}/libhdf5.fpc.in diff --git a/hl/c++/src/CMakeLists.txt b/hl/c++/src/CMakeLists.txt index e56d6279e55..ab09d743bee 100644 --- a/hl/c++/src/CMakeLists.txt +++ b/hl/c++/src/CMakeLists.txt @@ -105,8 +105,8 @@ if (BUILD_SHARED_LIBS) set (_PKG_CONFIG_SH_LIBS "${_PKG_CONFIG_SH_LIBS} -l${HDF5_HL_CPP_LIB_CORENAME}") endif () -set (_PKG_CONFIG_REQUIRES "${HDF5_HL_LIB_CORENAME}-${HDF5_PACKAGE_VERSION}") -set (_PKG_CONFIG_REQUIRES_PRIVATE "${HDF5_HL_LIB_CORENAME}-${HDF5_PACKAGE_VERSION}") +set (_PKG_CONFIG_REQUIRES "${HDF5_HL_LIB_CORENAME} = ${HDF5_PACKAGE_VERSION}") +set (_PKG_CONFIG_REQUIRES_PRIVATE "${HDF5_HL_LIB_CORENAME} = ${HDF5_PACKAGE_VERSION}") configure_file ( ${HDF_CONFIG_DIR}/libhdf5.pc.in diff --git a/hl/fortran/src/CMakeLists.txt b/hl/fortran/src/CMakeLists.txt index 7a484dcf150..ad462ea9d9f 100644 --- a/hl/fortran/src/CMakeLists.txt +++ b/hl/fortran/src/CMakeLists.txt @@ -339,8 +339,8 @@ if (BUILD_SHARED_LIBS) set (_PKG_CONFIG_SH_LIBS "${_PKG_CONFIG_SH_LIBS} -l${HDF5_HL_F90_LIB_CORENAME}") endif () -set (_PKG_CONFIG_REQUIRES "${HDF5_F90_LIB_CORENAME}-${HDF5_PACKAGE_VERSION}") -set (_PKG_CONFIG_REQUIRES_PRIVATE "${HDF5_F90_LIB_CORENAME}-${HDF5_PACKAGE_VERSION}") +set (_PKG_CONFIG_REQUIRES "${HDF5_F90_LIB_CORENAME} = ${HDF5_PACKAGE_VERSION}") +set (_PKG_CONFIG_REQUIRES_PRIVATE "${HDF5_F90_LIB_CORENAME} = ${HDF5_PACKAGE_VERSION}") configure_file ( ${HDF_CONFIG_DIR}/libhdf5.fpc.in diff --git a/hl/src/CMakeLists.txt b/hl/src/CMakeLists.txt index 93a62bc7356..6dd5be70dc6 100644 --- a/hl/src/CMakeLists.txt +++ b/hl/src/CMakeLists.txt @@ -137,8 +137,8 @@ if (BUILD_SHARED_LIBS) set (_PKG_CONFIG_SH_LIBS "${_PKG_CONFIG_SH_LIBS} -l${HDF5_HL_LIB_CORENAME}") endif () -set (_PKG_CONFIG_REQUIRES "${HDF5_LIB_CORENAME}-${HDF5_PACKAGE_VERSION}") -set (_PKG_CONFIG_REQUIRES_PRIVATE "${HDF5_LIB_CORENAME}-${HDF5_PACKAGE_VERSION}") +set (_PKG_CONFIG_REQUIRES "${HDF5_LIB_CORENAME} = ${HDF5_PACKAGE_VERSION}") +set (_PKG_CONFIG_REQUIRES_PRIVATE "${HDF5_LIB_CORENAME} = ${HDF5_PACKAGE_VERSION}") configure_file ( ${HDF_CONFIG_DIR}/libhdf5.pc.in diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 96e452a89d8..9042a06adf1 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -235,6 +235,14 @@ Bug Fixes since HDF5-1.12.1 release Configuration ------------- + - Change the settings of the *pc files to use the correct format + + The pkg-config files generated by CMake uses incorrect syntax for the 'Requires' + settings. Changing the set to use 'lib-name = version' instead 'lib-name-version' + fixes the issue + + (ADB - 2022/12/06 HDFFV-11355) + - Move MPI libraries link from PRIVATE to PUBLIC The install dependencies were not including the need for MPI libraries when From 1dae8c805c61039b7f0c53021d0b828026077eba Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Mon, 12 Dec 2022 09:32:38 -0600 Subject: [PATCH 007/108] 1.12 h5repack should only print if verbose (#2274) * h5repack should only print if verbose Also chage dump test to use long option * Add note * Update yaml workflows --- .github/workflows/clang-format-check.yml | 2 +- .github/workflows/clang-format-fix.yml | 2 +- .github/workflows/codespell.yml | 2 +- .github/workflows/main.yml | 13 ++--- release_docs/RELEASE.txt | 7 ++- tools/src/h5repack/h5repack_copy.c | 62 ++++++++++++++---------- tools/test/h5dump/CMakeTests.cmake | 2 +- 7 files changed, 53 insertions(+), 37 deletions(-) diff --git a/.github/workflows/clang-format-check.yml b/.github/workflows/clang-format-check.yml index 2d7943c1f68..e8251f2ce7a 100644 --- a/.github/workflows/clang-format-check.yml +++ b/.github/workflows/clang-format-check.yml @@ -7,7 +7,7 @@ jobs: runs-on: ubuntu-latest if: "!contains(github.event.head_commit.message, 'skip-ci')" steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Run clang-format style check for C and Java programs. uses: DoozyX/clang-format-lint-action@v0.13 with: diff --git a/.github/workflows/clang-format-fix.yml b/.github/workflows/clang-format-fix.yml index a390a8bd795..c1110cf2b98 100644 --- a/.github/workflows/clang-format-fix.yml +++ b/.github/workflows/clang-format-fix.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest if: "!contains(github.event.head_commit.message, 'skip-ci')" steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Run clang-format style check for C and Java programs. uses: DoozyX/clang-format-lint-action@v0.13 with: diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml index 6b7960a4eb5..a281fc7f1b4 100644 --- a/.github/workflows/codespell.yml +++ b/.github/workflows/codespell.yml @@ -8,7 +8,7 @@ jobs: name: Check for spelling errors runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - uses: codespell-project/actions-codespell@master with: skip: ./bin/trace,./hl/tools/h5watch/h5watch.c,./tools/test/h5jam/tellub.c,./config/sanitizer/LICENSE diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 5d3e14d68dd..c112a1d4b37 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -6,12 +6,13 @@ on: push: branches: [ develop, hdf5_1_12, hdf5_1_10, hdf5_1_8 ] paths-ignore: - - '.github/**' - - 'doc/**' - - 'release_docs/**' - - 'ACKNOWLEDGEMENTS' - - 'COPYING**' - - '**.md' + - '.github/CODEOWNERS' + - '.github/FUNDING.yml' + - 'doc/**' + - 'release_docs/**' + - 'ACKNOWLEDGEMENTS' + - 'COPYING**' + - '**.md' # A workflow run is made up of one or more jobs that can run sequentially or in parallel jobs: diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 9042a06adf1..0b6ca4eeefd 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -254,7 +254,12 @@ Bug Fixes since HDF5-1.12.1 release Tools ----- - - + - Fix h5repack to only print output when verbose option is selected + + When timing option was added to h5repack, the check for verbose was + incorrectly implemented. + + (ADB - 2022/12/02, GH #2270) Performance diff --git a/tools/src/h5repack/h5repack_copy.c b/tools/src/h5repack/h5repack_copy.c index d8a9e154e8a..f3ab22e31a3 100644 --- a/tools/src/h5repack/h5repack_copy.c +++ b/tools/src/h5repack/h5repack_copy.c @@ -666,15 +666,17 @@ do_copy_objects(hid_t fidin, hid_t fidout, trav_table_t *travt, pack_opt_t *opti *------------------------------------------------------------------------- */ - if (options->verbose == 2) { - HDprintf("-----------------------------------------------------------------\n"); - HDprintf(" Type Filter (Compression) Timing read/write Name\n"); - HDprintf("-----------------------------------------------------------------\n"); - } - else { - HDprintf("-----------------------------------------\n"); - HDprintf(" Type Filter (Compression) Name\n"); - HDprintf("-----------------------------------------\n"); + if (options->verbose > 0) { + if (options->verbose == 2) { + HDprintf("-----------------------------------------------------------------\n"); + HDprintf(" Type Filter (Compression) Timing read/write Name\n"); + HDprintf("-----------------------------------------------------------------\n"); + } + else { + HDprintf("-----------------------------------------\n"); + HDprintf(" Type Filter (Compression) Name\n"); + HDprintf("-----------------------------------------\n"); + } } if (travt->objs) { @@ -692,10 +694,12 @@ do_copy_objects(hid_t fidin, hid_t fidout, trav_table_t *travt, pack_opt_t *opti *------------------------------------------------------------------------- */ case H5TRAV_TYPE_GROUP: - if (options->verbose == 2) - HDprintf(FORMAT_OBJ_NOTIME, "group", travt->objs[i].name); - else - HDprintf(FORMAT_OBJ, "group", travt->objs[i].name); + if (options->verbose > 0) { + if (options->verbose == 2) + HDprintf(FORMAT_OBJ_NOTIME, "group", travt->objs[i].name); + else + HDprintf(FORMAT_OBJ, "group", travt->objs[i].name); + } /* open input group */ if ((grp_in = H5Gopen2(fidin, travt->objs[i].name, H5P_DEFAULT)) < 0) @@ -1199,7 +1203,7 @@ do_copy_objects(hid_t fidin, hid_t fidout, trav_table_t *travt, pack_opt_t *opti if (options->verbose > 0) { double ratio = 0; - /* only print the compression ration if there was a filter request */ + /* only print the compression ratio if there was a filter request */ if (apply_s && apply_f && req_filter) { /* get the storage size of the output dataset */ dsize_out = H5Dget_storage_size(dset_out); @@ -1305,10 +1309,12 @@ do_copy_objects(hid_t fidin, hid_t fidout, trav_table_t *travt, pack_opt_t *opti if (H5Dclose(dset_out) < 0) H5TOOLS_GOTO_ERROR((-1), "H5Dclose failed"); - if (options->verbose == 2) - HDprintf(FORMAT_OBJ_TIME, "dset", 0.0, write_time, travt->objs[i].name); - else - HDprintf(FORMAT_OBJ, "dset", travt->objs[i].name); + if (options->verbose > 0) { + if (options->verbose == 2) + HDprintf(FORMAT_OBJ_TIME, "dset", 0.0, write_time, travt->objs[i].name); + else + HDprintf(FORMAT_OBJ, "dset", travt->objs[i].name); + } } /* end whether we have request for filter/chunking */ @@ -1320,10 +1326,12 @@ do_copy_objects(hid_t fidin, hid_t fidout, trav_table_t *travt, pack_opt_t *opti *------------------------------------------------------------------------- */ case H5TRAV_TYPE_NAMED_DATATYPE: - if (options->verbose == 2) - HDprintf(FORMAT_OBJ_NOTIME, "type", travt->objs[i].name); - else - HDprintf(FORMAT_OBJ, "type", travt->objs[i].name); + if (options->verbose > 0) { + if (options->verbose == 2) + HDprintf(FORMAT_OBJ_NOTIME, "type", travt->objs[i].name); + else + HDprintf(FORMAT_OBJ, "type", travt->objs[i].name); + } if ((type_in = H5Topen2(fidin, travt->objs[i].name, H5P_DEFAULT)) < 0) H5TOOLS_GOTO_ERROR((-1), "H5Topen2 failed"); @@ -1362,10 +1370,12 @@ do_copy_objects(hid_t fidin, hid_t fidout, trav_table_t *travt, pack_opt_t *opti */ case H5TRAV_TYPE_LINK: case H5TRAV_TYPE_UDLINK: - if (options->verbose == 2) - HDprintf(FORMAT_OBJ_NOTIME, "link", travt->objs[i].name); - else - HDprintf(FORMAT_OBJ, "link", travt->objs[i].name); + if (options->verbose > 0) { + if (options->verbose == 2) + HDprintf(FORMAT_OBJ_NOTIME, "link", travt->objs[i].name); + else + HDprintf(FORMAT_OBJ, "link", travt->objs[i].name); + } /* Check -X option. */ if (options->merge) { diff --git a/tools/test/h5dump/CMakeTests.cmake b/tools/test/h5dump/CMakeTests.cmake index 933aba98241..2505e847bc6 100644 --- a/tools/test/h5dump/CMakeTests.cmake +++ b/tools/test/h5dump/CMakeTests.cmake @@ -1159,7 +1159,7 @@ ADD_H5ERR_MASK_TEST (torderlinks2 0 "unable to open external file, external link file name = 'fname'" --enable-error-stack --sort_by=name --sort_order=descending tfcontents1.h5) # tests for floating point user defined printf format - ADD_H5_TEST (tfpformat 0 --enable-error-stack -m %.7f tfpformat.h5) + ADD_H5_TEST (tfpformat 0 --enable-error-stack --format=%.7f tfpformat.h5) # tests for traversal of external links ADD_H5ERR_MASK_TEST (textlinksrc 0 "Too many soft links in path" --enable-error-stack textlinksrc.h5) From 8ef713ae5dc454db66ea821635f6d6d6970858f0 Mon Sep 17 00:00:00 2001 From: vchoi-hdfgroup <55293060+vchoi-hdfgroup@users.noreply.github.com> Date: Mon, 12 Dec 2022 10:40:30 -0600 Subject: [PATCH 008/108] =?UTF-8?q?Fix=20for=20HDFFV-10840:=20Instead=20of?= =?UTF-8?q?=20using=20fill->buf=20for=20datatype=20conversi=E2=80=A6=20(#2?= =?UTF-8?q?277)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Fix for HDFFV-10840: Instead of using fill->buf for datatype conversion (#2153) * Fix for HDFFV-10840: Instead of using fill->buf for datatype conversion if it is large enough, a buffer is allocated regardless so that the element in fill->buf can later be reclaimed. Valgrind is run on test/set_extent.c and there is no memory leak. * Add information of this fix to release notes. * Change macos version for CI to macos-11 until accum test failure is fixed for macos 12. Co-authored-by: Larry Knox --- release_docs/RELEASE.txt | 13 +++++++++++++ src/H5Ofill.c | 25 +++++++++++-------------- 2 files changed, 24 insertions(+), 14 deletions(-) diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 0b6ca4eeefd..022010cfb8e 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -227,6 +227,19 @@ Bug Fixes since HDF5-1.12.1 release (JTH - 2022/07/08, HDFFV-11316, HDFFV-11317) + - Memory leak + + A memory leak was observed with variable-length fill value in + H5O_fill_convert() function in H5Ofill.c. The leak is + manifested by running valgrind on test/set_extent.c. + + Previously, fill->buf is used for datatype conversion + if it is large enough and the variable-length information + is therefore lost. A buffer is now allocated regardless + so that the element in fill->buf can later be reclaimed. + + (VC - 2022/10/10, HDFFV-10840) + Java Library ------------ diff --git a/src/H5Ofill.c b/src/H5Ofill.c index 4106056e420..2197c09f0f8 100644 --- a/src/H5Ofill.c +++ b/src/H5Ofill.c @@ -1006,6 +1006,8 @@ H5O_fill_convert(H5O_fill_t *fill, H5T_t *dset_type, hbool_t *fill_changed) /* Don't bother doing anything if there will be no actual conversion */ if (!H5T_path_noop(tpath)) { + size_t fill_type_size; + if ((src_id = H5I_register(H5I_DATATYPE, H5T_copy(fill->type, H5T_COPY_ALL), FALSE)) < 0 || (dst_id = H5I_register(H5I_DATATYPE, H5T_copy(dset_type, H5T_COPY_ALL), FALSE)) < 0) HGOTO_ERROR(H5E_OHDR, H5E_CANTINIT, FAIL, "unable to copy/register data type") @@ -1014,13 +1016,11 @@ H5O_fill_convert(H5O_fill_t *fill, H5T_t *dset_type, hbool_t *fill_changed) * Datatype conversions are always done in place, so we need a buffer * that is large enough for both source and destination. */ - if (H5T_get_size(fill->type) >= H5T_get_size(dset_type)) - buf = fill->buf; - else { - if (NULL == (buf = H5MM_malloc(H5T_get_size(dset_type)))) - HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed for type conversion") - H5MM_memcpy(buf, fill->buf, H5T_get_size(fill->type)); - } /* end else */ + fill_type_size = H5T_get_size(fill->type); + + if (NULL == (buf = H5MM_malloc(MAX(fill_type_size, H5T_get_size(dset_type))))) + HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed for type conversion") + H5MM_memcpy(buf, fill->buf, fill_type_size); /* Use CALLOC here to clear the buffer in case later the library thinks there's * data in the background. */ @@ -1032,11 +1032,10 @@ H5O_fill_convert(H5O_fill_t *fill, H5T_t *dset_type, hbool_t *fill_changed) HGOTO_ERROR(H5E_OHDR, H5E_CANTINIT, FAIL, "datatype conversion failed") /* Update the fill message */ - if (buf != fill->buf) { - H5T_vlen_reclaim_elmt(fill->buf, fill->type); - H5MM_xfree(fill->buf); - fill->buf = buf; - } /* end if */ + H5T_vlen_reclaim_elmt(fill->buf, fill->type); + H5MM_xfree(fill->buf); + fill->buf = buf; + (void)H5T_close_real(fill->type); fill->type = NULL; H5_CHECKED_ASSIGN(fill->size, ssize_t, H5T_get_size(dset_type), size_t); @@ -1050,8 +1049,6 @@ H5O_fill_convert(H5O_fill_t *fill, H5T_t *dset_type, hbool_t *fill_changed) HDONE_ERROR(H5E_OHDR, H5E_CANTDEC, FAIL, "unable to decrement ref count for temp ID") if (dst_id >= 0 && H5I_dec_ref(dst_id) < 0) HDONE_ERROR(H5E_OHDR, H5E_CANTDEC, FAIL, "unable to decrement ref count for temp ID") - if (buf != fill->buf) - H5MM_xfree(buf); if (bkg) H5MM_xfree(bkg); From 91aa02817a957831ddd363af7e4e9232b7202f25 Mon Sep 17 00:00:00 2001 From: Dana Robinson <43805+derobins@users.noreply.github.com> Date: Fri, 16 Dec 2022 13:42:48 -0800 Subject: [PATCH 009/108] 1.12: Brings the updated CI in from develop (#2311) * Brings the updated CI in from develop Does not include the -Werror checks, changes 1.14 API to 1.12 where needed. * Fixes an API compat issue in the mirror VFD test * Committing clang-format changes * Removed pr-check.yml Co-authored-by: github-actions <41898282+github-actions[bot]@users.noreply.github.com> --- .github/workflows/main.yml | 508 ++++++++++++++++++++++----------- .github/workflows/pr-check.yml | 220 -------------- test/mirror_vfd.c | 4 +- 3 files changed, 347 insertions(+), 385 deletions(-) delete mode 100644 .github/workflows/pr-check.yml diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index c112a1d4b37..4c2159f9d09 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -4,7 +4,8 @@ name: hdf5 dev CI on: workflow_dispatch: push: - branches: [ develop, hdf5_1_12, hdf5_1_10, hdf5_1_8 ] + pull_request: + branches: [ develop, hdf5_1_14, hdf5_1_12, hdf5_1_10, hdf5_1_8 ] paths-ignore: - '.github/CODEOWNERS' - '.github/FUNDING.yml' @@ -14,209 +15,390 @@ on: - 'COPYING**' - '**.md' -# A workflow run is made up of one or more jobs that can run sequentially or in parallel +# A workflow run is made up of one or more jobs that can run sequentially or +# in parallel. We just have one job, but the matrix items defined below will +# run in parallel. jobs: - # This workflow contains a single job called "build" - build: + + # A workflow that builds the library and runs all the tests + build_and_test: + strategy: -# fail-fast: false + + # The current matrix has three dimensions: + # + # * config name + # * thread-safety on/off + # * release vs. debug build + # + # Most configuration information is added via the 'include' mechanism, + # which will append the key-value pairs in the configuration where the + # names match. + matrix: - name: ["Windows Latest MSVC", "Ubuntu Latest GCC", "Ubuntu Debug GCC", "macOS Latest Clang", "Ubuntu Autotools GCC", "Windows TS MSVC", "Ubuntu TS GCC", "TS Debug GCC", "macOS TS Clang", "TS Autotools GCC"] + + name: + - "Windows MSVC CMake" + - "Ubuntu gcc CMake" + - "Ubuntu gcc Autotools" + - "Ubuntu gcc Autotools parallel (build only)" + - "MacOS Clang CMake" + + thread_safety: + - enabled: true + text: " TS" + - enabled: false + text: "" + + build_mode: + - text: " REL" + cmake: "Release" + autotools: "production" + - text: " DBG" + cmake: "Debug" + autotools: "debug" + + # This is where we list the bulk of the options for each configuration. + # The key-value pair values are usually appropriate for being CMake or + # Autotools configure values, so be aware of that. + include: - - name: "Windows Latest MSVC" - artifact: "Windows-MSVC.tar.xz" + + # Windows w/ MSVC + CMake + # + # No Fortran, parallel, or VFDs that rely on POSIX things + - name: "Windows MSVC CMake" os: windows-2022 - build_type: "Release" toolchain: "" cpp: ON fortran: OFF java: ON - ts: OFF - hl: ON parallel: OFF + mirror_vfd: OFF + direct_vfd: OFF generator: "-G \"Visual Studio 17 2022\" -A x64" - - name: "Ubuntu Latest GCC" - artifact: "Linux.tar.xz" + run_tests: true + + # Linux (Ubuntu) w/ gcc + CMake + # + # We might think about adding Clang, but MacOS already tests that + # so it's not critical + - name: "Ubuntu gcc CMake" os: ubuntu-latest - build_type: "Release" cpp: ON fortran: ON java: ON - ts: OFF - hl: ON parallel: OFF + mirror_vfd: ON + direct_vfd: ON toolchain: "config/toolchain/GCC.cmake" generator: "-G Ninja" - - name: "macOS Latest Clang" - artifact: "macOS.tar.xz" + run_tests: true + + # Linux (Ubuntu) w/ gcc + Autotools + # + # Keep this identical to the CMake configs. Note the difference in + # the values. + - name: "Ubuntu gcc Autotools" + os: ubuntu-latest + cpp: enable + fortran: enable + java: enable + parallel: disable + mirror_vfd: enable + direct_vfd: enable + deprec_sym: enable + default_api: v112 + toolchain: "" + generator: "autogen" + flags: "" + run_tests: true + + # Parallel Linux (Ubuntu) w/ gcc + Autotools + # + # The GitHub runners are inadequate for running parallel HDF5 tests, + # so we catch most issues in daily testing. What we have here is just + # a compile check to make sure nothing obvious is broken. + - name: "Ubuntu gcc Autotools parallel (build only)" + os: ubuntu-latest + cpp: disable + fortran: enable + java: disable + parallel: enable + mirror_vfd: disable + direct_vfd: disable + deprec_sym: enable + default_api: v112 + toolchain: "" + generator: "autogen" + flags: "CC=mpicc" + run_tests: false + + # MacOS w/ Clang + CMake + # + # We could also build with the Autotools via brew installing them, + # but that seems unnecessary + - name: "MacOS Clang CMake" os: macos-11 - build_type: "Release" cpp: ON fortran: OFF java: ON - ts: OFF - hl: ON parallel: OFF + mirror_vfd: ON + direct_vfd: OFF toolchain: "config/toolchain/clang.cmake" generator: "-G Ninja" - - name: "Ubuntu Debug GCC" - artifact: "LinuxDBG.tar.xz" - os: ubuntu-latest - build_type: "Debug" - cpp: ON - fortran: OFF - java: OFF - ts: OFF - hl: ON - parallel: OFF - toolchain: "config/toolchain/GCC.cmake" - generator: "-G Ninja" - - name: "Ubuntu Autotools GCC" - artifact: "LinuxA.tar.xz" + run_tests: true + + + # + # SPECIAL AUTOTOOLS BUILDS + # + # These do not run tests and are not built into the matrix and instead + # become NEW configs as their name would clobber one of the matrix + # names (so make sure the names are UNIQUE). + # + + - name: "Ubuntu gcc Autotools v1.6 default API (build only)" os: ubuntu-latest - build_type: "Release" cpp: enable fortran: enable java: enable - ts: disable - hl: enable parallel: disable + mirror_vfd: enable + direct_vfd: enable + deprec_sym: enable + default_api: v16 toolchain: "" generator: "autogen" -# Threadsafe runs - - name: "Windows TS MSVC" - artifact: "Windows-MSVCTS.tar.xz" - os: windows-2019 - build_type: "Release" + flags: "" + run_tests: false + thread_safety: + enabled: false + text: "" + build_mode: + text: " DBG" + cmake: "Debug" + autotools: "debug" + + - name: "Ubuntu gcc Autotools v1.8 default API (build only)" + os: ubuntu-latest + cpp: enable + fortran: enable + java: enable + parallel: disable + mirror_vfd: enable + direct_vfd: enable + deprec_sym: enable + default_api: v18 toolchain: "" - cpp: OFF - fortran: OFF - java: OFF - ts: ON - hl: OFF - parallel: OFF - generator: "-G \"Visual Studio 16 2019\" -A x64" - - name: "Ubuntu TS GCC" - artifact: "LinuxTS.tar.xz" + generator: "autogen" + flags: "" + run_tests: false + thread_safety: + enabled: false + text: "" + build_mode: + text: " DBG" + cmake: "Debug" + autotools: "debug" + + - name: "Ubuntu gcc Autotools v1.10 default API (build only)" os: ubuntu-latest - build_type: "Release" - cpp: OFF - fortran: OFF - java: OFF - ts: ON - hl: OFF - parallel: OFF - toolchain: "config/toolchain/GCC.cmake" - generator: "-G Ninja" - - name: "macOS TS Clang" - artifact: "macOSTS.tar.xz" - os: macos-11 - build_type: "Release" - cpp: OFF - fortran: OFF - java: OFF - ts: ON - hl: OFF - parallel: OFF - toolchain: "config/toolchain/clang.cmake" - generator: "-G Ninja" - - name: "TS Debug GCC" - artifact: "LinuxTSDBG.tar.xz" + cpp: enable + fortran: enable + java: enable + parallel: disable + mirror_vfd: enable + direct_vfd: enable + deprec_sym: enable + default_api: v110 + toolchain: "" + generator: "autogen" + flags: "" + run_tests: false + thread_safety: + enabled: false + text: "" + build_mode: + text: " DBG" + cmake: "Debug" + autotools: "debug" + + - name: "Ubuntu gcc Autotools v1.12 default API (build only)" os: ubuntu-latest - build_type: "Debug" - cpp: OFF - fortran: OFF - java: OFF - ts: ON - hl: OFF - parallel: OFF - toolchain: "config/toolchain/GCC.cmake" - generator: "-G Ninja" - - name: "TS Autotools GCC" - artifact: "LinuxATS.tar.xz" + cpp: enable + fortran: enable + java: enable + parallel: disable + mirror_vfd: enable + direct_vfd: enable + deprec_sym: enable + default_api: v112 + toolchain: "" + generator: "autogen" + flags: "" + run_tests: false + thread_safety: + enabled: false + text: "" + build_mode: + text: " DBG" + cmake: "Debug" + autotools: "debug" + + - name: "Ubuntu gcc Autotools no deprecated symbols (build only)" os: ubuntu-latest - build_type: "Release" - cpp: disable - fortran: disable - java: disable - ts: enable - hl: disable + cpp: enable + fortran: enable + java: enable parallel: disable + mirror_vfd: enable + direct_vfd: enable + deprec_sym: disable + default_api: v112 toolchain: "" generator: "autogen" -# - name: "Ubuntu Parallel GCC" -# artifact: "LinuxPar.tar.xz" -# os: ubuntu-latest -# build_type: "Release" -# cpp: OFF -# fortran: OFF -# parallel: ON -# toolchain: "config/toolchain/GCC.cmake" -# generator: "-G Ninja" - - name: ${{ matrix.name }} + flags: "" + run_tests: false + thread_safety: + enabled: false + text: "" + build_mode: + text: " DBG" + cmake: "Debug" + autotools: "debug" + + # Sets the job's name from the properties + name: "${{ matrix.name }}${{ matrix.build_mode.text }}${{ matrix.thread_safety.text }}" + + # Don't run the action if the commit message says to skip CI + if: "!contains(github.event.head_commit.message, 'skip-ci')" + # The type of runner that the job will run on runs-on: ${{ matrix.os }} - if: "!contains(github.event.head_commit.message, 'skip-ci')" - + # Steps represent a sequence of tasks that will be executed as part of the job steps: - - name: Install Dependencies (Linux) - run: sudo apt-get install ninja-build - if: matrix.os == 'ubuntu-latest' - - name: Install Autotools Dependencies (Linux) - run: sudo apt-get install automake autoconf libtool libtool-bin - if: matrix.generator == 'autogen' - - name: Install Dependencies (Windows) - run: choco install ninja - if: matrix.os == 'windows-latest' - - name: Install Dependencies (macOS) - run: brew install ninja - if: matrix.os == 'macos-11' - - name: Set environment for MSVC (Windows) - if: matrix.os == 'windows-latest' - run: | - # Set these env vars so cmake picks the correct compiler - echo "CXX=cl.exe" >> $GITHUB_ENV - echo "CC=cl.exe" >> $GITHUB_ENV + + # + # SETUP + # + + #Useful for debugging + - name: Dump matrix context + run: echo '${{ toJSON(matrix) }}' + + - name: Install CMake Dependencies (Linux) + run: sudo apt-get install ninja-build + if: matrix.os == 'ubuntu-latest' + + - name: Install Autotools Dependencies (Linux, serial) + run: | + sudo apt update + sudo apt install automake autoconf libtool libtool-bin + sudo apt install gcc-11 g++-11 gfortran-11 + echo "CC=gcc-11" >> $GITHUB_ENV + echo "CXX=g++-11" >> $GITHUB_ENV + echo "FC=gfortran-11" >> $GITHUB_ENV + if: (matrix.generator == 'autogen') && (matrix.parallel != 'enable') + + - name: Install Autotools Dependencies (Linux, parallel) + run: | + sudo apt update + sudo apt install automake autoconf libtool libtool-bin + sudo apt install openmpi-bin openmpi-common mpi-default-dev + echo "CC=mpicc" >> $GITHUB_ENV + echo "FC=mpif90" >> $GITHUB_ENV + if: (matrix.generator == 'autogen') && (matrix.parallel == 'enable') + + - name: Install Dependencies (Windows) + run: choco install ninja + if: matrix.os == 'windows-latest' + + - name: Install Dependencies (macOS) + run: brew install ninja + if: matrix.os == 'macos-11' + + - name: Set environment for MSVC (Windows) + run: | + # Set these environment variables so CMake picks the correct compiler + echo "CXX=cl.exe" >> $GITHUB_ENV + echo "CC=cl.exe" >> $GITHUB_ENV + if: matrix.os == 'windows-latest' - # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - name: Get Sources - uses: actions/checkout@v2 - - - name: Autotools Configure - if: matrix.generator == 'autogen' - run: | - sh ./autogen.sh - mkdir "${{ runner.workspace }}/build" - cd "${{ runner.workspace }}/build" - $GITHUB_WORKSPACE/configure --enable-shared --${{ matrix.ts }}-threadsafe --${{ matrix.hl }}-hl --${{ matrix.parallel }}-parallel --${{ matrix.cpp }}-cxx --${{ matrix.fortran }}-fortran --${{ matrix.java }}-java - shell: bash - - - name: Configure - if: matrix.generator != 'autogen' - run: | - mkdir "${{ runner.workspace }}/build" - cd "${{ runner.workspace }}/build" - cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_type }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_THREADSAFE:BOOL=${{ matrix.ts }} -DHDF5_BUILD_HL_LIB:BOOL=${{ matrix.hl }} -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=${{ matrix.cpp }} -DHDF5_BUILD_FORTRAN=${{ matrix.fortran }} -DHDF5_BUILD_JAVA=${{ matrix.java }} $GITHUB_WORKSPACE - shell: bash - - - name: Autotools Build - if: matrix.generator == 'autogen' - run: make - working-directory: ${{ runner.workspace }}/build - - - name: Build - if: matrix.generator != 'autogen' - run: cmake --build . --config ${{ matrix.build_type }} - working-directory: ${{ runner.workspace }}/build - - - name: Autotools Test - if: matrix.generator == 'autogen' - run: make check - working-directory: ${{ runner.workspace }}/build - - - name: Test - if: matrix.generator != 'autogen' - run: ctest --build . -C ${{ matrix.build_type }} -V - working-directory: ${{ runner.workspace }}/build + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - name: Get Sources + uses: actions/checkout@v3 + + # + # AUTOTOOLS CONFIGURE + # + + - name: Autotools Configure + run: | + sh ./autogen.sh + mkdir "${{ runner.workspace }}/build" + cd "${{ runner.workspace }}/build" + ${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --${{ matrix.deprec_sym }}-deprecated-symbols --with-default-api-version=${{ matrix.default_api }} --enable-shared --${{ matrix.parallel }}-parallel --${{ matrix.cpp }}-cxx --${{ matrix.fortran }}-fortran --${{ matrix.java }}-java --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd + shell: bash + if: (matrix.generator == 'autogen') && (! matrix.thread_safe.enabled) + + - name: Autotools Configure (Thread-Safe) + run: | + sh ./autogen.sh + mkdir "${{ runner.workspace }}/build" + cd "${{ runner.workspace }}/build" + ${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --enable-shared --enable-threadsafe --disable-hl --${{ matrix.parallel }}-parallel --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd + shell: bash + if: (matrix.generator == 'autogen') && (matrix.thread_safe.enabled) + + # + # CMAKE CONFIGURE + # + + - name: CMake Configure + run: | + mkdir "${{ runner.workspace }}/build" + cd "${{ runner.workspace }}/build" + cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=${{ matrix.cpp }} -DHDF5_BUILD_FORTRAN=${{ matrix.fortran }} -DHDF5_BUILD_JAVA=${{ matrix.java }} -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} $GITHUB_WORKSPACE + shell: bash + if: (matrix.generator != 'autogen') && (! matrix.thread_safe.enabled) + + + - name: CMake Configure (Thread-Safe) + run: | + mkdir "${{ runner.workspace }}/build" + cd "${{ runner.workspace }}/build" + cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_THREADSAFE:BOOL=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_HL_LIB:BOOL=OFF -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} $GITHUB_WORKSPACE + shell: bash + if: (matrix.generator != 'autogen') && (matrix.thread_safe.enabled) + + # + # BUILD + # + + - name: Autotools Build + run: make + working-directory: ${{ runner.workspace }}/build + if: matrix.generator == 'autogen' + + - name: CMake Build + run: cmake --build . --config ${{ matrix.build_mode.cmake }} + working-directory: ${{ runner.workspace }}/build + if: matrix.generator != 'autogen' + + # + # RUN TESTS + # + + - name: Autotools Run Tests + run: make check + working-directory: ${{ runner.workspace }}/build + if: (matrix.generator == 'autogen') && (matrix.run_tests) + + - name: CMake Run Tests + run: ctest --build . -C ${{ matrix.build_mode.cmake }} -V + working-directory: ${{ runner.workspace }}/build + # Skip Debug MSVC while we investigate H5L Java test timeouts + if: (matrix.generator != 'autogen') && (matrix.run_tests) && ! ((matrix.name == 'Windows MSVC CMake') && (matrix.build_mode.cmake == 'Debug')) diff --git a/.github/workflows/pr-check.yml b/.github/workflows/pr-check.yml deleted file mode 100644 index b48f835dbe1..00000000000 --- a/.github/workflows/pr-check.yml +++ /dev/null @@ -1,220 +0,0 @@ -name: PR hdf5 dev CI - -# Controls when the action will run. Triggers the workflow on push or pull request -on: - pull_request: - branches: [ develop, hdf5_1_12, hdf5_1_10, hdf5_1_8 ] - paths-ignore: - - '.github/**' - - 'doc/**' - - 'release_docs/**' - - 'ACKNOWLEDGEMENTS' - - 'COPYING**' - - '**.md' - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build: - strategy: -# fail-fast: false - matrix: - name: ["Windows Latest MSVC", "Ubuntu Latest GCC", "Ubuntu Debug GCC", "macOS Latest Clang", "Ubuntu Autotools GCC", "Windows TS MSVC", "Ubuntu TS GCC", "TS Debug GCC", "macOS TS Clang", "TS Autotools GCC"] - include: - - name: "Windows Latest MSVC" - artifact: "Windows-MSVC.tar.xz" - os: windows-2022 - build_type: "Release" - toolchain: "" - cpp: ON - fortran: OFF - java: ON - ts: OFF - hl: ON - parallel: OFF - generator: "-G \"Visual Studio 17 2022\" -A x64" - - name: "Ubuntu Latest GCC" - artifact: "Linux.tar.xz" - os: ubuntu-latest - build_type: "Release" - cpp: ON - fortran: ON - java: ON - ts: OFF - hl: ON - parallel: OFF - toolchain: "config/toolchain/GCC.cmake" - generator: "-G Ninja" - - name: "macOS Latest Clang" - artifact: "macOS.tar.xz" - os: macos-11 - build_type: "Release" - cpp: ON - fortran: OFF - java: ON - ts: OFF - hl: ON - parallel: OFF - toolchain: "config/toolchain/clang.cmake" - generator: "-G Ninja" - - name: "Ubuntu Debug GCC" - artifact: "LinuxDBG.tar.xz" - os: ubuntu-latest - build_type: "Debug" - cpp: ON - fortran: OFF - java: OFF - ts: OFF - hl: ON - parallel: OFF - toolchain: "config/toolchain/GCC.cmake" - generator: "-G Ninja" - - name: "Ubuntu Autotools GCC" - artifact: "LinuxA.tar.xz" - os: ubuntu-latest - build_type: "Release" - cpp: enable - fortran: enable - java: enable - ts: disable - hl: enable - parallel: disable - toolchain: "" - generator: "autogen" -# Threadsafe runs - - name: "Windows TS MSVC" - artifact: "Windows-MSVCTS.tar.xz" - os: windows-2019 - build_type: "Release" - toolchain: "" - cpp: OFF - fortran: OFF - java: OFF - ts: ON - hl: OFF - parallel: OFF - generator: "-G \"Visual Studio 16 2019\" -A x64" - - name: "Ubuntu TS GCC" - artifact: "LinuxTS.tar.xz" - os: ubuntu-latest - build_type: "Release" - cpp: OFF - fortran: OFF - java: OFF - ts: ON - hl: OFF - parallel: OFF - toolchain: "config/toolchain/GCC.cmake" - generator: "-G Ninja" - - name: "macOS TS Clang" - artifact: "macOSTS.tar.xz" - os: macos-11 - build_type: "Release" - cpp: OFF - fortran: OFF - java: OFF - ts: ON - hl: OFF - parallel: OFF - toolchain: "config/toolchain/clang.cmake" - generator: "-G Ninja" - - name: "TS Debug GCC" - artifact: "LinuxTSDBG.tar.xz" - os: ubuntu-latest - build_type: "Debug" - cpp: OFF - fortran: OFF - java: OFF - ts: ON - hl: OFF - parallel: OFF - toolchain: "config/toolchain/GCC.cmake" - generator: "-G Ninja" - - name: "TS Autotools GCC" - artifact: "LinuxATS.tar.xz" - os: ubuntu-latest - build_type: "Release" - cpp: disable - fortran: disable - java: disable - ts: enable - hl: disable - parallel: disable - toolchain: "" - generator: "autogen" -# - name: "Ubuntu Parallel GCC" -# artifact: "LinuxPar.tar.xz" -# os: ubuntu-latest -# build_type: "Release" -# cpp: OFF -# fortran: OFF -# parallel: ON -# toolchain: "config/toolchain/GCC.cmake" -# generator: "-G Ninja" - - name: ${{ matrix.name }} - # The type of runner that the job will run on - runs-on: ${{ matrix.os }} - if: "!contains(github.event.head_commit.message, 'skip-ci')" - - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - name: Install Dependencies (Linux) - run: sudo apt-get install ninja-build - if: matrix.os == 'ubuntu-latest' - - name: Install Autotools Dependencies (Linux) - run: sudo apt-get install automake autoconf libtool libtool-bin - if: matrix.generator == 'autogen' - - name: Install Dependencies (Windows) - run: choco install ninja - if: matrix.os == 'windows-latest' - - name: Install Dependencies (macOS) - run: brew install ninja - if: matrix.os == 'macos-11' - - name: Set environment for MSVC (Windows) - if: matrix.os == 'windows-latest' - run: | - # Set these env vars so cmake picks the correct compiler - echo "CXX=cl.exe" >> $GITHUB_ENV - echo "CC=cl.exe" >> $GITHUB_ENV - - # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - name: Get Sources - uses: actions/checkout@v2 - - - name: Autotools Configure - if: matrix.generator == 'autogen' - run: | - sh ./autogen.sh - mkdir "${{ runner.workspace }}/build" - cd "${{ runner.workspace }}/build" - $GITHUB_WORKSPACE/configure --enable-shared --${{ matrix.ts }}-threadsafe --${{ matrix.hl }}-hl --${{ matrix.parallel }}-parallel --${{ matrix.cpp }}-cxx --${{ matrix.fortran }}-fortran --${{ matrix.java }}-java - shell: bash - - - name: Configure - if: matrix.generator != 'autogen' - run: | - mkdir "${{ runner.workspace }}/build" - cd "${{ runner.workspace }}/build" - cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_type }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_THREADSAFE:BOOL=${{ matrix.ts }} -DHDF5_BUILD_HL_LIB:BOOL=${{ matrix.hl }} -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=${{ matrix.cpp }} -DHDF5_BUILD_FORTRAN=${{ matrix.fortran }} -DHDF5_BUILD_JAVA=${{ matrix.java }} $GITHUB_WORKSPACE - shell: bash - - - name: Autotools Build - if: matrix.generator == 'autogen' - run: make - working-directory: ${{ runner.workspace }}/build - - - name: Build - if: matrix.generator != 'autogen' - run: cmake --build . --config ${{ matrix.build_type }} - working-directory: ${{ runner.workspace }}/build - - - name: Autotools Test - if: matrix.generator == 'autogen' - run: make check - working-directory: ${{ runner.workspace }}/build - - - name: Test - if: matrix.generator != 'autogen' - run: ctest --build . -C ${{ matrix.build_type }} -V - working-directory: ${{ runner.workspace }}/build diff --git a/test/mirror_vfd.c b/test/mirror_vfd.c index 1a224c4488e..f8caea8a57c 100644 --- a/test/mirror_vfd.c +++ b/test/mirror_vfd.c @@ -1573,8 +1573,8 @@ _create_chunking_ids(hid_t file_id, unsigned min_dset, unsigned max_dset, hsize_ FAIL_PUTS_ERROR(mesg); } - dataset_ids[m] = - H5Dcreate(file_id, dset_name, H5T_STD_I32BE, dataspace_ids[m], H5P_DEFAULT, dcpl_id, H5P_DEFAULT); + dataset_ids[m] = H5Dcreate2(file_id, dset_name, H5T_STD_I32BE, dataspace_ids[m], H5P_DEFAULT, dcpl_id, + H5P_DEFAULT); if (dataset_ids[m] < 0) { HDsnprintf(mesg, MIRR_MESG_SIZE, "unable to create dset ID %d\n", m); FAIL_PUTS_ERROR(mesg); From 6101a0cc836a42d99ac4fb9097ad396c007d3a05 Mon Sep 17 00:00:00 2001 From: Dana Robinson <43805+derobins@users.noreply.github.com> Date: Wed, 4 Jan 2023 16:28:26 -0800 Subject: [PATCH 010/108] Brings README.md changes from develop (#2389) --- README.md | 38 +++++++++++++++--------- doc/img/release-schedule.plantuml | 47 ++++++++++++------------------ doc/img/release-schedule.png | Bin 16991 -> 13977 bytes 3 files changed, 43 insertions(+), 42 deletions(-) diff --git a/README.md b/README.md index 961c1373eac..38e48ed1c8d 100644 --- a/README.md +++ b/README.md @@ -1,18 +1,24 @@ -HDF5 version 1.12.3-1 currently under development +HDF5 version 1.15.0 currently under development ![HDF5 Logo](doxygen/img/HDF5.png) -*Please refer to the release_docs/INSTALL file for installation instructions.* +[![develop build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/main.yml?branch=develop&label=develop)](https://github.com/HDFGroup/hdf5/actions?query=branch%3Adevelop) +[![1.14 build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/main.yml?branch=hdf5_1_14&label=1.14)](https://github.com/HDFGroup/hdf5/actions?query=branch%3Ahdf5_1_14) +[![1.12 build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/main.yml?branch=hdf5_1_12&label=1.12)](https://github.com/HDFGroup/hdf5/actions?query=branch%3Ahdf5_1_12) +[![1.10 build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/main.yml?branch=hdf5_1_10&label=1.10)](https://github.com/HDFGroup/hdf5/actions?query=branch%3Ahdf5_1_10) +[![1.8 build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/main.yml?branch=hdf5_1_8&label=1.8)](https://github.com/HDFGroup/hdf5/actions?query=branch%3Ahdf5_1_8) +[![BSD](https://img.shields.io/badge/License-BSD-blue.svg)](https://github.com/HDFGroup/hdf5/blob/develop/COPYING) -THE HDF GROUP ---------------- +*Please refer to the release_docs/INSTALL file for installation instructions.* -The HDF Group is the developer of HDF5®, a high-performance software library and -data format that has been adopted across multiple industries and has become a -de facto standard in scientific and research communities. +This repository contains a high-performance library's source code and a file format +specification that implement the HDF5® data model. The model has been adopted across +many industries and this implementation has become a de facto data management standard +in science, engineering, and research communities worldwide. -More information about The HDF Group, the HDF5 Community and the HDF5 software -project, tools and services can be found at the Group's website. +The HDF Group is the developer, maintainer, and steward of HDF5 software. Find more +information about The HDF Group, the HDF5 Community, and other HDF5 software projects, +tools, and services at The HDF Group's website. https://www.hdfgroup.org/ @@ -77,13 +83,19 @@ RELEASE SCHEDULE HDF5 does not release on a regular schedule. Instead, releases are driven by new features and bug fixes, though we try to have at least one release of each maintenance branch per year. Future HDF5 releases indicated on this schedule -are tentative. +are tentative. + +**NOTE**: HDF5 1.12 is being retired early due to its incomplete and incompatible VOL +layer. | Release | New Features | | ------- | ------------ | -| 1.13.2 | VFD SWMR, Onion VFD, Selection I/O | -| 1.13.3 | Multi-Dataset I/O | -| 1.13.4 | Subfiling | +| 1.8.23 | last HDF5 1.8 release | +| 1.10.10 | CVE fixes, performance improvements, H5Dchunk\_iter() | +| 1.12.3 | CVE fixes, performance improvements, H5Dchunk\_iter(), last HDF5 1.12 release | +| 1.14.1 | selection I/O with datatype conversion | +| 2.0.0 | TBD | +| TBD | VFD SWMR | This list of feature release versions is also tentative, and the specific release in which a feature is introduced may change. diff --git a/doc/img/release-schedule.plantuml b/doc/img/release-schedule.plantuml index f5aa62a2856..c724dc98802 100644 --- a/doc/img/release-schedule.plantuml +++ b/doc/img/release-schedule.plantuml @@ -2,44 +2,33 @@ The release timeline was generated on PlantUML (https://plantuml.com) The current script: -@startuml +@startgantt + title HDF5 Release Schedule projectscale monthly -Project starts 2021-01-01 +Project starts 2022-01-01 -[1.8] starts 2021-01-01 and lasts 114 weeks -[1.8.22] happens 2021-02-05 -[1.8.23] happens 2022-12-31 -[1.8.23] displays on same row as [1.8.22] -[1.8] is colored in #CC6677 +[1.8] starts 2022-01-01 and lasts 57 weeks +[1.8.23] happens 2023-01-31 +[1.8] is colored in #F76969 -[1.10] starts 2021-01-01 and lasts 114 weeks -[1.10.8] happens 2021-10-22 +[1.10] starts 2022-01-01 and lasts 104 weeks [1.10.9] happens 2022-05-31 -[1.10.9] displays on same row as [1.10.8] -[1.10] is colored in #DDCC77 +[1.10.10] happens 2023-02-28 +[1.10.10] displays on same row as [1.10.9] +[1.10] is colored in #F6DD60 -[1.12] starts 2021-01-01 and lasts 114 weeks -[1.12.1] happens 2021-07-01 +[1.12] starts 2022-01-01 and lasts 65 weeks [1.12.2] happens 2022-04-30 -[1.12.2] displays on same row as [1.12.1] +[1.12.3] happens 2023-03-31 +[1.12.3] displays on same row as [1.12.2] [1.12] is colored in #88CCEE -[1.13] starts 2021-01-01 and lasts 104 weeks -[1.13.0] happens 2021-12-01 -[1.13.1] happens 2022-03-02 -[1.13.2] happens 2022-06-31 -[1.13.3] happens 2022-08-31 -[1.13.4] happens 2022-10-31 -[1.13.1] displays on same row as [1.13.0] -[1.13.2] displays on same row as [1.13.0] -[1.13.3] displays on same row as [1.13.0] -[1.13.4] displays on same row as [1.13.0] -[1.13] is colored in #44AA99 - -[1.14] starts at 2022-12-31 and lasts 10 weeks +[1.14] starts at 2023-01-01 and lasts 52 weeks [1.14.0] happens at 2022-12-31 -[1.14] is colored in #AA4499 -@enduml +[1.14.1] happens at 2023-04-30 +[1.14.1] displays on same row as [1.14.0] +[1.14] is colored in #B187CF +@endgantt diff --git a/doc/img/release-schedule.png b/doc/img/release-schedule.png index c2ed24142b06ff9b8753f0d8ab53ab2728b59233..b96f741a02953245d03f6420552621181d8aa608 100755 GIT binary patch literal 13977 zcmeHuWn5L=*6*T}5D+OrNrKC$*xebR*p#^N_UGi(jA+WymRAu z&U>D7-!J#x-}~kM4jeU1!ssCW!a?8K24-Psg{w6tK+x3QV=%?}4HM>yS2M}t_~iYVHRH&Z#w$PF{XE9I`<7kl*1QWceG99{ z-YL_$Hwvq4Oqh_vW#ktTQL#chwKq3Jzkgh!O=}z8p&*@N%@P?g&w4A=U{;2*@TD`D zWh==S#Ti}g+>)!W>pnJ@pbT? zH&vMu^^%)ed|CJAX%l;iLgJpT2iB~WuAa(hQu4yxBw-|PC=AJ8E)KsE*xV@^+{vu- z!P&ZT5%l2w_)w-$R!^gsLs{omw^f4=jrz-nf+_$UT&KbLB{b zNfz~Mrx&xLu!w*>qM*gta`PatewQX->?Q9kO7+KIldhOXdpUBP*|ojBAIBhPOBY+!D9S6n?c=58IL7@b&#Hac$h z3Qp-JcR>_E(ARg8+t+XMCxs~xfd#$r=0}7p z^a+BYB3@r3!O;<~Uy&E_5U<{79W;nn2rCjy9()l|MO?o!{dWW8^HZ=o&w5mCR_EuJ zHe{fa$vCa%m*zka_wEMQ4M>%5eOSr0+AcX?x5?AO!eXT_b*E+}?fZAp$2zyz7Ur}o z&0=?x9!FqcVVRGYEy;0DJFE`eho#`85mNP^3=IvDup4Q)uH{w`M@2;%#z=TD7b*mmj852tB;V)q^F)<@0ud38bUhX|u%aKdwl16b&xIL(9Eh6$3 zaTi+4Z^;@uuv~umo47cU#{31mI=JMeJv}`ixUOF1DWt34lzQscD@EgVbv8R}^z7NYyNb%nPdrW?E>%Ck z_Lr-sohF0u=_~BXB|PDs^i5~eJNf0F7pDaEVgWei#{Ddh9_jD&rSk2=;aS<)Vq#+W zckY}XtnJUZ?fed99IUq0f7x?5rpxNeu*N;>PQxX1b-s^BBcXV^>kct7x6PdJ_DVzb zLO9=c{eE9U!rh?^T%q$%2}||fXjnu8KBi0W8*CVyq;IyvhIz1{4F9+M$Qe$NY2p5? zuxG_DD}(ud=MR(`ANO3VA4^nI;MCuK6@HH1FTPm)_W24xsex&L@h#|^M@4Q|Kq<<0SOY+Wp>0!hmu`@9B4--hK+POcsG zYbj`u;y%b2MZVqr{_}E?k!AUPsGL8GUcg!KU_fjFUEsNp8*SQ!WCVqroZO*{7qz5I zuMHVH%8gqL4+ys)&HkT54Ci~fXDw3qu=wF#mn&ZR9@|qqGxaNJ zZnt!?0z*S{jx4LD+idf{>Da$0p@`;6Er%d83XuF}H$5*CNBgH68@1f_d%0`uf5Vq1 zker?;ada+RlyHpy5^rH-X7*Sw$aC5MkZh&LDpVugo*;f#gr?WtBfO`#cj?>KWYtcl ziHOX_W_f@3J??O=!}ZbZ%b5nZvryBU(kUQGKjh}Z>U9%s^AuoEqRh#6P4xRyb%P( zb6?+5>}0#{%rvz5?RW$p&w8cbjERi}x7lu|^om*Lji9xaD7|N0A2ngzm-2gh$FtoR zws@1$T{y{qGFvj@Nb9zJ9|+mNK-Ructl8Pt_V)b;UNuC-#8t-q6x-u9_VnCO{nV?U zvU)dZWjDWC_lqcB`7Y0CI`nRWMj|xhsOV#(*}u=43(mmESm1WN4W_Z&`}60|2*J}1 z7N5Dy019diJ-w{o#t~^=moQigF=O9yZ*tAbk5|@96qg0RA|oTi$@z>73~Yoi;qplv zY|*jL8e9*DhKG|m%_R!`R^ZRfo=Hj$n+*>QnebW~85y01GJZoc{b*WxvbH1r{=w(5 z^Xzegm9lOYcH`rp=)}#oNV0KEzrN*YG3q~ihH4FO`R03b|CzE8Q*6(hT)BXMS!`ZX zE_lbn1yw*!GYELkF>U8WulurLR8)*?dM$yVvPKbP+_aRJ4{sK_%5Ed!Fo|qGt0eGp zRRRUjez^zK1%E$3rHWGRI{9?Li<7;Db*+FXU5`Vxk2($Co12rlt>xD(yJ8smTCD9W-FK@K8>O&!mO+SdVb;vU#87 zHtc%k1^(V&;ReTn?Lu4ClwJ4L<<3=-bFfpUaeo>|h&pxS-4ZQOJg`CWK%89vZztUU z(Eu*+>oe9H1NVYn_5lRtV=IP3&|7E`Z-5=FAB%SF;_@<=$p95I^Ol0w>2Qtx%DdxK z=b27$2z7R@4P-Xg!}iY;m*$X#z3os{Q;H<%x(0WDHM2II!$Fa5YiSvnB7CGCs0pz} zN(w6&7%YOED%hK{?}ybtPZvzpPWL=Db#Xaq3BoV9m@xksW&X_vP0*O98oHf9hC~>! zu2f_9Y)e7#cgvh)Mms(!X%`LBZBV5I_E-8>+B)CU*OXOQj7Ph*g_4LRhY+)VI0BJU zKu$@itFFGx-~A2E8(Koof2tbM6-%>ct0XOrQTL-*vsnN!ov$}@b91!>C%#yA3m_7_ z7aX;I6X3Ws+5zF5V*Gmn8C?7{Lp|bQTz0N(yxkQzBx#BB7r^}~EvBlY9(8Alhg5@e zCxuJb>%!42ozFSEVW=*ix0{j}VvzibROzakOJ-!!UBbYl_E22q4T zG5&!0#BzdTkc5cn!MYL*#xs^WTBO0(4_`z63NX)+Ns**l7vAUB-;nKc^YZqOZ6S23 z2ybRpR*5ju*p!s>qe<)Jvsaa7OEtTuUrt6&%lifG)0Q@oy+wGCUv3sk=62)RpP$&K z!G8r%b_pt0fpgV0oQ!7(JRyifaz3X}w;Q+=Jz;~{l5WL9SR};6&dWU{E1Ebg5GsSD zaBHTkYAM;Y*VV<;Q`>pe?rg`6AGWyAEf5-FI|kCy(qqX|Qc|zSj!|(;`;%V~YMT48 z?wFR$FXa~)Rpr&>vo=D0lY3zqQoo$sn=c5g+PPWa1l!_7)v4dNPW;apr^2mK#WAZE z?rrDgzdfO-vcFrpjH%Ra&Vz?Pw|xT4DW`F=my?@X-oqpGIZw%Y*GIG&8zGHinQW(;p% zAmO%3S=Iewq3L@Ao5F4L@$d_;L_v?*7$xoV&-(2Qm!OE{c=C01cTWO1Q&&7q&gogX z6BCnFD_a!`Ztz!PSV88r`4XS2;wsO~d$=Lt; z?gzlgPO)bBHh^m&;=5?-2?z;;yx{U7NupY^>_$Cv6t&;iY}B@Qkc#$tx#2yD?DX;} z)pkoVrqBHpRnSvDGF&OCsLJwCgDg{NaAxUJqEmGKwi_$%#y~dk;q^mT-px<4Z|!Im z7c{JSx}4)ApS^kW#;^1q^8|tN;!<~fa>|NrU*8+k=L>CNDk>_t6#N|*_z#{3MnqKI zI=si3U3(&Q^}9b?BCK{P?p2QG&Av1N^HraFwI>peJ26S3b zDdAvRSs%8Z#JFH*vvm3wyEf2Q3Wmy{x;OXrvBEugtG?kQ;OifMC zt!D>~ICZ4_9o1CR;BdH(+ZN|WBxZ)({aqO9xkz%>`clWPMClBlMsGe#ML38i} zn`Gm~(PWL1q9U8!Vh4}?awrM=^2W!eXKTqaESj)=F zP9ijtl~e*_6&b7)S;9XGlnlWhi=U!#3NA$YXeU^R0kSEq_=yIh!ZC+aAZ*|6~ z@?oGGGu;)#60`Oj$n>=(iSzLX61-IFoUbW*Ocw6VG$oDaNaO49mjlwVJzaONtM&I+ z6x`hqCBN`Hlg}z)D~Fe!uZ&PJ)2z!Q2RNhMKS;0OR!2j$#0NPIGvYbb9*c*JX z6I09Bma&UfKTo{*4G+pd!ie8q%Y-LfZ>kjrF| zZqZOEa@Fa{yru2l#)EZyqQ$QtFY3mdqjgX^1u}A{@hp4-d@Fq#ddGo!LR4>tyluW?fQls|sczPDMRfcK{dRQl?SLME1`QUF`&)UT((x@|exlh-^AvVs&qp%4Zy* z3p&x;%aRmejeAjh5^7Im%aB6c|1)*aF!dxe-) z=e(0XJwY#}okvhCH-N}`H9Wdj^5FylvOP=Pt5+$`d=-p6QI9kb!HL(N&ZkkFs1y<5 z`Z$}cV5RkrEOEMRpO0Nj1{ntYB3|;~hM2WRg=|h!j;u_I3%GK`SJsS$j&7+Vie^fu z(H)j4jn;JdDyL;vqZ+zutbM*WapBqX0EFQC_L`_9;Bq$5R+|emoIee!`2&JIIH%m14=F4cw!L zXZ+p2AHWv-Qy*Jf>3~j`cilf zlFCenKNP6sMK>lt)^8Iyo??If)qA?mIa%-gj(Usc7mLKzJikHLGyqteje3`Lb*LP9 zJkMQ}V-ALd$Es>o^!5Opq>&8I`BpC-lF2@nr$8AjY?n+uAmajp*Y#tTobtB{zLAW0 z=51p#obYtMRuqOqK=JOkfjXO@@Mvvy(yDH2ubnc_ft# znHmnx1F4!@JM=v#w#BlFsp)VHW1NGY9DkJ-)hy+M3sp z0HD_DVD|CWWP7T^EnV0Wp_ObtIPae@Z!RZWdjgK_r_o@MBXG(&fHp#4gj_>LGE=6L z$w#~tXv;vbj>i_WuNRaX(*M*zgy)H`UCSb02hBFU2T);NjcbT(Dy!nff0sGw(TgjJ z{c7xa7KJ9La$?WOJICIr;65|RAju#(DvI55{Bw!w0?=OC z==|k)|E@RjS;5r=>feotJoo1)TZ&Kx7tY?v4_BBDS2VeGCa9<`m=fmK&3O@}hZv6# z5b6O$^qyYf!|@&0;oNB2x&~D`KcO*UwfVE4Nd%<(nYVit3=h%{E?m$tc-s#q7mRZf zx`UtY%+ZC{rBtbivL|4*!lpHe4W73;we{b@BzOkfs5~3ivaeeR&^e_5@Fxv}@--k8 z2a{h!LC`@6pF;D=HBB3_?CZ`R_ByAzb;EZQuRKl!f+FAY`+R%t*jH?L8-fg|B3`q- zNM$!C0X&ij z$m4(J{E#U^=JKRn-q!X}X=BoaeObA+$(a1V58{Xa;MqHWyR&_z?C9*Qk;iOzZxMAE zG5bOgmC75K1N_G;eQIjK`fXk_AwZ(6?8`k=b_nivxkRZ z9-{KfcieQzpR#d2_#kFs5&!O;C5=RW?#;hlhz@@8`T5^0ZwJlYTk0!4*Gsot|3{;~ z<|AOot~F_4?ab=x7u!I*CPz>cUJ*}EPgT{C># zOhYm+03)M-!X6wrBn<&#>EnaEPXGyfBlHAxyLViA6_^FZ#i7sofrwiP&j*?%6Vv9` zwGh;}-}0vF`y2QE+iEtZ=+K=t~ODNE*Kulo^+WFhDSp@*oJA42W&Q zY&51cH~pRw5D}Rz;RWTzF~5p-$vO&QZF~xpURVp7)7C_6Obo3f&<+l|9(|1AMS|X5 z(=?MoGa8Aa$jwE(&)`rq>P@ON8O#Fj0&pqgrit$gd7kG;zi%-Dq8L6tKA*#?4M0}I z43Kcf^<&5fhlEHEIBfn@QkNh$8Oe`2sIzc_=hxTs18Jh9Y9@;-s4$8~5>S?&WNubI zzE{Dpqs?)!TDbSPdZsWSbQ^>%AR-GkK~xl3nn;rbR*478-E)GkHeFp<1DUC+>GE_V zCWfHsikOJ`WlUlsZ1d;i@i%vYpep#e4 z#p|S2{O-6-n50Ye2z4Bi_(riVFL9RKueP@dPPN#?-OFrI)N zTacxtju-3KCCnaMAA0GAs?mSHzR(w~jHfdjUpKluIJAkw* zd~0_uhGqR3xC0pclQ0)OtmUXzvbhp2wIOiWd(sg&AjT&FkisLBI!AfMysv+asaDQ^ z>eL;kguP+!c6_rr`p&&Vl|O$2Risdtg4677n7-g^L9Lu>0*FS#WZb%&kd}{C>AQFDew4i8HXTCcCWS;$7L{ziPXz9|@T*om2lGVs9$nN~HiC@>K~eZ0ym8v~{mK z$2V-U)$}l-j4WXawEISuEhdD@nwsOMha1k@Qw1dxPBWQe0SIF$uMLbZ=yqgLz5#@+ zgM0=ChKwWNMvVblm)HQIl3ZUKJzcW6=~zAZ@G)=}l&B(nu+{E8e)*?x6mikO`cwIc zNk}wRRiABz-Qzx59b6d8^tT0}9~g*kwKHB$&&M~piAI&6Sc#XdCgpGUNLqDOLLV{O zoDf(daa-HR7&L`_%AH04=t*HppAsfxs%%+qwaTwzISIJ3zh;dV2GPYDM`1 z&f8j@Si%Fs+<2zhplWp;C&UAaH>kZZ6V|2*p8Km8Vv@@AhZ|$@SYL=;KWcy0^W$jD zQmX@w%hZp!*RH19U<)K9+8T~$lB1)kVMyqtKrd|`= z&VNiZQ2oAXWmG1CCPo16E*2&xnc!F5Iw#B7Mvo;O$OpYdbjXZGtR|NBq3!kBU(W$w z`u?~njTmFuokC~e{LHrtQNkAS%0r&I9xz;70nG*AXs^9iVmekx#6Kr;vmz?9F19CM zXXRdR{s;l&eFgT9S@b@SNp^8@F;MDNe_aF$a&wRM$&x*_q2lA?V`jDl z>wP{))zu%x_lR(7}nSs>Y_=Xsf)8Vo8nN~3iy}5D__9#H#lkRC8n~Dnh_a3^k;4b zMz9#?Gn5jR{v_>P-*o`I{yC&_DtvXlGa|>K!)5j2V%-Jtv}&!zx%Z$smeB_hrHN0t z3ofiF+?_x2-bKt@jpNVEbto!VM{;f5%fUhLpf|c!hdtVL+snA0F138Co_)+?Y0qUo z`qg_;YV*Xnf|<$d>Rj<>5fA7KK=$asqlRyMRs9|QRLz<;PDAiD7d&(7e>m+$^x%=)iBCG@r=A4=*<)yCW>Ze zW_j|dAmb|Cj;)4oQR+a#U%yW<@bHW;h!;gxaDE%#(88Dh3&rn$!u}YxMTxrY{UvVB6VbwijPW!7DfgFYv1y-K3RR8)0T{kOtc&8+$I`? zivH5FB_FfXrWnINd#vUG(?76`8xpz`m`eEkyxx#7un6DFrpAN%*xuTj=*82TnL-ji z1CDZqBo68A2JdoWphPP&0+E~wMynBw^-Nq`x~Y3v5&xFtM{T)PKrI1Z1tl1VoR{_t zx*j+N)E*#AbvyRS0;>d+7GsJ{#lBl^I{+z^m6vaM-uyX^1)-C{_xJB{Tan$lLrsho zLC?W)kQ=WP-8MOj+SUga5s0FE9;XMO;&~&*AqgLMwG8NDt)Mh$AFslcXWUvlwYHAy+{wD zMazaI{7WAsC^g`4{`x!I8O0UeMS-dEq5B!+g#kS=2{>t-kLlIW|2LLX_*WY?W-nJ(13;6*A zGVz4v83Gi26)jN*nWe7Z=7)|z-?j48d!~;%a`v@kk!8r5E`Q2C#|FegQbLF;UUb5 zPl`pO_QOAT(aA^a4BGrh)ZO!%4^e{> z6c$8L5pbSq#B7Plfp*6un@DGL>*e*>=a2vEIr>B;;X3c__1)ha_3A~>XXA^BistWq z_od7)2bJu(xmF+o`n~mgS@MuJ!u@(^U)%dW04t&rDHqfhi|74p+XHgxo?Mc4B2R*A zT(5i8FrD^h{r78fNf-Lh#~=hHZZzSqYt1S_Is8P*{$DfsmlXv&@V9myVPQorEhgu; zq^Rwb5IbTECg_EPgjiUtfJI)0GJ^i}5|TokBW5J7T)h@($YbjrLrUEd(LUdf9#)pxs&m!PjAE`h_~=- zfz%Cn6c9K@1m(!a7J0S#d1d4F5PR!-u=zw z8r=U^peGcx4QYUT+fTsM|^wnc9Cd3kFGK zPS3-*YdnX)OeNvEkNlC7|b0XX6Y;Q!}-RlkWXY)x$ zTMm3{*EPELSNwvwNnB{X!NI`*n@rX^KC)I{@H2R! zA-=@O%DPiIrc;hn00ALCUGghs1!JKOknk7R8|+6z*XJMEUSyL=|Er^zL6-PAL|1!> zp-B`TxOg{6@J`9q`HGO~dlqE7C{NHAk;vy5)=y7_!KzWss zhAVjDZRIFq3(AMb!Js5r7?-`hJuGjeRUwW4H#mA@d@9&1XFQIl9U2)2fuM;Houhx% z=S^%#hgqu{G_Yx_aBe%f5_uIX~q>&Jo}H!_aB$< zKQ7<@?=D}OdpsTZCx{b_E~z$Kkcr9u)f}Mf`Fjbt#i2n#nKkFuJje*hyc!f5I#uV)%9-Z2 z%@Yw1ypi$^aAIQP;-){frLNQkolbVlFFt);`FsCfe(%-L9BYIyLTHGQM5aQ_Y+3h2 zr3YZw#jYW6q;G@&dbnlDqEnwRaTAv!$I0E@T~jl0|L?=Jw2Ol?c=Khs5%w{J5XC7$ ztB>99+gAospqJ0|{Z&EfaGpZBGr+rj<>$6Ypr4dXu}G4zdvGXMw&n_Sh(=?L*n$p7 zTrSWyA8R<6B|e2uK_Q=fe*7M2$?ZC8)3T{c5m-QL#0?iuOO9ej9ley#^K;v(atE!GppQAC5a{9{MYauo;yuie|QmoxGrWc>QwLnMI zxd$19+(GjiYcvG#EjoI7zz-*vFF|K9jKw{PS+J7GD|Wytt{e-xv4n2&xric`MY0Eo zK(H?GuL2Bb!qbR_=doLqN*DBK3%lnGJf&?Ceit@wi3p0V&zDRFT3TdR_Z0&Ehubbk zg?{3!KLtIwxJ^olkFPLRE6vB8)`r0I&~tJ+g8l~}q6r#z;+P9VcR}YS^;r1B_-r5& zf$pN;!H-Gmozk2= z!1KQMbKd87{y3laocY|J+nHgmeeG-SwZH4T*7`0#IT;Bo3}Orj1cLSQg{T4qa$6g` zKSM_X|3?$`n+N~U*@>yx=~-Gin;96|K_m<;3~Y4m4D>19I#U?i*;(ZDx0dEb#i+B#7YiF|NcnGH zR9XjoVttCSqswfVJis%$$B3IvpF&3V`fdM%EQw5lP;Wx1&(vr7&lp(wABL_`S>0B) z6MKv$%J;L%_c{F$zIWPBr!#_>-vv0h8cSuqcMfUuT1DBuIfrTAs9fB~2uvmiM-cFcRf%##4?Vn!~GUGqA*(TSY6vlR)6fjVuLQ6b< zN^SM`+k=8bj%*2>qbU+E8mut;0ul?~g~OK-Em)p?YL?n_QIYS*UAQn>4NF!_iBWPS z8qMdcO{LR#?+%lS=cM zGa#`jJn-uKOi*MNVai3w!Gl2N-4_w+6;-Y*xeR0}G;Y7QH8KYr(H7M&4Y)jdzNmfj z;%nE)QKAh<(t1OH%WqoUQ7RBG(S6FyWG1>9^V{BQX^w#9!I~r$RI04{<3`sH%mjUUdi>3={D4@D7H zLDmm?fPVk2FS;i(>ib4>!e>z-vj_GIVv{&H1QD8emwX&^yN<|)qKnE(a-jyO5$z1< z^zFX1cWFD>cZ*DfA5$*BTS1eNdGjl4aJ82*asRw}Ghoe5IdM5FTQTEAaO&kYA+SL3 z(rBQn_Ly$VYQ-iKYxGI;oPw;_si2}pL+otO53WG zZ>=RICCOa38(-(C{iJ=Xr>9f1+t^NtqFMzk2p6&r&@TINEt7BBkk z42+C2($o8QF3<5g@^atD{i1l)0bR9;RlG&B^4h$uELZoJxY%l&MgJeUpT&q#r6V%70de9r^P zlZ}d*IF|ae-7ZQIZ#kUKfkNHpTenfIcths$t2e)&ZdT*s;&yln9ggZPsh(hWC&Hrn zUI`@s_7bsWD*iJiD zNJM0$Ur-IY^(fR~ee|bUe}$zsyv8mtsnGJ27dCLDK)38KqEjak2(>y;iQ;pbK9x z@yRD)d5(}RrMA{qZj*sexn%WcD`_E>uLnQ;2q3W4Bfg(VLO{^QlBc_|p0cg*83h6X z+l-DmN=jSAT^Lud?RM;i+3{ygjFn{2dcHLX=D2C5hQGF~`?_-Yq=y}9IawJ7zuXCN zmwPEH+EQ*lv9bu(yz0C&lA*`8>hnpI&pq;s)r{os^pB`mU$PB*;&!BL#P)4Q3w6)J z)i$T9N38tmSWN~V=NrUvoAX)EwIsV=#KKSaSLb)!xGsOa(YEebPIm7_o2(NH!2j{% z$GgMz+33LxS)K&GXEM>j9lc41!e|%bn)Qf$lh4-8yjJo`ZXGyUb~y7#By959FV7`<1qBiinXe0|z%92Ju` zm~V=t$Y2h+TSUvhqb`q=FJHD9^(8Gv|DAEKpB?%zI;z4~x7Q2f#fDntapewRWBmJ^ zm$n2mETd-L|J7vH_->a~`~f=O7V_80om@3{`)w;79i8*xn9=bfIJocf4d#Wpd;dJY zV`?HWQe@!fCdgwkQEu+ofuZ;(THDQep*>8=zJoei&bdIVmQ-*2Iri3|tWZyP_r6m- z%aX8!W9E*z19ZC$I{j{|*3R|M5FLC}(f|Mxz@jFkMg~He&Hl5zh`_+Wxw*GhA0ijK zqT{u(;*+7}CWEaNznniky^VTz+bOX#N#I=|rnbw2wUL5l)URK^=DaBov|9*&lb*SCVHo?Zq6M?BWVtDYnA7-Kn~3+_)Ons6*FoZPg@0}O zvuzCTAlNHe*{$I`^#FXPBLfA6fi`k?GlIqiu>D`N#e;}d-8Z)*-)P;RaK1lhHeC}d zb+J}|5g>I$@Z`~>M|@z~k&!=c9mm6GHKe7u6{0n4UIHi`7#L_T-KlbY1bzJYv799b z2gjzpdlt>3T)SF#>CKM1Q725p%n_73xw$J|AZLea@d@vuc@atzfS6 z{;T#oSVZjU&U;G>pW2NU78gw}){{w%em8s|+j1;E8&kHn-T{cAUSR=!_)zTUcZUr= zgU$$C8kzQGQ#H5qgAsvuhk?Pt7L%21u_Iucqx%+dJN$~NADzCK4&$r0oDl@5VY}45 zI#NJ>Y(mUM{V4w6Xg(yF*S6KH!TqQAI@P6EJUAS-Rje47Xky(-0x)XouWsij6*F$9 z?w5bv)zQR6zyU!^6cb=`r>7{qLBA^>_Wy|oM2!6Z5Tb_(%=xJf z3<>fHz3d&#h7>wUDaWec9?GowePMZdBu|}(gJZ9QCYk@;VXkU5K&SQf^_rR*0AYP` zTwTc}ot>RYE`PF)j+{c;_6{d3<1-*SC=>TXf0{9hKbr{qh5`9ZQ9*Uij;S*>?Xs5l zM#EYxgiPQ8F7E22L043)5sJtB%5Urso};6qU@Q6?wJ&xFLZayuzghhPs73~b3K(`1 z^0}?$Rb-lOPF4jQ*>8*=07o+?Tq&Z1R!&8cHl~6!5zMI!i9xP{nnbDUtVIP2ovw2< zPq6U?13ZfQF#D9RQY1y^aDB{md#_Ju56OHumneGDVZwZW_joxuWBw!By#Z?*8ygdo ztc%=@i3*bG9~&QF$cJEA*yDnz3pZXUqIkRvod`3GMJc!FxrZ4L5Kv|MD_dFP0VFu> zBMg}j5Cf-&VpdzFv*_{MD;XA=e9MZvvj2#*=P zF=w$qKu3c;P@VPe7{UhL+>%qC2&?J#RIN}|RaF`-gO8BApZ!$RQoS0=jE;d}?N<%~ zQ%lEZns_gHdHJPcW)_y^#cG>H$#d1aJl-&52k?jvv^-Hdt3sB{~i8yT4N} z^(IOwC-T|Pv83H@wzjkKd4|PQt=sI)WiqgE{&%J#ipO$#(sq{0dM|B@rS3@B9ltr~ zW^xazgQ$NaRt$oB=Ad8Ejigo4?T1Tq6cHkL@*;PSr0r)T6bVJ>d|z>Ex>h2X)cNel znz;&b>~)T|XUS&BGt2?J#c~+!8?n`-+zc9MsCUN*V6XjxJVmk$MEb8Tf_?gGB*Ks9;z!uQ1 z4fKP#L>xWjmFHwIXBT6kJ;Kck!7Ht1h54xI=vG_&a5WxY?=Qw^tLn=aFIeG1N54NJ zPR8OQSZs^dK%&LP#bBc#GJ)^!{%&fFmYb`)aI>e~((&9tZ$Gl2Uq-iqJz#LXjnn{a z(=UK=#UA#y#TSrHZTnx!etv%MPxg=Z2W5MVZ~aEtDCAkD#|AnC=h4vSMmu8FAs%wK zChkKhL^G!*!Vu@=79S`q+!YF1E5D%PG2TO_IG}!VDaXz9rd9KYL z%;eej6_45Lp%pA(Vh8K5VkLAY^C8`2uw`X()5eiFmh7ySK5hl#YbznIiJc&|Fz~ha zDdHDH+jzlYP!cCnb>t8!It>hl-wyZ{^CX)YRljn&pCDz=i|ZGvZZUWzqp`QAy9ada@E&AXdMPQOHO1})a!hCd|8pz9dp&DR(D~@i)uEyJ2>Pf@g=@+?&Ma;PZDAU87 z^H%1%*>mktKxq7cdCJ6dZva#Qr#Ec0sS6yKp8A+%#8mbtx@+RW4oi1A+_6S4<%4Lqsueu zI`IPC#EwG}zGRoDVacGrlwtb)v*M}MZ;v{(ien0H`QqOs#Y=rS(**byK z0E3T#nVFe`747q9`yg?b~>=)FM*aytHJuwp4S&-W^c6jPuFw~z2-l`A0`XMrRkClVRS#8 zm|3Box~(K8d(YX~nZu}epSOp=00X`N$n?&0UESIKR1u@@80!i1iuZdxJp6^cwoCoE ztqmV;J8n&DTGttD<1GXFywnzUur|V?QQ5Cy)hUPwX~3UZYWE*Rx*pB?aH>YkeT%m^ z0j`^uxVF2mZ~1vgVlu04Qy7&5EeesXi!)%9KW#EZKhwxA$zZ6iPzFk5*Cg1urm=sd zXJV>77*>bb|0#Wcdgy+!m(0t{yL+iflfn8mI*4X_pXT@kf}dqQWdLUON7qnxrU%*MhaBb5~tY#NH8@jDWMgwNBgYVhNZ zmZbd0YTr2uMnwm)J_4x~1B}md)4_Kw^)I(HZcqF~;Uw|eE$&?F^hX#(ql3P}U1-NR?8gk=|jEt>`ioR{F5)c8(k%Y8~@Q_62YE%_l&eRQ~4>uSM;0S~*CSTt_Xoe;fS`B|)o$yj6j?;SvuTOh7<=<+ zy>b0^6DFzRN_yyz1QFd|><*vjFH>Za_?=h3M&lC@DDsUHw2(>j(DVlm;b&`DOw4G#`y~|SK$v|e zY{bJd(024k#v%bZxg}F-JDzsc)5QZlvDha5wJ-^?R7Xr(wcMRI1zPW?>_-xxT7Lmi z76`b%QA=#83zIg}3GNlRp9eG%XRC%w=qDL`d8PZd=W9(_jy2+VRQb3~S4|q3N!Ubp zAMF}M76fobr26-*l<2_Tnw~i5zrcjWKs%UG^rX|lCGTPEr93QLat;%d4E&wFu_|qs zyIm}UwlqY_d%<6P?g|#F&jqr3aqR+=GIts=P*NH)W7!vF2XyVWmSeX$U^vYqqb+<+ zd?_e)f2SN-)JogO$*To9BEpi2x}&}S7~=EE(iyvDz0+$A?C9-XiV~*rVjPay-AGR+ zbw@RDB4=q|yeh64V}v%Tc^hq2VH&igw=<5ih^-lkCt_d946V^nO%rpA}- zD@0PSaL7n%&Moz?41@G9We}+TTsdP0YxM zZ6kZv?fTN;-P&-T&A%@xf!yoY_)}8D6i;k0I4%H$>*?urb#;N`{MHc|?i{Yv!{i;B zfY~8u@mOJJ-UW^Q!lbx3Ns=P@V;}oJM*8}X1zr$MKOD%{go$ub?h)p=pZi(p2 z$OKQ7Sp~W;brZQ{E++4W8NDP6qwaQ&pnKJ=v8zc@pbm?mTX?6qSwF8ltz+$TFz zWayRD6c$>w_}=F*?50Sr2KM#x^OLYL_w7cst@_Ig5Oi&gmvudu*HGc;o%<2cWAlgb zz^Ks^#alW#uU=pD9)6m5&?6z4kU+b4T_4$D+6oHnb}MqG-7HxgrB;BhN#Ad8Qbhi9 z$5EkOxHUm*fCC512fIDbmNzYyR0!<{7VQCSt46Q9;!#GS)@v7xDv*>|?n{>axzZ?{ zbW7wrvtW_Z4v{pzE%%T6;&?h)WxJ~JhYW?&9aTdmVJKH3glx!g zyws%bRxedX3eN*AsU;RNHTMg@xj|aP-UMFPqwU&ve{CpxW=Sk-8rzU;T-SnLK}kTq zrFyFlKHS+QPAD&yg4s@&`)Suf-)ZI-z5ViA*jwNX_-}jLX}Z46n&XIL91YGLn%rp5 z`~!U%+w{1l0zpUoh-s_MHy@H!+1`!*Tx~h^y-elJ%XI~0KPg=l(vo)WdWEO0j-R-n zG@p94t8Q%-cc>^T%_2D0Z+HB*UJe_7$s*@HL&2kk&$34(GI*CucrTRYnEa^Jq-7#FQ{!t!B3}ujs_P(mG4-z@vJKz)qf!^lDf}-ML*ZoLM@>} zZu|l%Dw;+*qhHYH zNB;v?mY+?x3E74Is|59U*bh^d#`J9?EM(jn*-4pM2iImnuS~SW^B8Q@xG0I8fG1aqB~1*4?BPX90N z%f;{YE=>A*@N+!y3sW%!CB7!j&5BRO9n5}@UB%y!raM3Hoy^?_@i%xqBeb1{TGYOO zfczW>{HGMK2uCvi0Q*FN?n!};fq5MFO#L@DCE|l;lW^UWeta+3-lz1}FX{3~z6tVJ`A$L*KL?j{Auh+;wAvf>TR7ILHwYYnx;;JFl>k$I-6ZNir%%6u%@dM1O7X;D z_V;}|e(HK{2N#ZiA0l%~w{57k<6!O)e^P1X$w)I0n}_i20uS-U!RO{96Jo+Bn>bgure$3CAG^?7eO-ru+`TDC=q$diZvOJ+ zQPEW`gCYXmw%SI1YStTzWNvP5sXLbH_0TVu(Xh=%@-;!q4O*UEw$SkzBn3|YZe`4aiz*O?}jKvGR1I=)-Mrt|%m&&g_R7)*Xvzmg^=TW9{`Z{pPm`#*3 zF)`%EhDw-U1#KhL!(7yzL^$XS+I&Rl?WY1Hdq-qo9h znVG&znIb;ebFZ{11O>H@$|$OdSXfy4y1QMsYX9Edq9*0D%LGwgu4;*mt*uu~=k>Ij z;cwbQeaRM)^!3`iRWv`5XnoDiHS3I^14-+Evcm6HBT(Y$K5RurMHBp%jzcw(OQF2; zzx9_;T-@fi8P`2&adB_Ub6!yEiB3#>53(9<*W=={KbHLX0@d3rFgY2Dx~?1pfScQN z=mldQwuYmRchGYsHIZhwRw*f|&*9;M#|ya4rzHu1VB-;YCkqLI91chy276uYHBvF+ z$7zzaogi)jbhx9f#Hf#%mX@q2T(vnNA)z&hL_$8He)C^qO~WC1aD3By$pPVOz2ufuSmX>BJ6uvaUA z8G#}b5ug|#7SVZMy0<4@=A8i?O|^wV^2bpCL-F6mK3fE3%+Q$0*@^m$hybA8-BO^aNmQ<$u7 zPNXiSHm-3%6LfRj&b=>3?Fsd89yvKxTUpN3(KLB6%%IdHa-hI*!rjd^+4a#4cm^Q= zg?4h5C!U7c&7!??q>XkeRdd6>cDy@bJQ-X3jQez2NP{UE{R(T5mD3P6#Bgjsaav52 zbj-{>cj&s!_S3%H-H8WUzjJwS%heh881D*vDKd!>$ecy&SZUC6#+L;IzJRe_{COw+!}9B-Et(qx z%JrX05r2}eZf4-)mXQt)@ZIK=%yAz)h~7GIb4(bl%F<+7O?&9RVk7pMJ5M2h?U$j^ zY^{yY4eXjm02CA3xN#UMYEQr#Vop+C!?sF0Ks+SJ>xaj~N30cC|ErCF5Z;tq8!$kX zmCO4I);!{FF-1~ofDBQ9oK)tm{&%LXuC6}VPz6QB zoeK<8(o$oS)d8e$27xGdpRL>m8T`!V;r{u>?40~?thX;;zI^`tIY^lb3wxGnXrE3w z&VV$UfuZ5F(|iypU=7pHRV^pM2ftzc(gJV>AIX1cdTX1!+`H7y8H)ky%M|^ukv}8b z-|g4hLlic)-2TG+DTa_BoS=G)GoX@}n)sZNiHZ7lgA}mL9<9FjnK#s-u2ze|f z0g&b?gZf~gzyEl>yATu*$x}o*q6{B$c|VtuibS+pIlJIj?rd=}d#AdJ%8NEcz;DzO zXOM>iSpgPHVHcK%_}LstKy*637S9AH)y&L{vFaJ$bfq8D5`xSwp#xHV|QWW4I)7cCP9G2gp~II$V|3@(+eL+_f!1=@G)qP2OLWPk3%Y%C=K`35=uWAr~1SHB9+#7 z&AiQT;(9e*1R#^)b#BT$NTqf%kU~;!fwEt}0f-Ur-@iW>pB?RT@CRjYjIy?5;2j5f z*H$jrgV@+ufRJR+K5)%Sf9kLr`d6V^*=azfwRhD(fs9U)dIuWSyCRkV|kr+y* zSn9{WKi)gpU!{sdf~+GJcIf`yF9XEH1Mf0CG+@U46niu$Af0UuS2i&*0c9$hWLM!f zOnkZRM-hOEkAp`ZGhz1HZGvU-ypVS~8s0fpVe0yG6 z;teu3K_ooixQU2}fFohAnWr3Xr`3?gOOV z&Vd82!trQe#ro1p+?m7K&UH4b7xEKnbF^r_JC>sg8S$a9N=Ps!O zP!GFxoLv_L^mSULhACmip4FRf?U5Sf235&Zom|Ts(WK+k{QoZPt4ijLpSOWvqT0#+$WNIa+ z1HDxaWr}ayH+GzexlNv46zftL^y&EA3IGrI-N=3l9$mDbsYa(x+C>j=wb-n>ubz#wH~(N14jrf*kn)C@Ox=nXn0y&YKGAUV-0&{6dsn@jV9-^B=IhH7|N6f1$hFgvJ9YR`(QCJG^F%B`p3H? zBx`GHi<$PoI(BIc+*61Q`gDYn17TX?v3YRfx%9X zcIJh7)A&&!a;n#;lktxyM+U~?tAn$Vo2t84uKQ@D_rHkNurR|e9mJyn=kAw8F2s{p zy>_QsLp(S*i|C|-6sEblGBPqC^Rrq@Lq(+r9#_c*2nhhisvZZRA>R;Rm^KkpJmhz!K-V43SWIp{9GV`q5Ut*?(G<%@?yboblS*q zm+9>c%qG{)(9(3CizutTJTdz?9Nk)BOgxhN@?#fsPwa)q4Rw;E?Yh2fbh!*=?A}b` z=>ipk5N+(W?&;$kKZVaF+LL_kq`AJMSFx*r!Vv(N+#G^#`R^iaej6+$IUC%|jVDZT;Q7^91egvwd*28Ny-BjZg0h<#5B-a_ zO)ek?s}#BZqwVg&i`jo6azF2}OZxU=%?*WZ_?Lw3HKIo6CVvGU+N@%~WHu_1%XIL{ zbkKFNfrD>|#UkP6G#lOicux?Ap#jCof7<=>eG!rd~nDbUFXfB9nIz_%; zrNuw)wx(yCATq*pSa~=cgFrDRCT3;D`0H~ddo3i^sn|qR?hQ&~nTi>~ z0~CLZUIC0i21#zRr&c=EKr#UoSRl{<4p9|;`(v)IzP^4j!I&l;EUVzxz318_gq+8A zcUFWo5-4Nt60$xc!bL^}&-x5aU1V1P0bD9-YCw1~j)*!PjSM!%N{mbfm**X)oips$ zh7BKbIBowX&~^>O24M@&`xCoV?sOf1mOlJq0P_-%-{-mcM`V0rGQ;Xn-lmo#7CjH7 zgC9hDOwIxZ2v}qPGUWH>kV38n%KpVlbP*kV zW{syd3oi^_;au@tb!2!uE!HFJ-wn5VHgmHyOO1a5B_5n9_3uX@MgWDNfgLcRkM1+} z_Zkr(X>3f>*gxE<*kUYmhy3c=FSDF_acb344cGClCS%^+*N8E{q;90A<{_45?cP7W zP#J{8gow*@2;klig?hkIK`h-V6Gsa_5a}N| z@iW6B0^98{AgY?Px**LnCAt%7^uYrWfzi^^(@RQA1IJZYIi~HQk+u!@JDD2GFaJmI z8#<}$>dH^>z%twtLV3q=9M5{5D#mLLne8jHgQ4I;inHH@Pq+l*g<3urQ0t-`VzF@rG z7_R-NNii`m@FgbKYsmxi)XF4{3uY3z$^ch3AIc0XYdPt$(bd&$cPr^8F9;8NNYj4W z_-l8u%j+=iC2MKfE*@JPnA62kRKBOvE&inT_I6Oalrk`FI)=wm%faCg)R2Gp zcNXe*JXkdHi71j3bkbY*ZdidsQ?p)Ue_+Par1UMSn)Lr;YYMkcdoR1!PVh2NE zhUO3?1XIun7@(lfOHG{`#L7AZJ7sp^3k;h84=+Z!JLUPu?ioqvvz<1t@&4x~IFZBs zp--Nt6f5e#$oND9th&_OdoWP>@WFpa_`=SR_A7Rr^%*IW*t4#@_@5vB1fOIwB7!rh z=RfGIk)um6IU>ZL8r<94+b{*`9!bj_Alon<{MG}a9z^u$qpUw zlw2&-9K@|{Xg(5o-kH?>1wu%GNmy?q2)=S5o8bV`0XM?>FafdwAr?khaPZZtgQ8k9 z@huY3?>LfhbW;q({EbG>8;DJH9FgEELEF@sRl}i_A60DgC`FgfYJUY!Dr?gI>M;tEj z64lpMhCL2e1T+Eh(Zg3KTdVf4PqicVAZ>2#R~Cah^Lz8?g|5iOUyuHA6|&-iS~AK# zqnUI4O)AOcj4Wt0?@8RgMpe!Ce-5^u^8zzJDQ(BuW+Omgx$LF!f!VZOk1B!c+*9Qn zF|%)booQC*~n8aYQ^*M|s`b|NLb@q)n!3lf+ zOMxbtW(eDOxLWtsTT?R|n^y{-XgsmCs+LTSa|0OvF=Ku0GNoO2x8d=~Hv#`Pgz_3H z(}Oe;QKv`5t&g=nyFHAo8QSYO^ot(*73X>FzcP2)>#`kQkY-@qWHs-Pz4?Xxe_qu8 zsmmt!_xEr4Vg*7T#44wF)?XYH+_`fH@HZTWCPW~HSo^(Du9_BbY@#9}@bK_}Gn31c zTU3GRG(ZfF9cSK0gI8><^L!8wDDejT`gOC#4ZNeoE;XX!;+?2PrKNTo<7J@AA$P(r zz0u@_2?RuR(YQds<+k&KU{z?c@$siX zFx+VbbXDBk+=n0uYF7iK20%}ViIt7Wa)8$-STWZSq3GD9Y8c$|`|1;aifCMLC%=Gz z`FWrL04k`KiaM9;U3v!UA4*D~#`cp?D+Qo8h*pPZ-o8HJ$4zwr)kM9v;K(i_8=sw> z9oM_pCW=FUcE05v;^%iiD79@6WFbKupd$z)6>@VG7Z(SzOrY~6zJLEY8p!o>!1HDp z3Gngf;tj5rQ0_UmuO!^~#f5A`{&2o)f6DVE%dh-Eg!|Us{wBV(!oE7F*CB7PuP?wq zLIg;hI{?k}O#X1oAMCu0^_hAh9v-0Wv)%sPNV)r_a`yVT^fsfiad9b|U^OG(Y#K&8 zg?9-T=go0K7G5Bk{&Tag>zfp7fSvm@$dV#-_pa{9w z&C%y;ANgEnXRV;#W~Y^#`V2Z>oID2yW99(Sa$jP4h0`3tIy%Q1U(;2i!AQ@@ zJZB@scpnl+(8Y9WVZ+uHbYSdZ^CZf(RCJ20Cl42eZ2YswgSnp)J?1U*keF%OyhP5$ z>%c-N4HPi$tZ#(M zw;CB2xC6O%Pc&!Lh}OFv1$VNxmx|v~N_aMI@RI3-KrZFCd*iVdy&w+07&F`}8w0ffL} z{^|mGpqEUob&kx+bJ}Oh6`-JziAp9@y*R9>|0(qyEz>`VYdm$08xL<@YM9G({EdpCla;QZpoXO&k1~7ph zY$-e51J zrZG3hqCw8zDO+eDFGTV^^a2o0AYN6@Y8udJ-x(Q-LlH*pf`mywm7sjy6*tu+K{`xI`n z5FZO}detf$gPFSbdG~Wa3fccr>WX3t@b_PdDq((VkWBX*R6mLxx3qelj<$8Pf%>q3 z+kCtQYh^kunx8gxZ^&@0*uahzRM-i$oIdDbfW3&ZrOj1qwXiuO9|E;zpeh>xp_HV= zsV5gURYqvFerq&Kj;m&F>Rp6P#!CuRpGUvSX<<{o0lHi@wQ*2gvvj(&eQZmNvaW#0 z-f{=Gw6q`u45LNOa?-vvsIGbm&CG&K^*}}qza#eTsPhM10Xah_-jZqv|1Ns1dd%S4 zpn8z7T>~0Lpi5;^D}4&o#Z0^|GY{qnWp-lS&)2l^hUI#~`L88lxNzphJk6kqu-Tw) zf&IPSML|7S`2TI5K&7qt4=orW{1Lch5P{*mnQ_}A&`Fu}miiE@Pm4gF!1s)%reo)IUVE~thg@Bj#gy?Kg>+LB=->l{>lAvG~Y z(2kKGG*R?|X?>m>zFDgiVU+faH#bNuUV1c~4r4i?_-Oowu|}MKcTNShs;hyjHT{``it$#;uw9deGqjmON*S zTsoQP5HxumQ9%(QKM9XT1e(DHzd#Q^qTx!acmkZ-NZ7wEX|)IFf?ClkMg?k=m>r=IneQo>5CR(rkFPrZP z_3wa4{p4WH$cg+B0Iv0XFTFD$^z0X5x{L_nm)iAZ)a^|Jy1AwBEhOhDpcJE=iMP3g z7VM%1!1kAWABK1CQQgCs2)nU@3w^c(zCKz-S{9!K%!#lZVCC_T+OK4EPyRW=GPHnQ zVtBGf^s9vH)>hk85->`OY1(=YcNWN(}<&;S12nsEmu`6$BWrZ{VS z7A>=>kdfh$qab9NQdAXzBk9 zKk1R2Y~^QReBc4XlRg+fwR>if*hSQ{aKuY%@hoe`c^O(#`;gb`pm;#BJ<)NR*ZsoP zU;G~a?p?wG`yIcp{(Z*9=!&VU{8Sl*)uf^mn0xtV0 z+=Jhw_pq$Ucu{J@w$<3(cluBEWbu;4*?-83l4t zM0ZxnYc1|3UwS0XXEM6f5fxO;r}?k5U<4EQ)5d5Qh*;CFm2X#y)UQhx64$!uCzN$i zb1U{2L(j&{Z40*B6ZI++Cf|e5^2c{cpuu^8P@%_&l^;hovDHzPvJp+Qi1xa;M@zTH zeZgSK@#9CnEP3jX6sD-;-xJTX*9b~RO*2RB$e){%xo}>&3R%Mt%Jfsbivn5+plAvK zQXNvoqY^wWplE8@32t@xw2>(Zv^@V%DFv>NHt3Cbx~_q|%Id8jfB0@UGmC{H1VC(< zLXNwA6lflLt`aNk(nC`KWpwM1I(@m|4(9lwqm{8|1W!hN-ts# zLhxZm_g;DihIfCmGUn#z1;|3q-fZ2{DTLl4>S=w8TnaVGhOpkTfeo;fK!!K+KpN)9 z!#DdYKdIQ*!g?2lYROoy+Gj98FYVv;F%dNP<1TG}t#6=fRT(rnIs>UOICv{Z;KDE2 zRkq7~Nxy)00W_Hg{Hqh@fA)o_^DPP8m4T+sx3|BrAR@dT->@&K2TkbmjDw{gbTCXt1V`nD4Z!2*&aYjFEpc{u?b=bUr2vIJBc62ee+gBU^ ze8dcxNrc>@(A5tpd28*YfK5`A!Cf7%*`Hc@9fOW zG84)0=15Fqh~IGqE{tYGeapDXu#a^m^e;jmJQ~Iu}Gn zKyg`D2D*}#k}%2r;>q8~U*4~t`tKeh$#w21j|xN&(W~u=uqk#-rzv(CnS|epf+s6f zMZK0+4M})spw|V6U&wfE7C`!~YQdYt;`c`YBkx?@6Usq%(OI$=y!Wls^JXX#?AnGN zj5?%eAvHX1-*{2djig?2PhcO1Jb};Nf94fXWcU1Q4?L301_lQ$%R}ShI+Z~`2q1fN z+CQ|*&4`%f_8fCLjo3g3(|_JlZf53S_hGBqCNDclP-O**E-m4t_e>I}O9 z#i{0ZyN<3dM4c^=hyx^@p?$<476R4&szOkO-#612zukZgx{7wDlzQ(kI6QuRgo^9| zEdK$4YX6!85V0o09P9pM=YXOa=-k}{TI0B%NDcoBJw6xY8U_#*si_HFMA-dOOhz Date: Wed, 4 Jan 2023 17:18:35 -0800 Subject: [PATCH 011/108] Update README.md Fixes incorrect version --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 38e48ed1c8d..eedc4cfa030 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -HDF5 version 1.15.0 currently under development +HDF5 version 1.12.3-1 currently under development ![HDF5 Logo](doxygen/img/HDF5.png) From 363b26fa0d9dadcd96e611ed38956afeaa94851a Mon Sep 17 00:00:00 2001 From: Dana Robinson <43805+derobins@users.noreply.github.com> Date: Thu, 5 Jan 2023 08:59:24 -0800 Subject: [PATCH 012/108] Minor change to H5private.h to bring in line with develop (#2391) --- src/H5private.h | 25 +++++-------------------- 1 file changed, 5 insertions(+), 20 deletions(-) diff --git a/src/H5private.h b/src/H5private.h index e470f17f6c5..f54c6dff617 100644 --- a/src/H5private.h +++ b/src/H5private.h @@ -26,25 +26,6 @@ #include "H5public.h" /* Include Public Definitions */ -/* include the pthread header */ -#ifdef H5_HAVE_THREADSAFE -#ifdef H5_HAVE_WIN32_API -#ifndef H5_HAVE_WIN_THREADS -#ifdef H5_HAVE_PTHREAD_H -#include -#endif /* H5_HAVE_PTHREAD_H */ -#endif /* H5_HAVE_WIN_THREADS */ -#else /* H5_HAVE_WIN32_API */ -#ifdef H5_HAVE_PTHREAD_H -#include -#endif /* H5_HAVE_PTHREAD_H */ -#endif /* H5_HAVE_WIN32_API */ -#endif /* H5_HAVE_THREADSAFE */ - -/* - * Include ANSI-C header files. - */ -#ifdef H5_STDC_HEADERS #include #include #include @@ -56,7 +37,6 @@ #include #include #include -#endif /* POSIX headers */ #ifdef H5_HAVE_SYS_TIME_H @@ -82,6 +62,11 @@ #endif #endif +/* Include the Pthreads header, if necessary */ +#if defined(H5_HAVE_THREADSAFE) && defined(H5_HAVE_PTHREAD_H) +#include +#endif + /* * The `struct stat' data type for stat() and fstat(). This is a POSIX file * but often appears on non-POSIX systems also. The `struct stat' is required From cedd2a41f5143834084a6e2f609929f19a55cb2b Mon Sep 17 00:00:00 2001 From: Dana Robinson <43805+derobins@users.noreply.github.com> Date: Thu, 5 Jan 2023 12:51:25 -0800 Subject: [PATCH 013/108] Brings mirror VFD test fixes from develop (#2392) --- src/H5FDmirror.c | 72 +- test/mirror_vfd.c | 2913 ++++++++++++------------- test/test_mirror.sh.in | 18 +- test/use_append_chunk_mirror.c | 63 +- utils/mirror_vfd/CMakeLists.txt | 4 +- utils/mirror_vfd/mirror_server.c | 64 +- utils/mirror_vfd/mirror_server_stop.c | 11 + utils/mirror_vfd/mirror_writer.c | 37 +- 8 files changed, 1548 insertions(+), 1634 deletions(-) diff --git a/src/H5FDmirror.c b/src/H5FDmirror.c index f7f79fcecc5..c0efb96d10f 100644 --- a/src/H5FDmirror.c +++ b/src/H5FDmirror.c @@ -244,7 +244,7 @@ H5FD_mirror_init(void) FUNC_ENTER_NOAPI(H5I_INVALID_HID) - LOG_OP_CALL(FUNC); + LOG_OP_CALL(__func__); if (H5I_VFL != H5I_get_type(H5FD_MIRROR_g)) { H5FD_MIRROR_g = H5FD_register(&H5FD_mirror_g, sizeof(H5FD_class_t), FALSE); @@ -268,12 +268,12 @@ H5FD_mirror_init(void) static herr_t H5FD__mirror_term(void) { - FUNC_ENTER_STATIC_NOERR + FUNC_ENTER_PACKAGE_NOERR /* Reset VFL ID */ H5FD_MIRROR_g = 0; - LOG_OP_CALL(FUNC); + LOG_OP_CALL(__func__); FUNC_LEAVE_NOAPI(SUCCEED) } /* end H5FD__mirror_term() */ @@ -337,7 +337,7 @@ H5FD__mirror_xmit_decode_uint32(uint32_t *out, const unsigned char *_buf) /* --------------------------------------------------------------------------- * Function: is_host_little_endian * - * Purpose: Determine whether the host machine is is little-endian. + * Purpose: Determine whether the host machine is little-endian. * * Store an integer with a known value, re-map the memory to a * character array, and inspect the array's contents. @@ -1127,9 +1127,9 @@ H5FD__mirror_verify_reply(H5FD_mirror_t *file) ssize_t read_ret = 0; herr_t ret_value = SUCCEED; - FUNC_ENTER_STATIC + FUNC_ENTER_PACKAGE - LOG_OP_CALL(FUNC); + LOG_OP_CALL(__func__); HDassert(file && file->sock_fd); @@ -1182,9 +1182,9 @@ H5FD__mirror_fapl_get(H5FD_t *_file) H5FD_mirror_fapl_t *fa = NULL; void *ret_value = NULL; - FUNC_ENTER_STATIC + FUNC_ENTER_PACKAGE - LOG_OP_CALL(FUNC); + LOG_OP_CALL(__func__); fa = (H5FD_mirror_fapl_t *)H5MM_calloc(sizeof(H5FD_mirror_fapl_t)); if (NULL == fa) @@ -1218,9 +1218,9 @@ H5FD__mirror_fapl_copy(const void *_old_fa) H5FD_mirror_fapl_t *new_fa = NULL; void *ret_value = NULL; - FUNC_ENTER_STATIC + FUNC_ENTER_PACKAGE - LOG_OP_CALL(FUNC); + LOG_OP_CALL(__func__); new_fa = (H5FD_mirror_fapl_t *)H5MM_malloc(sizeof(H5FD_mirror_fapl_t)); if (new_fa == NULL) @@ -1250,9 +1250,9 @@ H5FD__mirror_fapl_free(void *_fa) { H5FD_mirror_fapl_t *fa = (H5FD_mirror_fapl_t *)_fa; - FUNC_ENTER_STATIC_NOERR + FUNC_ENTER_PACKAGE_NOERR - LOG_OP_CALL(FUNC); + LOG_OP_CALL(__func__); /* sanity check */ HDassert(fa != NULL); @@ -1283,7 +1283,7 @@ H5Pget_fapl_mirror(hid_t fapl_id, H5FD_mirror_fapl_t *fa_dst) FUNC_ENTER_API(FAIL) H5TRACE2("e", "i*x", fapl_id, fa_dst); - LOG_OP_CALL(FUNC); + LOG_OP_CALL(__func__); if (NULL == fa_dst) HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "fa_dst is NULL"); @@ -1324,7 +1324,7 @@ H5Pset_fapl_mirror(hid_t fapl_id, H5FD_mirror_fapl_t *fa) FUNC_ENTER_API(FAIL) H5TRACE2("e", "i*x", fapl_id, fa); - LOG_OP_CALL(FUNC); + LOG_OP_CALL(__func__); plist = H5P_object_verify(fapl_id, H5P_FILE_ACCESS); if (NULL == plist) @@ -1368,9 +1368,9 @@ H5FD__mirror_open(const char *name, unsigned flags, hid_t fapl_id, haddr_t maxad H5FD_mirror_xmit_open_t *open_xmit = NULL; H5FD_t *ret_value = NULL; - FUNC_ENTER_STATIC + FUNC_ENTER_PACKAGE - LOG_OP_CALL(FUNC); + LOG_OP_CALL(__func__); /* --------------- */ /* Check arguments */ @@ -1493,9 +1493,9 @@ H5FD__mirror_close(H5FD_t *_file) int xmit_encoded = 0; /* monitor point of failure */ herr_t ret_value = SUCCEED; - FUNC_ENTER_STATIC + FUNC_ENTER_PACKAGE - LOG_OP_CALL(FUNC); + LOG_OP_CALL(__func__); /* Sanity check */ HDassert(file); @@ -1565,9 +1565,9 @@ H5FD__mirror_close(H5FD_t *_file) static herr_t H5FD__mirror_query(const H5FD_t H5_ATTR_UNUSED *_file, unsigned long *flags) { - FUNC_ENTER_STATIC_NOERR; + FUNC_ENTER_PACKAGE_NOERR; - LOG_OP_CALL(FUNC); + LOG_OP_CALL(__func__); /* Notice: the Mirror VFD Writer currently uses only the Sec2 driver as * the underlying driver -- as such, the Mirror VFD implementation copies @@ -1602,9 +1602,9 @@ H5FD__mirror_get_eoa(const H5FD_t *_file, H5FD_mem_t H5_ATTR_UNUSED type) { const H5FD_mirror_t *file = (const H5FD_mirror_t *)_file; - FUNC_ENTER_STATIC_NOERR + FUNC_ENTER_PACKAGE_NOERR - LOG_OP_CALL(FUNC); + LOG_OP_CALL(__func__); HDassert(file); @@ -1629,9 +1629,9 @@ H5FD__mirror_set_eoa(H5FD_t *_file, H5FD_mem_t type, haddr_t addr) H5FD_mirror_t *file = (H5FD_mirror_t *)_file; herr_t ret_value = SUCCEED; - FUNC_ENTER_STATIC + FUNC_ENTER_PACKAGE - LOG_OP_CALL(FUNC); + LOG_OP_CALL(__func__); HDassert(file); @@ -1684,9 +1684,9 @@ H5FD__mirror_get_eof(const H5FD_t *_file, H5FD_mem_t H5_ATTR_UNUSED type) { const H5FD_mirror_t *file = (const H5FD_mirror_t *)_file; - FUNC_ENTER_STATIC_NOERR + FUNC_ENTER_PACKAGE_NOERR - LOG_OP_CALL(FUNC); + LOG_OP_CALL(__func__); HDassert(file); @@ -1705,9 +1705,9 @@ static herr_t H5FD__mirror_read(H5FD_t H5_ATTR_UNUSED *_file, H5FD_mem_t H5_ATTR_UNUSED type, hid_t H5_ATTR_UNUSED fapl_id, haddr_t H5_ATTR_UNUSED addr, size_t H5_ATTR_UNUSED size, void H5_ATTR_UNUSED *buf) { - FUNC_ENTER_STATIC_NOERR + FUNC_ENTER_PACKAGE_NOERR - LOG_OP_CALL(FUNC); + LOG_OP_CALL(__func__); FUNC_LEAVE_NOAPI(FAIL) } /* end H5FD__mirror_read() */ @@ -1738,9 +1738,9 @@ H5FD__mirror_write(H5FD_t *_file, H5FD_mem_t type, hid_t H5_ATTR_UNUSED dxpl_id, H5FD_mirror_t *file = (H5FD_mirror_t *)_file; herr_t ret_value = SUCCEED; - FUNC_ENTER_STATIC + FUNC_ENTER_PACKAGE - LOG_OP_CALL(FUNC); + LOG_OP_CALL(__func__); HDassert(file); HDassert(buf); @@ -1801,9 +1801,9 @@ H5FD__mirror_truncate(H5FD_t *_file, hid_t H5_ATTR_UNUSED dxpl_id, hbool_t H5_AT H5FD_mirror_t *file = (H5FD_mirror_t *)_file; herr_t ret_value = SUCCEED; - FUNC_ENTER_STATIC + FUNC_ENTER_PACKAGE - LOG_OP_CALL(FUNC); + LOG_OP_CALL(__func__); file->xmit.xmit_count = (file->xmit_i)++; file->xmit.op = H5FD_MIRROR_OP_TRUNCATE; @@ -1849,9 +1849,9 @@ H5FD__mirror_lock(H5FD_t *_file, hbool_t rw) H5FD_mirror_t *file = (H5FD_mirror_t *)_file; herr_t ret_value = SUCCEED; - FUNC_ENTER_STATIC + FUNC_ENTER_PACKAGE - LOG_OP_CALL(FUNC); + LOG_OP_CALL(__func__); file->xmit.xmit_count = (file->xmit_i)++; file->xmit.op = H5FD_MIRROR_OP_LOCK; @@ -1896,9 +1896,9 @@ H5FD__mirror_unlock(H5FD_t *_file) H5FD_mirror_t *file = (H5FD_mirror_t *)_file; herr_t ret_value = SUCCEED; - FUNC_ENTER_STATIC + FUNC_ENTER_PACKAGE - LOG_OP_CALL(FUNC); + LOG_OP_CALL(__func__); file->xmit.xmit_count = (file->xmit_i)++; file->xmit.op = H5FD_MIRROR_OP_UNLOCK; diff --git a/test/mirror_vfd.c b/test/mirror_vfd.c index f8caea8a57c..b4f4c2c185e 100644 --- a/test/mirror_vfd.c +++ b/test/mirror_vfd.c @@ -75,6 +75,25 @@ static unsigned int g_verbosity = DEFAULT_VERBOSITY; #define MIRR_MESG_SIZE 128 static char mesg[MIRR_MESG_SIZE + 1]; +/* ---------------------------------------------------------------------------- + * Structure: struct mt_opts + * + * Purpose: Convenience structure to hold options as parsed from the + * command line. + * + * `portno` (int) + * Port number, as received from arguments. + * + * `ip` (char *) + * IP address string as received from arguments. + * + * ---------------------------------------------------------------------------- + */ +struct mt_opts { + int portno; + char ip[H5FD_MIRROR_MAX_IP_LEN + 1]; +}; + /* Convenience structure for passing file names via helper functions. */ struct mirrortest_filenames { @@ -95,82 +114,57 @@ static herr_t _create_chunking_ids(hid_t file_id, unsigned min_dset, unsigned ma static herr_t _close_chunking_ids(unsigned min_dset, unsigned max_dset, hid_t *dataspace_ids, hid_t *filespace_ids, hid_t *dataset_ids, hid_t *memspace_id); -static herr_t _populate_filepath(const char *dirname, const char *_basename, hid_t fapl_id, char *path_out, - hbool_t h5suffix); - -static hid_t create_mirroring_split_fapl(const char *_basename, struct mirrortest_filenames *names); - -static void mybzero(void *dest, size_t size); +static herr_t populate_filepath(const char *dirname, const char *_basename, hid_t fapl_id, char *path_out, + hbool_t h5suffix); -/* ---------------------------------------------------------------------------- - * Function: mybzero - * - * Purpose: Have bzero simplicity and abstraction in (possible) absence of - * it being available. - * - * Programmer: Jacob Smith - * 2020-03-30 - * ---------------------------------------------------------------------------- - */ -static void -mybzero(void *dest, size_t size) -{ - size_t i = 0; - char *s = NULL; - HDassert(dest != NULL); - s = (char *)dest; - for (i = 0; i < size; i++) { - *(s + i) = 0; - } -} /* end mybzero() */ +static hid_t create_mirroring_split_fapl(const char *_basename, struct mirrortest_filenames *names, + const struct mt_opts *opts); /* ---------------------------------------------------------------------------- - * Function: _populate_filepath + * Function: populate_filepath * * Purpose: Given a directory name and a base name, concatenate the two and * run h5fixname() to get the "actual" path to the intended target. * `h5suffix' should be FALSE to keep the base name unaltered; * TRUE will append the '.h5' h5suffix to the basename... * FALSE -> h5fixname_no_suffix(), TRUE -> h5fixname() - * / / <_basename> + * / / * - * Programmer: Jacob Smith - * 2019-08-16 * ---------------------------------------------------------------------------- */ static herr_t -_populate_filepath(const char *dirname, const char *_basename, hid_t fapl_id, char *path_out, - hbool_t h5suffix) +populate_filepath(const char *dirname, const char *basename, hid_t fapl_id, char *path_out, hbool_t h5suffix) { - char _path[H5FD_SPLITTER_PATH_MAX]; + char *path = NULL; - if ((_basename == NULL) || (*_basename == 0) || (dirname == NULL) || (*dirname == 0) || - (path_out == NULL)) { + if ((basename == NULL) || (*basename == 0) || (dirname == NULL) || (*dirname == 0) || (path_out == NULL)) + TEST_ERROR; + + if (NULL == (path = HDcalloc(H5FD_SPLITTER_PATH_MAX, sizeof(char)))) TEST_ERROR; - } - if (HDsnprintf(_path, H5FD_SPLITTER_PATH_MAX, "%s%s%s", dirname, + if (HDsnprintf(path, H5FD_SPLITTER_PATH_MAX, "%s%s%s", dirname, (dirname[HDstrlen(dirname)] == '/') ? "" : "/", /* slash iff needed */ - _basename) > H5FD_SPLITTER_PATH_MAX) { + basename) > H5FD_SPLITTER_PATH_MAX) TEST_ERROR; - } if (h5suffix == TRUE) { - if (h5_fixname(_path, fapl_id, path_out, H5FD_SPLITTER_PATH_MAX) == NULL) { + if (h5_fixname(path, fapl_id, path_out, H5FD_SPLITTER_PATH_MAX) == NULL) TEST_ERROR; - } } else { - if (h5_fixname_no_suffix(_path, fapl_id, path_out, H5FD_SPLITTER_PATH_MAX) == NULL) { + if (h5_fixname_no_suffix(path, fapl_id, path_out, H5FD_SPLITTER_PATH_MAX) == NULL) TEST_ERROR; - } } + HDfree(path); + return SUCCEED; error: + HDfree(path); return FAIL; -} /* end _populate_filepath() */ +} /* end populate_filepath() */ /* --------------------------------------------------------------------------- * Function: build_paths @@ -179,39 +173,37 @@ _populate_filepath(const char *dirname, const char *_basename, hid_t fapl_id, ch * most mirror tests. * * Return: SUCCEED/FAIL - * - * Programmer: Jacob Smith - * 2019-08-16 * --------------------------------------------------------------------------- */ static herr_t -build_paths(const char *_basename, H5FD_splitter_vfd_config_t *splitter_config, +build_paths(const char *basename, H5FD_splitter_vfd_config_t *splitter_config, struct mirrortest_filenames *names) { - char baselogname[H5FD_SPLITTER_PATH_MAX]; + char *baselogname = NULL; - if (_populate_filepath(MIRROR_RW_DIR, _basename, splitter_config->rw_fapl_id, names->rw, TRUE) == FAIL) { + if (NULL == (baselogname = HDcalloc(H5FD_SPLITTER_PATH_MAX, sizeof(char)))) TEST_ERROR; - } - if (_populate_filepath(MIRROR_WO_DIR, _basename, splitter_config->wo_fapl_id, names->wo, TRUE) == FAIL) { + if (populate_filepath(MIRROR_RW_DIR, basename, splitter_config->rw_fapl_id, names->rw, TRUE) < 0) TEST_ERROR; - } - if (_basename == NULL || *_basename == 0) - return FAIL; - if (HDsnprintf(baselogname, H5FD_SPLITTER_PATH_MAX, "%s_err.log", _basename) > H5FD_SPLITTER_PATH_MAX) { + if (populate_filepath(MIRROR_WO_DIR, basename, splitter_config->wo_fapl_id, names->wo, TRUE) < 0) TEST_ERROR; - } - if (_populate_filepath(MIRROR_WO_DIR, baselogname, splitter_config->wo_fapl_id, names->log, FALSE) == - FAIL) { + if (basename == NULL || *basename == 0) + TEST_ERROR; + if (HDsnprintf(baselogname, H5FD_SPLITTER_PATH_MAX, "%s_err.log", basename) > H5FD_SPLITTER_PATH_MAX) TEST_ERROR; - } + + if (populate_filepath(MIRROR_WO_DIR, baselogname, splitter_config->wo_fapl_id, names->log, FALSE) < 0) + TEST_ERROR; + + HDfree(baselogname); return SUCCEED; error: + HDfree(baselogname); return FAIL; } /* end build_paths() */ @@ -222,15 +214,12 @@ build_paths(const char *_basename, H5FD_splitter_vfd_config_t *splitter_config, * * Return: Success: 0 * Failure: -1 - * - * Programmer: Jacob Smith - * 2019-03-12 * --------------------------------------------------------------------------- */ static int test_fapl_configuration(void) { - hid_t fapl_id; + hid_t fapl_id = H5I_INVALID_HID; H5FD_mirror_fapl_t mirror_conf = { H5FD_MIRROR_FAPL_MAGIC, /* magic */ H5FD_MIRROR_CURR_FAPL_T_VERSION, /* version */ @@ -241,42 +230,39 @@ test_fapl_configuration(void) TESTING("Mirror fapl configuration (set/get)"); - fapl_id = H5Pcreate(H5P_FILE_ACCESS); - if (H5I_INVALID_HID == fapl_id) { + if ((fapl_id = H5Pcreate(H5P_FILE_ACCESS)) < 0) TEST_ERROR; - } - if (H5Pset_fapl_mirror(fapl_id, &mirror_conf) == FAIL) { + if (H5Pset_fapl_mirror(fapl_id, &mirror_conf) < 0) TEST_ERROR; - } - if (H5Pget_fapl_mirror(fapl_id, &fa_out) == FAIL) { + if (H5Pget_fapl_mirror(fapl_id, &fa_out) < 0) TEST_ERROR; - } - if (H5FD_MIRROR_FAPL_MAGIC != fa_out.magic) { + + if (H5FD_MIRROR_FAPL_MAGIC != fa_out.magic) TEST_ERROR; - } - if (H5FD_MIRROR_CURR_FAPL_T_VERSION != fa_out.version) { + + if (H5FD_MIRROR_CURR_FAPL_T_VERSION != fa_out.version) TEST_ERROR; - } - if (SERVER_HANDSHAKE_PORT != fa_out.handshake_port) { + + if (SERVER_HANDSHAKE_PORT != fa_out.handshake_port) TEST_ERROR; - } - if (HDstrncmp(SERVER_IP_ADDRESS, (const char *)fa_out.remote_ip, H5FD_MIRROR_MAX_IP_LEN)) { + + if (HDstrncmp(SERVER_IP_ADDRESS, (const char *)fa_out.remote_ip, H5FD_MIRROR_MAX_IP_LEN)) TEST_ERROR; - } - if (H5Pclose(fapl_id) == FAIL) { + if (H5Pclose(fapl_id) < 0) TEST_ERROR; - } PASSED(); return 0; error: - if (H5I_INVALID_HID != fapl_id) { - (void)H5Pclose(fapl_id); + H5E_BEGIN_TRY + { + H5Pclose(fapl_id); } + H5E_END_TRY; return -1; } /* end test_fapl_configuration() */ @@ -301,931 +287,904 @@ test_fapl_configuration(void) } \ } while (0); /* end PRINT_BUFFER_DIFF */ -/* --------------------------------------------------------------------------- - * Function: test_xmit_encode_decode - * - * Purpose: Test byte-encoding operations for network transport. - * - * Return: Success: 0 - * Failure: -1 - * - * Programmer: Jacob Smith - * 2020-02-02 - * --------------------------------------------------------------------------- - */ +/*******************************************/ +/* Encode/decode tests for various C types */ +/*******************************************/ + +/* Test uint8_t encode/decode */ static int -test_xmit_encode_decode(void) +test_encdec_uint8_t(void) { - H5FD_mirror_xmit_t xmit_mock; /* re-used header in various xmit tests */ + unsigned char buf[8]; + unsigned char expected[8]; + const uint8_t v = 200; + unsigned char out = 0; - TESTING("Mirror encode/decode of xmit elements"); + TESTING("Mirror encode/decode of uint8_t data"); - /* Set bogus values matching expected; encoding doesn't care - * Use sequential values to easily generate the expected buffer with a - * for loop. - */ - xmit_mock.magic = 0x00010203; - xmit_mock.version = 0x04; - xmit_mock.session_token = 0x05060708; - xmit_mock.xmit_count = 0x090A0B0C; - xmit_mock.op = 0x0D; + /* Start of buffer uint8_t */ + HDmemset(buf, 0, 8); + HDmemset(expected, 0, 8); + expected[0] = 200; + out = 0; + if (H5FD__mirror_xmit_encode_uint8(buf, v) != 1) + TEST_ERROR; + if (HDmemcmp(buf, expected, 8) != 0) { + PRINT_BUFFER_DIFF(buf, expected, 8); + TEST_ERROR; + } + if (H5FD__mirror_xmit_decode_uint8(&out, buf) != 1) + TEST_ERROR; + if (v != out) + TEST_ERROR; - /* Test uint8_t encode/decode - */ - do { - unsigned char buf[8]; - unsigned char expected[8]; - const uint8_t v = 200; - unsigned char out = 0; + /* Middle of buffer uint8_t */ + HDmemset(buf, 0, 8); + HDmemset(expected, 0, 8); + expected[3] = v; + out = 0; + if (H5FD__mirror_xmit_encode_uint8((buf + 3), v) != 1) + TEST_ERROR; + if (HDmemcmp(buf, expected, 8) != 0) { + PRINT_BUFFER_DIFF(buf, expected, 8); + TEST_ERROR; + } + if (H5FD__mirror_xmit_decode_uint8(&out, (buf + 3)) != 1) + TEST_ERROR; + if (v != out) + TEST_ERROR; - /* Start of buffer uint8_t - */ - mybzero(buf, 8); - mybzero(expected, 8); - expected[0] = 200; - out = 0; - if (H5FD__mirror_xmit_encode_uint8(buf, v) != 1) { - TEST_ERROR; - } - if (HDmemcmp(buf, expected, 8) != 0) { - PRINT_BUFFER_DIFF(buf, expected, 8); - TEST_ERROR; - } - if (H5FD__mirror_xmit_decode_uint8(&out, buf) != 1) { - TEST_ERROR; - } - if (v != out) { - TEST_ERROR; - } + /* End of buffer uint8_t */ + HDmemset(buf, 0, 8); + HDmemset(expected, 0, 8); + expected[7] = v; + out = 0; + if (H5FD__mirror_xmit_encode_uint8((buf + 7), v) != 1) + TEST_ERROR; + if (HDmemcmp(buf, expected, 8) != 0) { + PRINT_BUFFER_DIFF(buf, expected, 8); + TEST_ERROR; + } + if (H5FD__mirror_xmit_decode_uint8(&out, (buf + 7)) != 1) + TEST_ERROR; + if (v != out) + TEST_ERROR; - /* Middle of buffer uint8_t - */ - mybzero(buf, 8); - mybzero(expected, 8); - expected[3] = v; - out = 0; - if (H5FD__mirror_xmit_encode_uint8((buf + 3), v) != 1) { - TEST_ERROR; - } - if (HDmemcmp(buf, expected, 8) != 0) { - PRINT_BUFFER_DIFF(buf, expected, 8); - TEST_ERROR; - } - if (H5FD__mirror_xmit_decode_uint8(&out, (buf + 3)) != 1) { - TEST_ERROR; - } - if (v != out) { - TEST_ERROR; - } + PASSED(); + return 0; - /* End of buffer uint8_t - */ - mybzero(buf, 8); - mybzero(expected, 8); - expected[7] = v; - out = 0; - if (H5FD__mirror_xmit_encode_uint8((buf + 7), v) != 1) { - TEST_ERROR; - } - if (HDmemcmp(buf, expected, 8) != 0) { - PRINT_BUFFER_DIFF(buf, expected, 8); - TEST_ERROR; - } - if (H5FD__mirror_xmit_decode_uint8(&out, (buf + 7)) != 1) { - TEST_ERROR; - } - if (v != out) { - TEST_ERROR; - } +error: + return -1; +} - } while (0); /* end uint8_t en/decode */ +/* Test uint16_t encode/decode */ +static int +test_encdec_uint16_t(void) +{ + unsigned char buf[8]; + unsigned char expected[8]; + const uint16_t v = 0x8F02; + uint16_t out = 0; - /* Test uint16_t encode/decode - */ - do { - unsigned char buf[8]; - unsigned char expected[8]; - const uint16_t v = 0x8F02; - uint16_t out = 0; + TESTING("Mirror encode/decode of uint16_t data"); - /* Start of buffer uint16_t - */ - mybzero(buf, 8); - mybzero(expected, 8); - expected[0] = 0x8F; - expected[1] = 0x02; - out = 0; - if (H5FD__mirror_xmit_encode_uint16(buf, v) != 2) { - TEST_ERROR; - } - if (HDmemcmp(buf, expected, 8) != 0) { - PRINT_BUFFER_DIFF(buf, expected, 8); - TEST_ERROR; - } - if (H5FD__mirror_xmit_decode_uint16(&out, buf) != 2) { - TEST_ERROR; - } - if (out != v) { - TEST_ERROR; - } + /* Start of buffer uint16_t */ + HDmemset(buf, 0, 8); + HDmemset(expected, 0, 8); + expected[0] = 0x8F; + expected[1] = 0x02; + out = 0; + if (H5FD__mirror_xmit_encode_uint16(buf, v) != 2) + TEST_ERROR; + if (HDmemcmp(buf, expected, 8) != 0) { + PRINT_BUFFER_DIFF(buf, expected, 8); + TEST_ERROR; + } + if (H5FD__mirror_xmit_decode_uint16(&out, buf) != 2) + TEST_ERROR; + if (out != v) + TEST_ERROR; - /* Middle of buffer uint16_t - */ - mybzero(buf, 8); - mybzero(expected, 8); - expected[3] = 0x8F; - expected[4] = 0x02; - out = 0; - if (H5FD__mirror_xmit_encode_uint16((buf + 3), v) != 2) { - TEST_ERROR; - } - if (HDmemcmp(buf, expected, 8) != 0) { - PRINT_BUFFER_DIFF(buf, expected, 8); - TEST_ERROR; - } - if (H5FD__mirror_xmit_decode_uint16(&out, (buf + 3)) != 2) { - TEST_ERROR; - } - if (out != v) { - TEST_ERROR; - } - /* slice */ - if (H5FD__mirror_xmit_decode_uint16(&out, (buf + 4)) != 2) { - TEST_ERROR; - } - if (out != 0x0200) { - TEST_ERROR; - } + /* Middle of buffer uint16_t */ + HDmemset(buf, 0, 8); + HDmemset(expected, 0, 8); + expected[3] = 0x8F; + expected[4] = 0x02; + out = 0; + if (H5FD__mirror_xmit_encode_uint16((buf + 3), v) != 2) + TEST_ERROR; + if (HDmemcmp(buf, expected, 8) != 0) { + PRINT_BUFFER_DIFF(buf, expected, 8); + TEST_ERROR; + } + if (H5FD__mirror_xmit_decode_uint16(&out, (buf + 3)) != 2) + TEST_ERROR; + if (out != v) + TEST_ERROR; - /* End of buffer uint16_t - */ - mybzero(buf, 8); - mybzero(expected, 8); - expected[6] = 0x8F; - expected[7] = 0x02; - out = 0; - if (H5FD__mirror_xmit_encode_uint16((buf + 6), v) != 2) { - TEST_ERROR; - } - if (HDmemcmp(buf, expected, 8) != 0) { - PRINT_BUFFER_DIFF(buf, expected, 8); - TEST_ERROR; - } - if (H5FD__mirror_xmit_decode_uint16(&out, (buf + 6)) != 2) { - TEST_ERROR; - } - if (out != v) { - TEST_ERROR; - } + /* slice */ + if (H5FD__mirror_xmit_decode_uint16(&out, (buf + 4)) != 2) + TEST_ERROR; + if (out != 0x0200) + TEST_ERROR; - } while (0); /* end uint16_t en/decode */ + /* End of buffer uint16_t */ + HDmemset(buf, 0, 8); + HDmemset(expected, 0, 8); + expected[6] = 0x8F; + expected[7] = 0x02; + out = 0; + if (H5FD__mirror_xmit_encode_uint16((buf + 6), v) != 2) + TEST_ERROR; + if (HDmemcmp(buf, expected, 8) != 0) { + PRINT_BUFFER_DIFF(buf, expected, 8); + TEST_ERROR; + } + if (H5FD__mirror_xmit_decode_uint16(&out, (buf + 6)) != 2) + TEST_ERROR; + if (out != v) + TEST_ERROR; - /* Test uint32_t encode/decode - */ - do { - unsigned char buf[8]; - unsigned char expected[8]; - const uint32_t v = 0x8F020048; - uint32_t out = 0; + PASSED(); + return 0; - /* Start of buffer uint32_t - */ - mybzero(buf, 8); - mybzero(expected, 8); - expected[0] = 0x8F; - expected[1] = 0x02; - expected[2] = 0x00; - expected[3] = 0x48; - out = 0; - if (H5FD__mirror_xmit_encode_uint32(buf, v) != 4) { - TEST_ERROR; - } - if (HDmemcmp(buf, expected, 8) != 0) { - PRINT_BUFFER_DIFF(buf, expected, 8); - TEST_ERROR; - } - if (H5FD__mirror_xmit_decode_uint32(&out, buf) != 4) { - TEST_ERROR; - } - if (out != v) { - TEST_ERROR; - } +error: + return -1; +} - /* Middle of buffer uint32_t - */ - mybzero(buf, 8); - mybzero(expected, 8); - expected[3] = 0x8F; - expected[4] = 0x02; - expected[5] = 0x00; - expected[6] = 0x48; - out = 0; - if (H5FD__mirror_xmit_encode_uint32((buf + 3), v) != 4) { - TEST_ERROR; - } - if (HDmemcmp(buf, expected, 8) != 0) { - PRINT_BUFFER_DIFF(buf, expected, 8); - TEST_ERROR; - } - if (H5FD__mirror_xmit_decode_uint32(&out, (buf + 3)) != 4) { - TEST_ERROR; - } - if (out != v) { - TEST_ERROR; - } - /* slice */ - if (H5FD__mirror_xmit_decode_uint32(&out, (buf + 4)) != 4) { - TEST_ERROR; - } - if (out != 0x02004800) { - TEST_ERROR; - } +/* Test uint32_t encode/decode */ +static int +test_encdec_uint32_t(void) +{ + unsigned char buf[8]; + unsigned char expected[8]; + const uint32_t v = 0x8F020048; + uint32_t out = 0; - /* End of buffer uint32_t - */ - mybzero(buf, 8); - mybzero(expected, 8); - expected[4] = 0x8F; - expected[5] = 0x02; - expected[6] = 0x00; - expected[7] = 0x48; - out = 0; - if (H5FD__mirror_xmit_encode_uint32((buf + 4), v) != 4) { - TEST_ERROR; - } - if (HDmemcmp(buf, expected, 8) != 0) { - PRINT_BUFFER_DIFF(buf, expected, 8); - TEST_ERROR; - } - if (H5FD__mirror_xmit_decode_uint32(&out, (buf + 4)) != 4) { - TEST_ERROR; - } - if (out != v) { - TEST_ERROR; - } + TESTING("Mirror encode/decode of uint32_t data"); - } while (0); /* end uint32_t en/decode */ + /* Start of buffer uint32_t */ + HDmemset(buf, 0, 8); + HDmemset(expected, 0, 8); + expected[0] = 0x8F; + expected[1] = 0x02; + expected[2] = 0x00; + expected[3] = 0x48; + out = 0; + if (H5FD__mirror_xmit_encode_uint32(buf, v) != 4) + TEST_ERROR; + if (HDmemcmp(buf, expected, 8) != 0) { + PRINT_BUFFER_DIFF(buf, expected, 8); + TEST_ERROR; + } + if (H5FD__mirror_xmit_decode_uint32(&out, buf) != 4) + TEST_ERROR; + if (out != v) + TEST_ERROR; - /* Test uint64_t encode/decode - */ - do { - unsigned char buf[16]; - unsigned char expected[16]; - const uint64_t v = 0x90DCBE17939CE4BB; - uint64_t out = 0; + /* Middle of buffer uint32_t */ + HDmemset(buf, 0, 8); + HDmemset(expected, 0, 8); + expected[3] = 0x8F; + expected[4] = 0x02; + expected[5] = 0x00; + expected[6] = 0x48; + out = 0; + if (H5FD__mirror_xmit_encode_uint32((buf + 3), v) != 4) + TEST_ERROR; + if (HDmemcmp(buf, expected, 8) != 0) { + PRINT_BUFFER_DIFF(buf, expected, 8); + TEST_ERROR; + } + if (H5FD__mirror_xmit_decode_uint32(&out, (buf + 3)) != 4) + TEST_ERROR; + if (out != v) + TEST_ERROR; + /* slice */ + if (H5FD__mirror_xmit_decode_uint32(&out, (buf + 4)) != 4) + TEST_ERROR; + if (out != 0x02004800) + TEST_ERROR; - /* Start of buffer uint64_t - */ - mybzero(buf, 16); - mybzero(expected, 16); - expected[0] = 0x90; - expected[1] = 0xDC; - expected[2] = 0xBE; - expected[3] = 0x17; - expected[4] = 0x93; - expected[5] = 0x9C; - expected[6] = 0xE4; - expected[7] = 0xBB; - out = 0; - if (H5FD__mirror_xmit_encode_uint64(buf, v) != 8) { - TEST_ERROR; - } - if (HDmemcmp(buf, expected, 16) != 0) { - PRINT_BUFFER_DIFF(buf, expected, 16); - TEST_ERROR; - } - if (H5FD__mirror_xmit_decode_uint64(&out, buf) != 8) { - TEST_ERROR; - } - if (out != v) { - TEST_ERROR; - } + /* End of buffer uint32_t */ + HDmemset(buf, 0, 8); + HDmemset(expected, 0, 8); + expected[4] = 0x8F; + expected[5] = 0x02; + expected[6] = 0x00; + expected[7] = 0x48; + out = 0; + if (H5FD__mirror_xmit_encode_uint32((buf + 4), v) != 4) + TEST_ERROR; + if (HDmemcmp(buf, expected, 8) != 0) { + PRINT_BUFFER_DIFF(buf, expected, 8); + TEST_ERROR; + } + if (H5FD__mirror_xmit_decode_uint32(&out, (buf + 4)) != 4) + TEST_ERROR; + if (out != v) + TEST_ERROR; - /* Middle of buffer uint64_t - */ - mybzero(buf, 16); - mybzero(expected, 16); - expected[3] = 0x90; - expected[4] = 0xDC; - expected[5] = 0xBE; - expected[6] = 0x17; - expected[7] = 0x93; - expected[8] = 0x9C; - expected[9] = 0xE4; - expected[10] = 0xBB; - out = 0; - if (H5FD__mirror_xmit_encode_uint64((buf + 3), v) != 8) { - TEST_ERROR; - } - if (HDmemcmp(buf, expected, 16) != 0) { - PRINT_BUFFER_DIFF(buf, expected, 16); - TEST_ERROR; - } - if (H5FD__mirror_xmit_decode_uint64(&out, (buf + 3)) != 8) { - TEST_ERROR; - } - if (out != v) { - TEST_ERROR; - } - /* slice */ - if (H5FD__mirror_xmit_decode_uint64(&out, (buf + 6)) != 8) { - TEST_ERROR; - } - if (out != 0x17939CE4BB000000) { - TEST_ERROR; - } + PASSED(); + return 0; - /* End of buffer uint64_t - */ - mybzero(buf, 16); - mybzero(expected, 16); - expected[8] = 0x90; - expected[9] = 0xDC; - expected[10] = 0xBE; - expected[11] = 0x17; - expected[12] = 0x93; - expected[13] = 0x9C; - expected[14] = 0xE4; - expected[15] = 0xBB; - out = 0; - if (H5FD__mirror_xmit_encode_uint64((buf + 8), v) != 8) { - TEST_ERROR; - } - if (HDmemcmp(buf, expected, 16) != 0) { - PRINT_BUFFER_DIFF(buf, expected, 16); - TEST_ERROR; - } - if (H5FD__mirror_xmit_decode_uint64(&out, (buf + 8)) != 8) { - TEST_ERROR; - } - if (out != v) { - TEST_ERROR; - } +error: + return -1; +} - } while (0); /* end uint64_t en/decode */ +/* Test uint64_t encode/decode */ +static int +test_encdec_uint64_t(void) +{ + unsigned char buf[16]; + unsigned char expected[16]; + const uint64_t v = 0x90DCBE17939CE4BB; + uint64_t out = 0; - /* Test xmit header structure encode/decode - * Write bogus but easily verifiable data to inside a buffer, and compare. - * Then decode the buffer and compare the structure contents. - * Then repeat from a different offset in the buffer and compare. - */ - do { - unsigned char buf[H5FD_MIRROR_XMIT_HEADER_SIZE + 8]; - unsigned char expected[H5FD_MIRROR_XMIT_HEADER_SIZE + 8]; - H5FD_mirror_xmit_t xmit_out; - size_t i = 0; - - /* sanity check */ - if (14 != H5FD_MIRROR_XMIT_HEADER_SIZE) { - FAIL_PUTS_ERROR("Header size definition does not match test\n"); - } + TESTING("Mirror encode/decode of uint64_t data"); - /* Populate the expected buffer; expect end padding of 0xFF - */ - HDmemset(expected, 0xFF, H5FD_MIRROR_XMIT_HEADER_SIZE + 8); - for (i = 0; i < H5FD_MIRROR_XMIT_HEADER_SIZE; i++) { - expected[i + 2] = (unsigned char)i; - } + /* Start of buffer uint64_t */ + HDmemset(buf, 0, 16); + HDmemset(expected, 0, 16); + expected[0] = 0x90; + expected[1] = 0xDC; + expected[2] = 0xBE; + expected[3] = 0x17; + expected[4] = 0x93; + expected[5] = 0x9C; + expected[6] = 0xE4; + expected[7] = 0xBB; + out = 0; + if (H5FD__mirror_xmit_encode_uint64(buf, v) != 8) + TEST_ERROR; + if (HDmemcmp(buf, expected, 16) != 0) { + PRINT_BUFFER_DIFF(buf, expected, 16); + TEST_ERROR; + } + if (H5FD__mirror_xmit_decode_uint64(&out, buf) != 8) + TEST_ERROR; + if (out != v) + TEST_ERROR; - /* Encode, and compare buffer contents - * Initial buffer is filled with 0xFF to match expected padding - */ - HDmemset(buf, 0xFF, H5FD_MIRROR_XMIT_HEADER_SIZE + 8); - if (H5FD_mirror_xmit_encode_header((buf + 2), &xmit_mock) != H5FD_MIRROR_XMIT_HEADER_SIZE) { - TEST_ERROR; - } - if (HDmemcmp(buf, expected, H5FD_MIRROR_XMIT_HEADER_SIZE + 8) != 0) { - PRINT_BUFFER_DIFF(buf, expected, H5FD_MIRROR_XMIT_HEADER_SIZE + 8); - TEST_ERROR; - } + /* Middle of buffer uint64_t */ + HDmemset(buf, 0, 16); + HDmemset(expected, 0, 16); + expected[3] = 0x90; + expected[4] = 0xDC; + expected[5] = 0xBE; + expected[6] = 0x17; + expected[7] = 0x93; + expected[8] = 0x9C; + expected[9] = 0xE4; + expected[10] = 0xBB; + out = 0; + if (H5FD__mirror_xmit_encode_uint64((buf + 3), v) != 8) + TEST_ERROR; + if (HDmemcmp(buf, expected, 16) != 0) { + PRINT_BUFFER_DIFF(buf, expected, 16); + TEST_ERROR; + } + if (H5FD__mirror_xmit_decode_uint64(&out, (buf + 3)) != 8) + TEST_ERROR; + if (out != v) + TEST_ERROR; + /* Slice */ + if (H5FD__mirror_xmit_decode_uint64(&out, (buf + 6)) != 8) + TEST_ERROR; + if (out != 0x17939CE4BB000000) + TEST_ERROR; - /* Decode from buffer - */ - if (H5FD_mirror_xmit_decode_header(&xmit_out, (buf + 2)) != H5FD_MIRROR_XMIT_HEADER_SIZE) { - TEST_ERROR; - } - if (xmit_out.magic != xmit_mock.magic) - TEST_ERROR; - if (xmit_out.version != xmit_mock.version) - TEST_ERROR; - if (xmit_out.session_token != xmit_mock.session_token) - TEST_ERROR; - if (xmit_out.xmit_count != xmit_mock.xmit_count) - TEST_ERROR; - if (xmit_out.op != xmit_mock.op) - TEST_ERROR; + /* End of buffer uint64_t */ + HDmemset(buf, 0, 16); + HDmemset(expected, 0, 16); + expected[8] = 0x90; + expected[9] = 0xDC; + expected[10] = 0xBE; + expected[11] = 0x17; + expected[12] = 0x93; + expected[13] = 0x9C; + expected[14] = 0xE4; + expected[15] = 0xBB; + out = 0; + if (H5FD__mirror_xmit_encode_uint64((buf + 8), v) != 8) + TEST_ERROR; + if (HDmemcmp(buf, expected, 16) != 0) { + PRINT_BUFFER_DIFF(buf, expected, 16); + TEST_ERROR; + } + if (H5FD__mirror_xmit_decode_uint64(&out, (buf + 8)) != 8) + TEST_ERROR; + if (out != v) + TEST_ERROR; - /* Decode from different offset in buffer - * Observe changes when ingesting the padding - */ - if (H5FD_mirror_xmit_decode_header(&xmit_out, (buf)) != H5FD_MIRROR_XMIT_HEADER_SIZE) { - TEST_ERROR; - } - if (xmit_out.magic != 0xFFFF0001) - TEST_ERROR; - if (xmit_out.version != 0x02) - TEST_ERROR; - if (xmit_out.session_token != 0x03040506) - TEST_ERROR; - if (xmit_out.xmit_count != 0x0708090A) - TEST_ERROR; - if (xmit_out.op != 0x0B) - TEST_ERROR; + PASSED(); + return 0; - } while (0); /* end xmit header en/decode */ +error: + return -1; +} - /* Test xmit set-eoa structure encode/decode - * Write bogus but easily verifiable data to inside a buffer, and compare. - * Then decode the buffer and compare the structure contents. - * Then repeat from a different offset in the buffer and compare. - */ - do { - unsigned char buf[H5FD_MIRROR_XMIT_EOA_SIZE + 8]; - unsigned char expected[H5FD_MIRROR_XMIT_EOA_SIZE + 8]; - H5FD_mirror_xmit_eoa_t xmit_in; - H5FD_mirror_xmit_eoa_t xmit_out; - size_t i = 0; - - /* sanity check */ - if ((14 + 9) != H5FD_MIRROR_XMIT_EOA_SIZE) { - FAIL_PUTS_ERROR("Header size definition does not match test\n"); - } - if (xmit_mock.op != 0x0D) { - FAIL_PUTS_ERROR("shared header structure is not in expected state"); - } +/*****************************/ +/* Other Encode/decode tests */ +/*****************************/ - /* Populate the expected buffer; expect end padding of 0xFF - */ - HDmemset(expected, 0xFF, H5FD_MIRROR_XMIT_EOA_SIZE + 8); - for (i = 0; i < H5FD_MIRROR_XMIT_EOA_SIZE; i++) { - expected[i + 2] = (unsigned char)i; - } +/* Test xmit header structure encode/decode + * Write bogus but easily verifiable data to inside a buffer, and compare. + * Then decode the buffer and compare the structure contents. + * Then repeat from a different offset in the buffer and compare. + */ +static int +test_encdec_header(H5FD_mirror_xmit_t xmit_mock) +{ + unsigned char buf[H5FD_MIRROR_XMIT_HEADER_SIZE + 8]; + unsigned char expected[H5FD_MIRROR_XMIT_HEADER_SIZE + 8]; + H5FD_mirror_xmit_t xmit_out; + size_t i = 0; - /* Set xmit_in - */ - xmit_in.pub = xmit_mock; /* shared/common */ - xmit_in.type = 0x0E; - xmit_in.eoa_addr = 0x0F10111213141516; + TESTING("Mirror encode/decode of xmit header"); - /* Encode, and compare buffer contents - * Initial buffer is filled with 0xFF to match expected padding - */ - HDmemset(buf, 0xFF, H5FD_MIRROR_XMIT_EOA_SIZE + 8); - if (H5FD_mirror_xmit_encode_set_eoa((buf + 2), &xmit_in) != H5FD_MIRROR_XMIT_EOA_SIZE) { - TEST_ERROR; - } - if (HDmemcmp(buf, expected, H5FD_MIRROR_XMIT_EOA_SIZE + 8) != 0) { - PRINT_BUFFER_DIFF(buf, expected, H5FD_MIRROR_XMIT_EOA_SIZE + 8); - TEST_ERROR; - } + /* Sanity check */ + if (14 != H5FD_MIRROR_XMIT_HEADER_SIZE) + FAIL_PUTS_ERROR("Header size definition does not match test\n"); - /* Decode from buffer - */ - if (H5FD_mirror_xmit_decode_set_eoa(&xmit_out, (buf + 2)) != H5FD_MIRROR_XMIT_EOA_SIZE) { - TEST_ERROR; - } - if (xmit_out.pub.magic != xmit_mock.magic) - TEST_ERROR; - if (xmit_out.pub.version != xmit_mock.version) - TEST_ERROR; - if (xmit_out.pub.session_token != xmit_mock.session_token) - TEST_ERROR; - if (xmit_out.pub.xmit_count != xmit_mock.xmit_count) - TEST_ERROR; - if (xmit_out.pub.op != xmit_mock.op) - TEST_ERROR; - if (xmit_out.type != 0x0E) - TEST_ERROR; - if (xmit_out.eoa_addr != 0x0F10111213141516) - TEST_ERROR; + /* Populate the expected buffer; expect end padding of 0xFF */ + HDmemset(expected, 0xFF, H5FD_MIRROR_XMIT_HEADER_SIZE + 8); + for (i = 0; i < H5FD_MIRROR_XMIT_HEADER_SIZE; i++) { + expected[i + 2] = (unsigned char)i; + } - /* Decode from different offset in buffer - * Observe changes when ingesting the padding - */ - if (H5FD_mirror_xmit_decode_set_eoa(&xmit_out, (buf)) != H5FD_MIRROR_XMIT_EOA_SIZE) { - TEST_ERROR; - } - if (xmit_out.pub.magic != 0xFFFF0001) - TEST_ERROR; - if (xmit_out.pub.version != 0x02) - TEST_ERROR; - if (xmit_out.pub.session_token != 0x03040506) - TEST_ERROR; - if (xmit_out.pub.xmit_count != 0x0708090A) - TEST_ERROR; - if (xmit_out.pub.op != 0x0B) - TEST_ERROR; - if (xmit_out.type != 0x0C) - TEST_ERROR; - if (xmit_out.eoa_addr != 0x0D0E0F1011121314) - TEST_ERROR; + /* Encode, and compare buffer contents + * Initial buffer is filled with 0xFF to match expected padding + */ + HDmemset(buf, 0xFF, H5FD_MIRROR_XMIT_HEADER_SIZE + 8); + if (H5FD_mirror_xmit_encode_header((buf + 2), &xmit_mock) != H5FD_MIRROR_XMIT_HEADER_SIZE) + TEST_ERROR; + if (HDmemcmp(buf, expected, H5FD_MIRROR_XMIT_HEADER_SIZE + 8) != 0) { + PRINT_BUFFER_DIFF(buf, expected, H5FD_MIRROR_XMIT_HEADER_SIZE + 8); + TEST_ERROR; + } - } while (0); /* end xmit set-eoa en/decode */ + /* Decode from buffer */ + if (H5FD_mirror_xmit_decode_header(&xmit_out, (buf + 2)) != H5FD_MIRROR_XMIT_HEADER_SIZE) + TEST_ERROR; + if (xmit_out.magic != xmit_mock.magic) + TEST_ERROR; + if (xmit_out.version != xmit_mock.version) + TEST_ERROR; + if (xmit_out.session_token != xmit_mock.session_token) + TEST_ERROR; + if (xmit_out.xmit_count != xmit_mock.xmit_count) + TEST_ERROR; + if (xmit_out.op != xmit_mock.op) + TEST_ERROR; - /* Test xmit lock structure encode/decode - * Write bogus but easily verifiable data to inside a buffer, and compare. - * Then decode the buffer and compare the structure contents. - * Then repeat from a different offset in the buffer and compare. + /* Decode from different offset in buffer + * Observe changes when ingesting the padding */ - do { - unsigned char buf[H5FD_MIRROR_XMIT_LOCK_SIZE + 8]; - unsigned char expected[H5FD_MIRROR_XMIT_LOCK_SIZE + 8]; - H5FD_mirror_xmit_lock_t xmit_in; - H5FD_mirror_xmit_lock_t xmit_out; - size_t i = 0; - - /* sanity check */ - if ((14 + 8) != H5FD_MIRROR_XMIT_LOCK_SIZE) { - FAIL_PUTS_ERROR("Header size definition does not match test\n"); - } - if (xmit_mock.op != 0x0D) { - FAIL_PUTS_ERROR("shared header structure is not in expected state"); - } + if (H5FD_mirror_xmit_decode_header(&xmit_out, buf) != H5FD_MIRROR_XMIT_HEADER_SIZE) + TEST_ERROR; + if (xmit_out.magic != 0xFFFF0001) + TEST_ERROR; + if (xmit_out.version != 0x02) + TEST_ERROR; + if (xmit_out.session_token != 0x03040506) + TEST_ERROR; + if (xmit_out.xmit_count != 0x0708090A) + TEST_ERROR; + if (xmit_out.op != 0x0B) + TEST_ERROR; - /* Populate the expected buffer; expect end padding of 0xFF - */ - HDmemset(expected, 0xFF, H5FD_MIRROR_XMIT_LOCK_SIZE + 8); - for (i = 0; i < H5FD_MIRROR_XMIT_LOCK_SIZE; i++) { - expected[i + 2] = (unsigned char)i; - } + PASSED(); + return 0; - /* Set xmit_in - */ - xmit_in.pub = xmit_mock; /* shared/common */ - xmit_in.rw = 0x0E0F101112131415; +error: + return -1; +} - /* Encode, and compare buffer contents - * Initial buffer is filled with 0xFF to match expected padding - */ - HDmemset(buf, 0xFF, H5FD_MIRROR_XMIT_LOCK_SIZE + 8); - if (H5FD_mirror_xmit_encode_lock((buf + 2), &xmit_in) != H5FD_MIRROR_XMIT_LOCK_SIZE) { - TEST_ERROR; - } - if (HDmemcmp(buf, expected, H5FD_MIRROR_XMIT_LOCK_SIZE + 8) != 0) { - PRINT_BUFFER_DIFF(buf, expected, H5FD_MIRROR_XMIT_LOCK_SIZE + 8); - TEST_ERROR; - } +/* Test xmit set-eoa structure encode/decode + * Write bogus but easily verifiable data to inside a buffer, and compare. + * Then decode the buffer and compare the structure contents. + * Then repeat from a different offset in the buffer and compare. + */ +static int +test_encdec_set_eoa(H5FD_mirror_xmit_t xmit_mock) +{ + unsigned char buf[H5FD_MIRROR_XMIT_EOA_SIZE + 8]; + unsigned char expected[H5FD_MIRROR_XMIT_EOA_SIZE + 8]; + H5FD_mirror_xmit_eoa_t xmit_in; + H5FD_mirror_xmit_eoa_t xmit_out; + size_t i = 0; + + TESTING("Mirror encode/decode of xmit set-eoa"); + + /* Sanity check */ + if ((14 + 9) != H5FD_MIRROR_XMIT_EOA_SIZE) + FAIL_PUTS_ERROR("Header size definition does not match test\n"); + if (xmit_mock.op != 0x0D) + FAIL_PUTS_ERROR("shared header structure is not in expected state"); + + /* Populate the expected buffer; expect end padding of 0xFF */ + HDmemset(expected, 0xFF, H5FD_MIRROR_XMIT_EOA_SIZE + 8); + for (i = 0; i < H5FD_MIRROR_XMIT_EOA_SIZE; i++) + expected[i + 2] = (unsigned char)i; + + /* Set xmit_in */ + xmit_in.pub = xmit_mock; /* shared/common */ + xmit_in.type = 0x0E; + xmit_in.eoa_addr = 0x0F10111213141516; + + /* Encode, and compare buffer contents + * Initial buffer is filled with 0xFF to match expected padding + */ + HDmemset(buf, 0xFF, H5FD_MIRROR_XMIT_EOA_SIZE + 8); + if (H5FD_mirror_xmit_encode_set_eoa((buf + 2), &xmit_in) != H5FD_MIRROR_XMIT_EOA_SIZE) + TEST_ERROR; + if (HDmemcmp(buf, expected, H5FD_MIRROR_XMIT_EOA_SIZE + 8) != 0) { + PRINT_BUFFER_DIFF(buf, expected, H5FD_MIRROR_XMIT_EOA_SIZE + 8); + TEST_ERROR; + } - /* Decode from buffer - */ - if (H5FD_mirror_xmit_decode_lock(&xmit_out, (buf + 2)) != H5FD_MIRROR_XMIT_LOCK_SIZE) { - TEST_ERROR; - } - if (xmit_out.pub.magic != xmit_mock.magic) - TEST_ERROR; - if (xmit_out.pub.version != xmit_mock.version) - TEST_ERROR; - if (xmit_out.pub.session_token != xmit_mock.session_token) - TEST_ERROR; - if (xmit_out.pub.xmit_count != xmit_mock.xmit_count) - TEST_ERROR; - if (xmit_out.pub.op != xmit_mock.op) - TEST_ERROR; - if (xmit_out.rw != 0x0E0F101112131415) - TEST_ERROR; + /* Decode from buffer */ + if (H5FD_mirror_xmit_decode_set_eoa(&xmit_out, (buf + 2)) != H5FD_MIRROR_XMIT_EOA_SIZE) + TEST_ERROR; + if (xmit_out.pub.magic != xmit_mock.magic) + TEST_ERROR; + if (xmit_out.pub.version != xmit_mock.version) + TEST_ERROR; + if (xmit_out.pub.session_token != xmit_mock.session_token) + TEST_ERROR; + if (xmit_out.pub.xmit_count != xmit_mock.xmit_count) + TEST_ERROR; + if (xmit_out.pub.op != xmit_mock.op) + TEST_ERROR; + if (xmit_out.type != 0x0E) + TEST_ERROR; + if (xmit_out.eoa_addr != 0x0F10111213141516) + TEST_ERROR; - /* Decode from different offset in buffer - * Observe changes when ingesting the padding - */ - if (H5FD_mirror_xmit_decode_lock(&xmit_out, (buf)) != H5FD_MIRROR_XMIT_LOCK_SIZE) { - TEST_ERROR; - } - if (xmit_out.pub.magic != 0xFFFF0001) - TEST_ERROR; - if (xmit_out.pub.version != 0x02) - TEST_ERROR; - if (xmit_out.pub.session_token != 0x03040506) - TEST_ERROR; - if (xmit_out.pub.xmit_count != 0x0708090A) - TEST_ERROR; - if (xmit_out.pub.op != 0x0B) - TEST_ERROR; - if (xmit_out.rw != 0x0C0D0E0F10111213) - TEST_ERROR; + /* Decode from different offset in buffer + * Observe changes when ingesting the padding + */ + if (H5FD_mirror_xmit_decode_set_eoa(&xmit_out, buf) != H5FD_MIRROR_XMIT_EOA_SIZE) + TEST_ERROR; + if (xmit_out.pub.magic != 0xFFFF0001) + TEST_ERROR; + if (xmit_out.pub.version != 0x02) + TEST_ERROR; + if (xmit_out.pub.session_token != 0x03040506) + TEST_ERROR; + if (xmit_out.pub.xmit_count != 0x0708090A) + TEST_ERROR; + if (xmit_out.pub.op != 0x0B) + TEST_ERROR; + if (xmit_out.type != 0x0C) + TEST_ERROR; + if (xmit_out.eoa_addr != 0x0D0E0F1011121314) + TEST_ERROR; + + PASSED(); + return 0; - } while (0); /* end xmit lock en/decode */ +error: + return -1; +} - /* Test xmit open structure encode/decode - * Write bogus but easily verifiable data to inside a buffer, and compare. - * Then decode the buffer and compare the structure contents. - * Then repeat from a different offset in the buffer and compare. - * - * Verifies that the first zero character in the filepath will end the - * string, with all following bytes in the encoded buffer being zeroed. +/* Test xmit lock structure encode/decode + * Write bogus but easily verifiable data to inside a buffer, and compare. + * Then decode the buffer and compare the structure contents. + * Then repeat from a different offset in the buffer and compare. + */ +static int +test_encdec_lock(H5FD_mirror_xmit_t xmit_mock) +{ + unsigned char buf[H5FD_MIRROR_XMIT_LOCK_SIZE + 8]; + unsigned char expected[H5FD_MIRROR_XMIT_LOCK_SIZE + 8]; + H5FD_mirror_xmit_lock_t xmit_in; + H5FD_mirror_xmit_lock_t xmit_out; + size_t i = 0; + + TESTING("Mirror encode/decode of xmit lock"); + + /* Sanity check */ + if ((14 + 8) != H5FD_MIRROR_XMIT_LOCK_SIZE) + FAIL_PUTS_ERROR("Header size definition does not match test\n"); + if (xmit_mock.op != 0x0D) + FAIL_PUTS_ERROR("shared header structure is not in expected state"); + + /* Populate the expected buffer; expect end padding of 0xFF */ + HDmemset(expected, 0xFF, H5FD_MIRROR_XMIT_LOCK_SIZE + 8); + for (i = 0; i < H5FD_MIRROR_XMIT_LOCK_SIZE; i++) + expected[i + 2] = (unsigned char)i; + + /* Set xmit_in */ + xmit_in.pub = xmit_mock; /* shared/common */ + xmit_in.rw = 0x0E0F101112131415; + + /* Encode, and compare buffer contents + * Initial buffer is filled with 0xFF to match expected padding */ - do { - unsigned char buf[H5FD_MIRROR_XMIT_OPEN_SIZE + 8]; - unsigned char expected[H5FD_MIRROR_XMIT_OPEN_SIZE + 8]; - H5FD_mirror_xmit_open_t xmit_in; - H5FD_mirror_xmit_open_t xmit_out; - size_t i = 0; - - /* sanity check */ - if ((14 + 20 + 4097) != H5FD_MIRROR_XMIT_OPEN_SIZE) { - FAIL_PUTS_ERROR("Header size definition does not match test\n"); - } - if (xmit_mock.op != 0x0D) { - FAIL_PUTS_ERROR("shared header structure is not in expected state"); - } + HDmemset(buf, 0xFF, H5FD_MIRROR_XMIT_LOCK_SIZE + 8); + if (H5FD_mirror_xmit_encode_lock((buf + 2), &xmit_in) != H5FD_MIRROR_XMIT_LOCK_SIZE) + TEST_ERROR; + if (HDmemcmp(buf, expected, H5FD_MIRROR_XMIT_LOCK_SIZE + 8) != 0) { + PRINT_BUFFER_DIFF(buf, expected, H5FD_MIRROR_XMIT_LOCK_SIZE + 8); + TEST_ERROR; + } - /* Populate the expected buffer; expect end padding of 0xFF - */ - HDmemset(expected, 0xFF, H5FD_MIRROR_XMIT_OPEN_SIZE + 8); - for (i = 0; i < H5FD_MIRROR_XMIT_OPEN_SIZE; i++) { - /* 0x100 is "zero" in a byte, so encode will treat it as a NULL- - * terminator in the filepath string. Expect all zeroes following. - */ - expected[i + 2] = (i > 0xFF) ? 0 : (unsigned char)i; - } + /* Decode from buffer */ + if (H5FD_mirror_xmit_decode_lock(&xmit_out, (buf + 2)) != H5FD_MIRROR_XMIT_LOCK_SIZE) + TEST_ERROR; + if (xmit_out.pub.magic != xmit_mock.magic) + TEST_ERROR; + if (xmit_out.pub.version != xmit_mock.version) + TEST_ERROR; + if (xmit_out.pub.session_token != xmit_mock.session_token) + TEST_ERROR; + if (xmit_out.pub.xmit_count != xmit_mock.xmit_count) + TEST_ERROR; + if (xmit_out.pub.op != xmit_mock.op) + TEST_ERROR; + if (xmit_out.rw != 0x0E0F101112131415) + TEST_ERROR; + + /* Decode from different offset in buffer + * Observe changes when ingesting the padding + */ + if (H5FD_mirror_xmit_decode_lock(&xmit_out, buf) != H5FD_MIRROR_XMIT_LOCK_SIZE) + TEST_ERROR; + if (xmit_out.pub.magic != 0xFFFF0001) + TEST_ERROR; + if (xmit_out.pub.version != 0x02) + TEST_ERROR; + if (xmit_out.pub.session_token != 0x03040506) + TEST_ERROR; + if (xmit_out.pub.xmit_count != 0x0708090A) + TEST_ERROR; + if (xmit_out.pub.op != 0x0B) + TEST_ERROR; + if (xmit_out.rw != 0x0C0D0E0F10111213) + TEST_ERROR; + + PASSED(); + return 0; - /* Set xmit_in - */ - xmit_in.pub = xmit_mock; /* shared/common */ - xmit_in.flags = 0x0E0F1011; - xmit_in.maxaddr = 0x1213141516171819; - xmit_in.size_t_blob = 0x1A1B1C1D1E1F2021; - for (i = 0x22; i < H5FD_MIRROR_XMIT_FILEPATH_MAX + 0x22; i++) { - /* nonzero values repeat after 0x100, but will not be encoded */ - xmit_in.filename[i - 0x22] = (char)(i % 0x100); - } - xmit_in.filename[H5FD_MIRROR_XMIT_FILEPATH_MAX - 1] = 0; +error: + return -1; +} - /* Encode, and compare buffer contents - * Initial buffer is filled with 0xFF to match expected padding +/* Test xmit open structure encode/decode + * Write bogus but easily verifiable data to inside a buffer, and compare. + * Then decode the buffer and compare the structure contents. + * Then repeat from a different offset in the buffer and compare. + * + * Verifies that the first zero character in the filepath will end the + * string, with all following bytes in the encoded buffer being zeroed. + */ +static int +test_encdec_open(H5FD_mirror_xmit_t xmit_mock) +{ + unsigned char *buf = NULL; + unsigned char *expected = NULL; + H5FD_mirror_xmit_open_t *xmit_in = NULL; + H5FD_mirror_xmit_open_t *xmit_out = NULL; + + TESTING("Mirror encode/decode of xmit open"); + + /* Sanity check */ + if ((14 + 20 + 4097) != H5FD_MIRROR_XMIT_OPEN_SIZE) + FAIL_PUTS_ERROR("Header size definition does not match test\n"); + if (xmit_mock.op != 0x0D) + FAIL_PUTS_ERROR("shared header structure is not in expected state"); + + /* Allocate memory */ + if (NULL == (buf = HDmalloc((H5FD_MIRROR_XMIT_OPEN_SIZE + 8) * sizeof(unsigned char)))) + FAIL_PUTS_ERROR("Unable to allocate memory for buf"); + if (NULL == (expected = HDmalloc((H5FD_MIRROR_XMIT_OPEN_SIZE + 8) * sizeof(unsigned char)))) + FAIL_PUTS_ERROR("Unable to allocate memory for expected"); + if (NULL == (xmit_in = HDmalloc(sizeof(H5FD_mirror_xmit_open_t)))) + FAIL_PUTS_ERROR("Unable to allocate memory for xmit_in"); + if (NULL == (xmit_out = HDmalloc(sizeof(H5FD_mirror_xmit_open_t)))) + FAIL_PUTS_ERROR("Unable to allocate memory for xmit_out"); + + /* Populate the expected buffer; expect end padding of 0xFF */ + HDmemset(expected, 0xFF, H5FD_MIRROR_XMIT_OPEN_SIZE + 8); + for (size_t i = 0; i < H5FD_MIRROR_XMIT_OPEN_SIZE; i++) { + /* 0x100 is "zero" in a byte, so encode will treat it as a NULL- + * terminator in the filepath string. Expect all zeroes following. */ - HDmemset(buf, 0xFF, H5FD_MIRROR_XMIT_OPEN_SIZE + 8); - if (H5FD_mirror_xmit_encode_open((buf + 2), &xmit_in) != H5FD_MIRROR_XMIT_OPEN_SIZE) { - TEST_ERROR; - } - if (HDmemcmp(buf, expected, H5FD_MIRROR_XMIT_OPEN_SIZE + 8) != 0) { - PRINT_BUFFER_DIFF(buf, expected, H5FD_MIRROR_XMIT_OPEN_SIZE + 8); - TEST_ERROR; - } + expected[i + 2] = (i > 0xFF) ? 0 : (unsigned char)i; + } - /* Decode from buffer - */ - if (H5FD_mirror_xmit_decode_open(&xmit_out, (buf + 2)) != H5FD_MIRROR_XMIT_OPEN_SIZE) { - TEST_ERROR; - } - if (xmit_out.pub.magic != xmit_mock.magic) - TEST_ERROR; - if (xmit_out.pub.version != xmit_mock.version) - TEST_ERROR; - if (xmit_out.pub.session_token != xmit_mock.session_token) - TEST_ERROR; - if (xmit_out.pub.xmit_count != xmit_mock.xmit_count) - TEST_ERROR; - if (xmit_out.pub.op != xmit_mock.op) - TEST_ERROR; - if (xmit_out.flags != xmit_in.flags) - TEST_ERROR; - if (xmit_out.maxaddr != xmit_in.maxaddr) - TEST_ERROR; - if (xmit_out.size_t_blob != xmit_in.size_t_blob) - TEST_ERROR; - if (HDstrncmp(xmit_out.filename, xmit_in.filename, H5FD_MIRROR_XMIT_FILEPATH_MAX) != 0) { - PRINT_BUFFER_DIFF(xmit_out.filename, xmit_in.filename, H5FD_MIRROR_XMIT_FILEPATH_MAX); - TEST_ERROR; - } + /* Set xmit_in */ + xmit_in->pub = xmit_mock; /* shared/common */ + xmit_in->flags = 0x0E0F1011; + xmit_in->maxaddr = 0x1213141516171819; + xmit_in->size_t_blob = 0x1A1B1C1D1E1F2021; + for (size_t i = 0x22; i < H5FD_MIRROR_XMIT_FILEPATH_MAX + 0x22; i++) { + /* Non-zero values repeat after 0x100, but will not be encoded */ + xmit_in->filename[i - 0x22] = (char)(i % 0x100); + } + xmit_in->filename[H5FD_MIRROR_XMIT_FILEPATH_MAX - 1] = 0; - /* Decode from different offset in buffer - * Observe changes when ingesting the padding - */ - if (H5FD_mirror_xmit_decode_open(&xmit_out, (buf)) != H5FD_MIRROR_XMIT_OPEN_SIZE) { - TEST_ERROR; - } - if (xmit_out.pub.magic != 0xFFFF0001) - TEST_ERROR; - if (xmit_out.pub.version != 0x02) - TEST_ERROR; - if (xmit_out.pub.session_token != 0x03040506) - TEST_ERROR; - if (xmit_out.pub.xmit_count != 0x0708090A) - TEST_ERROR; - if (xmit_out.pub.op != 0x0B) - TEST_ERROR; - if (xmit_out.flags != 0x0C0D0E0F) - TEST_ERROR; - if (xmit_out.maxaddr != 0x1011121314151617) - TEST_ERROR; - if (xmit_out.size_t_blob != 0x18191A1B1C1D1E1F) - TEST_ERROR; - /* update expected "filepath" in structure */ - for (i = 0x20; i < H5FD_MIRROR_XMIT_FILEPATH_MAX + 0x20; i++) { - xmit_in.filename[i - 0x20] = (i > 0xFF) ? 0 : (char)i; - } - if (HDstrncmp(xmit_out.filename, xmit_in.filename, H5FD_MIRROR_XMIT_FILEPATH_MAX) != 0) { - PRINT_BUFFER_DIFF(xmit_out.filename, xmit_in.filename, H5FD_MIRROR_XMIT_FILEPATH_MAX); - TEST_ERROR; - } + /* Encode, and compare buffer contents + * Initial buffer is filled with 0xFF to match expected padding + */ + HDmemset(buf, 0xFF, H5FD_MIRROR_XMIT_OPEN_SIZE + 8); + if (H5FD_mirror_xmit_encode_open((buf + 2), xmit_in) != H5FD_MIRROR_XMIT_OPEN_SIZE) + TEST_ERROR; + if (HDmemcmp(buf, expected, H5FD_MIRROR_XMIT_OPEN_SIZE + 8) != 0) { + PRINT_BUFFER_DIFF(buf, expected, H5FD_MIRROR_XMIT_OPEN_SIZE + 8); + TEST_ERROR; + } - } while (0); /* end xmit open en/decode */ + /* Decode from buffer */ + if (H5FD_mirror_xmit_decode_open(xmit_out, (buf + 2)) != H5FD_MIRROR_XMIT_OPEN_SIZE) + TEST_ERROR; + if (xmit_out->pub.magic != xmit_mock.magic) + TEST_ERROR; + if (xmit_out->pub.version != xmit_mock.version) + TEST_ERROR; + if (xmit_out->pub.session_token != xmit_mock.session_token) + TEST_ERROR; + if (xmit_out->pub.xmit_count != xmit_mock.xmit_count) + TEST_ERROR; + if (xmit_out->pub.op != xmit_mock.op) + TEST_ERROR; + if (xmit_out->flags != xmit_in->flags) + TEST_ERROR; + if (xmit_out->maxaddr != xmit_in->maxaddr) + TEST_ERROR; + if (xmit_out->size_t_blob != xmit_in->size_t_blob) + TEST_ERROR; + if (HDstrncmp(xmit_out->filename, xmit_in->filename, H5FD_MIRROR_XMIT_FILEPATH_MAX) != 0) { + PRINT_BUFFER_DIFF(xmit_out->filename, xmit_in->filename, H5FD_MIRROR_XMIT_FILEPATH_MAX); + TEST_ERROR; + } - /* Test xmit reply structure encode/decode - * Write bogus but easily verifiable data to inside a buffer, and compare. - * Then decode the buffer and compare the structure contents. - * Then repeat from a different offset in the buffer and compare. - * - * Verifies that the first zero character in the filepath will end the - * string, with all following bytes in the encoded buffer being zeroed. + /* Decode from different offset in buffer + * Observe changes when ingesting the padding */ - do { - unsigned char buf[H5FD_MIRROR_XMIT_REPLY_SIZE + 8]; - unsigned char expected[H5FD_MIRROR_XMIT_REPLY_SIZE + 8]; - H5FD_mirror_xmit_reply_t xmit_in; - H5FD_mirror_xmit_reply_t xmit_out; - size_t i = 0; - - /* sanity check */ - if ((14 + 4 + 256) != H5FD_MIRROR_XMIT_REPLY_SIZE) { - FAIL_PUTS_ERROR("Header size definition does not match test\n"); - } - if (xmit_mock.op != 0x0D) { - FAIL_PUTS_ERROR("shared header structure is not in expected state"); - } + if (H5FD_mirror_xmit_decode_open(xmit_out, buf) != H5FD_MIRROR_XMIT_OPEN_SIZE) + TEST_ERROR; + if (xmit_out->pub.magic != 0xFFFF0001) + TEST_ERROR; + if (xmit_out->pub.version != 0x02) + TEST_ERROR; + if (xmit_out->pub.session_token != 0x03040506) + TEST_ERROR; + if (xmit_out->pub.xmit_count != 0x0708090A) + TEST_ERROR; + if (xmit_out->pub.op != 0x0B) + TEST_ERROR; + if (xmit_out->flags != 0x0C0D0E0F) + TEST_ERROR; + if (xmit_out->maxaddr != 0x1011121314151617) + TEST_ERROR; + if (xmit_out->size_t_blob != 0x18191A1B1C1D1E1F) + TEST_ERROR; + /* Update expected "filepath" in structure */ + for (size_t i = 0x20; i < H5FD_MIRROR_XMIT_FILEPATH_MAX + 0x20; i++) + xmit_in->filename[i - 0x20] = (i > 0xFF) ? 0 : (char)i; + if (HDstrncmp(xmit_out->filename, xmit_in->filename, H5FD_MIRROR_XMIT_FILEPATH_MAX) != 0) { + PRINT_BUFFER_DIFF(xmit_out->filename, xmit_in->filename, H5FD_MIRROR_XMIT_FILEPATH_MAX); + TEST_ERROR; + } - /* Populate the expected buffer; expect end padding of 0xFF - */ - HDmemset(expected, 0xFF, H5FD_MIRROR_XMIT_REPLY_SIZE + 8); - for (i = 0; i < H5FD_MIRROR_XMIT_REPLY_SIZE; i++) { - /* 0x100 is "zero" in a byte, so encode will treat it as a NULL- - * terminator in the filepath string. Expect all zeroes following. - */ - expected[i + 2] = (i > 0xFF) ? 0 : (unsigned char)i; - } + HDfree(buf); + HDfree(expected); + HDfree(xmit_in); + HDfree(xmit_out); - /* Set xmit_in - */ - xmit_in.pub = xmit_mock; /* shared/common */ - xmit_in.status = 0x0E0F1011; - for (i = 0x12; i < H5FD_MIRROR_STATUS_MESSAGE_MAX + 0x12; i++) { - /* nonzero values repeat after 0x100, but will not be encoded */ - xmit_in.message[i - 0x12] = (char)(i % 0x100); - } - xmit_in.message[H5FD_MIRROR_STATUS_MESSAGE_MAX - 1] = 0; + PASSED(); + return 0; - /* Encode, and compare buffer contents - * Initial buffer is filled with 0xFF to match expected padding - */ - HDmemset(buf, 0xFF, H5FD_MIRROR_XMIT_REPLY_SIZE + 8); - if (H5FD_mirror_xmit_encode_reply((buf + 2), &xmit_in) != H5FD_MIRROR_XMIT_REPLY_SIZE) { - TEST_ERROR; - } - if (HDmemcmp(buf, expected, H5FD_MIRROR_XMIT_REPLY_SIZE + 8) != 0) { - PRINT_BUFFER_DIFF(buf, expected, H5FD_MIRROR_XMIT_REPLY_SIZE + 8); - TEST_ERROR; - } +error: + HDfree(buf); + HDfree(expected); + HDfree(xmit_in); + HDfree(xmit_out); - /* Decode from buffer - */ - if (H5FD_mirror_xmit_decode_reply(&xmit_out, (buf + 2)) != H5FD_MIRROR_XMIT_REPLY_SIZE) { - TEST_ERROR; - } - if (xmit_out.pub.magic != xmit_mock.magic) - TEST_ERROR; - if (xmit_out.pub.version != xmit_mock.version) - TEST_ERROR; - if (xmit_out.pub.session_token != xmit_mock.session_token) - TEST_ERROR; - if (xmit_out.pub.xmit_count != xmit_mock.xmit_count) - TEST_ERROR; - if (xmit_out.pub.op != xmit_mock.op) - TEST_ERROR; - if (xmit_out.status != xmit_in.status) - TEST_ERROR; - if (HDstrncmp(xmit_out.message, xmit_in.message, H5FD_MIRROR_STATUS_MESSAGE_MAX) != 0) { - PRINT_BUFFER_DIFF(xmit_out.message, xmit_in.message, H5FD_MIRROR_STATUS_MESSAGE_MAX); - TEST_ERROR; - } + return -1; +} - /* Decode from different offset in buffer - * Observe changes when ingesting the padding +/* Test xmit reply structure encode/decode + * Write bogus but easily verifiable data to inside a buffer, and compare. + * Then decode the buffer and compare the structure contents. + * Then repeat from a different offset in the buffer and compare. + * + * Verifies that the first zero character in the filepath will end the + * string, with all following bytes in the encoded buffer being zeroed. + */ +static int +test_encdec_reply(H5FD_mirror_xmit_t xmit_mock) +{ + unsigned char buf[H5FD_MIRROR_XMIT_REPLY_SIZE + 8]; + unsigned char expected[H5FD_MIRROR_XMIT_REPLY_SIZE + 8]; + H5FD_mirror_xmit_reply_t xmit_in; + H5FD_mirror_xmit_reply_t xmit_out; + size_t i = 0; + + TESTING("Mirror encode/decode of xmit reply"); + + /* Sanity check */ + if ((14 + 4 + 256) != H5FD_MIRROR_XMIT_REPLY_SIZE) + FAIL_PUTS_ERROR("Header size definition does not match test\n"); + if (xmit_mock.op != 0x0D) + FAIL_PUTS_ERROR("shared header structure is not in expected state"); + + /* Populate the expected buffer; expect end padding of 0xFF */ + HDmemset(expected, 0xFF, H5FD_MIRROR_XMIT_REPLY_SIZE + 8); + for (i = 0; i < H5FD_MIRROR_XMIT_REPLY_SIZE; i++) { + /* 0x100 is "zero" in a byte, so encode will treat it as a NULL- + * terminator in the filepath string. Expect all zeroes following. */ - if (H5FD_mirror_xmit_decode_reply(&xmit_out, (buf)) != H5FD_MIRROR_XMIT_REPLY_SIZE) { - TEST_ERROR; - } - if (xmit_out.pub.magic != 0xFFFF0001) - TEST_ERROR; - if (xmit_out.pub.version != 0x02) - TEST_ERROR; - if (xmit_out.pub.session_token != 0x03040506) - TEST_ERROR; - if (xmit_out.pub.xmit_count != 0x0708090A) - TEST_ERROR; - if (xmit_out.pub.op != 0x0B) - TEST_ERROR; - if (xmit_out.status != 0x0C0D0E0F) - TEST_ERROR; - /* update expected "message" in structure */ - for (i = 0x10; i < H5FD_MIRROR_STATUS_MESSAGE_MAX + 0x10; i++) { - xmit_in.message[i - 0x10] = (i > 0xFF) ? 0 : (char)i; - } - if (HDstrncmp(xmit_out.message, xmit_in.message, H5FD_MIRROR_STATUS_MESSAGE_MAX) != 0) { - PRINT_BUFFER_DIFF(xmit_out.message, xmit_in.message, H5FD_MIRROR_STATUS_MESSAGE_MAX); - TEST_ERROR; - } + expected[i + 2] = (i > 0xFF) ? 0 : (unsigned char)i; + } - } while (0); /* end xmit reply en/decode */ + /* Set xmit_in */ + xmit_in.pub = xmit_mock; /* shared/common */ + xmit_in.status = 0x0E0F1011; + for (i = 0x12; i < H5FD_MIRROR_STATUS_MESSAGE_MAX + 0x12; i++) { + /* Non-zero values repeat after 0x100, but will not be encoded */ + xmit_in.message[i - 0x12] = (char)(i % 0x100); + } + xmit_in.message[H5FD_MIRROR_STATUS_MESSAGE_MAX - 1] = 0; - /* Test xmit write structure encode/decode - * Write bogus but easily verifiable data to inside a buffer, and compare. - * Then decode the buffer and compare the structure contents. - * Then repeat from a different offset in the buffer and compare. + /* Encode, and compare buffer contents + * Initial buffer is filled with 0xFF to match expected padding */ - do { - unsigned char buf[H5FD_MIRROR_XMIT_WRITE_SIZE + 8]; - unsigned char expected[H5FD_MIRROR_XMIT_WRITE_SIZE + 8]; - H5FD_mirror_xmit_write_t xmit_in; - H5FD_mirror_xmit_write_t xmit_out; - size_t i = 0; - - /* sanity check */ - if ((14 + 17) != H5FD_MIRROR_XMIT_WRITE_SIZE) { - FAIL_PUTS_ERROR("Header size definition does not match test\n"); - } - if (xmit_mock.op != 0x0D) { - FAIL_PUTS_ERROR("shared header structure is not in expected state"); - } + HDmemset(buf, 0xFF, H5FD_MIRROR_XMIT_REPLY_SIZE + 8); + if (H5FD_mirror_xmit_encode_reply((buf + 2), &xmit_in) != H5FD_MIRROR_XMIT_REPLY_SIZE) + TEST_ERROR; + if (HDmemcmp(buf, expected, H5FD_MIRROR_XMIT_REPLY_SIZE + 8) != 0) { + PRINT_BUFFER_DIFF(buf, expected, H5FD_MIRROR_XMIT_REPLY_SIZE + 8); + TEST_ERROR; + } - /* Populate the expected buffer; expect end padding of 0xFF - */ - HDmemset(expected, 0xFF, H5FD_MIRROR_XMIT_WRITE_SIZE + 8); - for (i = 0; i < H5FD_MIRROR_XMIT_WRITE_SIZE; i++) { - expected[i + 2] = (unsigned char)i; - } + /* Decode from buffer */ + if (H5FD_mirror_xmit_decode_reply(&xmit_out, (buf + 2)) != H5FD_MIRROR_XMIT_REPLY_SIZE) + TEST_ERROR; + if (xmit_out.pub.magic != xmit_mock.magic) + TEST_ERROR; + if (xmit_out.pub.version != xmit_mock.version) + TEST_ERROR; + if (xmit_out.pub.session_token != xmit_mock.session_token) + TEST_ERROR; + if (xmit_out.pub.xmit_count != xmit_mock.xmit_count) + TEST_ERROR; + if (xmit_out.pub.op != xmit_mock.op) + TEST_ERROR; + if (xmit_out.status != xmit_in.status) + TEST_ERROR; + if (HDstrncmp(xmit_out.message, xmit_in.message, H5FD_MIRROR_STATUS_MESSAGE_MAX) != 0) { + PRINT_BUFFER_DIFF(xmit_out.message, xmit_in.message, H5FD_MIRROR_STATUS_MESSAGE_MAX); + TEST_ERROR; + } - /* Set xmit_in - */ - xmit_in.pub = xmit_mock; /* shared/common */ - xmit_in.type = 0x0E; - xmit_in.offset = 0x0F10111213141516; - xmit_in.size = 0x1718191A1B1C1D1E; + /* Decode from different offset in buffer + * Observe changes when ingesting the padding + */ + if (H5FD_mirror_xmit_decode_reply(&xmit_out, buf) != H5FD_MIRROR_XMIT_REPLY_SIZE) + TEST_ERROR; + if (xmit_out.pub.magic != 0xFFFF0001) + TEST_ERROR; + if (xmit_out.pub.version != 0x02) + TEST_ERROR; + if (xmit_out.pub.session_token != 0x03040506) + TEST_ERROR; + if (xmit_out.pub.xmit_count != 0x0708090A) + TEST_ERROR; + if (xmit_out.pub.op != 0x0B) + TEST_ERROR; + if (xmit_out.status != 0x0C0D0E0F) + TEST_ERROR; + /* Update expected "message" in structure */ + for (i = 0x10; i < H5FD_MIRROR_STATUS_MESSAGE_MAX + 0x10; i++) + xmit_in.message[i - 0x10] = (i > 0xFF) ? 0 : (char)i; + if (HDstrncmp(xmit_out.message, xmit_in.message, H5FD_MIRROR_STATUS_MESSAGE_MAX) != 0) { + PRINT_BUFFER_DIFF(xmit_out.message, xmit_in.message, H5FD_MIRROR_STATUS_MESSAGE_MAX); + TEST_ERROR; + } - /* Encode, and compare buffer contents - * Initial buffer is filled with 0xFF to match expected padding - */ - HDmemset(buf, 0xFF, H5FD_MIRROR_XMIT_WRITE_SIZE + 8); - if (H5FD_mirror_xmit_encode_write((buf + 2), &xmit_in) != H5FD_MIRROR_XMIT_WRITE_SIZE) { - TEST_ERROR; - } - if (HDmemcmp(buf, expected, H5FD_MIRROR_XMIT_WRITE_SIZE + 8) != 0) { - PRINT_BUFFER_DIFF(buf, expected, H5FD_MIRROR_XMIT_WRITE_SIZE + 8); - TEST_ERROR; - } + PASSED(); + return 0; - /* Decode from buffer - */ - if (H5FD_mirror_xmit_decode_write(&xmit_out, (buf + 2)) != H5FD_MIRROR_XMIT_WRITE_SIZE) { - TEST_ERROR; - } - if (xmit_out.pub.magic != xmit_mock.magic) - TEST_ERROR; - if (xmit_out.pub.version != xmit_mock.version) - TEST_ERROR; - if (xmit_out.pub.session_token != xmit_mock.session_token) - TEST_ERROR; - if (xmit_out.pub.xmit_count != xmit_mock.xmit_count) - TEST_ERROR; - if (xmit_out.pub.op != xmit_mock.op) - TEST_ERROR; - if (xmit_out.type != 0x0E) - TEST_ERROR; - if (xmit_out.offset != 0x0F10111213141516) - TEST_ERROR; - if (xmit_out.size != 0x1718191A1B1C1D1E) - TEST_ERROR; +error: + return -1; +} - /* Decode from different offset in buffer - * Observe changes when ingesting the padding - */ - if (H5FD_mirror_xmit_decode_write(&xmit_out, (buf)) != H5FD_MIRROR_XMIT_WRITE_SIZE) { - TEST_ERROR; - } - if (xmit_out.pub.magic != 0xFFFF0001) - TEST_ERROR; - if (xmit_out.pub.version != 0x02) - TEST_ERROR; - if (xmit_out.pub.session_token != 0x03040506) - TEST_ERROR; - if (xmit_out.pub.xmit_count != 0x0708090A) - TEST_ERROR; - if (xmit_out.pub.op != 0x0B) - TEST_ERROR; - if (xmit_out.type != 0x0C) - TEST_ERROR; - if (xmit_out.offset != 0x0D0E0F1011121314) - TEST_ERROR; - if (xmit_out.size != 0x15161718191A1B1C) - TEST_ERROR; +/* Test xmit write structure encode/decode + * Write bogus but easily verifiable data to inside a buffer, and compare. + * Then decode the buffer and compare the structure contents. + * Then repeat from a different offset in the buffer and compare. + */ +static int +test_encdec_write(H5FD_mirror_xmit_t xmit_mock) +{ + unsigned char buf[H5FD_MIRROR_XMIT_WRITE_SIZE + 8]; + unsigned char expected[H5FD_MIRROR_XMIT_WRITE_SIZE + 8]; + H5FD_mirror_xmit_write_t xmit_in; + H5FD_mirror_xmit_write_t xmit_out; + size_t i = 0; + + TESTING("Mirror encode/decode of xmit write"); + + /* Sanity check */ + if ((14 + 17) != H5FD_MIRROR_XMIT_WRITE_SIZE) + FAIL_PUTS_ERROR("Header size definition does not match test\n"); + if (xmit_mock.op != 0x0D) + FAIL_PUTS_ERROR("shared header structure is not in expected state"); + + /* Populate the expected buffer; expect end padding of 0xFF */ + HDmemset(expected, 0xFF, H5FD_MIRROR_XMIT_WRITE_SIZE + 8); + for (i = 0; i < H5FD_MIRROR_XMIT_WRITE_SIZE; i++) + expected[i + 2] = (unsigned char)i; + + /* Set xmit_in */ + xmit_in.pub = xmit_mock; /* shared/common */ + xmit_in.type = 0x0E; + xmit_in.offset = 0x0F10111213141516; + xmit_in.size = 0x1718191A1B1C1D1E; + + /* Encode, and compare buffer contents + * Initial buffer is filled with 0xFF to match expected padding + */ + HDmemset(buf, 0xFF, H5FD_MIRROR_XMIT_WRITE_SIZE + 8); + if (H5FD_mirror_xmit_encode_write((buf + 2), &xmit_in) != H5FD_MIRROR_XMIT_WRITE_SIZE) + TEST_ERROR; + if (HDmemcmp(buf, expected, H5FD_MIRROR_XMIT_WRITE_SIZE + 8) != 0) { + PRINT_BUFFER_DIFF(buf, expected, H5FD_MIRROR_XMIT_WRITE_SIZE + 8); + TEST_ERROR; + } + + /* Decode from buffer */ + if (H5FD_mirror_xmit_decode_write(&xmit_out, (buf + 2)) != H5FD_MIRROR_XMIT_WRITE_SIZE) + TEST_ERROR; + if (xmit_out.pub.magic != xmit_mock.magic) + TEST_ERROR; + if (xmit_out.pub.version != xmit_mock.version) + TEST_ERROR; + if (xmit_out.pub.session_token != xmit_mock.session_token) + TEST_ERROR; + if (xmit_out.pub.xmit_count != xmit_mock.xmit_count) + TEST_ERROR; + if (xmit_out.pub.op != xmit_mock.op) + TEST_ERROR; + if (xmit_out.type != 0x0E) + TEST_ERROR; + if (xmit_out.offset != 0x0F10111213141516) + TEST_ERROR; + if (xmit_out.size != 0x1718191A1B1C1D1E) + TEST_ERROR; - } while (0); /* end xmit write en/decode */ + /* Decode from different offset in buffer + * Observe changes when ingesting the padding + */ + if (H5FD_mirror_xmit_decode_write(&xmit_out, buf) != H5FD_MIRROR_XMIT_WRITE_SIZE) + TEST_ERROR; + if (xmit_out.pub.magic != 0xFFFF0001) + TEST_ERROR; + if (xmit_out.pub.version != 0x02) + TEST_ERROR; + if (xmit_out.pub.session_token != 0x03040506) + TEST_ERROR; + if (xmit_out.pub.xmit_count != 0x0708090A) + TEST_ERROR; + if (xmit_out.pub.op != 0x0B) + TEST_ERROR; + if (xmit_out.type != 0x0C) + TEST_ERROR; + if (xmit_out.offset != 0x0D0E0F1011121314) + TEST_ERROR; + if (xmit_out.size != 0x15161718191A1B1C) + TEST_ERROR; PASSED(); return 0; error: return -1; -} /* end test_xmit_encode_decode */ +} /* --------------------------------------------------------------------------- * Function: create_mirroring_split_fapl @@ -1240,100 +1199,84 @@ test_xmit_encode_decode(void) * Return: Success: HID of the top-level (splitter) FAPL, a non-negative * value. * Failure: H5I_INVALID_HID, a negative value. - * - * Programmer: Jacob Smith - * 2019 * --------------------------------------------------------------------------- */ static hid_t -create_mirroring_split_fapl(const char *_basename, struct mirrortest_filenames *names) +create_mirroring_split_fapl(const char *basename, struct mirrortest_filenames *names, + const struct mt_opts *opts) { - H5FD_splitter_vfd_config_t splitter_config; - H5FD_mirror_fapl_t mirror_conf; - hid_t ret_value = H5I_INVALID_HID; + H5FD_splitter_vfd_config_t *splitter_config = NULL; + H5FD_mirror_fapl_t mirror_conf; + hid_t ret_value = H5I_INVALID_HID; - if (_basename == NULL || *_basename == '\0') { + if (NULL == (splitter_config = HDmalloc(sizeof(H5FD_splitter_vfd_config_t)))) TEST_ERROR; - } - - splitter_config.magic = H5FD_SPLITTER_MAGIC; - splitter_config.version = H5FD_CURR_SPLITTER_VFD_CONFIG_VERSION; - splitter_config.ignore_wo_errs = FALSE; - /* Create Splitter R/W channel driver (sec2) + /* Initialize the fapls, too, so the library doesn't try to + * close non-existing IDs on errors */ - splitter_config.rw_fapl_id = H5Pcreate(H5P_FILE_ACCESS); - if (H5I_INVALID_HID == splitter_config.rw_fapl_id) { + splitter_config->magic = H5FD_SPLITTER_MAGIC; + splitter_config->version = H5FD_CURR_SPLITTER_VFD_CONFIG_VERSION; + splitter_config->ignore_wo_errs = FALSE; + splitter_config->rw_fapl_id = H5I_INVALID_HID; + splitter_config->wo_fapl_id = H5I_INVALID_HID; + + if (basename == NULL || *basename == '\0') TEST_ERROR; - } - if (H5Pset_fapl_sec2(splitter_config.rw_fapl_id) == FAIL) { + + /* Create Splitter R/W channel driver (sec2) */ + if ((splitter_config->rw_fapl_id = H5Pcreate(H5P_FILE_ACCESS)) < 0) + TEST_ERROR; + if (H5Pset_fapl_sec2(splitter_config->rw_fapl_id) < 0) TEST_ERROR; - } - /* Create Splitter W/O channel driver (mirror) - */ + /* Create Splitter W/O channel driver (mirror) */ mirror_conf.magic = H5FD_MIRROR_FAPL_MAGIC; mirror_conf.version = H5FD_MIRROR_CURR_FAPL_T_VERSION; - mirror_conf.handshake_port = SERVER_HANDSHAKE_PORT; - if (HDstrncpy(mirror_conf.remote_ip, SERVER_IP_ADDRESS, H5FD_MIRROR_MAX_IP_LEN) == NULL) { + mirror_conf.handshake_port = opts->portno; + if (HDstrncpy(mirror_conf.remote_ip, opts->ip, H5FD_MIRROR_MAX_IP_LEN) == NULL) TEST_ERROR; - } - splitter_config.wo_fapl_id = H5Pcreate(H5P_FILE_ACCESS); - if (H5I_INVALID_HID == splitter_config.wo_fapl_id) { + if ((splitter_config->wo_fapl_id = H5Pcreate(H5P_FILE_ACCESS)) < 0) TEST_ERROR; - } - if (H5Pset_fapl_mirror(splitter_config.wo_fapl_id, &mirror_conf) == FAIL) { + if (H5Pset_fapl_mirror(splitter_config->wo_fapl_id, &mirror_conf) < 0) TEST_ERROR; - } - /* Build r/w, w/o, and log file paths - */ - if (build_paths(_basename, &splitter_config, names) < 0) { + /* Build r/w, w/o, and log file paths */ + if (build_paths(basename, splitter_config, names) < 0) TEST_ERROR; - } - /* Set file paths for w/o and logfile - */ - if (HDstrncpy(splitter_config.wo_path, (const char *)names->wo, H5FD_SPLITTER_PATH_MAX) == NULL) { + /* Set file paths for w/o and logfile */ + if (HDstrncpy(splitter_config->wo_path, (const char *)names->wo, H5FD_SPLITTER_PATH_MAX) == NULL) TEST_ERROR; - } - if (HDstrncpy(splitter_config.log_file_path, (const char *)names->log, H5FD_SPLITTER_PATH_MAX) == NULL) { + if (HDstrncpy(splitter_config->log_file_path, (const char *)names->log, H5FD_SPLITTER_PATH_MAX) == NULL) TEST_ERROR; - } - /* Create Splitter FAPL - */ - ret_value = H5Pcreate(H5P_FILE_ACCESS); - if (H5I_INVALID_HID == ret_value) { + /* Create Splitter FAPL */ + if ((ret_value = H5Pcreate(H5P_FILE_ACCESS)) < 0) TEST_ERROR; - } - if (H5Pset_fapl_splitter(ret_value, &splitter_config) == FAIL) { + if (H5Pset_fapl_splitter(ret_value, splitter_config) < 0) TEST_ERROR; - } - /* Close FAPLs created for child channels - */ - if (H5Pclose(splitter_config.rw_fapl_id) < 0) { + /* Close FAPLs created for child channels */ + if (H5Pclose(splitter_config->rw_fapl_id) < 0) TEST_ERROR; - } - splitter_config.rw_fapl_id = H5I_INVALID_HID; - if (H5Pclose(splitter_config.wo_fapl_id) < 0) { + if (H5Pclose(splitter_config->wo_fapl_id) < 0) TEST_ERROR; - } - splitter_config.wo_fapl_id = H5I_INVALID_HID; + + HDfree(splitter_config); return ret_value; error: - if (splitter_config.wo_fapl_id >= 0) { - (void)H5Pclose(splitter_config.wo_fapl_id); - } - if (splitter_config.rw_fapl_id >= 0) { - (void)H5Pclose(splitter_config.rw_fapl_id); - } - if (ret_value >= 0) { - (void)H5Pclose(ret_value); + HDfree(splitter_config); + H5E_BEGIN_TRY + { + H5Pclose(splitter_config->wo_fapl_id); + H5Pclose(splitter_config->rw_fapl_id); + H5Pclose(ret_value); } + H5E_END_TRY; + return H5I_INVALID_HID; } /* end create_mirroring_split_fapl() */ @@ -1348,55 +1291,43 @@ create_mirroring_split_fapl(const char *_basename, struct mirrortest_filenames * * * Return: Success: 0 * Failure: -1 - * - * Programmer: Jacob Smith - * 2019-12-17 * --------------------------------------------------------------------------- */ static int -test_create_and_close(void) +test_create_and_close(const struct mt_opts *opts) { - struct mirrortest_filenames names; - hid_t file_id = H5I_INVALID_HID; - hid_t fapl_id = H5P_DEFAULT; + struct mirrortest_filenames *names = NULL; + hid_t file_id = H5I_INVALID_HID; + hid_t fapl_id = H5I_INVALID_HID; TESTING("File creation and immediate close"); - /* Create FAPL for Splitter[sec2|mirror] - */ - fapl_id = create_mirroring_split_fapl("basic_create", &names); - if (H5I_INVALID_HID == fapl_id) { + if (NULL == (names = HDmalloc(sizeof(struct mirrortest_filenames)))) TEST_ERROR; - } - - /* -------------------- */ - /* TEST: Create and Close */ - file_id = H5Fcreate(names.rw, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id); - if (H5I_INVALID_HID == file_id) { + /* Create FAPL for splitter[sec2|mirror] */ + if ((fapl_id = create_mirroring_split_fapl("basic_create", names, opts)) < 0) TEST_ERROR; - } - /* -------------------- */ - /* Standard cleanup */ + if ((file_id = H5Fcreate(names->rw, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id)) < 0) + TEST_ERROR; - if (H5Fclose(file_id) == FAIL) { + if (H5Fclose(file_id) < 0) TEST_ERROR; - } - if (fapl_id != H5P_DEFAULT && fapl_id >= 0) { - if (H5Pclose(fapl_id) == FAIL) { - TEST_ERROR; - } - } + if (H5Pclose(fapl_id) < 0) + TEST_ERROR; + + HDfree(names); PASSED(); return 0; error: + HDfree(names); H5E_BEGIN_TRY { - (void)H5Fclose(file_id); - (void)H5Pclose(fapl_id); + H5Fclose(file_id); + H5Pclose(fapl_id); } H5E_END_TRY; return -1; @@ -1412,9 +1343,6 @@ test_create_and_close(void) * and values. * * Return: SUCCEED/FAIL - * - * Programmer: Jacob Smith - * 2019-08-14 * ---------------------------------------------------------------------------- */ static herr_t @@ -1437,10 +1365,7 @@ create_datasets(hid_t file_id, unsigned min_dset, unsigned max_dset) LOGPRINT(2, "create_dataset()\n"); - /* --------------------------------- - * "Clear" ID arrays - */ - + /* Initialize ID arrays */ for (i = 0; i < MAX_DSET_COUNT; i++) { LOGPRINT(3, "clearing IDs [%d]\n", i); dataspace_ids[i] = H5I_INVALID_HID; @@ -1448,20 +1373,14 @@ create_datasets(hid_t file_id, unsigned min_dset, unsigned max_dset) filespace_ids[i] = H5I_INVALID_HID; } - /* --------------------------------- - * Generate dataspace, dataset, and 'filespace' IDs - */ - + /* Generate dataspace, dataset, and 'filespace' IDs */ if (_create_chunking_ids(file_id, min_dset, max_dset, chunk_dims, dset_dims, dataspace_ids, filespace_ids, - dataset_ids, &memspace_id) == FAIL) { + dataset_ids, &memspace_id) < 0) TEST_ERROR; - } - /* --------------------------------- - * Initialize (write) all datasets in a "round robin"... + /* Initialize (write) all datasets in a "round robin"... * for a given chunk 'location', write chunk data to each dataset. */ - for (i = 0; i < DSET_DIM; i += CHUNK_DIM) { LOGPRINT(3, "i: %d\n", i); for (j = 0; j < DSET_DIM; j += CHUNK_DIM) { @@ -1475,39 +1394,28 @@ create_datasets(hid_t file_id, unsigned min_dset, unsigned max_dset) } } - /* select on disk hyperslab */ + /* Select on disk hyperslab */ offset[0] = (hsize_t)i; offset[1] = (hsize_t)j; LOGPRINT(3, " H5Sselect_hyperslab()\n"); - if (H5Sselect_hyperslab(filespace_ids[m], H5S_SELECT_SET, offset, NULL, a_size, NULL) < 0) { + if (H5Sselect_hyperslab(filespace_ids[m], H5S_SELECT_SET, offset, NULL, a_size, NULL) < 0) TEST_ERROR; - } LOGPRINT(3, " H5Dwrite()\n"); if (H5Dwrite(dataset_ids[m], H5T_NATIVE_INT, memspace_id, filespace_ids[m], H5P_DEFAULT, - data_chunk) < 0) { + data_chunk) < 0) TEST_ERROR; - } } } } - /* --------------------------------- - * Read and verify data from datasets - */ - - if (_verify_datasets(min_dset, max_dset, filespace_ids, dataset_ids, memspace_id) == FAIL) { + /* Read and verify data from datasets */ + if (_verify_datasets(min_dset, max_dset, filespace_ids, dataset_ids, memspace_id) < 0) TEST_ERROR; - } - - /* --------------------------------- - * Cleanup - */ - if (_close_chunking_ids(min_dset, max_dset, dataspace_ids, filespace_ids, dataset_ids, &memspace_id) == - FAIL) { + /* Cleanup */ + if (_close_chunking_ids(min_dset, max_dset, dataspace_ids, filespace_ids, dataset_ids, &memspace_id) < 0) TEST_ERROR; - } return SUCCEED; @@ -1523,9 +1431,6 @@ create_datasets(hid_t file_id, unsigned min_dset, unsigned max_dset) * Purpose: Create new IDs to be used with the associated file. * * Return: SUCCEED/FAIL - * - * Programer: Jacob Smith - * 2019 * ---------------------------------------------------------------------------- */ static herr_t @@ -1533,92 +1438,69 @@ _create_chunking_ids(hid_t file_id, unsigned min_dset, unsigned max_dset, hsize_ hsize_t *dset_dims, hid_t *dataspace_ids, hid_t *filespace_ids, hid_t *dataset_ids, hid_t *memspace_id) { - char dset_name[DSET_NAME_LEN + 1]; - unsigned m = 0; - hid_t dcpl_id = H5I_INVALID_HID; + char dset_name[DSET_NAME_LEN + 1]; + hid_t dcpl_id = H5I_INVALID_HID; LOGPRINT(2, "_create_chunking_ids()\n"); - /* -------------------- - * Create chunking DCPL - */ - - dcpl_id = H5Pcreate(H5P_DATASET_CREATE); - if (dcpl_id < 0) { + /* Create chunking DCPL */ + if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR; - } - if (H5Pset_chunk(dcpl_id, 2, chunk_dims) == FAIL) { + if (H5Pset_chunk(dcpl_id, 2, chunk_dims) < 0) TEST_ERROR; - } - - /* -------------------- - * Create dataspace IDs - */ - for (m = min_dset; m <= max_dset; m++) { - dataspace_ids[m] = H5Screate_simple(2, dset_dims, NULL); - if (dataspace_ids[m] < 0) { + /* Create dataspace IDs */ + for (unsigned m = min_dset; m <= max_dset; m++) { + if ((dataspace_ids[m] = H5Screate_simple(2, dset_dims, NULL)) < 0) { HDsnprintf(mesg, MIRR_MESG_SIZE, "unable to create dataspace ID %d\n", m); FAIL_PUTS_ERROR(mesg); } } - /* -------------------- - * Create dataset IDs - */ - - for (m = min_dset; m <= max_dset; m++) { + /* Create dataset IDs */ + for (unsigned m = min_dset; m <= max_dset; m++) { if (HDsnprintf(dset_name, DSET_NAME_LEN, "/dset%03d", m) > DSET_NAME_LEN) { HDsnprintf(mesg, MIRR_MESG_SIZE, "unable to compose dset name %d\n", m); FAIL_PUTS_ERROR(mesg); } - dataset_ids[m] = H5Dcreate2(file_id, dset_name, H5T_STD_I32BE, dataspace_ids[m], H5P_DEFAULT, dcpl_id, - H5P_DEFAULT); - if (dataset_ids[m] < 0) { + if ((dataset_ids[m] = H5Dcreate2(file_id, dset_name, H5T_STD_I32BE, dataspace_ids[m], H5P_DEFAULT, + dcpl_id, H5P_DEFAULT)) < 0) { HDsnprintf(mesg, MIRR_MESG_SIZE, "unable to create dset ID %d\n", m); FAIL_PUTS_ERROR(mesg); } } - /* -------------------- - * Get file space IDs - */ - - for (m = min_dset; m <= max_dset; m++) { - filespace_ids[m] = H5Dget_space(dataset_ids[m]); - if (filespace_ids[m] < 0) { + /* Get file space IDs */ + for (unsigned m = min_dset; m <= max_dset; m++) { + if ((filespace_ids[m] = H5Dget_space(dataset_ids[m])) < 0) { HDsnprintf(mesg, MIRR_MESG_SIZE, "unable to create filespace ID %d\n", m); FAIL_PUTS_ERROR(mesg); } } - /* -------------------- - * Create mem space to be used to read and write chunks - */ - - *memspace_id = H5Screate_simple(2, chunk_dims, NULL); - if (*memspace_id < 0) { + /* Create mem space to be used to read and write chunks */ + if ((*memspace_id = H5Screate_simple(2, chunk_dims, NULL)) < 0) TEST_ERROR; - } - /* -------------------- - * Clean up the DCPL, even if there were errors before - */ - - if (dcpl_id != H5P_DEFAULT && dcpl_id != H5I_INVALID_HID) { - if (H5Pclose(dcpl_id) == FAIL) { - TEST_ERROR; - } - } + /* Clean up the DCPL */ + if (H5Pclose(dcpl_id) < 0) + TEST_ERROR; return SUCCEED; error: - if (dcpl_id != H5P_DEFAULT && dcpl_id != H5I_INVALID_HID) { - (void)H5Pclose(dcpl_id); + H5E_BEGIN_TRY + { + /* Note that it's the caller's responsibility to clean up any IDs + * passed back via out parameters + */ + H5Pclose(dcpl_id); } + H5E_END_TRY; + LOGPRINT(1, "_create_chunking_ids() FAILED\n"); + return FAIL; } /* end _create_chunking_ids() */ @@ -1628,61 +1510,47 @@ _create_chunking_ids(hid_t file_id, unsigned min_dset, unsigned max_dset, hsize_ * Purpose: Open/access IDs from the given file. * * Return: SUCCEED/FAIL - * - * Programmer: Jacob Smith - * 2019 * ---------------------------------------------------------------------------- */ static herr_t _open_chunking_ids(hid_t file_id, unsigned min_dset, unsigned max_dset, hsize_t *chunk_dims, hid_t *filespace_ids, hid_t *dataset_ids, hid_t *memspace_id) { - char dset_name[DSET_NAME_LEN + 1]; - unsigned m = 0; + char dset_name[DSET_NAME_LEN + 1]; LOGPRINT(2, "_open_chunking_ids()\n"); - /* -------------------- - * Open dataset IDs - */ - - for (m = min_dset; m <= max_dset; m++) { + /* Open dataset IDs */ + for (unsigned m = min_dset; m <= max_dset; m++) { if (HDsnprintf(dset_name, DSET_NAME_LEN, "/dset%03d", m) > DSET_NAME_LEN) { HDsnprintf(mesg, MIRR_MESG_SIZE, "unable to compose dset name %d\n", m); FAIL_PUTS_ERROR(mesg); } - dataset_ids[m] = H5Dopen2(file_id, dset_name, H5P_DEFAULT); - if (dataset_ids[m] < 0) { + if ((dataset_ids[m] = H5Dopen2(file_id, dset_name, H5P_DEFAULT)) < 0) { HDsnprintf(mesg, MIRR_MESG_SIZE, "unable to open dset ID %d\n", m); FAIL_PUTS_ERROR(mesg); } } - /* -------------------- - * Open filespace IDs - */ - - for (m = min_dset; m <= max_dset; m++) { - filespace_ids[m] = H5Dget_space(dataset_ids[m]); - if (filespace_ids[m] < 0) { + /* Open filespace IDs */ + for (unsigned m = min_dset; m <= max_dset; m++) { + if ((filespace_ids[m] = H5Dget_space(dataset_ids[m])) < 0) { HDsnprintf(mesg, MIRR_MESG_SIZE, "unable to get filespace ID %d\n", m); FAIL_PUTS_ERROR(mesg); } } - /* -------------------- - * Create mem space to be used to read and write chunks - */ - - *memspace_id = H5Screate_simple(2, chunk_dims, NULL); - if (*memspace_id < 0) { + /* Create mem space to be used to read and write chunks */ + if ((*memspace_id = H5Screate_simple(2, chunk_dims, NULL)) < 0) TEST_ERROR; - } return SUCCEED; error: + /* Note that the caller is responsible for cleaning up IDs returned + * as out parameters + */ LOGPRINT(1, "_open_chunking_ids() FAILED\n"); return FAIL; } /* end _open_chunking_ids() */ @@ -1695,20 +1563,15 @@ _open_chunking_ids(hid_t file_id, unsigned min_dset, unsigned max_dset, hsize_t * _open_chunking_ids(). (as opposed to created IDs) * * Return: SUCCEED/FAIL - * - * Programmer: Jacob Smith - * 2019 * --------------------------------------------------------------------------- */ static herr_t _close_chunking_ids(unsigned min_dset, unsigned max_dset, hid_t *dataspace_ids, hid_t *filespace_ids, hid_t *dataset_ids, hid_t *memspace_id) { - unsigned m; - LOGPRINT(2, "_close_chunking_ids()\n"); - for (m = min_dset; m <= max_dset; m++) { + for (unsigned m = min_dset; m <= max_dset; m++) { LOGPRINT(3, "closing ids[%d]\n", m); if (dataspace_ids) { if (H5Sclose(dataspace_ids[m]) < 0) { @@ -1726,9 +1589,9 @@ _close_chunking_ids(unsigned min_dset, unsigned max_dset, hid_t *dataspace_ids, } } - if ((*memspace_id != H5I_INVALID_HID) && (H5Sclose(*memspace_id) < 0)) { - TEST_ERROR; - } + if (*memspace_id != H5I_INVALID_HID) + if (H5Sclose(*memspace_id) < 0) + TEST_ERROR; return SUCCEED; @@ -1744,9 +1607,6 @@ _close_chunking_ids(unsigned min_dset, unsigned max_dset, hid_t *dataspace_ids, * to create_datasets(). * * Return: SUCCEED/FAIL - * - * Programmer: Jacob Smith - * 2019 * --------------------------------------------------------------------------- */ static herr_t @@ -1770,9 +1630,8 @@ _verify_datasets(unsigned min_dset, unsigned max_dset, hid_t *filespace_ids, hid /* select on disk hyperslab */ offset[0] = (hsize_t)i; offset[1] = (hsize_t)j; - if (H5Sselect_hyperslab(filespace_ids[m], H5S_SELECT_SET, offset, NULL, a_size, NULL) < 0) { + if (H5Sselect_hyperslab(filespace_ids[m], H5S_SELECT_SET, offset, NULL, a_size, NULL) < 0) TEST_ERROR; - } if (H5Dread(dataset_ids[m], H5T_NATIVE_INT, memspace_id, filespace_ids[m], H5P_DEFAULT, data_chunk) < 0) { @@ -1809,19 +1668,15 @@ _verify_datasets(unsigned min_dset, unsigned max_dset, hid_t *filespace_ids, hid * tears down accessor information. * * Return: SUCCEED/FAIL - * - * Programmer: Jacob Smith - * 2019 * --------------------------------------------------------------------------- */ static herr_t verify_datasets(hid_t file_id, unsigned min_dset, unsigned max_dset) { - hid_t dataset_ids[MAX_DSET_COUNT + 1]; - hid_t filespace_ids[MAX_DSET_COUNT + 1]; - unsigned i; - hid_t memspace_id = H5I_INVALID_HID; - hsize_t chunk_dims[2] = {CHUNK_DIM, CHUNK_DIM}; + hid_t dataset_ids[MAX_DSET_COUNT + 1]; + hid_t filespace_ids[MAX_DSET_COUNT + 1]; + hid_t memspace_id = H5I_INVALID_HID; + hsize_t chunk_dims[2] = {CHUNK_DIM, CHUNK_DIM}; HDassert(file_id >= 0); HDassert(min_dset <= max_dset); @@ -1829,40 +1684,25 @@ verify_datasets(hid_t file_id, unsigned min_dset, unsigned max_dset) LOGPRINT(2, "verify_datasets()\n"); - /* --------------------------------- - * "Clear" ID arrays - */ - - for (i = 0; i < MAX_DSET_COUNT; i++) { + /* Initialize ID arrays */ + for (unsigned i = 0; i < MAX_DSET_COUNT; i++) { LOGPRINT(3, "clearing IDs [%d]\n", i); dataset_ids[i] = H5I_INVALID_HID; filespace_ids[i] = H5I_INVALID_HID; } - /* --------------------------------- - * Generate dataspace, dataset, and 'filespace' IDs - */ - + /* Generate dataspace, dataset, and 'filespace' IDs */ if (_open_chunking_ids(file_id, min_dset, max_dset, chunk_dims, filespace_ids, dataset_ids, - &memspace_id) == FAIL) { + &memspace_id) < 0) TEST_ERROR; - } - - /* --------------------------------- - * Read and verify data from datasets - */ - if (_verify_datasets(min_dset, max_dset, filespace_ids, dataset_ids, memspace_id) == FAIL) { + /* Read and verify data from datasets */ + if (_verify_datasets(min_dset, max_dset, filespace_ids, dataset_ids, memspace_id) < 0) TEST_ERROR; - } - - /* --------------------------------- - * Cleanup - */ - if (_close_chunking_ids(min_dset, max_dset, NULL, filespace_ids, dataset_ids, &memspace_id) == FAIL) { + /* Cleanup */ + if (_close_chunking_ids(min_dset, max_dset, NULL, filespace_ids, dataset_ids, &memspace_id) < 0) TEST_ERROR; - } return SUCCEED; @@ -1883,40 +1723,34 @@ verify_datasets(hid_t file_id, unsigned min_dset, unsigned max_dset) * * Return: Success: 0 * Failure: -1 - * - * Programmer: Jacob Smith - * 2019 * --------------------------------------------------------------------------- */ static int -test_basic_dataset_write(void) +test_basic_dataset_write(const struct mt_opts *opts) { - struct mirrortest_filenames names; - hid_t file_id = H5I_INVALID_HID; - hid_t fapl_id = H5P_DEFAULT; - hid_t dset_id = H5I_INVALID_HID; - hid_t dspace_id = H5I_INVALID_HID; - hid_t dtype_id = H5T_NATIVE_INT; - hsize_t dims[2] = {DATABUFFER_SIZE, DATABUFFER_SIZE}; - int *buf = NULL; - int i = 0; - int j = 0; + struct mirrortest_filenames *names = NULL; + hid_t file_id = H5I_INVALID_HID; + hid_t fapl_id = H5I_INVALID_HID; + hid_t dset_id = H5I_INVALID_HID; + hid_t dspace_id = H5I_INVALID_HID; + hid_t dtype_id = H5T_NATIVE_INT; + hsize_t dims[2] = {DATABUFFER_SIZE, DATABUFFER_SIZE}; + int *buf = NULL; + int i = 0; + int j = 0; TESTING("Mirror open and dataset writing"); - /* Create FAPL for Splitter[sec2|mirror] - */ - fapl_id = create_mirroring_split_fapl("basic_write", &names); - if (H5I_INVALID_HID == fapl_id) { + if (NULL == (names = HDmalloc(sizeof(struct mirrortest_filenames)))) TEST_ERROR; - } - /* Prepare data to be written - */ - buf = (int *)HDmalloc(DATABUFFER_SIZE * DATABUFFER_SIZE * sizeof(int)); - if (NULL == buf) { + /* Create FAPL for Splitter[sec2|mirror] */ + if ((fapl_id = create_mirroring_split_fapl("basic_write", names, opts)) < 0) + TEST_ERROR; + + /* Prepare data to be written */ + if (NULL == (buf = HDmalloc(DATABUFFER_SIZE * DATABUFFER_SIZE * sizeof(int)))) TEST_ERROR; - } for (i = 0; i < DATABUFFER_SIZE; i++) { for (j = 0; j < DATABUFFER_SIZE; j++) { int k = i * DATABUFFER_SIZE + j; @@ -1927,78 +1761,59 @@ test_basic_dataset_write(void) /* -------------------- */ /* TEST: Create and Close */ - file_id = H5Fcreate(names.rw, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id); - if (H5I_INVALID_HID == file_id) { + if ((file_id = H5Fcreate(names->rw, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id)) < 0) TEST_ERROR; - } - if (H5Fclose(file_id) == FAIL) { + if (H5Fclose(file_id) < 0) TEST_ERROR; - } - file_id = H5I_INVALID_HID; /* -------------------- */ /* TEST: Repoen and Write */ - file_id = H5Fopen(names.rw, H5F_ACC_RDWR, fapl_id); - if (H5I_INVALID_HID == file_id) { + if ((file_id = H5Fopen(names->rw, H5F_ACC_RDWR, fapl_id)) < 0) TEST_ERROR; - } - dspace_id = H5Screate_simple(2, dims, NULL); - if (H5I_INVALID_HID == dspace_id) { + if ((dspace_id = H5Screate_simple(2, dims, NULL)) < 0) TEST_ERROR; - } - dset_id = H5Dcreate2(file_id, "dataset", dtype_id, dspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); - if (H5I_INVALID_HID == dset_id) { + if ((dset_id = + H5Dcreate2(file_id, "dataset", dtype_id, dspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR; - } - if (H5Dwrite(dset_id, dtype_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) == FAIL) { + if (H5Dwrite(dset_id, dtype_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) TEST_ERROR; - } - /* -------------------- */ - /* Standard cleanup */ + /* Cleanup */ - HDfree(buf); - buf = NULL; - if (H5Dclose(dset_id) == FAIL) { + if (H5Dclose(dset_id) < 0) TEST_ERROR; - } - if (H5Sclose(dspace_id) == FAIL) { + if (H5Sclose(dspace_id) < 0) TEST_ERROR; - } - if (H5Fclose(file_id) == FAIL) { + if (H5Fclose(file_id) < 0) + TEST_ERROR; + if (H5Pclose(fapl_id) < 0) TEST_ERROR; - } - if (fapl_id != H5P_DEFAULT && fapl_id > 0) { - if (H5Pclose(fapl_id) == FAIL) { - TEST_ERROR; - } - } /* -------------------- */ /* TEST: Verify that the R/W and W/O files are identical */ - if (h5_compare_file_bytes(names.rw, names.wo) < 0) { + if (h5_compare_file_bytes(names->rw, names->wo) < 0) TEST_ERROR; - } + + HDfree(buf); + HDfree(names); PASSED(); return 0; error: + HDfree(buf); + HDfree(names); + H5E_BEGIN_TRY { - (void)H5Fclose(file_id); - if (buf) { - HDfree(buf); - } - (void)H5Dclose(dset_id); - (void)H5Sclose(dspace_id); - if (fapl_id != H5P_DEFAULT && fapl_id > 0) { - (void)H5Pclose(fapl_id); - } + H5Fclose(file_id); + H5Dclose(dset_id); + H5Sclose(dspace_id); + H5Pclose(fapl_id); } H5E_END_TRY; return -1; @@ -2014,104 +1829,79 @@ test_basic_dataset_write(void) * * Return: Success: 0 * Failure: -1 - * - * Programmer: Jacob Smith - * 2019 * --------------------------------------------------------------------------- */ static int -test_chunked_dataset_write(void) +test_chunked_dataset_write(const struct mt_opts *opts) { - struct mirrortest_filenames names; - hid_t file_id = H5I_INVALID_HID; - hid_t fapl_id = H5P_DEFAULT; + struct mirrortest_filenames *names = NULL; + + hid_t file_id = H5I_INVALID_HID; + hid_t fapl_id = H5P_DEFAULT; TESTING("Mirror open and dataset writing (chunked)"); - /* Create FAPL for Splitter[sec2|mirror] - */ - fapl_id = create_mirroring_split_fapl("chunked_write", &names); - if (H5I_INVALID_HID == fapl_id) { + if (NULL == (names = HDmalloc(sizeof(struct mirrortest_filenames)))) + TEST_ERROR; + + /* Create FAPL for Splitter[sec2|mirror] */ + if ((fapl_id = create_mirroring_split_fapl("chunked_write", names, opts)) < 0) TEST_ERROR; - } /* -------------------- */ /* TEST: Create and Close */ - file_id = H5Fcreate(names.rw, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id); - if (H5I_INVALID_HID == file_id) { + if ((file_id = H5Fcreate(names->rw, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id)) < 0) TEST_ERROR; - } - if (H5Fclose(file_id) == FAIL) { + if (H5Fclose(file_id) < 0) TEST_ERROR; - } - file_id = H5I_INVALID_HID; /* -------------------- */ /* TEST: Reopen and Write */ - file_id = H5Fopen(names.rw, H5F_ACC_RDWR, fapl_id); - if (H5I_INVALID_HID == file_id) { + if ((file_id = H5Fopen(names->rw, H5F_ACC_RDWR, fapl_id)) < 0) TEST_ERROR; - } - /* Write datasets to file - */ - if (create_datasets(file_id, 0, MAX_DSET_COUNT) == FAIL) { + /* Write datasets to file */ + if (create_datasets(file_id, 0, MAX_DSET_COUNT) < 0) TEST_ERROR; - } - /* Close to 'flush to disk', and reopen file - */ - if (H5Fclose(file_id) == FAIL) { + /* Close to 'flush to disk', and reopen file */ + if (H5Fclose(file_id) < 0) TEST_ERROR; - } - file_id = H5I_INVALID_HID; - /* Reopen file - */ - file_id = H5Fopen(names.rw, H5F_ACC_RDWR, fapl_id); - if (H5I_INVALID_HID == file_id) { + /* Reopen file */ + if ((file_id = H5Fopen(names->rw, H5F_ACC_RDWR, fapl_id)) < 0) TEST_ERROR; - } - /* Verify written data integrity - */ - if (verify_datasets(file_id, 0, MAX_DSET_COUNT) == FAIL) { + /* Verify written data integrity */ + if (verify_datasets(file_id, 0, MAX_DSET_COUNT) < 0) TEST_ERROR; - } - - /* -------------------- */ - /* Standard cleanup */ - if (H5Fclose(file_id) == FAIL) { + /* Cleanup */ + if (H5Fclose(file_id) < 0) + TEST_ERROR; + if (H5Pclose(fapl_id) < 0) TEST_ERROR; - } - file_id = H5I_INVALID_HID; - if (fapl_id != H5P_DEFAULT && fapl_id > 0) { - if (H5Pclose(fapl_id) == FAIL) { - TEST_ERROR; - } - fapl_id = H5I_INVALID_HID; - } /* -------------------- */ /* TEST: Verify that the R/W and W/O files are identical */ - if (h5_compare_file_bytes(names.rw, names.wo) < 0) { + if (h5_compare_file_bytes(names->rw, names->wo) < 0) { TEST_ERROR; } + HDfree(names); + PASSED(); return 0; error: + HDfree(names); H5E_BEGIN_TRY { - (void)H5Fclose(file_id); - if (fapl_id != H5P_DEFAULT && fapl_id > 0) { - (void)H5Pclose(fapl_id); - } + H5Fclose(file_id); + H5Pclose(fapl_id); } H5E_END_TRY; return -1; @@ -2128,99 +1918,82 @@ test_chunked_dataset_write(void) * * Return: Success: 0 * Failure: -1 - * - * Programmer: Jacob Smith - * 2019 * --------------------------------------------------------------------------- */ static int -test_on_disk_zoo(void) +test_on_disk_zoo(const struct mt_opts *opts) { - const char grp_name[] = "/only"; - struct mirrortest_filenames names; - hid_t file_id = H5I_INVALID_HID; - hid_t grp_id = H5I_INVALID_HID; - hid_t fapl_id = H5P_DEFAULT; + const char grp_name[] = "/only"; + struct mirrortest_filenames *names = NULL; + hid_t file_id = H5I_INVALID_HID; + hid_t grp_id = H5I_INVALID_HID; + hid_t fapl_id = H5I_INVALID_HID; TESTING("'Zoo' of on-disk structures"); - /* Create FAPL for Splitter[sec2|mirror] - */ - fapl_id = create_mirroring_split_fapl("zoo", &names); - if (H5I_INVALID_HID == fapl_id) { + if (NULL == (names = HDmalloc(sizeof(struct mirrortest_filenames)))) + TEST_ERROR; + + /* Create FAPL for Splitter[sec2|mirror] */ + if ((fapl_id = create_mirroring_split_fapl("zoo", names, opts)) < 0) TEST_ERROR; - } /* -------------------- */ /* TEST: Create file */ - file_id = H5Fcreate(names.rw, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id); - if (H5I_INVALID_HID == file_id) { + if ((file_id = H5Fcreate(names->rw, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id)) < 0) TEST_ERROR; - } - grp_id = H5Gcreate2(file_id, grp_name, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); - if (grp_id == H5I_INVALID_HID) { + if ((grp_id = H5Gcreate2(file_id, grp_name, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR; - } /* Create datasets in file, close (flush) and reopen, validate. * Use of ( pass ) a conceit required for using create_ and validate_zoo() * from cache_common and/or genall5. */ - if (pass) { + if (pass) create_zoo(file_id, grp_name, 0); - } if (pass) { - if (H5Fclose(file_id) == FAIL) { + if (H5Fclose(file_id) < 0) TEST_ERROR; - } - file_id = H5Fopen(names.rw, H5F_ACC_RDWR, fapl_id); - if (H5I_INVALID_HID == file_id) { + if ((file_id = H5Fopen(names->rw, H5F_ACC_RDWR, fapl_id)) < 0) TEST_ERROR; - } } - if (pass) { + if (pass) validate_zoo(file_id, grp_name, 0); /* sanity-check */ - } + if (!pass) { - HDprintf(failure_mssg); + HDprintf("%s", failure_mssg); TEST_ERROR; } - /* -------------------- */ - /* Standard cleanup */ + /* Cleanup */ - if (fapl_id != H5P_DEFAULT && fapl_id >= 0) { - if (H5Pclose(fapl_id) == FAIL) { - TEST_ERROR; - } - } - if (H5Gclose(grp_id) == FAIL) { + if (H5Pclose(fapl_id) < 0) TEST_ERROR; - } - if (H5Fclose(file_id) == FAIL) { + if (H5Gclose(grp_id) < 0) + TEST_ERROR; + if (H5Fclose(file_id) < 0) TEST_ERROR; - } /* -------------------- */ /* TEST: Verify that the R/W and W/O files are identical */ - if (h5_compare_file_bytes(names.rw, names.wo) < 0) { + if (h5_compare_file_bytes(names->rw, names->wo) < 0) TEST_ERROR; - } + + HDfree(names); PASSED(); return 0; error: + HDfree(names); H5E_BEGIN_TRY { - (void)H5Fclose(file_id); - (void)H5Gclose(grp_id); - if (fapl_id != H5P_DEFAULT && fapl_id > 0) { - (void)H5Pclose(fapl_id); - } + H5Fclose(file_id); + H5Gclose(grp_id); + H5Pclose(fapl_id); } H5E_END_TRY; return -1; @@ -2241,164 +2014,128 @@ test_on_disk_zoo(void) * * Return: Success: 0 * Failure: -1 - * - * Programmer: Jacob Smith - * 2019 * --------------------------------------------------------------------------- */ static int -test_vanishing_datasets(void) +test_vanishing_datasets(const struct mt_opts *opts) { - struct mirrortest_filenames names; - hid_t file_id = H5I_INVALID_HID; - hid_t fapl_id = H5I_INVALID_HID; - hid_t dset_id = H5I_INVALID_HID; - hid_t dspace_id = H5I_INVALID_HID; - hid_t mirror_fapl_id = H5I_INVALID_HID; - hsize_t dims[2] = {DATABUFFER_SIZE, DATABUFFER_SIZE}; - uint32_t buf[DATABUFFER_SIZE][DATABUFFER_SIZE]; /* consider malloc? */ - H5G_info_t group_info; - unsigned int i, j, k; - const unsigned int max_loops = 20; - const unsigned int max_at_one_time = 3; + struct mirrortest_filenames *names = NULL; + hid_t file_id = H5I_INVALID_HID; + hid_t fapl_id = H5I_INVALID_HID; + hid_t dset_id = H5I_INVALID_HID; + hid_t dspace_id = H5I_INVALID_HID; + hsize_t dims[2] = {DATABUFFER_SIZE, DATABUFFER_SIZE}; + H5G_info_t group_info; + unsigned int i, j, k; + const unsigned int max_loops = 20; + const unsigned int max_at_one_time = 3; + struct { + uint32_t arr[DATABUFFER_SIZE][DATABUFFER_SIZE]; + } *buf = NULL; TESTING("Vanishing Datasets"); + if (NULL == (names = HDmalloc(sizeof(struct mirrortest_filenames)))) + TEST_ERROR; + if (NULL == (buf = HDcalloc(1, sizeof(*buf)))) + TEST_ERROR; + /* -------------------- */ /* Set up recurrent data (FAPL, dataspace) */ - /* Create FAPL for Splitter[sec2|mirror] - */ - fapl_id = create_mirroring_split_fapl("vanishing", &names); - if (H5I_INVALID_HID == fapl_id) { + /* Create FAPL for Splitter[sec2|mirror] */ + if ((fapl_id = create_mirroring_split_fapl("vanishing", names, opts)) < 0) TEST_ERROR; - } - dspace_id = H5Screate_simple(2, dims, NULL); - if (dspace_id < 0) { + if ((dspace_id = H5Screate_simple(2, dims, NULL)) < 0) TEST_ERROR; - } - /* create file */ - file_id = H5Fcreate(names.rw, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id); - if (H5I_INVALID_HID == file_id) { + /* Create file */ + if ((file_id = H5Fcreate(names->rw, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id)) < 0) TEST_ERROR; - } for (i = 0; i < max_loops; i++) { char namebuf[DSET_NAME_LEN + 1]; - /* deleting datasets */ + /* Delete datasets */ if (i >= max_at_one_time) { - if (HDsnprintf(namebuf, DSET_NAME_LEN, "/dset%02d", (i - max_at_one_time)) > DSET_NAME_LEN) { + if (HDsnprintf(namebuf, DSET_NAME_LEN, "/dset%02d", (i - max_at_one_time)) > DSET_NAME_LEN) TEST_ERROR; - } - if (H5Ldelete(file_id, namebuf, H5P_DEFAULT) < 0) { + if (H5Ldelete(file_id, namebuf, H5P_DEFAULT) < 0) TEST_ERROR; - } - } /* end if deleting a dataset */ + } - /* writing datasets */ + /* Write to datasets */ if (i < (max_loops - max_at_one_time)) { - if (HDsnprintf(namebuf, DSET_NAME_LEN, "/dset%02d", i) > DSET_NAME_LEN) { + if (HDsnprintf(namebuf, DSET_NAME_LEN, "/dset%02d", i) > DSET_NAME_LEN) TEST_ERROR; - } - dset_id = - H5Dcreate2(file_id, namebuf, H5T_STD_U32LE, dspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); - if (H5I_INVALID_HID == dset_id) { + if ((dset_id = H5Dcreate2(file_id, namebuf, H5T_STD_U32LE, dspace_id, H5P_DEFAULT, H5P_DEFAULT, + H5P_DEFAULT)) < 0) TEST_ERROR; - } - for (j = 0; j < DATABUFFER_SIZE; j++) { - for (k = 0; k < DATABUFFER_SIZE; k++) { - buf[j][k] = (uint32_t)i; - } - } + for (j = 0; j < DATABUFFER_SIZE; j++) + for (k = 0; k < DATABUFFER_SIZE; k++) + buf->arr[j][k] = (uint32_t)i; - if (H5Dwrite(dset_id, H5T_STD_U32LE, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) { + if (H5Dwrite(dset_id, H5T_STD_U32LE, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) TEST_ERROR; - } - if (H5Dclose(dset_id) < 0) { + if (H5Dclose(dset_id) < 0) TEST_ERROR; - } - dset_id = H5I_INVALID_HID; - } /* end if writing a dataset */ + } } /* end for dataset create-destroy cycles */ - if (H5Fclose(file_id) < 0) { + if (H5Fclose(file_id) < 0) TEST_ERROR; - } - file_id = H5I_INVALID_HID; /* verify there are no datasets in file */ - file_id = H5Fopen(names.rw, H5F_ACC_RDONLY, H5P_DEFAULT); - if (file_id < 0) { + if ((file_id = H5Fopen(names->rw, H5F_ACC_RDONLY, fapl_id)) < 0) TEST_ERROR; - } - if (H5Gget_info(file_id, &group_info) < 0) { + if (H5Gget_info(file_id, &group_info) < 0) TEST_ERROR; - } if (group_info.nlinks > 0) { HDfprintf(stderr, "links in rw file: %" PRIuHSIZE "\n", group_info.nlinks); - HDfflush(stderr); TEST_ERROR; } - if (H5Fclose(file_id) < 0) { + if (H5Fclose(file_id) < 0) TEST_ERROR; - } - file_id = H5Fopen(names.wo, H5F_ACC_RDONLY, H5P_DEFAULT); - if (file_id < 0) { + if ((file_id = H5Fopen(names->wo, H5F_ACC_RDONLY, fapl_id)) < 0) TEST_ERROR; - } - if (H5Gget_info(file_id, &group_info) < 0) { + if (H5Gget_info(file_id, &group_info) < 0) TEST_ERROR; - } if (group_info.nlinks > 0) { HDfprintf(stderr, "links in wo file: %" PRIuHSIZE "\n", group_info.nlinks); - HDfflush(stderr); TEST_ERROR; } - if (H5Fclose(file_id) < 0) { + if (H5Fclose(file_id) < 0) TEST_ERROR; - } - file_id = H5I_INVALID_HID; - if (h5_compare_file_bytes(names.rw, names.wo) < 0) + if (h5_compare_file_bytes(names->rw, names->wo) < 0) TEST_ERROR; - /* -------------------- */ /* Teardown */ - if (H5Sclose(dspace_id) < 0) { + if (H5Sclose(dspace_id) < 0) TEST_ERROR; - } - if (H5Pclose(fapl_id) < 0) { + if (H5Pclose(fapl_id) < 0) TEST_ERROR; - } + + HDfree(names); + HDfree(buf); PASSED(); return 0; error: + HDfree(names); + HDfree(buf); H5E_BEGIN_TRY { - if (mirror_fapl_id != H5I_INVALID_HID) { - H5Pclose(mirror_fapl_id); - } - if (fapl_id != H5I_INVALID_HID) { - H5Pclose(fapl_id); - } - if (file_id != H5I_INVALID_HID) { - H5Fclose(file_id); - } - if (dset_id != H5I_INVALID_HID) { - H5Dclose(dset_id); - } - if (dspace_id != H5I_INVALID_HID) { - H5Sclose(dspace_id); - } + H5Pclose(fapl_id); + H5Fclose(file_id); + H5Dclose(dset_id); + H5Sclose(dspace_id); } H5E_END_TRY; return -1; @@ -2413,20 +2150,20 @@ test_vanishing_datasets(void) * * Return: Success: 0 * Failure: -1 - * - * Programmer: Jacob Smith - * 2020-03-09 * --------------------------------------------------------------------------- */ static int -test_concurrent_access(void) +test_concurrent_access(const struct mt_opts *opts) { struct file_bundle { struct mirrortest_filenames names; hid_t dset_id; hid_t fapl_id; hid_t file_id; - } bundle[CONCURRENT_COUNT]; + }; + + struct file_bundle *bundle = NULL; + hid_t dspace_id = H5I_INVALID_HID; hid_t dtype_id = H5T_NATIVE_INT; hsize_t dims[2] = {DATABUFFER_SIZE, DATABUFFER_SIZE}; @@ -2436,7 +2173,10 @@ test_concurrent_access(void) TESTING("Concurrent opened mirrored files"); - /* blank bundle */ + if (NULL == (bundle = HDmalloc(sizeof(struct file_bundle) * CONCURRENT_COUNT))) + TEST_ERROR; + + /* Initialize bundle */ for (i = 0; i < CONCURRENT_COUNT; i++) { bundle[i].dset_id = H5I_INVALID_HID; bundle[i].fapl_id = H5I_INVALID_HID; @@ -2446,25 +2186,20 @@ test_concurrent_access(void) *bundle[i].names.log = '\0'; } - /* Create FAPL for Splitter[sec2|mirror] - */ + /* Create FAPL for Splitter[sec2|mirror] */ for (i = 0; i < CONCURRENT_COUNT; i++) { - char _name[16] = ""; - hid_t _fapl_id = H5I_INVALID_HID; - HDsnprintf(_name, 15, "concurrent%d", i); - _fapl_id = create_mirroring_split_fapl(_name, &bundle[i].names); - if (H5I_INVALID_HID == _fapl_id) { + char name[16] = ""; + hid_t fapl_id = H5I_INVALID_HID; + + HDsnprintf(name, 15, "concurrent%d", i); + if ((fapl_id = create_mirroring_split_fapl(name, &bundle[i].names, opts)) < 0) TEST_ERROR; - } - bundle[i].fapl_id = _fapl_id; + bundle[i].fapl_id = fapl_id; } - /* Prepare data to be written - */ - buf = (int *)HDmalloc(DATABUFFER_SIZE * DATABUFFER_SIZE * sizeof(int)); - if (NULL == buf) { + /* Prepare data to be written */ + if (NULL == (buf = HDmalloc(DATABUFFER_SIZE * DATABUFFER_SIZE * sizeof(int)))) TEST_ERROR; - } for (i = 0; i < DATABUFFER_SIZE; i++) { for (j = 0; j < DATABUFFER_SIZE; j++) { int k = i * DATABUFFER_SIZE + j; @@ -2472,76 +2207,59 @@ test_concurrent_access(void) } } - /* Prepare generic dataspace - */ - dspace_id = H5Screate_simple(2, dims, NULL); - if (H5I_INVALID_HID == dspace_id) { + /* Prepare generic dataspace */ + if ((dspace_id = H5Screate_simple(2, dims, NULL)) < 0) TEST_ERROR; - } /* -------------------- */ /* TEST: Create file and open elements */ for (i = 0; i < CONCURRENT_COUNT; i++) { - hid_t _file_id = H5I_INVALID_HID; - hid_t _dset_id = H5I_INVALID_HID; + hid_t file_id = H5I_INVALID_HID; + hid_t dset_id = H5I_INVALID_HID; - _file_id = H5Fcreate(bundle[i].names.rw, H5F_ACC_TRUNC, H5P_DEFAULT, bundle[i].fapl_id); - if (H5I_INVALID_HID == _file_id) { + if ((file_id = H5Fcreate(bundle[i].names.rw, H5F_ACC_TRUNC, H5P_DEFAULT, bundle[i].fapl_id)) < 0) TEST_ERROR; - } - bundle[i].file_id = _file_id; + bundle[i].file_id = file_id; - _dset_id = - H5Dcreate2(_file_id, "dataset", dtype_id, dspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); - if (H5I_INVALID_HID == _dset_id) { + if ((dset_id = H5Dcreate2(file_id, "dataset", dtype_id, dspace_id, H5P_DEFAULT, H5P_DEFAULT, + H5P_DEFAULT)) < 0) TEST_ERROR; - } - bundle[i].dset_id = _dset_id; + bundle[i].dset_id = dset_id; } /* -------------------- */ /* TEST: Write to files */ - for (i = 0; i < CONCURRENT_COUNT; i++) { - if (H5Dwrite(bundle[i].dset_id, dtype_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) == FAIL) { + for (i = 0; i < CONCURRENT_COUNT; i++) + if (H5Dwrite(bundle[i].dset_id, dtype_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) TEST_ERROR; - } - } /* -------------------- */ /* TEST: Close elements */ for (i = 0; i < CONCURRENT_COUNT; i++) { - if (H5Dclose(bundle[i].dset_id) == FAIL) { + if (H5Dclose(bundle[i].dset_id) < 0) TEST_ERROR; - } - if (H5Fclose(bundle[i].file_id) == FAIL) { + if (H5Fclose(bundle[i].file_id) < 0) TEST_ERROR; - } - if (H5Pclose(bundle[i].fapl_id) == FAIL) { + if (H5Pclose(bundle[i].fapl_id) < 0) TEST_ERROR; - } } - /* -------------------- */ - /* Standard cleanup */ - - HDfree(buf); - buf = NULL; - if (H5Sclose(dspace_id) == FAIL) { + if (H5Sclose(dspace_id) < 0) TEST_ERROR; - } /* -------------------- */ /* TEST: Verify that the R/W and W/O files are identical */ - for (i = 0; i < CONCURRENT_COUNT; i++) { - if (h5_compare_file_bytes(bundle[i].names.rw, bundle[i].names.wo) < 0) { + for (i = 0; i < CONCURRENT_COUNT; i++) + if (h5_compare_file_bytes(bundle[i].names.rw, bundle[i].names.wo) < 0) TEST_ERROR; - } - } + + HDfree(bundle); + HDfree(buf); PASSED(); return 0; @@ -2549,36 +2267,134 @@ test_concurrent_access(void) error: H5E_BEGIN_TRY { - if (buf) { - HDfree(buf); - } - (void)H5Sclose(dspace_id); + H5Sclose(dspace_id); for (i = 0; i < CONCURRENT_COUNT; i++) { - (void)H5Dclose(bundle[i].dset_id); - (void)H5Fclose(bundle[i].file_id); - (void)H5Pclose(bundle[i].fapl_id); + H5Dclose(bundle[i].dset_id); + H5Fclose(bundle[i].file_id); + H5Pclose(bundle[i].fapl_id); } } H5E_END_TRY; + HDfree(bundle); + HDfree(buf); return -1; } /* end test_concurrent_access() */ +/* ---------------------------------------------------------------------------- + * Function: parse_args + * + * Purpose: Parse command-line arguments, populating the options struct + * pointer as appropriate. + * Default values will be set for unspecified options. + * + * Return: 0 on success, negative (-1) if error. + * ---------------------------------------------------------------------------- + */ +static int +parse_args(int argc, char **argv, struct mt_opts *opts) +{ + int i = 0; + + opts->portno = SERVER_HANDSHAKE_PORT; + HDstrncpy(opts->ip, SERVER_IP_ADDRESS, H5FD_MIRROR_MAX_IP_LEN); + + for (i = 1; i < argc; i++) { /* start with first possible option argument */ + if (!HDstrncmp(argv[i], "--ip=", 5)) + HDstrncpy(opts->ip, argv[i] + 5, H5FD_MIRROR_MAX_IP_LEN); + else if (!HDstrncmp(argv[i], "--port=", 7)) + opts->portno = HDatoi(argv[i] + 7); + else { + HDprintf("Unrecognized option: '%s'\n", argv[i]); + return -1; + } + } /* end for each argument from command line */ + + /* Auto-replace 'localhost' with numeric IP */ + if (!HDstrncmp(opts->ip, "localhost", 10)) /* include NUL terminator */ + HDstrncpy(opts->ip, "127.0.0.1", H5FD_MIRROR_MAX_IP_LEN); + + return 0; +} /* end parse_args() */ + +/* ---------------------------------------------------------------------------- + * Function: confirm_server + * + * Purpose: Create socket and confirm remote server is available. + * + * Return: 0 on success, negative (-1) if error. + * ---------------------------------------------------------------------------- + */ +static int +confirm_server(struct mt_opts *opts) +{ + char mybuf[16]; + int live_socket; + struct sockaddr_in target_addr; + unsigned attempt = 0; + + live_socket = HDsocket(AF_INET, SOCK_STREAM, 0); + if (live_socket < 0) { + HDprintf("ERROR socket()\n"); + return -1; + } + + target_addr.sin_family = AF_INET; + target_addr.sin_port = HDhtons((uint16_t)opts->portno); + target_addr.sin_addr.s_addr = HDinet_addr(opts->ip); + HDmemset(target_addr.sin_zero, '\0', sizeof(target_addr.sin_zero)); + + while (1) { + if (HDconnect(live_socket, (struct sockaddr *)&target_addr, (socklen_t)sizeof(target_addr)) < 0) { + if (attempt > 10) { + HDprintf("ERROR connect() (%d)\n%s\n", errno, HDstrerror(errno)); + return -1; + } + attempt++; + HDsleep(1); + HDprintf("attempt #%u: ERROR connect() (%d)\n%s\n", attempt, errno, HDstrerror(errno)); + } + else + break; + } + + /* Request confirmation from the server */ + if (HDwrite(live_socket, "CONFIRM", 8) == -1) { + HDprintf("ERROR write() (%d)\n%s\n", errno, HDstrerror(errno)); + return -1; + } + + /* Read & verify response from port connection. */ + if (HDread(live_socket, &mybuf, sizeof(mybuf)) == -1) { + HDprintf("ERROR read() can't receive data\n"); + return -1; + } + if (HDstrncmp("ALIVE", mybuf, 6)) { + HDprintf("ERROR read() didn't receive data from server\n"); + return -1; + } + + if (HDclose(live_socket) < 0) { + HDprintf("ERROR close() can't close socket\n"); + return -1; + } + + return 0; +} /* end confirm_server() */ + /* --------------------------------------------------------------------------- * Function: main * - * Purpose: Run tests. + * Purpose: Run tests * * Return: Success: 0 * Failure: 1 - * - * Programmer: Jacob Smith - * 2019 * --------------------------------------------------------------------------- */ int -main(void) +main(int argc, char **argv) { - int nerrors = 0; + struct mt_opts opts; + int nerrors = 0; h5_reset(); @@ -2586,35 +2402,63 @@ main(void) HDprintf("Testing Mirror VFD functionality.\n"); - /* -------------------- */ /* SETUP */ - /* Create directories for test-generated .h5 files - */ + /* Create directories for test-generated .h5 files */ if (nerrors == 0) { - if ((HDmkdir(MIRROR_RW_DIR, (mode_t)0755) < 0) && (errno != EEXIST)) { + if ((HDmkdir(MIRROR_RW_DIR, (mode_t)0755) < 0) && (errno != EEXIST)) nerrors++; - } } if (nerrors == 0) { - if ((HDmkdir(MIRROR_WO_DIR, (mode_t)0755) < 0) && (errno != EEXIST)) { + if ((HDmkdir(MIRROR_WO_DIR, (mode_t)0755) < 0) && (errno != EEXIST)) nerrors++; - } } - /* -------------------- */ + if (parse_args(argc, argv, &opts) < 0) { + HDprintf("Unable to parse arguments\n"); + HDexit(EXIT_FAILURE); + } + + if (confirm_server(&opts) < 0) { + HDprintf("Unable to confirm server is running\n"); + HDexit(EXIT_FAILURE); + } + /* TESTS */ - /* Tests return negative values; `-=' increments nerrors count */ if (nerrors == 0) { - nerrors -= test_fapl_configuration(); - nerrors -= test_xmit_encode_decode(); - nerrors -= test_create_and_close(); - nerrors -= test_basic_dataset_write(); - nerrors -= test_chunked_dataset_write(); - nerrors -= test_on_disk_zoo(); - nerrors -= test_vanishing_datasets(); - nerrors -= test_concurrent_access(); + H5FD_mirror_xmit_t xmit_mock; /* Re-used header in various xmit tests */ + + /* Set bogus values matching expected; encoding doesn't care + * Use sequential values to easily generate the expected buffer with a + * for loop. + */ + xmit_mock.magic = 0x00010203; + xmit_mock.version = 0x04; + xmit_mock.session_token = 0x05060708; + xmit_mock.xmit_count = 0x090A0B0C; + xmit_mock.op = 0x0D; + + nerrors += test_fapl_configuration() < 0 ? 1 : 0; + + nerrors += test_encdec_uint8_t() < 0 ? 1 : 0; + nerrors += test_encdec_uint16_t() < 0 ? 1 : 0; + nerrors += test_encdec_uint32_t() < 0 ? 1 : 0; + nerrors += test_encdec_uint64_t() < 0 ? 1 : 0; + + nerrors += test_encdec_header(xmit_mock) < 0 ? 1 : 0; + nerrors += test_encdec_set_eoa(xmit_mock) < 0 ? 1 : 0; + nerrors += test_encdec_lock(xmit_mock) < 0 ? 1 : 0; + nerrors += test_encdec_open(xmit_mock) < 0 ? 1 : 0; + nerrors += test_encdec_reply(xmit_mock) < 0 ? 1 : 0; + nerrors += test_encdec_write(xmit_mock) < 0 ? 1 : 0; + + nerrors += test_create_and_close(&opts) < 0 ? 1 : 0; + nerrors += test_basic_dataset_write(&opts) < 0 ? 1 : 0; + nerrors += test_chunked_dataset_write(&opts) < 0 ? 1 : 0; + nerrors += test_on_disk_zoo(&opts) < 0 ? 1 : 0; + nerrors += test_vanishing_datasets(&opts) < 0 ? 1 : 0; + nerrors += test_concurrent_access(&opts) < 0 ? 1 : 0; } if (nerrors) { @@ -2631,7 +2475,6 @@ main(void) int main(void) { - h5_reset(); HDprintf("Testing Mirror VFD functionality.\n"); HDprintf("SKIPPED - Mirror VFD not built.\n"); return EXIT_SUCCESS; diff --git a/test/test_mirror.sh.in b/test/test_mirror.sh.in index b3e75c67e73..fbc7edecb77 100644 --- a/test/test_mirror.sh.in +++ b/test/test_mirror.sh.in @@ -22,7 +22,10 @@ nerrors=0 SERVER_VERBOSITY="--verbosity=1" -SERVER_PORT="--port=3000" +# Choose random ephemeral port number +RANDOM_PORT=$[ $RANDOM % 16384 + 49152 ] +echo "Using port: $RANDOM_PORT" +SERVER_PORT="--port=$RANDOM_PORT" ############################################################################### @@ -45,7 +48,9 @@ done RUN_DIR=mirror_vfd_test MIRROR_UTILS=../utils/mirror_vfd # TODO: presupposes from test/ -mkdir $RUN_DIR +if [[ ! -d $RUN_DIR ]] ; then + mkdir $RUN_DIR +fi # Copy program files into dedicated test directory for FILE in $MIRROR_UTILS/mirror_* ; do @@ -61,6 +66,10 @@ cp mirror_vfd $RUN_DIR # wrapper script. Copy these libs builds if appropriate. if [ -f $MIRROR_UTILS/.libs/mirror_server ] ; then RUN_LIBS=$RUN_DIR/.libs + # Delete previous .libs directory, to remove any generated libtool files + if [[ -d $RUN_LIBS ]] ; then + rm -rf $RUN_LIBS + fi mkdir $RUN_LIBS for FILE in $MIRROR_UTILS/.libs/mirror_* ; do case "$FILE" in @@ -77,12 +86,15 @@ echo "Launching Mirror Server" SERVER_ARGS="$SERVER_PORT $SERVER_VERBOSITY" ./mirror_server $SERVER_ARGS & -./mirror_vfd +./mirror_vfd $SERVER_PORT nerrors=$? echo "Stopping Mirror Server" ./mirror_server_stop $SERVER_PORT +# Wait for background server process to exit +wait + ############################################################################### ## Report and exit ############################################################################### diff --git a/test/use_append_chunk_mirror.c b/test/use_append_chunk_mirror.c index b19c510d3c5..788cf839c0f 100644 --- a/test/use_append_chunk_mirror.c +++ b/test/use_append_chunk_mirror.c @@ -139,7 +139,7 @@ setup_parameters(int argc, char *const argv[], options_t *opts) opts->nplanes = (hsize_t)opts->chunksize; show_parameters(opts); - return (0); + return 0; } /* setup_parameters() */ /* Overall Algorithm: @@ -152,17 +152,17 @@ setup_parameters(int argc, char *const argv[], options_t *opts) int main(int argc, char *argv[]) { - pid_t childpid = 0; - pid_t mypid, tmppid; - int child_status; - int child_wait_option = 0; - int ret_value = 0; - int child_ret_value; - hbool_t send_wait = FALSE; - hid_t fid = -1; /* File ID */ - H5FD_mirror_fapl_t mirr_fa; - H5FD_splitter_vfd_config_t split_fa; - hid_t mirr_fapl_id = H5I_INVALID_HID; + pid_t childpid = 0; + pid_t mypid, tmppid; + int child_status; + int child_wait_option = 0; + int ret_value = 0; + int child_ret_value; + hbool_t send_wait = FALSE; + hid_t fid = H5I_INVALID_HID; + H5FD_mirror_fapl_t mirr_fa; + H5FD_splitter_vfd_config_t *split_fa = NULL; + hid_t mirr_fapl_id = H5I_INVALID_HID; if (setup_parameters(argc, argv, &UC_opts) < 0) { Hgoto_error(1); @@ -173,13 +173,18 @@ main(int argc, char *argv[]) mirr_fa.handshake_port = SERVER_PORT; HDstrncpy(mirr_fa.remote_ip, SERVER_IP, H5FD_MIRROR_MAX_IP_LEN); - split_fa.wo_fapl_id = H5I_INVALID_HID; - split_fa.rw_fapl_id = H5I_INVALID_HID; - split_fa.magic = H5FD_SPLITTER_MAGIC; - split_fa.version = H5FD_CURR_SPLITTER_VFD_CONFIG_VERSION; - split_fa.log_file_path[0] = '\0'; /* none */ - split_fa.ignore_wo_errs = FALSE; - HDstrncpy(split_fa.wo_path, MIRROR_FILE_NAME, H5FD_SPLITTER_PATH_MAX); + if (NULL == (split_fa = HDcalloc(1, sizeof(H5FD_splitter_vfd_config_t)))) { + HDfprintf(stderr, "can't allocate memory for splitter config\n"); + Hgoto_error(1); + } + + split_fa->wo_fapl_id = H5I_INVALID_HID; + split_fa->rw_fapl_id = H5I_INVALID_HID; + split_fa->magic = H5FD_SPLITTER_MAGIC; + split_fa->version = H5FD_CURR_SPLITTER_VFD_CONFIG_VERSION; + split_fa->log_file_path[0] = '\0'; /* none */ + split_fa->ignore_wo_errs = FALSE; + HDstrncpy(split_fa->wo_path, MIRROR_FILE_NAME, H5FD_SPLITTER_PATH_MAX); /* Determine the need to send/wait message file*/ if (UC_opts.launch == UC_READWRITE) { @@ -211,14 +216,14 @@ main(int argc, char *argv[]) } /* Prepare parent "splitter" driver in UC_opts */ - split_fa.wo_fapl_id = mirr_fapl_id; - split_fa.rw_fapl_id = H5P_DEFAULT; - UC_opts.fapl_id = H5Pcreate(H5P_FILE_ACCESS); + split_fa->wo_fapl_id = mirr_fapl_id; + split_fa->rw_fapl_id = H5P_DEFAULT; + UC_opts.fapl_id = H5Pcreate(H5P_FILE_ACCESS); if (UC_opts.fapl_id == H5I_INVALID_HID) { HDfprintf(stderr, "can't create creation FAPL\n"); Hgoto_error(1); } - if (H5Pset_fapl_splitter(UC_opts.fapl_id, &split_fa) < 0) { + if (H5Pset_fapl_splitter(UC_opts.fapl_id, split_fa) < 0) { HDfprintf(stderr, "can't set creation FAPL\n"); H5Eprint2(H5E_DEFAULT, stdout); Hgoto_error(1); @@ -300,14 +305,14 @@ main(int argc, char *argv[]) } /* Prepare parent "splitter" driver in UC_opts */ - split_fa.wo_fapl_id = mirr_fapl_id; - split_fa.rw_fapl_id = H5P_DEFAULT; - UC_opts.fapl_id = H5Pcreate(H5P_FILE_ACCESS); + split_fa->wo_fapl_id = mirr_fapl_id; + split_fa->rw_fapl_id = H5P_DEFAULT; + UC_opts.fapl_id = H5Pcreate(H5P_FILE_ACCESS); if (UC_opts.fapl_id == H5I_INVALID_HID) { HDfprintf(stderr, "can't create creation FAPL\n"); Hgoto_error(1); } - if (H5Pset_fapl_splitter(UC_opts.fapl_id, &split_fa) < 0) { + if (H5Pset_fapl_splitter(UC_opts.fapl_id, split_fa) < 0) { HDfprintf(stderr, "can't set creation FAPL\n"); H5Eprint2(H5E_DEFAULT, stdout); Hgoto_error(1); @@ -368,6 +373,8 @@ main(int argc, char *argv[]) } done: + HDfree(split_fa); + if (ret_value != 0) { HDprintf("Error(s) encountered\n"); } @@ -375,7 +382,7 @@ main(int argc, char *argv[]) HDprintf("All passed\n"); } - return (ret_value); + return ret_value; } #else /* H5_HAVE_MIRROR_VFD */ diff --git a/utils/mirror_vfd/CMakeLists.txt b/utils/mirror_vfd/CMakeLists.txt index 92212e0be49..4765e2ce406 100644 --- a/utils/mirror_vfd/CMakeLists.txt +++ b/utils/mirror_vfd/CMakeLists.txt @@ -12,7 +12,7 @@ set (mirror_server_SOURCES ${HDF5_UTILS_MIRRORVFD_SOURCE_DIR}/mirror_remote.h ) add_executable (mirror_server ${mirror_server_SOURCES}) -target_include_directories (mirror_server PRIVATE "${HDF5_UITLS_DIR};${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_include_directories (mirror_server PRIVATE "${HDF5_UTILS_DIR};${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (mirror_server STATIC) target_link_libraries (mirror_server PRIVATE ${HDF5_LIB_TARGET}) @@ -37,7 +37,7 @@ endif () set (mirror_server_stop_SOURCES ${HDF5_UTILS_MIRRORVFD_SOURCE_DIR}/mirror_server_stop.c) add_executable (mirror_server_stop ${mirror_server_stop_SOURCES}) -target_include_directories (mirror_server_stop PRIVATE "${HDF5_UITLS_DIR};${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_include_directories (mirror_server_stop PRIVATE "${HDF5_UTILS_DIR};${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (mirror_server_stop STATIC) target_link_libraries (mirror_server_stop PRIVATE ${HDF5_LIB_TARGET}) diff --git a/utils/mirror_vfd/mirror_server.c b/utils/mirror_vfd/mirror_server.c index ef41aa59948..79c9d80c9ee 100644 --- a/utils/mirror_vfd/mirror_server.c +++ b/utils/mirror_vfd/mirror_server.c @@ -46,11 +46,8 @@ #ifdef H5_HAVE_MIRROR_VFD -#define MAXBUF 2048 /* max buffer length. */ -#define LISTENQ 80 /* max pending mirrorS requests */ -#define DEFAULT_PORT 3000 /* default listening port */ -#define MAX_PORT_LOOPS 20 /* max iteratations through port range */ -#define PORT_LOOP_RETRY_DELAY 1 /* seconds to wait between port scans */ +#define LISTENQ 80 /* max pending mirrorS requests */ +#define DEFAULT_PORT 3000 /* default listening port */ /* semi-unique "magic" numbers to sanity-check structure pointers */ #define OP_ARGS_MAGIC 0xCF074379u @@ -211,8 +208,8 @@ parse_args(int argc, char **argv, struct op_args *args_out) return -1; } - /* Loop over arguments after program name and writer_path */ - for (i = 2; i < argc; i++) { + /* Loop over arguments after program name */ + for (i = 1; i < argc; i++) { if (!HDstrncmp(argv[i], "-h", 3) || !HDstrncmp(argv[i], "--help", 7)) { mirror_log(NULL, V_INFO, "found help argument"); args_out->help = 1; @@ -474,12 +471,13 @@ wait_for_child(int H5_ATTR_UNUSED sig) static int handle_requests(struct server_run *run) { - int connfd = -1; /**/ - char mybuf[H5FD_MIRROR_XMIT_OPEN_SIZE]; /**/ - ssize_t ret; /* general-purpose error-checking */ - int pid; /* process ID of fork */ - struct sigaction sa; - int ret_value = 0; + int connfd = -1; + char *mybuf = NULL; + ssize_t ret; /* general-purpose error-checking */ + int pid; /* process ID of fork */ + struct sigaction sa; + H5FD_mirror_xmit_open_t *xopen = NULL; + int ret_value = 0; if (run == NULL || run->magic != SERVER_RUN_MAGIC) { mirror_log(NULL, V_ERR, "invalid server_run pointer"); @@ -504,6 +502,15 @@ handle_requests(struct server_run *run) return 1; } + if (NULL == (mybuf = HDmalloc(H5FD_MIRROR_XMIT_OPEN_SIZE * sizeof(char)))) { + mirror_log(NULL, V_ERR, "out of memory"); + goto error; + } + if (NULL == (xopen = HDmalloc(sizeof(H5FD_mirror_xmit_open_t)))) { + mirror_log(NULL, V_ERR, "out of memory"); + goto error; + } + /* Keep listening for attempts to connect. */ @@ -521,7 +528,7 @@ handle_requests(struct server_run *run) /* Read handshake from port connection. */ - if ((ret = HDread(connfd, &mybuf, H5FD_MIRROR_XMIT_OPEN_SIZE)) < 0) { + if ((ret = HDread(connfd, mybuf, H5FD_MIRROR_XMIT_OPEN_SIZE)) < 0) { mirror_log(run->loginfo, V_ERR, "read:%d", ret); goto error; } @@ -536,17 +543,33 @@ handle_requests(struct server_run *run) if (!HDstrncmp("SHUTDOWN", mybuf, 8)) { /* Stop operation if told to stop */ mirror_log(run->loginfo, V_INFO, "received SHUTDOWN!", ret); + + /* Confirm operation */ + if ((ret = HDwrite(connfd, "CLOSING", 8)) < 0) { + mirror_log(run->loginfo, V_ERR, "write:%d", ret); + HDclose(connfd); + connfd = -1; + goto error; + } + HDclose(connfd); connfd = -1; goto done; } /* end if explicit "SHUTDOWN" directive */ + if (!HDstrncmp("CONFIRM", mybuf, 7)) { + /* Confirm operation */ + if ((ret = HDwrite(connfd, "ALIVE", 6)) < 0) { + mirror_log(run->loginfo, V_ERR, "write:%d", ret); + goto error; + } + HDclose(connfd); + } /* end if "CONFIRM" directive */ else if (H5FD_MIRROR_XMIT_OPEN_SIZE == ret) { - H5FD_mirror_xmit_open_t xopen; mirror_log(run->loginfo, V_INFO, "probable OPEN xmit received"); - H5FD_mirror_xmit_decode_open(&xopen, (const unsigned char *)mybuf); - if (FALSE == H5FD_mirror_xmit_is_open(&xopen)) { + H5FD_mirror_xmit_decode_open(xopen, (const unsigned char *)mybuf); + if (FALSE == H5FD_mirror_xmit_is_open(xopen)) { mirror_log(run->loginfo, V_WARN, "expected OPEN xmit was malformed"); HDclose(connfd); continue; @@ -561,7 +584,7 @@ handle_requests(struct server_run *run) } /* end if fork error */ else if (pid == 0) { /* child process (writer side of fork) */ mirror_log(run->loginfo, V_INFO, "executing writer"); - if (run_writer(connfd, &xopen) < 0) { + if (run_writer(connfd, xopen) < 0) { HDprintf("can't run writer\n"); } else { @@ -591,12 +614,17 @@ handle_requests(struct server_run *run) HDclose(connfd); } + HDfree(mybuf); + HDfree(xopen); + return ret_value; error: if (connfd >= 0) { HDclose(connfd); } + HDfree(mybuf); + HDfree(xopen); return -1; } /* end handle_requests() */ diff --git a/utils/mirror_vfd/mirror_server_stop.c b/utils/mirror_vfd/mirror_server_stop.c index abc4c1e8906..44386bff8ac 100644 --- a/utils/mirror_vfd/mirror_server_stop.c +++ b/utils/mirror_vfd/mirror_server_stop.c @@ -128,6 +128,7 @@ parse_args(int argc, char **argv, struct mshs_opts *opts) static int send_shutdown(struct mshs_opts *opts) { + char mybuf[16]; int live_socket; struct sockaddr_in target_addr; @@ -157,6 +158,16 @@ send_shutdown(struct mshs_opts *opts) return -1; } + /* Read & verify response from port connection. */ + if (HDread(live_socket, &mybuf, sizeof(mybuf)) == -1) { + HDprintf("ERROR read() can't receive data\n"); + return -1; + } + if (HDstrncmp("CLOSING", mybuf, 8)) { + HDprintf("ERROR read() didn't receive data from server\n"); + return -1; + } + if (HDclose(live_socket) < 0) { HDprintf("ERROR close() can't close socket\n"); return -1; diff --git a/utils/mirror_vfd/mirror_writer.c b/utils/mirror_vfd/mirror_writer.c index a69ea738faa..ad7cd911985 100644 --- a/utils/mirror_vfd/mirror_writer.c +++ b/utils/mirror_vfd/mirror_writer.c @@ -929,25 +929,33 @@ static int process_instructions(struct mirror_session *session) { struct sock_comm comm; - char xmit_buf[H5FD_MIRROR_XMIT_BUFFER_MAX]; /* raw bytes */ - H5FD_mirror_xmit_t xmit_recd; /* for decoded xmit header */ + char *xmit_buf = NULL; /* raw bytes */ + size_t buf_size; + H5FD_mirror_xmit_t xmit_recd; /* for decoded xmit header */ HDassert(session && (session->magic == MW_SESSION_MAGIC)); mirror_log(session->loginfo, V_INFO, "process_instructions()"); + buf_size = H5FD_MIRROR_XMIT_BUFFER_MAX * sizeof(char); + + if (NULL == (xmit_buf = HDmalloc(buf_size))) { + mirror_log(session->loginfo, V_ERR, "out of memory"); + goto error; + } + comm.magic = MW_SOCK_COMM_MAGIC; comm.recd_die = 0; /* Flag for program to terminate */ comm.xmit_recd = &xmit_recd; comm.raw = xmit_buf; - comm.raw_size = sizeof(xmit_buf); + comm.raw_size = buf_size; while (1) { /* sill-listening infinite loop */ /* Use convenience structure for raw/decoded info in/out */ if (receive_communique(session, &comm) < 0) { mirror_log(session->loginfo, V_ERR, "problem reading socket"); - return -1; + goto error; } if (comm.recd_die) { @@ -957,42 +965,42 @@ process_instructions(struct mirror_session *session) switch (xmit_recd.op) { case H5FD_MIRROR_OP_CLOSE: if (do_close(session) < 0) { - return -1; + goto error; } goto done; case H5FD_MIRROR_OP_LOCK: if (do_lock(session, (const unsigned char *)xmit_buf) < 0) { - return -1; + goto error; } break; case H5FD_MIRROR_OP_OPEN: mirror_log(session->loginfo, V_ERR, "OPEN xmit during session"); reply_error(session, "illegal OPEN xmit during session"); - return -1; + goto error; case H5FD_MIRROR_OP_SET_EOA: if (do_set_eoa(session, (const unsigned char *)xmit_buf) < 0) { - return -1; + goto error; } break; case H5FD_MIRROR_OP_TRUNCATE: if (do_truncate(session) < 0) { - return -1; + goto error; } break; case H5FD_MIRROR_OP_UNLOCK: if (do_unlock(session) < 0) { - return -1; + goto error; } break; case H5FD_MIRROR_OP_WRITE: if (do_write(session, (const unsigned char *)xmit_buf) < 0) { - return -1; + goto error; } break; default: mirror_log(session->loginfo, V_ERR, "unrecognized transmission"); reply_error(session, "unrecognized transmission"); - return -1; + goto error; } /* end switch (xmit_recd.op) */ } /* end while still listening */ @@ -1000,7 +1008,12 @@ process_instructions(struct mirror_session *session) done: comm.magic = 0; /* invalidate structure, on principle */ xmit_recd.magic = 0; /* invalidate structure, on principle */ + HDfree(xmit_buf); return 0; + +error: + HDfree(xmit_buf); + return -1; } /* end process_instructions() */ /* --------------------------------------------------------------------------- From e2c71ed273592b90a30c88c9ee48b30d8b518cfc Mon Sep 17 00:00:00 2001 From: Larry Knox Date: Wed, 25 Jan 2023 17:52:23 -0600 Subject: [PATCH 014/108] CMake: Find MPI in HDF5 CMake config (#2400) (#2411) Co-authored-by: kwryankrattiger <80296582+kwryankrattiger@users.noreply.github.com> --- config/cmake/hdf5-config.cmake.in | 2 ++ 1 file changed, 2 insertions(+) diff --git a/config/cmake/hdf5-config.cmake.in b/config/cmake/hdf5-config.cmake.in index 35cee4f6062..1a3fb7bbf2f 100644 --- a/config/cmake/hdf5-config.cmake.in +++ b/config/cmake/hdf5-config.cmake.in @@ -63,6 +63,8 @@ if (${HDF5_PACKAGE_NAME}_ENABLE_PARALLEL) set (${HDF5_PACKAGE_NAME}_MPI_Fortran_INCLUDE_PATH "@MPI_Fortran_INCLUDE_DIRS@") set (${HDF5_PACKAGE_NAME}_MPI_Fortran_LIBRARIES "@MPI_Fortran_LIBRARIES@") endif () + + find_package(MPI QUIET REQUIRED) endif () if (${HDF5_PACKAGE_NAME}_BUILD_JAVA) From dcd4e2d7b9f3b8e4cf9b04588b777a9ad4a323f0 Mon Sep 17 00:00:00 2001 From: Dana Robinson <43805+derobins@users.noreply.github.com> Date: Sat, 11 Feb 2023 04:59:07 -0800 Subject: [PATCH 015/108] Update CODEOWNERS given personnel changes (#2455) --- .github/CODEOWNERS | 37 +++---------------------------------- 1 file changed, 3 insertions(+), 34 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index af0eab2c4b6..8d736843989 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -2,41 +2,10 @@ # Each line is a file pattern followed by one or more owners. # These owners will be the default owners for everything in the repo. -* @lrknox +* @lrknox @derobins @byrnHDF @fortnern @jhendersonHDF @vchoi-hdfgroup @bmribler @raylu-hdf @mattjala @brtnfld # Order is important. The last matching pattern has the most precedence. # So if a pull request only touches javascript files, only these owners # will be requested to review. -*.cmake @byrnHDF @ChristopherHogan @gnuoyd @derobins -CMakeLists.txt @byrnHDF @ChristopherHogan @gnuoyd @derobins -CMakeTests.* @byrnHDF @ChristopherHogan @gnuoyd @derobins - -/bin/ @lrknox @ChristopherHogan @gnuoyd @derobins - -/c++/ @bmribler @byrnHDF @ChristopherHogan @gnuoyd @derobins - -/config/ @lrknox @byrnHDF @ChristopherHogan @gnuoyd @derobins - -/doc/ @ChristopherHogan @gnuoyd @jrmainzer - -/examples/ @lrknox @bmribler @ChristopherHogan @gnuoyd @derobins - -/fortran/ @brtnfld @epourmal - -/hl/ @bmribler @byrnHDF @ChristopherHogan @gnuoyd @derobins - -/java/ @jhendersonHDF @byrnHDF - -/m4/ @lrknox @ChristopherHogan @gnuoyd @derobins - -/release_docs/ @lrknox @bmribler @byrnHDF - -/src/ @jhendersonHDF @fortnern @soumagne @vchoi-hdfgroup @ChristopherHogan @gnuoyd @derobins @jrmainzer - -/test/ @jhendersonHDF @fortnern @soumagne @vchoi-hdfgroup @ChristopherHogan @gnuoyd @derobins @jrmainzer - -/testpar/ @jhendersonHDF @ChristopherHogan @gnuoyd @jrmainzer - -/tools/ @byrnHDF @bmribler @ChristopherHogan @gnuoyd @derobins - -/utils/ @lrknox @byrnHDF @ChristopherHogan @gnuoyd @derobins +/fortran/ @brtnfld @derobins +/java/ @jhendersonHDF @byrnHDF @derobins From 606dc11623b57987d77a7640b8687d2f6df6abda Mon Sep 17 00:00:00 2001 From: Dana Robinson <43805+derobins@users.noreply.github.com> Date: Sun, 12 Feb 2023 17:03:37 -0800 Subject: [PATCH 016/108] Drop non-1.12 branches in main.yml (#2447) There's no reason to list develop, etc. in the list of branches where this flavor of main.yml applies. Those branches have their own main.yml files. --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 4c2159f9d09..36b190bdaa4 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -5,7 +5,7 @@ on: workflow_dispatch: push: pull_request: - branches: [ develop, hdf5_1_14, hdf5_1_12, hdf5_1_10, hdf5_1_8 ] + branches: [ hdf5_1_12 ] paths-ignore: - '.github/CODEOWNERS' - '.github/FUNDING.yml' From 593484fa0ee30e42a7b735e6a08744b736885d11 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Fri, 17 Feb 2023 16:17:58 -0600 Subject: [PATCH 017/108] 1.12 Merge Correct pkgconfig variables and libraries #2259 (#2478) * Merge Correct pkgconfig variables and libraries #2259 * Fix spacing --- c++/src/CMakeLists.txt | 8 ++++++-- config/libhdf5.fpc.in | 2 +- config/libhdf5.pc.in | 2 +- fortran/src/CMakeLists.txt | 8 ++++++-- hl/c++/src/CMakeLists.txt | 8 ++++++-- hl/fortran/src/CMakeLists.txt | 8 ++++++-- hl/src/CMakeLists.txt | 8 ++++++-- release_docs/RELEASE.txt | 13 +++++++++++++ src/CMakeLists.txt | 18 ++++++++++++++---- 9 files changed, 59 insertions(+), 16 deletions(-) diff --git a/c++/src/CMakeLists.txt b/c++/src/CMakeLists.txt index afb1c9fdbb3..b4374578b84 100644 --- a/c++/src/CMakeLists.txt +++ b/c++/src/CMakeLists.txt @@ -177,14 +177,18 @@ set (_PKG_CONFIG_LIBDIR \${exec_prefix}/${HDF5_INSTALL_LIB_DIR}) set (_PKG_CONFIG_INCLUDEDIR \${prefix}/${HDF5_INSTALL_INCLUDE_DIR}) set (_PKG_CONFIG_LIBNAME "${HDF5_CPP_LIB_CORENAME}") set (_PKG_CONFIG_VERSION "${HDF5_PACKAGE_VERSION}") +set (PKG_CONFIG_LIBNAME "${HDF5_CPP_LIB_CORENAME}") +if (${HDF_CFG_NAME} MATCHES "Debug" OR ${HDF_CFG_NAME} MATCHES "Developer") + set (PKG_CONFIG_LIBNAME "${PKG_CONFIG_LIBNAME}${CMAKE_DEBUG_POSTFIX}") +endif () set (_PKG_CONFIG_LIBS_PRIVATE) if (NOT ONLY_SHARED_LIBS) - set (_PKG_CONFIG_LIBS "${_PKG_CONFIG_LIBS} -l${HDF5_CPP_LIB_CORENAME}") + set (_PKG_CONFIG_LIBS "${_PKG_CONFIG_LIBS} -l${PKG_CONFIG_LIBNAME}") endif () if (BUILD_SHARED_LIBS) - set (_PKG_CONFIG_SH_LIBS "${_PKG_CONFIG_SH_LIBS} -l${HDF5_CPP_LIB_CORENAME}") + set (_PKG_CONFIG_SH_LIBS "${_PKG_CONFIG_SH_LIBS} -l${PKG_CONFIG_LIBNAME}") endif () set (_PKG_CONFIG_REQUIRES "${HDF5_LIB_CORENAME} = ${HDF5_PACKAGE_VERSION}") diff --git a/config/libhdf5.fpc.in b/config/libhdf5.fpc.in index c28c2f9c62a..d4ac61508fd 100644 --- a/config/libhdf5.fpc.in +++ b/config/libhdf5.fpc.in @@ -12,5 +12,5 @@ Cflags: -I${includedir} Fflags: -I${moddir} Libs: -L${libdir} @_PKG_CONFIG_SH_LIBS@ Requires: @_PKG_CONFIG_REQUIRES@ -Libs.private: @_PKG_CONFIG_LIBS_PRIVATE@ @_PKG_CONFIG_LIBS@ +Libs.private: @_PKG_CONFIG_LIBS@ @_PKG_CONFIG_LIBS_PRIVATE@ Requires.private: @_PKG_CONFIG_REQUIRES_PRIVATE@ diff --git a/config/libhdf5.pc.in b/config/libhdf5.pc.in index 4a2ebaab474..3cb42d280e9 100644 --- a/config/libhdf5.pc.in +++ b/config/libhdf5.pc.in @@ -10,5 +10,5 @@ Version: @_PKG_CONFIG_VERSION@ Cflags: -I${includedir} Libs: -L${libdir} @_PKG_CONFIG_SH_LIBS@ Requires: @_PKG_CONFIG_REQUIRES@ -Libs.private: @_PKG_CONFIG_LIBS_PRIVATE@ @_PKG_CONFIG_LIBS@ +Libs.private: @_PKG_CONFIG_LIBS@ @_PKG_CONFIG_LIBS_PRIVATE@ Requires.private: @_PKG_CONFIG_REQUIRES_PRIVATE@ diff --git a/fortran/src/CMakeLists.txt b/fortran/src/CMakeLists.txt index 409af0e753d..b763883eaaf 100644 --- a/fortran/src/CMakeLists.txt +++ b/fortran/src/CMakeLists.txt @@ -542,14 +542,18 @@ set (_PKG_CONFIG_INCLUDEDIR \${prefix}/${HDF5_INSTALL_INCLUDE_DIR}) set (_PKG_CONFIG_MODULEDIR \${prefix}/${HDF5_INSTALL_MODULE_DIR}) set (_PKG_CONFIG_LIBNAME "${HDF5_F90_LIB_CORENAME}") set (_PKG_CONFIG_VERSION "${HDF5_PACKAGE_VERSION}") +set (PKG_CONFIG_LIBNAME "${HDF5_F90_LIB_CORENAME}") +if (${HDF_CFG_NAME} MATCHES "Debug" OR ${HDF_CFG_NAME} MATCHES "Developer") + set (PKG_CONFIG_LIBNAME "${PKG_CONFIG_LIBNAME}${CMAKE_DEBUG_POSTFIX}") +endif () set (_PKG_CONFIG_LIBS_PRIVATE) if (NOT ONLY_SHARED_LIBS) - set (_PKG_CONFIG_LIBS "${_PKG_CONFIG_LIBS} -l${HDF5_F90_LIB_CORENAME}") + set (_PKG_CONFIG_LIBS "${_PKG_CONFIG_LIBS} -l${PKG_CONFIG_LIBNAME}") endif () if (BUILD_SHARED_LIBS) - set (_PKG_CONFIG_SH_LIBS "${_PKG_CONFIG_SH_LIBS} -l${HDF5_F90_LIB_CORENAME}") + set (_PKG_CONFIG_SH_LIBS "${_PKG_CONFIG_SH_LIBS} -l${PKG_CONFIG_LIBNAME}") endif () set (_PKG_CONFIG_REQUIRES "${HDF5_LIB_CORENAME} = ${HDF5_PACKAGE_VERSION}") diff --git a/hl/c++/src/CMakeLists.txt b/hl/c++/src/CMakeLists.txt index ab09d743bee..2483dd43f8b 100644 --- a/hl/c++/src/CMakeLists.txt +++ b/hl/c++/src/CMakeLists.txt @@ -95,14 +95,18 @@ set (_PKG_CONFIG_LIBDIR \${exec_prefix}/${HDF5_INSTALL_LIB_DIR}) set (_PKG_CONFIG_INCLUDEDIR \${prefix}/${HDF5_INSTALL_INCLUDE_DIR}) set (_PKG_CONFIG_LIBNAME "${HDF5_HL_CPP_LIB_CORENAME}") set (_PKG_CONFIG_VERSION "${HDF5_PACKAGE_VERSION}") +set (PKG_CONFIG_LIBNAME "${HDF5_HL_CPP_LIB_CORENAME}") +if (${HDF_CFG_NAME} MATCHES "Debug" OR ${HDF_CFG_NAME} MATCHES "Developer") + set (PKG_CONFIG_LIBNAME "${PKG_CONFIG_LIBNAME}${CMAKE_DEBUG_POSTFIX}") +endif () set (_PKG_CONFIG_LIBS_PRIVATE) if (NOT ONLY_SHARED_LIBS) - set (_PKG_CONFIG_LIBS "${_PKG_CONFIG_LIBS} -l${HDF5_HL_CPP_LIB_CORENAME}") + set (_PKG_CONFIG_LIBS "${_PKG_CONFIG_LIBS} -l${PKG_CONFIG_LIBNAME}") endif () if (BUILD_SHARED_LIBS) - set (_PKG_CONFIG_SH_LIBS "${_PKG_CONFIG_SH_LIBS} -l${HDF5_HL_CPP_LIB_CORENAME}") + set (_PKG_CONFIG_SH_LIBS "${_PKG_CONFIG_SH_LIBS} -l${PKG_CONFIG_LIBNAME}") endif () set (_PKG_CONFIG_REQUIRES "${HDF5_HL_LIB_CORENAME} = ${HDF5_PACKAGE_VERSION}") diff --git a/hl/fortran/src/CMakeLists.txt b/hl/fortran/src/CMakeLists.txt index ad462ea9d9f..f218b13cbe3 100644 --- a/hl/fortran/src/CMakeLists.txt +++ b/hl/fortran/src/CMakeLists.txt @@ -329,14 +329,18 @@ set (_PKG_CONFIG_INCLUDEDIR \${prefix}/${HDF5_INSTALL_INCLUDE_DIR}) set (_PKG_CONFIG_MODULEDIR \${prefix}/${HDF5_INSTALL_MODULE_DIR}) set (_PKG_CONFIG_LIBNAME "${HDF5_HL_F90_LIB_CORENAME}") set (_PKG_CONFIG_VERSION "${HDF5_PACKAGE_VERSION}") +set (PKG_CONFIG_LIBNAME "${HDF5_HL_F90_LIB_CORENAME}") +if (${HDF_CFG_NAME} MATCHES "Debug" OR ${HDF_CFG_NAME} MATCHES "Developer") + set (PKG_CONFIG_LIBNAME "${PKG_CONFIG_LIBNAME}${CMAKE_DEBUG_POSTFIX}") +endif () set (_PKG_CONFIG_LIBS_PRIVATE) if (NOT ONLY_SHARED_LIBS) - set (_PKG_CONFIG_LIBS "${_PKG_CONFIG_LIBS} -l${HDF5_HL_F90_LIB_CORENAME}") + set (_PKG_CONFIG_LIBS "${_PKG_CONFIG_LIBS} -l${PKG_CONFIG_LIBNAME}") endif () if (BUILD_SHARED_LIBS) - set (_PKG_CONFIG_SH_LIBS "${_PKG_CONFIG_SH_LIBS} -l${HDF5_HL_F90_LIB_CORENAME}") + set (_PKG_CONFIG_SH_LIBS "${_PKG_CONFIG_SH_LIBS} -l${PKG_CONFIG_LIBNAME}") endif () set (_PKG_CONFIG_REQUIRES "${HDF5_F90_LIB_CORENAME} = ${HDF5_PACKAGE_VERSION}") diff --git a/hl/src/CMakeLists.txt b/hl/src/CMakeLists.txt index 6dd5be70dc6..f52faf6ccf0 100644 --- a/hl/src/CMakeLists.txt +++ b/hl/src/CMakeLists.txt @@ -127,14 +127,18 @@ set (_PKG_CONFIG_LIBDIR \${exec_prefix}/${HDF5_INSTALL_LIB_DIR}) set (_PKG_CONFIG_INCLUDEDIR \${prefix}/${HDF5_INSTALL_INCLUDE_DIR}) set (_PKG_CONFIG_LIBNAME "${HDF5_HL_LIB_CORENAME}") set (_PKG_CONFIG_VERSION "${HDF5_PACKAGE_VERSION}") +set (PKG_CONFIG_LIBNAME "${HDF5_HL_LIB_CORENAME}") +if (${HDF_CFG_NAME} MATCHES "Debug" OR ${HDF_CFG_NAME} MATCHES "Developer") + set (PKG_CONFIG_LIBNAME "${PKG_CONFIG_LIBNAME}${CMAKE_DEBUG_POSTFIX}") +endif () set (_PKG_CONFIG_LIBS_PRIVATE) if (NOT ONLY_SHARED_LIBS) - set (_PKG_CONFIG_LIBS "${_PKG_CONFIG_LIBS} -l${HDF5_HL_LIB_CORENAME}") + set (_PKG_CONFIG_LIBS "${_PKG_CONFIG_LIBS} -l${PKG_CONFIG_LIBNAME}") endif () if (BUILD_SHARED_LIBS) - set (_PKG_CONFIG_SH_LIBS "${_PKG_CONFIG_SH_LIBS} -l${HDF5_HL_LIB_CORENAME}") + set (_PKG_CONFIG_SH_LIBS "${_PKG_CONFIG_SH_LIBS} -l${PKG_CONFIG_LIBNAME}") endif () set (_PKG_CONFIG_REQUIRES "${HDF5_LIB_CORENAME} = ${HDF5_PACKAGE_VERSION}") diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 022010cfb8e..32261658907 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -248,6 +248,19 @@ Bug Fixes since HDF5-1.12.1 release Configuration ------------- + - Correct the CMake generated pkg-config file + + The pkg-config file generated by CMake had the order and placement of the + libraries wrong. Also added support for debug library names. + + Changed the order of Libs.private libraries so that dependencies come after + dependents. Did not move the compression libraries into Requires.private + because there was not a way to determine if the compression libraries had + supported pkconfig files. Still recommend that the CMake config file method + be used for building projects with CMake. + + (ADB - 2023/02/16 GH-1546,GH-2259) + - Change the settings of the *pc files to use the correct format The pkg-config files generated by CMake uses incorrect syntax for the 'Requires' diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index c6294c41af1..e74cbb1e961 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -1292,20 +1292,30 @@ set (_PKG_CONFIG_LIBDIR \${exec_prefix}/${HDF5_INSTALL_LIB_DIR}) set (_PKG_CONFIG_INCLUDEDIR \${prefix}/${HDF5_INSTALL_INCLUDE_DIR}) set (_PKG_CONFIG_LIBNAME "${HDF5_LIB_CORENAME}") set (_PKG_CONFIG_VERSION "${HDF5_PACKAGE_VERSION}") +set (PKGCONFIG_LIBNAME "${HDF5_LIB_CORENAME}") +if (${HDF_CFG_NAME} MATCHES "Debug" OR ${HDF_CFG_NAME} MATCHES "Developer") + set (PKGCONFIG_LIBNAME "${PKGCONFIG_LIBNAME}${CMAKE_DEBUG_POSTFIX}") +endif () + +foreach (libs ${LINK_LIBS}) + set (_PKG_CONFIG_LIBS_PRIVATE "${_PKG_CONFIG_LIBS_PRIVATE} -l${libs}") +endforeach () -foreach (libs ${LINK_LIBS} ${LINK_COMP_LIBS}) +# The settings for the compression libs depends on if they have pkconfig support +# Assuming they don't +foreach (libs ${LINK_COMP_LIBS}) +# set (_PKG_CONFIG_REQUIRES_PRIVATE "${_PKG_CONFIG_REQUIRES_PRIVATE} -l${libs}") set (_PKG_CONFIG_LIBS_PRIVATE "${_PKG_CONFIG_LIBS_PRIVATE} -l${libs}") endforeach () if (NOT ONLY_SHARED_LIBS) - set (_PKG_CONFIG_LIBS "${_PKG_CONFIG_LIBS} -l${HDF5_LIB_CORENAME}") + set (_PKG_CONFIG_LIBS "${_PKG_CONFIG_LIBS} -l${PKGCONFIG_LIBNAME}") endif () if (BUILD_SHARED_LIBS) - set (_PKG_CONFIG_SH_LIBS "${_PKG_CONFIG_SH_LIBS} -l${HDF5_LIB_CORENAME}") + set (_PKG_CONFIG_SH_LIBS "${_PKG_CONFIG_SH_LIBS} -l${PKGCONFIG_LIBNAME}") endif () set (_PKG_CONFIG_REQUIRES) -set (_PKG_CONFIG_REQUIRES_PRIVATE) configure_file ( ${HDF_CONFIG_DIR}/libhdf5.pc.in From 79ed1cc15f0926a577bd8da96fc551dfea5e8c40 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Fri, 17 Feb 2023 16:37:48 -0600 Subject: [PATCH 018/108] Merge toolchain lowercase names and updates #2426 (#2468) --- .github/workflows/main.yml | 2 +- config/cmake/HDFCXXCompilerFlags.cmake | 6 +++--- config/cmake/HDFCompilerFlags.cmake | 7 +++---- config/sanitizer/code-coverage.cmake | 12 ++++++------ config/sanitizer/sanitizers.cmake | 2 +- config/toolchain/ icc.cmake | 11 +++++++++++ config/toolchain/aarch64.cmake | 17 +++++++++++++++++ config/toolchain/gcc.cmake | 11 +++++++++++ config/toolchain/intel.cmake | 13 +++---------- config/toolchain/{PGI.cmake => pgi.cmake} | 0 10 files changed, 56 insertions(+), 25 deletions(-) create mode 100644 config/toolchain/ icc.cmake create mode 100644 config/toolchain/aarch64.cmake create mode 100644 config/toolchain/gcc.cmake rename config/toolchain/{PGI.cmake => pgi.cmake} (100%) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 36b190bdaa4..4541473a273 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -91,7 +91,7 @@ jobs: parallel: OFF mirror_vfd: ON direct_vfd: ON - toolchain: "config/toolchain/GCC.cmake" + toolchain: "config/toolchain/gcc.cmake" generator: "-G Ninja" run_tests: true diff --git a/config/cmake/HDFCXXCompilerFlags.cmake b/config/cmake/HDFCXXCompilerFlags.cmake index dcc96cf0a2b..e6561465cb0 100644 --- a/config/cmake/HDFCXXCompilerFlags.cmake +++ b/config/cmake/HDFCXXCompilerFlags.cmake @@ -135,7 +135,7 @@ else () ADD_H5_FLAGS (HDF5_CMAKE_CXX_FLAGS "${HDF5_SOURCE_DIR}/config/gnu-warnings/cxx-general") ADD_H5_FLAGS (H5_CXXFLAGS "${HDF5_SOURCE_DIR}/config/gnu-warnings/cxx-error-general") endif () - elseif (CMAKE_CXX_COMPILER_ID MATCHES "[Cc]lang") + elseif (CMAKE_CXX_COMPILER_ID MATCHES "IntelLLVM" OR CMAKE_CXX_COMPILER_ID MATCHES "[Cc]lang") ADD_H5_FLAGS (HDF5_CMAKE_CXX_FLAGS "${HDF5_SOURCE_DIR}/config/clang-warnings/general") elseif (CMAKE_CXX_COMPILER_ID STREQUAL "PGI") list (APPEND HDF5_CMAKE_CXX_FLAGS "-Minform=inform") @@ -156,14 +156,14 @@ if (HDF5_ENABLE_DEV_WARNINGS) elseif (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") # Use the C warnings as CXX warnings are the same ADD_H5_FLAGS (H5_CXXFLAGS "${HDF5_SOURCE_DIR}/config/gnu-warnings/developer-general") - elseif (CMAKE_CXX_COMPILER_ID MATCHES "[Cc]lang") + elseif (CMAKE_CXX_COMPILER_ID MATCHES "IntelLLVM" OR CMAKE_CXX_COMPILER_ID MATCHES "[Cc]lang") ADD_H5_FLAGS (H5_CXXFLAGS "${HDF5_SOURCE_DIR}/config/clang-warnings/developer-general") endif () else () if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") # Use the C warnings as CXX warnings are the same ADD_H5_FLAGS (H5_CXXFLAGS "${HDF5_SOURCE_DIR}/config/gnu-warnings/no-developer-general") - elseif (CMAKE_CXX_COMPILER_ID MATCHES "[Cc]lang") + elseif (CMAKE_CXX_COMPILER_ID MATCHES "IntelLLVM" OR CMAKE_CXX_COMPILER_ID MATCHES "[Cc]lang") ADD_H5_FLAGS (H5_CXXFLAGS "${HDF5_SOURCE_DIR}/config/clang-warnings/no-developer-general") endif () endif () diff --git a/config/cmake/HDFCompilerFlags.cmake b/config/cmake/HDFCompilerFlags.cmake index 61218dc3841..15b158da8ae 100644 --- a/config/cmake/HDFCompilerFlags.cmake +++ b/config/cmake/HDFCompilerFlags.cmake @@ -34,7 +34,6 @@ endif() # future if(MSVC OR _INTEL_WINDOWS OR _CLANG_MSVC_WINDOWS) add_definitions(-D_CRT_SECURE_NO_DEPRECATE -D_CRT_NONSTDC_NO_DEPRECATE) -else() endif() if(MSVC) @@ -158,7 +157,7 @@ else () # gcc automatically inlines based on the optimization level # this is just a failsafe list (APPEND H5_CFLAGS "-finline-functions") - elseif (CMAKE_C_COMPILER_ID MATCHES "[Cc]lang") + elseif (CMAKE_C_COMPILER_ID MATCHES "IntelLLVM" OR CMAKE_C_COMPILER_ID MATCHES "[Cc]lang") ADD_H5_FLAGS (HDF5_CMAKE_C_FLAGS "${HDF5_SOURCE_DIR}/config/clang-warnings/general") ADD_H5_FLAGS (H5_CFLAGS "${HDF5_SOURCE_DIR}/config/clang-warnings/error-general") elseif (CMAKE_C_COMPILER_ID STREQUAL "PGI") @@ -184,13 +183,13 @@ if (HDF5_ENABLE_DEV_WARNINGS) endif () elseif (CMAKE_C_COMPILER_ID STREQUAL "GNU" AND CMAKE_C_COMPILER_VERSION VERSION_GREATER_EQUAL 4.8) ADD_H5_FLAGS (H5_CFLAGS "${HDF5_SOURCE_DIR}/config/gnu-warnings/developer-general") - elseif (CMAKE_C_COMPILER_ID MATCHES "[Cc]lang") + elseif (CMAKE_C_COMPILER_ID MATCHES "IntelLLVM" OR CMAKE_C_COMPILER_ID MATCHES "[Cc]lang") ADD_H5_FLAGS (H5_CFLAGS "${HDF5_SOURCE_DIR}/config/clang-warnings/developer-general") endif () else () if (CMAKE_C_COMPILER_ID STREQUAL "GNU" AND CMAKE_C_COMPILER_VERSION VERSION_GREATER_EQUAL 4.8) ADD_H5_FLAGS (H5_CFLAGS "${HDF5_SOURCE_DIR}/config/gnu-warnings/no-developer-general") - elseif (CMAKE_C_COMPILER_ID MATCHES "[Cc]lang") + elseif (CMAKE_C_COMPILER_ID MATCHES "IntelLLVM" OR CMAKE_C_COMPILER_ID MATCHES "[Cc]lang") ADD_H5_FLAGS (H5_CFLAGS "${HDF5_SOURCE_DIR}/config/clang-warnings/no-developer-general") endif () endif () diff --git a/config/sanitizer/code-coverage.cmake b/config/sanitizer/code-coverage.cmake index 3a99024cf84..4a927af5ee8 100644 --- a/config/sanitizer/code-coverage.cmake +++ b/config/sanitizer/code-coverage.cmake @@ -106,7 +106,7 @@ if(CODE_COVERAGE AND NOT CODE_COVERAGE_ADDED) ${CMAKE_COVERAGE_OUTPUT_DIRECTORY} DEPENDS ccov-clean) - if(CMAKE_C_COMPILER_ID MATCHES "[Cc]lang" OR CMAKE_CXX_COMPILER_ID MATCHES "[Cc]lang") + if(CMAKE_C_COMPILER_ID MATCHES "IntelLLVM" OR CMAKE_CXX_COMPILER_ID MATCHES "IntelLLVM" OR CMAKE_C_COMPILER_ID MATCHES "[Cc]lang" OR CMAKE_CXX_COMPILER_ID MATCHES "[Cc]lang") # Messages message(STATUS "Building with llvm Code Coverage Tools") @@ -212,7 +212,7 @@ function(target_code_coverage TARGET_NAME) if(CODE_COVERAGE) # Add code coverage instrumentation to the target's linker command - if(CMAKE_C_COMPILER_ID MATCHES "[Cc]lang" OR CMAKE_CXX_COMPILER_ID MATCHES "[Cc]lang") + if(CMAKE_C_COMPILER_ID MATCHES "IntelLLVM" OR CMAKE_CXX_COMPILER_ID MATCHES "IntelLLVM" OR CMAKE_C_COMPILER_ID MATCHES "[Cc]lang" OR CMAKE_CXX_COMPILER_ID MATCHES "[Cc]lang") target_compile_options(${TARGET_NAME} PRIVATE -fprofile-instr-generate -fcoverage-mapping --coverage) set_property( @@ -234,7 +234,7 @@ function(target_code_coverage TARGET_NAME) # Add shared library to processing for 'all' targets if(target_type STREQUAL "SHARED_LIBRARY" AND target_code_coverage_ALL) - if(CMAKE_C_COMPILER_ID MATCHES "[Cc]lang" OR CMAKE_CXX_COMPILER_ID MATCHES "[Cc]lang") + if(CMAKE_C_COMPILER_ID MATCHES "IntelLLVM" OR CMAKE_CXX_COMPILER_ID MATCHES "IntelLLVM" OR CMAKE_C_COMPILER_ID MATCHES "[Cc]lang" OR CMAKE_CXX_COMPILER_ID MATCHES "[Cc]lang") add_custom_target( ccov-run-${TARGET_NAME} COMMAND echo "-object=$" >> @@ -254,7 +254,7 @@ function(target_code_coverage TARGET_NAME) # For executables add targets to run and produce output if(target_type STREQUAL "EXECUTABLE") - if(CMAKE_C_COMPILER_ID MATCHES "[Cc]lang" OR CMAKE_CXX_COMPILER_ID MATCHES "(Apple)?Cc]lang") + if(CMAKE_C_COMPILER_ID MATCHES "IntelLLVM" OR CMAKE_CXX_COMPILER_ID MATCHES "IntelLLVM" OR CMAKE_C_COMPILER_ID MATCHES "[Cc]lang" OR CMAKE_CXX_COMPILER_ID MATCHES "(Apple)?Cc]lang") # If there are shared objects to also work with, generate the string to # add them here @@ -412,7 +412,7 @@ endfunction() # any subdirectories. To add coverage instrumentation to only specific targets, # use `target_code_coverage`. function(add_code_coverage) - if(CMAKE_C_COMPILER_ID MATCHES "[Cc]lang" OR CMAKE_CXX_COMPILER_ID MATCHES "[Cc]lang") + if(CMAKE_C_COMPILER_ID MATCHES "IntelLLVM" OR CMAKE_CXX_COMPILER_ID MATCHES "IntelLLVM" OR CMAKE_C_COMPILER_ID MATCHES "[Cc]lang" OR CMAKE_CXX_COMPILER_ID MATCHES "[Cc]lang") add_compile_options(-fprofile-instr-generate -fcoverage-mapping --coverage) add_link_options(-fprofile-instr-generate -fcoverage-mapping --coverage) elseif(CMAKE_C_COMPILER_ID MATCHES "GNU" OR CMAKE_CXX_COMPILER_ID MATCHES "GNU") @@ -437,7 +437,7 @@ function(add_code_coverage_all_targets) "${multi_value_keywords}" ${ARGN}) if(CODE_COVERAGE) - if(CMAKE_C_COMPILER_ID MATCHES "[Cc]lang" OR CMAKE_CXX_COMPILER_ID MATCHES "[Cc]lang") + if(CMAKE_C_COMPILER_ID MATCHES "IntelLLVM" OR CMAKE_CXX_COMPILER_ID MATCHES "IntelLLVM" OR CMAKE_C_COMPILER_ID MATCHES "[Cc]lang" OR CMAKE_CXX_COMPILER_ID MATCHES "[Cc]lang") # Merge the profile data for all of the run executables add_custom_target( diff --git a/config/sanitizer/sanitizers.cmake b/config/sanitizer/sanitizers.cmake index b06992fa600..4ba043bac08 100644 --- a/config/sanitizer/sanitizers.cmake +++ b/config/sanitizer/sanitizers.cmake @@ -30,7 +30,7 @@ endfunction() message(STATUS "USE_SANITIZER=${USE_SANITIZER}, CMAKE_C_COMPILER_ID=${CMAKE_C_COMPILER_ID}") if(USE_SANITIZER) - if(INTEL_CLANG OR CMAKE_C_COMPILER_ID MATCHES "[Cc]lang") + if(CMAKE_C_COMPILER_ID MATCHES "IntelLLVM" OR CMAKE_C_COMPILER_ID MATCHES "[Cc]lang") set(CMAKE_EXPORT_COMPILE_COMMANDS ON) if(UNIX) diff --git a/config/toolchain/ icc.cmake b/config/toolchain/ icc.cmake new file mode 100644 index 00000000000..97f6a64985f --- /dev/null +++ b/config/toolchain/ icc.cmake @@ -0,0 +1,11 @@ +# Uncomment the following to use cross-compiling +#set(CMAKE_SYSTEM_NAME Linux) + +set(CMAKE_COMPILER_VENDOR "intel") + +set(CMAKE_C_COMPILER icc) +set(CMAKE_CXX_COMPILER icpc) +set(CMAKE_Fortran_COMPILER ifort) + +# the following is used if cross-compiling +set(CMAKE_CROSSCOMPILING_EMULATOR "") diff --git a/config/toolchain/aarch64.cmake b/config/toolchain/aarch64.cmake new file mode 100644 index 00000000000..adb86390e46 --- /dev/null +++ b/config/toolchain/aarch64.cmake @@ -0,0 +1,17 @@ +set(TOOLCHAIN_PREFIX aarch64-linux-gnu) +set(ANDROID_NDK /opt/android-ndk-r25b-linux/android-ndk-r25b) +set (CMAKE_SYSTEM_NAME Android) +set (CMAKE_ANDROID_ARCH_ABI x86_64) +#set (CMAKE_ANDROID_STANDALONE_TOOLCHAIN ${ANDROID_NDK}/build/cmake/andriod.toolchain.cmake) +set (CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}-gcc) +set (CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}-g++) +#set (CMAKE_RC_COMPILER ${TOOLCHAIN_PREFIX}-windres) +set (CMAKE_Fortran_COMPILER ${TOOLCHAIN_PREFIX}-gfortran) +set (CMAKE_FIND_ROOT_PATH /usr/${TOOLCHAIN_PREFIX}) +set (CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) +set (CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) +set (CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) +set (CMAKE_CROSSCOMPILING_EMULATOR qemu-aarch64) + +include_directories(/usr/${TOOLCHAIN_PREFIX}/include) + diff --git a/config/toolchain/gcc.cmake b/config/toolchain/gcc.cmake new file mode 100644 index 00000000000..c41d0cadb31 --- /dev/null +++ b/config/toolchain/gcc.cmake @@ -0,0 +1,11 @@ +# Uncomment the following line and the correct system name to use cross-compiling +#set(CMAKE_SYSTEM_NAME Linux) + +set(CMAKE_COMPILER_VENDOR "GCC") + +set(CMAKE_C_COMPILER cc) +set(CMAKE_CXX_COMPILER c++) +set(CMAKE_Fortran_COMPILER gfortran) + +# the following is used if cross-compiling +set(CMAKE_CROSSCOMPILING_EMULATOR "") diff --git a/config/toolchain/intel.cmake b/config/toolchain/intel.cmake index ae1d2f8fa86..f8f60b28b46 100644 --- a/config/toolchain/intel.cmake +++ b/config/toolchain/intel.cmake @@ -3,16 +3,9 @@ set(CMAKE_COMPILER_VENDOR "intel") -if(USE_SANITIZER) - set(CMAKE_C_COMPILER icl) - set(CMAKE_CXX_COMPILER icl++) - set(CMAKE_Fortran_COMPILER ifort) - set(INTEL_CLANG ON) -else () - set(CMAKE_C_COMPILER icc) - set(CMAKE_CXX_COMPILER icpc) - set(CMAKE_Fortran_COMPILER ifort) -endif () +set(CMAKE_C_COMPILER icx) +set(CMAKE_CXX_COMPILER icpx) +set(CMAKE_Fortran_COMPILER ifx) # the following is used if cross-compiling set(CMAKE_CROSSCOMPILING_EMULATOR "") diff --git a/config/toolchain/PGI.cmake b/config/toolchain/pgi.cmake similarity index 100% rename from config/toolchain/PGI.cmake rename to config/toolchain/pgi.cmake From 04f0647727677d716a3c1c772d35a660a8ea0dc5 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Fri, 17 Feb 2023 16:38:27 -0600 Subject: [PATCH 019/108] Merge Add build options for CMake packaging #2347 (#2465) --- CMakeInstallation.cmake | 54 +++++++++++++++++++++++------------------ 1 file changed, 30 insertions(+), 24 deletions(-) diff --git a/CMakeInstallation.cmake b/CMakeInstallation.cmake index 2c4c304964c..018a36462d1 100644 --- a/CMakeInstallation.cmake +++ b/CMakeInstallation.cmake @@ -391,24 +391,29 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES) set (CPACK_PACKAGING_INSTALL_PREFIX "/${CPACK_PACKAGE_INSTALL_DIRECTORY}") set (CPACK_COMPONENTS_ALL_IN_ONE_PACKAGE ON) - list (APPEND CPACK_GENERATOR "DEB") - set (CPACK_DEBIAN_PACKAGE_SECTION "Libraries") - set (CPACK_DEBIAN_PACKAGE_MAINTAINER "${HDF5_PACKAGE_BUGREPORT}") - - list (APPEND CPACK_GENERATOR "RPM") - set (CPACK_RPM_PACKAGE_RELEASE "1") - set (CPACK_RPM_PACKAGE_RELEASE_DIST ON) - set (CPACK_RPM_COMPONENT_INSTALL ON) - set (CPACK_RPM_PACKAGE_RELOCATABLE ON) - set (CPACK_RPM_FILE_NAME "RPM-DEFAULT") - set (CPACK_RPM_PACKAGE_NAME "${CPACK_PACKAGE_NAME}") - set (CPACK_RPM_PACKAGE_VERSION "${CPACK_PACKAGE_VERSION}") - set (CPACK_RPM_PACKAGE_VENDOR "${CPACK_PACKAGE_VENDOR}") - set (CPACK_RPM_PACKAGE_LICENSE "BSD-style") - set (CPACK_RPM_PACKAGE_GROUP "Development/Libraries") - set (CPACK_RPM_PACKAGE_URL "${HDF5_PACKAGE_URL}") - set (CPACK_RPM_PACKAGE_SUMMARY "HDF5 is a unique technology suite that makes possible the management of extremely large and complex data collections.") - set (CPACK_RPM_PACKAGE_DESCRIPTION + find_program (DPKGSHLIB_EXE dpkg-shlibdeps) + if (DPKGSHLIB_EXE) + list (APPEND CPACK_GENERATOR "DEB") + set (CPACK_DEBIAN_PACKAGE_SECTION "Libraries") + set (CPACK_DEBIAN_PACKAGE_MAINTAINER "${HDF5_PACKAGE_BUGREPORT}") + endif () + + find_program (RPMBUILD_EXE rpmbuild) + if (RPMBUILD_EXE) + list (APPEND CPACK_GENERATOR "RPM") + set (CPACK_RPM_PACKAGE_RELEASE "1") + set (CPACK_RPM_PACKAGE_RELEASE_DIST ON) + set (CPACK_RPM_COMPONENT_INSTALL ON) + set (CPACK_RPM_PACKAGE_RELOCATABLE ON) + set (CPACK_RPM_FILE_NAME "RPM-DEFAULT") + set (CPACK_RPM_PACKAGE_NAME "${CPACK_PACKAGE_NAME}") + set (CPACK_RPM_PACKAGE_VERSION "${CPACK_PACKAGE_VERSION}") + set (CPACK_RPM_PACKAGE_VENDOR "${CPACK_PACKAGE_VENDOR}") + set (CPACK_RPM_PACKAGE_LICENSE "BSD-style") + set (CPACK_RPM_PACKAGE_GROUP "Development/Libraries") + set (CPACK_RPM_PACKAGE_URL "${HDF5_PACKAGE_URL}") + set (CPACK_RPM_PACKAGE_SUMMARY "HDF5 is a unique technology suite that makes possible the management of extremely large and complex data collections.") + set (CPACK_RPM_PACKAGE_DESCRIPTION "The HDF5 technology suite includes: * A versatile data model that can represent very complex data objects and a wide variety of metadata. @@ -423,13 +428,14 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES) The HDF5 data model, file format, API, library, and tools are open and distributed without charge. " - ) + ) - #----------------------------------------------------------------------------- - # Configure the spec file for the install RPM - #----------------------------------------------------------------------------- -# configure_file ("${HDF5_RESOURCES_DIR}/hdf5.spec.in" "${CMAKE_CURRENT_BINARY_DIR}/${HDF5_PACKAGE_NAME}.spec" @ONLY IMMEDIATE) -# set (CPACK_RPM_USER_BINARY_SPECFILE "${CMAKE_CURRENT_BINARY_DIR}/${HDF5_PACKAGE_NAME}.spec") + #----------------------------------------------------------------------------- + # Configure the spec file for the install RPM + #----------------------------------------------------------------------------- +# configure_file ("${HDF5_RESOURCES_DIR}/hdf5.spec.in" "${CMAKE_CURRENT_BINARY_DIR}/${HDF5_PACKAGE_NAME}.spec" @ONLY IMMEDIATE) +# set (CPACK_RPM_USER_BINARY_SPECFILE "${CMAKE_CURRENT_BINARY_DIR}/${HDF5_PACKAGE_NAME}.spec") + endif () endif () # By default, do not warn when built on machines using only VS Express: From d58db07b3dd16565b924c0dad411cfae4dcf2d5f Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Tue, 21 Feb 2023 08:26:15 -0600 Subject: [PATCH 020/108] Remove space from toolchain name #2482 (#2484) --- config/toolchain/{ icc.cmake => icc.cmake} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename config/toolchain/{ icc.cmake => icc.cmake} (100%) diff --git a/config/toolchain/ icc.cmake b/config/toolchain/icc.cmake similarity index 100% rename from config/toolchain/ icc.cmake rename to config/toolchain/icc.cmake From a7dd6452a0be68e3bd3af74dd583f959a9d6e65c Mon Sep 17 00:00:00 2001 From: Dana Robinson <43805+derobins@users.noreply.github.com> Date: Tue, 21 Feb 2023 09:08:09 -0800 Subject: [PATCH 021/108] 1.12: Fix files codespell has issues with (#2489) * Fix files codespell has issues with * Exclude h5repack .dat test files * Fix characters in C++ HTML file * Fix header.html characters --- .github/workflows/codespell.yml | 2 +- c++/src/C2Cppfunction_map.htm | 84 ++++++++++++++++----------------- c++/src/header.html | 2 +- 3 files changed, 44 insertions(+), 44 deletions(-) diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml index a281fc7f1b4..ddd17004b35 100644 --- a/.github/workflows/codespell.yml +++ b/.github/workflows/codespell.yml @@ -11,5 +11,5 @@ jobs: - uses: actions/checkout@v3 - uses: codespell-project/actions-codespell@master with: - skip: ./bin/trace,./hl/tools/h5watch/h5watch.c,./tools/test/h5jam/tellub.c,./config/sanitizer/LICENSE + skip: ./bin/trace,./hl/tools/h5watch/h5watch.c,./tools/test/h5jam/tellub.c,./config/sanitizer/LICENSE,./tools/test/h5repack/testfiles/*.dat ignore_words_list: isnt,inout,nd,parms,parm,ba,offsetP,ser,ois,had,fiter,fo,clude,refere,minnum,offsetp,creat,ans:,eiter,lastr,ans,isn't,ifset,sur,trun,dne,tthe,hda,filname,te,htmp,minnum,ro,oce,ot diff --git a/c++/src/C2Cppfunction_map.htm b/c++/src/C2Cppfunction_map.htm index a9e0a2779d5..665bc63f3a3 100644 --- a/c++/src/C2Cppfunction_map.htm +++ b/c++/src/C2Cppfunction_map.htm @@ -10402,13 +10402,13 @@

void H5Location::moveLink(const char* src_name,

const Group& + normal'>            const Group& dst, const char* dst_name,

const + normal'>            const LinkCreatPropList& lcpl = LinkCreatPropList::DEFAULT,

const + normal'>            const LinkAccPropList& lapl = LinkAccPropList::DEFAULT)

void H5Location::moveLink(const H5std_string& src_name,

const Group& + normal'>            const Group& dst, const H5std_string& dst_name,

const + normal'>            const LinkCreatPropList& lcpl = LinkCreatPropList::DEFAULT,

const + normal'>            const LinkAccPropList& lapl = LinkAccPropList::DEFAULT)

void H5Location::moveLink(const char* src_name, const char* dst_name,

const + normal'>            const LinkCreatPropList& lcpl = LinkCreatPropList::DEFAULT,

const + normal'>            const LinkAccPropList& lapl = LinkAccPropList::DEFAULT)

void H5Location::moveLink(const H5std_string& src_name,

const + normal'>            const H5std_string& dst_name,

const + normal'>            const LinkCreatPropList& lcpl = LinkCreatPropList::DEFAULT,

const + normal'>            const LinkAccPropList& lapl = LinkAccPropList::DEFAULT)

const LinkCreatPropList& lcpl = LinkCreatPropList::DEFAULT,

const LinkAccPropList& + normal'>const  LinkAccPropList& lapl = LinkAccPropList::DEFAULT)

void H5Location::link(const char *curr_name,

const Group& + normal'>             const Group& new_loc, const char *new_name,

const + normal'>             const LinkCreatPropList& lcpl = LinkCreatPropList::DEFAULT,

const + normal'>             const LinkAccPropList& lapl = LinkAccPropList::DEFAULT)

void H5Location::link(const H5std_string& curr_name,

const Group& + normal'>             const Group& new_loc, const H5std_string& new_name,

const + normal'>             const LinkCreatPropList& lcpl = LinkCreatPropList::DEFAULT,

const + normal'>             const LinkAccPropList& lapl = LinkAccPropList::DEFAULT)

void H5Location::link(const char *curr_name,

const hid_t + normal'>             const hid_t same_loc, const char *new_name,

const + normal'>             const LinkCreatPropList& lcpl = LinkCreatPropList::DEFAULT,

const + normal'>             const LinkAccPropList& lapl = LinkAccPropList::DEFAULT)

void H5Location::link(const H5std_string& curr_name,

const hid_t + normal'>             const hid_t same_loc, const H5std_string& new_name,

const + normal'>             const LinkCreatPropList& lcpl = LinkCreatPropList::DEFAULT,

const + normal'>             const LinkAccPropList& lapl = LinkAccPropList::DEFAULT)

void H5Location::link(const char *target_name, const char *link_name,

const + normal'>             const LinkCreatPropList& lcpl = LinkCreatPropList::DEFAULT,

const + normal'>             const LinkAccPropList& lapl = LinkAccPropList::DEFAULT)

void H5Location::link(const H5std_string& target_name,

const + normal'>             const H5std_string& link_name,

const + normal'>             const LinkCreatPropList& lcpl = LinkCreatPropList::DEFAULT,

const + normal'>             const LinkAccPropList& lapl = LinkAccPropList::DEFAULT)

void CommonFG::unlink( const char* name,

const + normal'>             const LinkAccPropList& lapl = LinkAccPropList::DEFAULT)

void CommonFG::unlink( const H5std_string& name,

const + normal'>             const LinkAccPropList& lapl = LinkAccPropList::DEFAULT)

void H5Location::unlink( const char* name,

const + normal'>             const LinkAccPropList& lapl = LinkAccPropList::DEFAULT)

void H5Location::unlink( const H5std_string& name,

const LinkAccPropList& + normal'>             const LinkAccPropList& lapl = LinkAccPropList::DEFAULT)

H5L_info2_t getLinkInfo(const char* link_name,

const + normal'>               const LinkAccPropList& lapl = LinkAccPropList::DEFAULT)

H5L_info2_t getLinkInfo(const H5std_string& link_name,

const + normal'>               const LinkAccPropList& lapl = LinkAccPropList::DEFAULT)

void DSetCreatPropList::setFilter(H5Z_filter_t filter_id, unsigned int flags,

size_t cd_nelmts, const + normal'>        size_t cd_nelmts, const unsigned int cd_values[])

H5Z_filter_t DSetCreatPropList::getFilter(int filter_number,

unsigned int + normal'>        unsigned int &flags, size_t &cd_nelmts, unsigned int* cd_values,

size_t namelen, char + normal'>        size_t namelen, char name[], unsigned int& filter_config)

void DSetCreatPropList::getFilterById(H5Z_filter_t filter_id,

unsigned int + normal'>        unsigned int &flags, size_t &cd_nelmts, unsigned int* cd_values,

size_t namelen, char + normal'>        size_t namelen, char name[], unsigned int &filter_config)

void H5Library::setFreeListLimits(int reg_global_lim, int reg_list_lim,

int arr_global_lim, int + normal'>        int arr_global_lim, int arr_list_lim, int blk_global_lim,

int blk_list_lim)

+ normal'>        int blk_list_lim)

 

From 063a61c36b189bb9b8249f495043a32967eda9d7 Mon Sep 17 00:00:00 2001 From: vchoi-hdfgroup <55293060+vchoi-hdfgroup@users.noreply.github.com> Date: Sun, 26 Feb 2023 12:07:52 -0600 Subject: [PATCH 022/108] =?UTF-8?q?Fix=20for=20HDFFV-11052:=20h5debug=20fa?= =?UTF-8?q?ils=20on=20a=20corrupted=20file=20(h5=5Fnrefs=5FPOC)=E2=80=A6?= =?UTF-8?q?=20(#2291)=20(#2496)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Fix for HDFFV-11052: h5debug fails on a corrupted file (h5_nrefs_POC) producing a core dump. When h5debug closes the corrupted file, the library calls H5F__dest() which performs all the closing operations for the file "f" (H5F_t *) but just keeping note of errors in "ret_value" all the way till the end of the routine. The user-provided corrupted file has an illegal file size causing failure when reading the image during the closing process. At the end of this routine it sets f->shared to NULL and then frees "f". This is done whether there is error or not in "ret_value". Due to the failure in reading the file earlier, the routine then returns error. The error return from H5F__dest() causes the file object "f" not being removed from the ID node table. When the library finally exits, it will try to close the file objects in the table. This causes assert failure when H5F_ID_EXISTS(f) or H5F_NREFS(f). Fix: a) H5F_dest(): free the f only when there is no error in "ret_value" at the end of the routine. b) H5VL__native_file_close(): if f->shared is NULL, free "f"; otherwise, perform closing on "f" as before. c) h5debug.c main(): track error return from H5Fclose(). * Committing clang-format changes Co-authored-by: vchoi Co-authored-by: github-actions <41898282+github-actions[bot]@users.noreply.github.com> --- release_docs/RELEASE.txt | 19 ++++++++++++++ src/H5Fint.c | 4 ++- src/H5VLnative_file.c | 52 ++++++++++++++++++++++----------------- test/CMakeTests.cmake | 1 + test/cve_2020_10812.h5 | Bin 0 -> 2565 bytes test/tmisc.c | 39 +++++++++++++++++++++++++++++ tools/src/misc/h5debug.c | 8 ++++-- 7 files changed, 97 insertions(+), 26 deletions(-) create mode 100755 test/cve_2020_10812.h5 diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 32261658907..acbde91243b 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -180,6 +180,25 @@ Bug Fixes since HDF5-1.12.1 release =================================== Library ------- + - Seg fault on file close + + h5debug fails at file close with core dump on a file that has an + illegal file size in its cache image. In H5F_dest(), the library + performs all the closing operations for the file and keeps track of + the error encountered when reading the file cache image. + At the end of the routine, it frees the file's file structure and + returns error. Due to the error return, the file object is not removed + from the ID node table. This eventually causes assertion failure in + H5VL__native_file_close() when the library finally exits and tries to + access that file object in the table for closing. + + The closing routine, H5F_dest(), will not free the file structure if + there is error, keeping a valid file structure in the ID node table. + It will be freed later in H5VL__native_file_close() when the + library exits and terminates the file package. + + (VC - 2022/12/14, HDFFV-11052, CVE-2020-10812) + - Fixed an issue with variable length attributes Previously, if a variable length attribute was held open while its file diff --git a/src/H5Fint.c b/src/H5Fint.c index dc86390746a..1bdf09bd220 100644 --- a/src/H5Fint.c +++ b/src/H5Fint.c @@ -1648,7 +1648,9 @@ H5F__dest(H5F_t *f, hbool_t flush) if (H5FO_top_dest(f) < 0) HDONE_ERROR(H5E_FILE, H5E_CANTINIT, FAIL, "problems closing file") f->shared = NULL; - f = H5FL_FREE(H5F_t, f); + + if (ret_value >= 0) + f = H5FL_FREE(H5F_t, f); FUNC_LEAVE_NOAPI(ret_value) } /* end H5F__dest() */ diff --git a/src/H5VLnative_file.c b/src/H5VLnative_file.c index 3803e6b1bab..a1a5c7d738b 100644 --- a/src/H5VLnative_file.c +++ b/src/H5VLnative_file.c @@ -813,29 +813,35 @@ H5VL__native_file_close(void *file, hid_t H5_ATTR_UNUSED dxpl_id, void H5_ATTR_U FUNC_ENTER_PACKAGE /* This routine should only be called when a file ID's ref count drops to zero */ - HDassert(H5F_ID_EXISTS(f)); - - /* Flush file if this is the last reference to this id and we have write - * intent, unless it will be flushed by the "shared" file being closed. - * This is only necessary to replicate previous behaviour, and could be - * disabled by an option/property to improve performance. - */ - if ((H5F_NREFS(f) > 1) && (H5F_INTENT(f) & H5F_ACC_RDWR)) { - /* Get the file ID corresponding to the H5F_t struct */ - if (H5I_find_id(f, H5I_FILE, &file_id) < 0 || H5I_INVALID_HID == file_id) - HGOTO_ERROR(H5E_ATOM, H5E_CANTGET, FAIL, "invalid atom") - - /* Get the number of references outstanding for this file ID */ - if ((nref = H5I_get_ref(file_id, FALSE)) < 0) - HGOTO_ERROR(H5E_ATOM, H5E_CANTGET, FAIL, "can't get ID ref count") - if (nref == 1) - if (H5F__flush(f) < 0) - HGOTO_ERROR(H5E_CACHE, H5E_CANTFLUSH, FAIL, "unable to flush cache") - } /* end if */ - - /* Close the file */ - if (H5F__close(f) < 0) - HGOTO_ERROR(H5E_FILE, H5E_CANTDEC, FAIL, "can't close file") + HDassert(f->shared == NULL || H5F_ID_EXISTS(f)); + + if (f->shared == NULL) + f = H5FL_FREE(H5F_t, f); + + else { + + /* Flush file if this is the last reference to this id and we have write + * intent, unless it will be flushed by the "shared" file being closed. + * This is only necessary to replicate previous behaviour, and could be + * disabled by an option/property to improve performance. + */ + if ((H5F_NREFS(f) > 1) && (H5F_INTENT(f) & H5F_ACC_RDWR)) { + /* Get the file ID corresponding to the H5F_t struct */ + if (H5I_find_id(f, H5I_FILE, &file_id) < 0 || H5I_INVALID_HID == file_id) + HGOTO_ERROR(H5E_ATOM, H5E_CANTGET, FAIL, "invalid atom") + + /* Get the number of references outstanding for this file ID */ + if ((nref = H5I_get_ref(file_id, FALSE)) < 0) + HGOTO_ERROR(H5E_ATOM, H5E_CANTGET, FAIL, "can't get ID ref count") + if (nref == 1) + if (H5F__flush(f) < 0) + HGOTO_ERROR(H5E_CACHE, H5E_CANTFLUSH, FAIL, "unable to flush cache") + } /* end if */ + + /* Close the file */ + if (H5F__close(f) < 0) + HGOTO_ERROR(H5E_FILE, H5E_CANTDEC, FAIL, "can't close file") + } done: FUNC_LEAVE_NOAPI(ret_value) diff --git a/test/CMakeTests.cmake b/test/CMakeTests.cmake index 3dafb2e6008..661604279dc 100644 --- a/test/CMakeTests.cmake +++ b/test/CMakeTests.cmake @@ -126,6 +126,7 @@ set (HDF5_REFERENCE_TEST_FILES btree_idx_1_6.h5 btree_idx_1_8.h5 corrupt_stab_msg.h5 + cve_2020_10812.h5 deflate.h5 family_v16_00000.h5 family_v16_00001.h5 diff --git a/test/cve_2020_10812.h5 b/test/cve_2020_10812.h5 new file mode 100755 index 0000000000000000000000000000000000000000..a20369da0db50a0d12400e9f886f2a431fbdfe6e GIT binary patch literal 2565 zcmeD5aB<`1lHy|G;9!6O11N))3&J=7<-f7G)6K{Lf(#5DP%#OH1_l!_n-L@o1_BU@ zi(vvMgaxDj(+B`_7|ufp*`h{i7i;8UmvsK 0) H5Pclose(fapl); - if (fid > 0) - H5Fclose(fid); + if (fid > 0) { + if (H5Fclose(fid) < 0) { + HDfprintf(stderr, "Error in closing file!\n"); + exit_value = 1; + } + } /* Pop API context */ if (api_ctx_pushed) From 4e645aeedf621b341c6b6d5665fecdd08314bda9 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Tue, 28 Feb 2023 19:06:50 -0600 Subject: [PATCH 023/108] 1.12 Add fetchcontent for compression libs and fix cmake config #2487 (#2493) * Add fetchcontent for compression libs and fix cmake config #2487 * Add CI test for FetchContent * Use LINK_COMP_LIBS instead of STATIC_LIBRARY for depends --- .github/workflows/codespell.yml | 2 +- .github/workflows/main.yml | 8 +- CMakeFilters.cmake | 123 ++-- CMakeLists.txt | 23 +- config/cmake/LIBAEC/CMakeLists.txt | 519 ++++++++++++++++ config/cmake/LIBAEC/CPack.Info.plist.in | 26 + config/cmake/LIBAEC/config.h.in | 36 ++ .../LIBAEC/libaec-config-version.cmake.in | 42 ++ config/cmake/LIBAEC/libaec-config.cmake.in | 59 ++ config/cmake/ZLIB/CMakeLists.txt | 572 ++++++++++++++++++ config/cmake/ZLIB/CPack.Info.plist.in | 26 + config/cmake/ZLIB/zconf.h.in | 536 ++++++++++++++++ .../cmake/ZLIB/zlib-config-version.cmake.in | 42 ++ config/cmake/ZLIB/zlib-config.cmake.in | 58 ++ config/cmake/cacheinit.cmake | 15 +- config/cmake_ext_mod/HDFLibMacros.cmake | 126 ++-- config/cmake_ext_mod/HDFMacros.cmake | 2 + config/sanitizer/tools.cmake | 2 +- config/toolchain/GCC.cmake | 11 - config/toolchain/aarch64.cmake | 2 +- release_docs/INSTALL_CMake.txt | 48 +- release_docs/RELEASE.txt | 25 + 22 files changed, 2162 insertions(+), 141 deletions(-) create mode 100644 config/cmake/LIBAEC/CMakeLists.txt create mode 100644 config/cmake/LIBAEC/CPack.Info.plist.in create mode 100644 config/cmake/LIBAEC/config.h.in create mode 100644 config/cmake/LIBAEC/libaec-config-version.cmake.in create mode 100644 config/cmake/LIBAEC/libaec-config.cmake.in create mode 100644 config/cmake/ZLIB/CMakeLists.txt create mode 100644 config/cmake/ZLIB/CPack.Info.plist.in create mode 100644 config/cmake/ZLIB/zconf.h.in create mode 100644 config/cmake/ZLIB/zlib-config-version.cmake.in create mode 100644 config/cmake/ZLIB/zlib-config.cmake.in delete mode 100644 config/toolchain/GCC.cmake diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml index ddd17004b35..1bd7ebc7ec8 100644 --- a/.github/workflows/codespell.yml +++ b/.github/workflows/codespell.yml @@ -12,4 +12,4 @@ jobs: - uses: codespell-project/actions-codespell@master with: skip: ./bin/trace,./hl/tools/h5watch/h5watch.c,./tools/test/h5jam/tellub.c,./config/sanitizer/LICENSE,./tools/test/h5repack/testfiles/*.dat - ignore_words_list: isnt,inout,nd,parms,parm,ba,offsetP,ser,ois,had,fiter,fo,clude,refere,minnum,offsetp,creat,ans:,eiter,lastr,ans,isn't,ifset,sur,trun,dne,tthe,hda,filname,te,htmp,minnum,ro,oce,ot + ignore_words_list: isnt,inout,nd,parms,parm,ba,offsetP,ser,ois,had,fiter,fo,clude,refere,minnum,offsetp,creat,ans:,eiter,lastr,ans,isn't,ifset,sur,trun,dne,tthe,hda,filname,te,htmp,minnum,ro,oce,ot,msdos diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 4541473a273..80278ad18c6 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -73,6 +73,8 @@ jobs: cpp: ON fortran: OFF java: ON + libaecfc: ON + zlibfc: ON parallel: OFF mirror_vfd: OFF direct_vfd: OFF @@ -88,6 +90,8 @@ jobs: cpp: ON fortran: ON java: ON + libaecfc: ON + zlibfc: ON parallel: OFF mirror_vfd: ON direct_vfd: ON @@ -143,6 +147,8 @@ jobs: cpp: ON fortran: OFF java: ON + libaecfc: ON + zlibfc: ON parallel: OFF mirror_vfd: ON direct_vfd: OFF @@ -361,7 +367,7 @@ jobs: run: | mkdir "${{ runner.workspace }}/build" cd "${{ runner.workspace }}/build" - cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=${{ matrix.cpp }} -DHDF5_BUILD_FORTRAN=${{ matrix.fortran }} -DHDF5_BUILD_JAVA=${{ matrix.java }} -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} $GITHUB_WORKSPACE + cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=${{ matrix.cpp }} -DHDF5_BUILD_FORTRAN=${{ matrix.fortran }} -DHDF5_BUILD_JAVA=${{ matrix.java }} -DBUILD_SZIP_WITH_FETCHCONTENT=${{ matrix.libaecfc }} -DBUILD_ZLIB_WITH_FETCHCONTENT=${{ matrix.zlibfc }} -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} $GITHUB_WORKSPACE shell: bash if: (matrix.generator != 'autogen') && (! matrix.thread_safe.enabled) diff --git a/CMakeFilters.cmake b/CMakeFilters.cmake index 725390b31b5..cac35d42805 100644 --- a/CMakeFilters.cmake +++ b/CMakeFilters.cmake @@ -11,16 +11,44 @@ # option (USE_LIBAEC "Use AEC library as SZip Filter" OFF) option (USE_LIBAEC_STATIC "Use static AEC library " OFF) +option (ZLIB_USE_EXTERNAL "Use External Library Building for ZLIB" 0) +option (SZIP_USE_EXTERNAL "Use External Library Building for SZIP" 0) -include (ExternalProject) -include (FetchContent) +option (BUILD_ZLIB_WITH_FETCHCONTENT "Use FetchContent to use original source files" OFF) +if (BUILD_ZLIB_WITH_FETCHCONTENT) + set (ZLIB_USE_EXTERNAL "Use External Library Building for ZLIB" 1) + if (NOT ZLIB_USE_LOCALCONTENT) + set (ZLIB_URL ${ZLIB_TGZ_ORIGPATH}/${ZLIB_TGZ_ORIGNAME}) + else () + set (ZLIB_URL ${TGZPATH}/${ZLIB_TGZ_ORIGNAME}) + endif () + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "Filter ZLIB file is ${ZLIB_URL}") + endif () +endif () + +option (BUILD_SZIP_WITH_FETCHCONTENT "Use FetchContent to use original source files" OFF) +if (BUILD_SZIP_WITH_FETCHCONTENT) + # Only libaec library is usable + set (USE_LIBAEC ON CACHE BOOL "Use libaec szip replacement" FORCE) + set (SZIP_USE_EXTERNAL "Use External Library Building for SZIP" 1) + if (NOT LIBAEC_USE_LOCALCONTENT) + set (SZIP_URL ${LIBAEC_TGZ_ORIGPATH}/${LIBAEC_TGZ_ORIGNAME}) + else () + set (SZIP_URL ${TGZPATH}/${LIBAEC_TGZ_ORIGNAME}) + endif () + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "Filter SZIP file is ${SZIP_URL}") + endif () +endif () +include (ExternalProject) #option (HDF5_ALLOW_EXTERNAL_SUPPORT "Allow External Library Building (NO GIT TGZ)" "NO") set (HDF5_ALLOW_EXTERNAL_SUPPORT "NO" CACHE STRING "Allow External Library Building (NO GIT TGZ)") set_property (CACHE HDF5_ALLOW_EXTERNAL_SUPPORT PROPERTY STRINGS NO GIT TGZ) if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ") - option (ZLIB_USE_EXTERNAL "Use External Library Building for HDF5_ZLIB" 1) - option (SZIP_USE_EXTERNAL "Use External Library Building for SZIP" 1) + set (ZLIB_USE_EXTERNAL "Use External Library Building for ZLIB" 1) + set (SZIP_USE_EXTERNAL "Use External Library Building for SZIP" 1) if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT") set (ZLIB_URL ${ZLIB_GIT_URL} CACHE STRING "Path to zlib git repository") set (ZLIB_BRANCH ${ZLIB_GIT_BRANCH}) @@ -30,16 +58,20 @@ if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MAT if (NOT TGZPATH) set (TGZPATH ${HDF5_SOURCE_DIR}) endif () - set (ZLIB_URL ${TGZPATH}/${ZLIB_TGZ_NAME}) + if (NOT BUILD_ZLIB_WITH_FETCHCONTENT) + set (ZLIB_URL ${TGZPATH}/${ZLIB_TGZ_NAME}) + endif () if (NOT EXISTS "${ZLIB_URL}") set (HDF5_ENABLE_Z_LIB_SUPPORT OFF CACHE BOOL "" FORCE) if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") message (VERBOSE "Filter ZLIB file ${ZLIB_URL} not found") endif () endif () - set (SZIP_URL ${TGZPATH}/${SZIP_TGZ_NAME}) - if (USE_LIBAEC) - set (SZIP_URL ${TGZPATH}/${SZAEC_TGZ_NAME}) + if (NOT BUILD_SZIP_WITH_FETCHCONTENT) + set (SZIP_URL ${TGZPATH}/${SZIP_TGZ_NAME}) + if (USE_LIBAEC) + set (SZIP_URL ${TGZPATH}/${SZAEC_TGZ_NAME}) + endif () endif () if (NOT EXISTS "${SZIP_URL}") set (HDF5_ENABLE_SZIP_SUPPORT OFF CACHE BOOL "" FORCE) @@ -63,20 +95,28 @@ if (HDF5_ENABLE_Z_LIB_SUPPORT) find_package (ZLIB NAMES ${ZLIB_PACKAGE_NAME}${HDF_PACKAGE_EXT} COMPONENTS static shared) if (NOT ZLIB_FOUND) find_package (ZLIB) # Legacy find - if (ZLIB_FOUND) - set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${ZLIB_LIBRARIES}) - endif () endif () - endif () - if (ZLIB_FOUND) - set (H5_HAVE_FILTER_DEFLATE 1) - set (H5_HAVE_ZLIB_H 1) - set (H5_HAVE_LIBZ 1) - set (H5_ZLIB_HEADER "zlib.h") - set (ZLIB_INCLUDE_DIR_GEN ${ZLIB_INCLUDE_DIR}) - set (ZLIB_INCLUDE_DIRS ${ZLIB_INCLUDE_DIRS} ${ZLIB_INCLUDE_DIR}) + if (ZLIB_FOUND) + set (H5_HAVE_FILTER_DEFLATE 1) + set (H5_HAVE_ZLIB_H 1) + set (H5_HAVE_LIBZ 1) + set (H5_ZLIB_HEADER "zlib.h") + set (ZLIB_INCLUDE_DIR_GEN ${ZLIB_INCLUDE_DIR}) + set (ZLIB_INCLUDE_DIRS ${ZLIB_INCLUDE_DIRS} ${ZLIB_INCLUDE_DIR}) + set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${ZLIB_LIBRARIES}) + endif () else () - if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ") + if (BUILD_ZLIB_WITH_FETCHCONTENT) + # Only tgz files available + ORIGINAL_ZLIB_LIBRARY ("TGZ") + set (H5_HAVE_FILTER_DEFLATE 1) + set (H5_HAVE_ZLIB_H 1) + set (H5_HAVE_LIBZ 1) + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "HDF5_ZLIB is built from fetch content") + endif () + set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${ZLIB_STATIC_LIBRARY}) + elseif (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ") EXTERNAL_ZLIB_LIBRARY (${HDF5_ALLOW_EXTERNAL_SUPPORT}) set (H5_HAVE_FILTER_DEFLATE 1) set (H5_HAVE_ZLIB_H 1) @@ -84,6 +124,7 @@ if (HDF5_ENABLE_Z_LIB_SUPPORT) if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") message (VERBOSE "Filter HDF5_ZLIB is built") endif () + set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${ZLIB_STATIC_LIBRARY}) else () message (FATAL_ERROR " ZLib is Required for ZLib support in HDF5") endif () @@ -97,7 +138,6 @@ if (HDF5_ENABLE_Z_LIB_SUPPORT) if (H5_HAVE_FILTER_DEFLATE) set (EXTERNAL_FILTERS "${EXTERNAL_FILTERS} DEFLATE") endif () - set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${ZLIB_STATIC_LIBRARY}) INCLUDE_DIRECTORIES (${ZLIB_INCLUDE_DIRS}) if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") message (VERBOSE "Filter HDF5_ZLIB is ON") @@ -119,25 +159,40 @@ if (HDF5_ENABLE_SZIP_SUPPORT) set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${SZIP_LIBRARIES}) endif () endif () - if (NOT SZIP_FOUND) find_package (SZIP NAMES ${SZIP_PACKAGE_NAME}${HDF_PACKAGE_EXT} COMPONENTS static shared) if (NOT SZIP_FOUND) find_package (SZIP) # Legacy find - if (SZIP_FOUND) - set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${SZIP_LIBRARIES}) - endif () + endif () + if (SZIP_FOUND) + set (H5_HAVE_FILTER_SZIP 1) + set (H5_HAVE_SZLIB_H 1) + set (H5_HAVE_LIBSZ 1) + set (SZIP_INCLUDE_DIR_GEN ${SZIP_INCLUDE_DIR}) + set (SZIP_INCLUDE_DIRS ${SZIP_INCLUDE_DIRS} ${SZIP_INCLUDE_DIR}) + set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${SZIP_LIBRARIES}) endif () endif () - endif () - if (SZIP_FOUND) - set (H5_HAVE_FILTER_SZIP 1) - set (H5_HAVE_SZLIB_H 1) - set (H5_HAVE_LIBSZ 1) - set (SZIP_INCLUDE_DIR_GEN ${SZIP_INCLUDE_DIR}) - set (SZIP_INCLUDE_DIRS ${SZIP_INCLUDE_DIRS} ${SZIP_INCLUDE_DIR}) else () - if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ") + if (BUILD_SZIP_WITH_FETCHCONTENT) + # Only tgz files available + ORIGINAL_SZIP_LIBRARY ("TGZ" ${HDF5_ENABLE_SZIP_ENCODING}) + set (H5_HAVE_FILTER_SZIP 1) + set (H5_HAVE_SZLIB_H 1) + set (H5_HAVE_LIBSZ 1) + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "SZIP is built from fetch content") + endif () + if (USE_LIBAEC) + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "... with library AEC") + endif () + set (SZIP_PACKAGE_NAME ${LIBAEC_PACKAGE_NAME}) + else () + set (SZIP_PACKAGE_NAME ${SZIP_PACKAGE_NAME}) + endif () + set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${SZIP_STATIC_LIBRARY}) + elseif (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ") EXTERNAL_SZIP_LIBRARY (${HDF5_ALLOW_EXTERNAL_SUPPORT} ${HDF5_ENABLE_SZIP_ENCODING}) set (H5_HAVE_FILTER_SZIP 1) set (H5_HAVE_SZLIB_H 1) @@ -153,11 +208,11 @@ if (HDF5_ENABLE_SZIP_SUPPORT) else () set (SZIP_PACKAGE_NAME ${SZIP_PACKAGE_NAME}) endif () + set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${SZIP_STATIC_LIBRARY}) else () message (FATAL_ERROR "SZIP is Required for SZIP support in HDF5") endif () endif () - set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${SZIP_STATIC_LIBRARY}) INCLUDE_DIRECTORIES (${SZIP_INCLUDE_DIRS}) if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") message (VERBOSE "Filter SZIP is ON") diff --git a/CMakeLists.txt b/CMakeLists.txt index 73bf6bf4b53..b038da03354 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -12,6 +12,11 @@ if (POLICY CMP0083) cmake_policy (SET CMP0083 NEW) endif () +# Avoid warning about DOWNLOAD_EXTRACT_TIMESTAMP in CMake 3.24: +if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.24.0") + cmake_policy(SET CMP0135 NEW) +endif() + #----------------------------------------------------------------------------- # Instructions for use : Normal Build # @@ -800,11 +805,11 @@ option (HDF5_PACKAGE_EXTLIBS "CPACK - include external libraries" OFF) if (NOT HDF5_EXTERNALLY_CONFIGURED) if (HDF5_PACKAGE_EXTLIBS) set (HDF5_NO_PACKAGES OFF CACHE BOOL "CPACK - Disable packaging" FORCE) - if (HDF5_ENABLE_Z_LIB_SUPPORT AND ZLIB_FOUND) + if (HDF5_ENABLE_Z_LIB_SUPPORT AND ZLIB_FOUND AND NOT BUILD_ZLIB_WITH_FETCHCONTENT) PACKAGE_ZLIB_LIBRARY (${HDF5_ALLOW_EXTERNAL_SUPPORT}) endif () - if (HDF5_ENABLE_SZIP_SUPPORT AND SZIP_FOUND) + if (HDF5_ENABLE_SZIP_SUPPORT AND SZIP_FOUND AND NOT BUILD_SZIP_WITH_FETCHCONTENT) PACKAGE_SZIP_LIBRARY (${HDF5_ALLOW_EXTERNAL_SUPPORT}) endif () endif () @@ -889,20 +894,12 @@ endif () add_subdirectory (src) if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ") - if (ZLIB_FOUND AND ZLIB_USE_EXTERNAL) - if (NOT ONLY_SHARED_LIBS) - add_dependencies (${HDF5_LIB_TARGET} HDF5_ZLIB) - endif () - if (BUILD_SHARED_LIBS) - add_dependencies (${HDF5_LIBSH_TARGET} HDF5_ZLIB) - endif () - endif () - if (SZIP_FOUND AND SZIP_USE_EXTERNAL) + if ((ZLIB_FOUND AND ZLIB_USE_EXTERNAL) OR (SZIP_FOUND AND SZIP_USE_EXTERNAL)) if (NOT ONLY_SHARED_LIBS) - add_dependencies (${HDF5_LIB_TARGET} SZIP) + add_dependencies (${HDF5_LIB_TARGET} ${LINK_COMP_LIBS}) endif () if (BUILD_SHARED_LIBS) - add_dependencies (${HDF5_LIBSH_TARGET} SZIP) + add_dependencies (${HDF5_LIBSH_TARGET} ${LINK_COMP_LIBS}) endif () endif () endif () diff --git a/config/cmake/LIBAEC/CMakeLists.txt b/config/cmake/LIBAEC/CMakeLists.txt new file mode 100644 index 00000000000..212c9bfe886 --- /dev/null +++ b/config/cmake/LIBAEC/CMakeLists.txt @@ -0,0 +1,519 @@ +cmake_minimum_required (VERSION 3.10) +PROJECT (LIBAEC C) + +#----------------------------------------------------------------------------- +# Basic LIBAEC stuff here +#----------------------------------------------------------------------------- +set(CMAKE_C_STANDARD 99) + +set (LIBAEC_PACKAGE_EXT ${HDF_PACKAGE_EXT}) +set (HDF_USE_GNU_DIRS ${HDF5_USE_GNU_DIRS}) +set (CMAKE_OSX_ARCHITECTURES ${CMAKE_OSX_ARCHITECTURES}) +set (CMAKE_TOOLCHAIN_FILE ${CMAKE_TOOLCHAIN_FILE}) +set (PACKAGE_NAMESPACE ${HDF_PACKAGE_NAMESPACE}) +if (MINGW) + set (WINDOWS 1) # MinGW tries to imitate Windows +endif () +if (WINDOWS) + set (HAVE_SYS_STAT_H 1) + set (HAVE_SYS_TYPES_H 1) +endif () + +if (NOT WINDOWS) + TEST_BIG_ENDIAN (WORDS_BIGENDIAN) +endif () + +# Check for __builtin_clzll for faster decoding +check_c_source_compiles( + "int main(void)\n{return __builtin_clzll(1LL);}" + HAVE_DECL___BUILTIN_CLZLL) + +if(NOT HAVE_DECL___BUILTIN_CLZLL) + # With MSVC we can use _BitScanReverse64 + check_c_source_compiles( + "int main(void){unsigned long foo; unsigned __int64 bar=1LL; +return _BitScanReverse64(&foo, bar);}" + HAVE_BSR64) +endif() + +#----------------------------------------------------------------------------- +# Define some CMake variables for use later in the project +#----------------------------------------------------------------------------- +set (LIBAEC_RESOURCES_DIR ${HDF_RESOURCES_DIR}/LIBAEC) +set (LIBAEC_SRC_DIR ${LIBAEC_SOURCE_DIR}/src) +set (LIBAEC_INC_DIR ${LIBAEC_SOURCE_DIR}/include) + +#----------------------------------------------------------------------------- +# Set the core names of all the libraries +#----------------------------------------------------------------------------- +set (LIBAEC_LIB_CORENAME "aec") +set (SZIP_LIB_CORENAME "szaec") + +#----------------------------------------------------------------------------- +# Set the true names of all the libraries if customized by external project +#----------------------------------------------------------------------------- +set (LIBAEC_LIB_NAME "${LIBAEC_EXTERNAL_LIB_PREFIX}${LIBAEC_LIB_CORENAME}") +set (SZIP_LIB_NAME "${LIBAEC_EXTERNAL_LIB_PREFIX}${SZIP_LIB_CORENAME}") + +#----------------------------------------------------------------------------- +# Set the target names of all the libraries +#----------------------------------------------------------------------------- +set (LIBAEC_LIB_TARGET "${LIBAEC_LIB_CORENAME}-static") +set (SZIP_LIB_TARGET "${SZIP_LIB_CORENAME}-static") + +set (libaec_VERS_MAJOR 1) +set (libaec_VERS_MINOR 0) +set (libaec_VERS_RELEASE 6) + +#----------------------------------------------------------------------------- +set (LIBAEC_PACKAGE "libaec") +set (LIBAEC_PACKAGE_NAME "LIBAEC") +set (LIBAEC_PACKAGE_VERSION "${libaec_VERS_MAJOR}.${libaec_VERS_MINOR}") +set (LIBAEC_PACKAGE_VERSION_MAJOR "${libaec_VERS_MAJOR}.${libaec_VERS_MINOR}") +set (LIBAEC_PACKAGE_VERSION_MINOR "${libaec_VERS_RELEASE}") +set (LIBAEC_PACKAGE_STRING "${LIBAEC_PACKAGE_NAME} ${LIBAEC_PACKAGE_VERSION}") +set (LIBAEC_PACKAGE_TARNAME "${LIBAEC_PACKAGE_NAME}${LIBAEC_PACKAGE_EXT}") +set (LIBAEC_PACKAGE_URL "http://www.hdfgroup.org") +set (LIBAEC_PACKAGE_BUGREPORT "help@hdfgroup.org") +set (LIBAEC_PACKAGE_SOVERSION "${libaec_VERS_MAJOR}.${libaec_VERS_MINOR}.${libaec_VERS_RELEASE}") +set (LIBAEC_PACKAGE_SOVERSION_MAJOR "${libaec_VERS_MAJOR}") + + +HDF_DIR_PATHS(${LIBAEC_PACKAGE_NAME}) + +#----------------------------------------------------------------------------- +# Targets built within this project are exported at Install time for use +# by other projects +#----------------------------------------------------------------------------- +if (NOT LIBAEC_EXPORTED_TARGETS) + set (LIBAEC_EXPORTED_TARGETS "libaec-targets") +endif () + +#----------------------------------------------------------------------------- +# To include a library in the list exported by the project AT BUILD TIME, +# add it to this variable. This is NOT used by Make Install, but for projects +# which include SZIP as a sub-project within their build tree +#----------------------------------------------------------------------------- +set_global_variable (LIBAEC_LIBRARIES_TO_EXPORT "") + +#----------------------------------------------------------------------------- +# Mac OS X Options +#----------------------------------------------------------------------------- +if (LIBAEC_BUILD_FRAMEWORKS AND NOT BUILD_SHARED_LIBS) + set (BUILD_SHARED_LIBS ON CACHE BOOL "Build Shared Libraries") +endif () + +set (CMAKE_POSITION_INDEPENDENT_CODE ON) + +#----------------------------------------------------------------------------- +# When building utility executables that generate other (source) files : +# we make use of the following variables defined in the root CMakeLists. +# Certain systems may add /Debug or /Release to output paths +# and we need to call the executable from inside the CMake configuration +#----------------------------------------------------------------------------- +set (EXE_EXT "") +if (WIN32) + set (EXE_EXT ".exe") + add_definitions (-D_BIND_TO_CURRENT_VCLIBS_VERSION=1) + add_definitions (-D_CRT_SECURE_NO_WARNINGS) + add_definitions (-D_CONSOLE) +endif () + +if (MSVC) + set (CMAKE_MFC_FLAG 0) +endif () + +set (MAKE_SYSTEM) +if (CMAKE_BUILD_TOOL MATCHES "make") + set (MAKE_SYSTEM 1) +endif () + +set (CFG_INIT "/${CMAKE_CFG_INTDIR}") +if (MAKE_SYSTEM) + set (CFG_INIT "") +endif () + +#----------------------------------------------------------------------------- +# Compiler specific flags : Shouldn't there be compiler tests for these +#----------------------------------------------------------------------------- +if (CMAKE_C_COMPILER_ID MATCHES "IntelLLVM" OR CMAKE_C_COMPILER_ID MATCHES "[Cc]lang") + set (CMAKE_C_FLAGS "${CMAKE_ANSI_CFLAGS} ${CMAKE_C_FLAGS} -Wno-deprecated-non-prototype") +endif () + +#----------------------------------------------------------------------------- +# This is in here to help some of the GCC based IDES like Eclipse +# and code blocks parse the compiler errors and warnings better. +#----------------------------------------------------------------------------- +if (CMAKE_COMPILER_IS_GNUCC) + set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fmessage-length=0") +endif () + +#----------------------------------------------------------------------------- +# Generate the aec_config.h file containing user settings needed by compilation +#----------------------------------------------------------------------------- +configure_file (${LIBAEC_RESOURCES_DIR}/config.h.in ${CMAKE_CURRENT_BINARY_DIR}/config.h) + +#----------------------------------------------------------------------------- +# All libs/tests/examples need the main include directories +#----------------------------------------------------------------------------- +INCLUDE_DIRECTORIES (${LIBAEC_BINARY_DIR} ${LIBAEC_SOURCE_DIR}/src ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}) + +#----------------------------------------------------------------------------- +# Define LIBAEC Library +#----------------------------------------------------------------------------- +set(LIBAEC_SRCS + ${LIBAEC_SRC_DIR}/encode.c + ${LIBAEC_SRC_DIR}/encode_accessors.c + ${LIBAEC_SRC_DIR}/decode.c +) + +set (LIBAEC_PUBLIC_HEADERS + ${LIBAEC_INC_DIR}/libaec.h +) + +add_library (${LIBAEC_LIB_TARGET} STATIC ${LIBAEC_SRCS} ${LIBAEC_PUBLIC_HEADERS}) +target_include_directories (${LIBAEC_LIB_TARGET} PUBLIC + "$" + "$" + "$" + "$") +TARGET_C_PROPERTIES (${LIBAEC_LIB_TARGET} STATIC) +target_link_libraries (${LIBAEC_LIB_TARGET} PRIVATE ${LINK_LIBS}) +H5_SET_LIB_OPTIONS (${LIBAEC_LIB_TARGET} ${LIBAEC_LIB_NAME} STATIC 0) +set_target_properties (${LIBAEC_LIB_TARGET} PROPERTIES + VERSION 0.0.12 SOVERSION 0 + PUBLIC_HEADER "${LIBAEC_PUBLIC_HEADERS}" + LINKER_LANGUAGE C + INTERFACE_INCLUDE_DIRECTORIES "$/include>" +) +set_global_variable (LIBAEC_LIBRARIES_TO_EXPORT ${LIBAEC_LIB_TARGET}) +set (install_targets ${LIBAEC_LIB_TARGET}) + +set(SZIP_SRCS + ${LIBAEC_SRC_DIR}/sz_compat.c +) + +set (SZIP_PUBLIC_HEADERS + ${LIBAEC_INC_DIR}/szlib.h +) + +add_library (${SZIP_LIB_TARGET} STATIC ${SZIP_SRCS} ${SZIP_PUBLIC_HEADERS}) +target_include_directories (${SZIP_LIB_TARGET} PUBLIC "${LIBAEC_SOURCE_DIR};${LIBAEC_SOURCE_DIR}/include;${CMAKE_BINARY_DIR}") +TARGET_C_PROPERTIES (${SZIP_LIB_TARGET} STATIC) +target_link_libraries (${SZIP_LIB_TARGET} PRIVATE ${LIBAEC_LIB_TARGET}) +H5_SET_LIB_OPTIONS (${SZIP_LIB_TARGET} ${SZIP_LIB_NAME} STATIC 0) +set_target_properties (${SZIP_LIB_TARGET} PROPERTIES + VERSION 2.0.1 SOVERSION 2 + PUBLIC_HEADER "${SZIP_PUBLIC_HEADERS}" + LINKER_LANGUAGE C + INTERFACE_INCLUDE_DIRECTORIES "$/include>" +) +set_global_variable (LIBAEC_LIBRARIES_TO_EXPORT "${LIBAEC_LIBRARIES_TO_EXPORT};${SZIP_LIB_TARGET}") +set (install_targets ${install_targets} ${SZIP_LIB_TARGET}) + +#----------------------------------------------------------------------------- +# Add Target(s) to CMake Install for import into other projects +#----------------------------------------------------------------------------- +if (LIBAEC_EXPORTED_TARGETS) + INSTALL_TARGET_PDB (${LIBAEC_LIB_TARGET} ${LIBAEC_INSTALL_BIN_DIR} libraries) + + install ( + TARGETS + ${install_targets} + EXPORT + ${LIBAEC_EXPORTED_TARGETS} + LIBRARY DESTINATION ${LIBAEC_INSTALL_LIB_DIR} COMPONENT libraries + ARCHIVE DESTINATION ${LIBAEC_INSTALL_LIB_DIR} COMPONENT libraries + RUNTIME DESTINATION ${LIBAEC_INSTALL_BIN_DIR} COMPONENT libraries + FRAMEWORK DESTINATION ${LIBAEC_INSTALL_FWRK_DIR} COMPONENT libraries + PUBLIC_HEADER DESTINATION ${LIBAEC_INSTALL_INCLUDE_DIR} COMPONENT headers + ) +endif () + +include (CMakePackageConfigHelpers) + +#----------------------------------------------------------------------------- +# Check for Installation Utilities +#----------------------------------------------------------------------------- +if (WIN32) + set (PF_ENV_EXT "(x86)") + find_program (NSIS_EXECUTABLE NSIS.exe PATHS "$ENV{ProgramFiles}\\NSIS" "$ENV{ProgramFiles${PF_ENV_EXT}}\\NSIS") + if(NOT CPACK_WIX_ROOT) + file(TO_CMAKE_PATH "$ENV{WIX}" CPACK_WIX_ROOT) + endif () + find_program (WIX_EXECUTABLE candle PATHS "${CPACK_WIX_ROOT}/bin") +endif () + +#----------------------------------------------------------------------------- +# Add file(s) to CMake Install +#----------------------------------------------------------------------------- +#if (NOT LIBAEC_INSTALL_NO_DEVELOPMENT) +# install ( +# FILES ${PROJECT_BINARY_DIR}/aec_config.h +# DESTINATION ${LIBAEC_INSTALL_INCLUDE_DIR} +# COMPONENT headers +# ) +#endif () + +#----------------------------------------------------------------------------- +# Add Target(s) to CMake Install for import into other projects +#----------------------------------------------------------------------------- +if (NOT LIBAEC_EXTERNALLY_CONFIGURED) + install ( + EXPORT ${LIBAEC_EXPORTED_TARGETS} + DESTINATION ${LIBAEC_INSTALL_CMAKE_DIR} + FILE ${LIBAEC_PACKAGE}${LIBAEC_PACKAGE_EXT}-targets.cmake + NAMESPACE ${PACKAGE_NAMESPACE} + COMPONENT configinstall + ) +endif () + +#----------------------------------------------------------------------------- +# Export all exported targets to the build tree for use by parent project +#----------------------------------------------------------------------------- +if (NOT LIBAEC_EXTERNALLY_CONFIGURED) + export ( + TARGETS ${LIBAEC_LIBRARIES_TO_EXPORT} ${LIBAEC_LIB_DEPENDENCIES} + FILE ${LIBAEC_PACKAGE}${LIBAEC_PACKAGE_EXT}-targets.cmake + NAMESPACE ${PACKAGE_NAMESPACE} + ) + export (PACKAGE ${LIBAEC_PACKAGE}${LIBAEC_PACKAGE_EXT}) +endif () + +#----------------------------------------------------------------------------- +# Set includes needed for build +#----------------------------------------------------------------------------- +set (LIBAEC_INCLUDES_BUILD_TIME + ${LIBAEC_SRC_DIR} ${LIBAEC_INC_DIR} ${LIBAEC_BINARY_DIR} +) + +#----------------------------------------------------------------------------- +# Set variables needed for installation +#----------------------------------------------------------------------------- +set (LIBAEC_VERSION_STRING ${LIBAEC_PACKAGE_VERSION}) +set (LIBAEC_VERSION_MAJOR ${LIBAEC_PACKAGE_VERSION_MAJOR}) +set (LIBAEC_VERSION_MINOR ${LIBAEC_PACKAGE_VERSION_MINOR}) + +#----------------------------------------------------------------------------- +# Configure the libaec-config.cmake file for the build directory +#----------------------------------------------------------------------------- +set (INCLUDE_INSTALL_DIR ${LIBAEC_INSTALL_INCLUDE_DIR}) +set (SHARE_INSTALL_DIR "${CMAKE_CURRENT_BINARY_DIR}/${LIBAEC_INSTALL_CMAKE_DIR}" ) +set (CURRENT_BUILD_DIR "${CMAKE_CURRENT_BINARY_DIR}" ) +configure_package_config_file ( + ${LIBAEC_RESOURCES_DIR}/libaec-config.cmake.in + "${LIBAEC_BINARY_DIR}/${LIBAEC_PACKAGE}${LIBAEC_PACKAGE_EXT}-config.cmake" + INSTALL_DESTINATION "${LIBAEC_INSTALL_CMAKE_DIR}" + PATH_VARS INCLUDE_INSTALL_DIR SHARE_INSTALL_DIR CURRENT_BUILD_DIR + INSTALL_PREFIX "${CMAKE_CURRENT_BINARY_DIR}" +) + +#----------------------------------------------------------------------------- +# Configure the libaec-config.cmake file for the install directory +#----------------------------------------------------------------------------- +set (INCLUDE_INSTALL_DIR ${LIBAEC_INSTALL_INCLUDE_DIR}) +set (SHARE_INSTALL_DIR "${CMAKE_INSTALL_PREFIX}/${LIBAEC_INSTALL_CMAKE_DIR}" ) +set (CURRENT_BUILD_DIR "${CMAKE_INSTALL_PREFIX}") +configure_package_config_file ( + ${LIBAEC_RESOURCES_DIR}/libaec-config.cmake.in + "${LIBAEC_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/${LIBAEC_PACKAGE}${LIBAEC_PACKAGE_EXT}-config.cmake" + INSTALL_DESTINATION "${LIBAEC_INSTALL_CMAKE_DIR}" + PATH_VARS INCLUDE_INSTALL_DIR SHARE_INSTALL_DIR CURRENT_BUILD_DIR +) +if (NOT LIBAEC_EXTERNALLY_CONFIGURED) + install ( + FILES ${LIBAEC_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/${LIBAEC_PACKAGE}${LIBAEC_PACKAGE_EXT}-config.cmake + DESTINATION ${LIBAEC_INSTALL_CMAKE_DIR} + COMPONENT configinstall + ) +endif () + +#----------------------------------------------------------------------------- +# Configure the libaec-config-version.cmake file for the install directory +#----------------------------------------------------------------------------- +if (NOT LIBAEC_EXTERNALLY_CONFIGURED) + configure_file ( + ${LIBAEC_RESOURCES_DIR}/libaec-config-version.cmake.in + ${LIBAEC_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/${LIBAEC_PACKAGE}${LIBAEC_PACKAGE_EXT}-config-version.cmake @ONLY + ) + install ( + FILES ${LIBAEC_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/${LIBAEC_PACKAGE}${LIBAEC_PACKAGE_EXT}-config-version.cmake + DESTINATION ${LIBAEC_INSTALL_CMAKE_DIR} + COMPONENT configinstall + ) +endif () + +#----------------------------------------------------------------------------- +# Add Document File(s) to CMake Install +#----------------------------------------------------------------------------- +if (NOT LIBAEC_EXTERNALLY_CONFIGURED) + install ( + FILES + ${LIBAEC_SOURCE_DIR}/README.md + ${LIBAEC_SOURCE_DIR}/INSTALL.md + ${LIBAEC_SOURCE_DIR}/CHANGELOG.md + DESTINATION ${LIBAEC_INSTALL_DATA_DIR} + COMPONENT documents + ) +endif () + +#----------------------------------------------------------------------------- +# Check for Installation Utilities +#----------------------------------------------------------------------------- +if (WIN32) + set (PF_ENV_EXT "(x86)") + find_program (NSIS_EXECUTABLE NSIS.exe PATHS "$ENV{ProgramFiles}\\NSIS" "$ENV{ProgramFiles${PF_ENV_EXT}}\\NSIS") + if(NOT CPACK_WIX_ROOT) + file(TO_CMAKE_PATH "$ENV{WIX}" CPACK_WIX_ROOT) + endif() + find_program (WIX_EXECUTABLE candle PATHS "${CPACK_WIX_ROOT}/bin") +endif () + +#----------------------------------------------------------------------------- +# Set the cpack variables +#----------------------------------------------------------------------------- +if (NOT LIBAEC_EXTERNALLY_CONFIGURED) + set (CPACK_PACKAGE_VENDOR "HDF_Group") + set (CPACK_PACKAGE_NAME "${LIBAEC_PACKAGE_NAME}") + if (CDASH_LOCAL) + set (CPACK_PACKAGE_VERSION "${LIBAEC_PACKAGE_VERSION}") + else () + set (CPACK_PACKAGE_VERSION "${LIBAEC_PACKAGE_VERSION_STRING}") + endif () + set (CPACK_PACKAGE_VERSION_MAJOR "${LIBAEC_PACKAGE_VERSION_MAJOR}") + set (CPACK_PACKAGE_VERSION_MINOR "${LIBAEC_PACKAGE_VERSION_MINOR}") + set (CPACK_PACKAGE_VERSION_PATCH "") + set (CPACK_RESOURCE_FILE_LICENSE "${CMAKE_CURRENT_SOURCE_DIR}/LICENSE.txt") + set (CPACK_PACKAGE_DESCRIPTION_FILE "${CMAKE_CURRENT_SOURCE_DIR}/README.SZIP") + set (CPACK_RESOURCE_FILE_README "${CMAKE_CURRENT_SOURCE_DIR}/README.md") + set (CPACK_PACKAGE_RELOCATABLE TRUE) + set (CPACK_PACKAGE_DESCRIPTION_SUMMARY "libaec - Adaptive Entropy Coding library by Deutsches Klimarechenzentrum GmbH") + set (CPACK_PACKAGE_INSTALL_DIRECTORY "${CPACK_PACKAGE_VENDOR}/${CPACK_PACKAGE_NAME}/${CPACK_PACKAGE_VERSION}") + + set (CPACK_GENERATOR "TGZ") + if (WIN32) + set (CPACK_GENERATOR "ZIP") + + if (NSIS_EXECUTABLE) + list (APPEND CPACK_GENERATOR "NSIS") + endif () + # Installers for 32- vs. 64-bit CMake: + # - Root install directory (displayed to end user at installer-run time) + # - "NSIS package/display name" (text used in the installer GUI) + # - Registry key used to store info about the installation + set (CPACK_NSIS_PACKAGE_NAME "${LIBAEC_PACKAGE_STRING}") + if (CMAKE_CL_64) + set (CPACK_NSIS_INSTALL_ROOT "$PROGRAMFILES64") + set (CPACK_PACKAGE_INSTALL_REGISTRY_KEY "${CPACK_PACKAGE_NAME}-${CPACK_PACKAGE_VERSION} (Win64)") + else () + set (CPACK_NSIS_INSTALL_ROOT "$PROGRAMFILES") + set (CPACK_PACKAGE_INSTALL_REGISTRY_KEY "${CPACK_PACKAGE_NAME}-${CPACK_PACKAGE_VERSION}") + endif () + set (CPACK_NSIS_DISPLAY_NAME "${CPACK_NSIS_PACKAGE_NAME}") + set (CPACK_PACKAGE_INSTALL_DIRECTORY "${CPACK_PACKAGE_VENDOR}\\\\${CPACK_PACKAGE_NAME}\\\\${CPACK_PACKAGE_VERSION}") + set (CPACK_NSIS_CONTACT "${LIBAEC_PACKAGE_BUGREPORT}") + set (CPACK_NSIS_MODIFY_PATH ON) + set (CPACK_NSIS_PACKAGE_NAME "LIBAEC ${LIBAEC_PACKAGE_VERSION}") + if (WIX_EXECUTABLE) + list (APPEND CPACK_GENERATOR "WIX") + endif () +#WiX variables + set (CPACK_WIX_UNINSTALL "1") + set (CPACK_RESOURCE_FILE_LICENSE "${CMAKE_CURRENT_SOURCE_DIR}/LICENSE.txt") + elseif (APPLE) + list (APPEND CPACK_GENERATOR "STGZ") + list (APPEND CPACK_GENERATOR "DragNDrop") + set (CPACK_COMPONENTS_ALL_IN_ONE_PACKAGE ON) + set (CPACK_PACKAGING_INSTALL_PREFIX "/${CPACK_PACKAGE_INSTALL_DIRECTORY}") + #set (CPACK_PACKAGE_ICON "${LIBAEC_RESOURCES_DIR}/hdf.icns") + + option (LIBAEC_PACK_MACOSX_FRAMEWORK "Package the LIBAEC Library in a Framework" OFF) + if (LIBAEC_PACK_MACOSX_FRAMEWORK AND LIBAEC_BUILD_FRAMEWORKS) + set (CPACK_BUNDLE_NAME "${LIBAEC_PACKAGE_STRING}") + set (CPACK_BUNDLE_LOCATION "/") # make sure CMAKE_INSTALL_PREFIX ends in / + set (CMAKE_INSTALL_PREFIX "/${CPACK_BUNDLE_NAME}.framework/Versions/${CPACK_PACKAGE_VERSION}/${CPACK_PACKAGE_NAME}/") + set (CPACK_SHORT_VERSION_STRING "${CPACK_PACKAGE_VERSION}") + #----------------------------------------------------------------------------- + # Configure the Info.plist file for the install bundle + #----------------------------------------------------------------------------- + configure_file ( + ${LIBAEC_RESOURCES_DIR}/CPack.Info.plist.in + ${LIBAEC_BINARY_DIR}/CMakeFiles/Info.plist @ONLY + ) + configure_file ( + ${LIBAEC_RESOURCES_DIR}/PkgInfo.in + ${LIBAEC_BINARY_DIR}/CMakeFiles/PkgInfo @ONLY + ) + install ( + FILES ${LIBAEC_BINARY_DIR}/CMakeFiles/PkgInfo + DESTINATION .. + ) + endif () + else () + list (APPEND CPACK_GENERATOR "STGZ") + set (CPACK_PACKAGING_INSTALL_PREFIX "/${CPACK_PACKAGE_INSTALL_DIRECTORY}") + set (CPACK_COMPONENTS_ALL_IN_ONE_PACKAGE ON) + + set (CPACK_DEBIAN_PACKAGE_SECTION "Libraries") + set (CPACK_DEBIAN_PACKAGE_MAINTAINER "${LIBAEC_PACKAGE_BUGREPORT}") + +# list (APPEND CPACK_GENERATOR "RPM") + set (CPACK_RPM_PACKAGE_RELEASE "1") + set (CPACK_RPM_COMPONENT_INSTALL ON) + set (CPACK_RPM_PACKAGE_RELOCATABLE ON) + endif () + + # By default, do not warn when built on machines using only VS Express: + if (NOT DEFINED CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS_NO_WARNINGS) + set (CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS_NO_WARNINGS ON) + endif () + include (InstallRequiredSystemLibraries) + + set (CPACK_INSTALL_CMAKE_PROJECTS "${LIBAEC_BINARY_DIR};LIBAEC;libraries;/") + set (CPACK_INSTALL_CMAKE_PROJECTS "${LIBAEC_BINARY_DIR};LIBAEC;headers;/") + set (CPACK_INSTALL_CMAKE_PROJECTS "${LIBAEC_BINARY_DIR};LIBAEC;configinstall;/") + + set (CPACK_ALL_INSTALL_TYPES Full Developer User) + set (CPACK_INSTALL_TYPE_FULL_DISPLAY_NAME "Everything") + + set(CPACK_COMPONENTS_ALL libraries headers documents configinstall) + + include (CPack) + + cpack_add_component_group(Runtime) + + cpack_add_component_group(Documents + EXPANDED + DESCRIPTION "Release notes for libaec" + ) + + cpack_add_component_group(Development + EXPANDED + DESCRIPTION "All of the tools you'll need to develop applications" + ) + + cpack_add_component (libraries + DISPLAY_NAME "LIBAEC Libraries" + REQUIRED + GROUP Runtime + INSTALL_TYPES Full Developer User + ) + cpack_add_component (headers + DISPLAY_NAME "LIBAEC Headers" + DEPENDS libraries + GROUP Development + INSTALL_TYPES Full Developer + ) + cpack_add_component (documents + DISPLAY_NAME "LIBAEC Documents" + GROUP Documents + INSTALL_TYPES Full Developer + ) + cpack_add_component (configinstall + DISPLAY_NAME "LIBAEC CMake files" + DEPENDS libraries + GROUP Development + INSTALL_TYPES Full Developer User + ) + +endif () diff --git a/config/cmake/LIBAEC/CPack.Info.plist.in b/config/cmake/LIBAEC/CPack.Info.plist.in new file mode 100644 index 00000000000..08d371bd5d9 --- /dev/null +++ b/config/cmake/LIBAEC/CPack.Info.plist.in @@ -0,0 +1,26 @@ + + + + + CFBundleDevelopmentRegion + English + CFBundleExecutable + @CPACK_PACKAGE_FILE_NAME@ + CFBundleIconFile + @CPACK_BUNDLE_ICON@ + CFBundleIdentifier + org.@CPACK_PACKAGE_VENDOR@.@CPACK_PACKAGE_NAME@@CPACK_MODULE_VERSION_SUFFIX@ + CFBundleInfoDictionaryVersion + 6.0 + CFBundlePackageType + FMWK + CFBundleSignature + ???? + CFBundleVersion + @CPACK_PACKAGE_VERSIO@ + CFBundleShortVersionString + @CPACK_SHORT_VERSION_STRING@ + CSResourcesFileMapped + + + diff --git a/config/cmake/LIBAEC/config.h.in b/config/cmake/LIBAEC/config.h.in new file mode 100644 index 00000000000..04425480729 --- /dev/null +++ b/config/cmake/LIBAEC/config.h.in @@ -0,0 +1,36 @@ +#cmakedefine WORDS_BIGENDIAN +#cmakedefine01 HAVE_DECL___BUILTIN_CLZLL +#cmakedefine01 HAVE_BSR64 +#cmakedefine HAVE_SNPRINTF +#cmakedefine HAVE__SNPRINTF +#cmakedefine HAVE__SNPRINTF_S + +/* Name of package */ +#cmakedefine LIBAEC_PACKAGE "@LIBAEC_PACKAGE@" + +/* Define to the address where bug reports for this package should be sent. */ +#cmakedefine LIBAEC_PACKAGE_BUGREPORT "@LIBAEC_PACKAGE_BUGREPORT@" + +/* Define to the full name of this package. */ +#cmakedefine LIBAEC_PACKAGE_NAME "@LIBAEC_PACKAGE_NAME@" + +/* Define to the full name and version of this package. */ +#cmakedefine LIBAEC_PACKAGE_STRING "@LIBAEC_PACKAGE_STRING@" + +/* Define to the one symbol short name of this package. */ +#cmakedefine LIBAEC_PACKAGE_TARNAME "@LIBAEC_PACKAGE_TARNAME@" + +/* Define to the version of this package. */ +#cmakedefine LIBAEC_PACKAGE_VERSION "@LIBAEC_PACKAGE_VERSION@" + +/* Define to 1 if you have the ANSI C header files. */ +#cmakedefine STDC_HEADERS @STDC_HEADERS@ + +/* Version number of package */ +#define VERSION "@LIBAEC_PACKAGE_VERSION@" + +/* Define to empty if `const' does not conform to ANSI C. */ +#cmakedefine const + +/* Define to `unsigned int' if does not define. */ +#cmakedefine size_t diff --git a/config/cmake/LIBAEC/libaec-config-version.cmake.in b/config/cmake/LIBAEC/libaec-config-version.cmake.in new file mode 100644 index 00000000000..4f0e7ae9ec7 --- /dev/null +++ b/config/cmake/LIBAEC/libaec-config-version.cmake.in @@ -0,0 +1,42 @@ +#----------------------------------------------------------------------------- +# LIBAEC Version file for install directory +#----------------------------------------------------------------------------- + +set (PACKAGE_VERSION "@LIBAEC_VERSION_STRING@") + +if("${PACKAGE_VERSION}" VERSION_LESS "${PACKAGE_FIND_VERSION}" ) + set(PACKAGE_VERSION_COMPATIBLE FALSE) +else() + if ("${PACKAGE_FIND_VERSION_MAJOR}" STREQUAL "@LIBAEC_VERSION_MAJOR@") + + # exact match for version @LIBAEC_VERSION_MAJOR@.@LIBAEC_VERSION_MINOR@ + if ("${PACKAGE_FIND_VERSION_MINOR}" STREQUAL "@LIBAEC_VERSION_MINOR@") + + # compatible with any version @LIBAEC_VERSION_MAJOR@.@LIBAEC_VERSION_MINOR@.x + set (PACKAGE_VERSION_COMPATIBLE TRUE) + + if ("${PACKAGE_FIND_VERSION_PATCH}" STREQUAL "@LIBAEC_VERSION_RELEASE@") + set (PACKAGE_VERSION_EXACT TRUE) + + if ("${PACKAGE_FIND_VERSION_TWEAK}" STREQUAL "@LIBAEC_VERSION_SUBRELEASE@") + # not using this yet + endif () + endif () + else () + set (PACKAGE_VERSION_COMPATIBLE FALSE) + endif () + endif () +endif () + +# if the installed or the using project don't have CMAKE_SIZEOF_VOID_P set, ignore it: +if("${CMAKE_SIZEOF_VOID_P}" STREQUAL "" OR "@CMAKE_SIZEOF_VOID_P@" STREQUAL "") + return() +endif() + +# check that the installed version has the same 32/64bit-ness as the one which is currently searching: +if(NOT "${CMAKE_SIZEOF_VOID_P}" STREQUAL "@CMAKE_SIZEOF_VOID_P@") + math(EXPR installedBits "@CMAKE_SIZEOF_VOID_P@ * 8") + set(PACKAGE_VERSION "${PACKAGE_VERSION} (${installedBits}bit)") + set(PACKAGE_VERSION_UNSUITABLE TRUE) +endif() + diff --git a/config/cmake/LIBAEC/libaec-config.cmake.in b/config/cmake/LIBAEC/libaec-config.cmake.in new file mode 100644 index 00000000000..36a116a67f0 --- /dev/null +++ b/config/cmake/LIBAEC/libaec-config.cmake.in @@ -0,0 +1,59 @@ +#----------------------------------------------------------------------------- +# LIBAEC Config file for compiling against LIBAEC build directory +#----------------------------------------------------------------------------- +@PACKAGE_INIT@ + +string(TOUPPER @LIBAEC_PACKAGE@ LIBAEC_PACKAGE_NAME) + +set (${LIBAEC_PACKAGE_NAME}_VALID_COMPONENTS static shared) + +#----------------------------------------------------------------------------- +# User Options +#----------------------------------------------------------------------------- +set (${LIBAEC_PACKAGE_NAME}_ENABLE_ENCODING @LIBAEC_ENABLE_ENCODING@) +set (${LIBAEC_PACKAGE_NAME}_BUILD_SHARED_LIBS @BUILD_SHARED_LIBS@) +set (${LIBAEC_PACKAGE_NAME}_EXPORT_LIBRARIES @LIBAEC_LIBRARIES_TO_EXPORT@) + +#----------------------------------------------------------------------------- +# Directories +#----------------------------------------------------------------------------- +set (${LIBAEC_PACKAGE_NAME}_INCLUDE_DIR "@PACKAGE_INCLUDE_INSTALL_DIR@") + +set (${LIBAEC_PACKAGE_NAME}_SHARE_DIR "@PACKAGE_SHARE_INSTALL_DIR@") +set_and_check (${LIBAEC_PACKAGE_NAME}_BUILD_DIR "@PACKAGE_CURRENT_BUILD_DIR@") + +#----------------------------------------------------------------------------- +# Version Strings +#----------------------------------------------------------------------------- +set (${LIBAEC_PACKAGE_NAME}_VERSION_STRING @LIBAEC_VERSION_STRING@) +set (${LIBAEC_PACKAGE_NAME}_VERSION_MAJOR @LIBAEC_VERSION_MAJOR@) +set (${LIBAEC_PACKAGE_NAME}_VERSION_MINOR @LIBAEC_VERSION_MINOR@) + +#----------------------------------------------------------------------------- +# Don't include targets if this file is being picked up by another +# project which has already build LIBAEC as a subproject +#----------------------------------------------------------------------------- +if (NOT TARGET "@LIBAEC_PACKAGE@") + include (@PACKAGE_SHARE_INSTALL_DIR@/@LIBAEC_PACKAGE@@LIBAEC_PACKAGE_EXT@-targets.cmake) +endif () + +# Handle default component(static) : +if (NOT ${LIBAEC_PACKAGE_NAME}_FIND_COMPONENTS) + set (${LIBAEC_PACKAGE_NAME}_FIND_COMPONENTS static) + set (${LIBAEC_PACKAGE_NAME}_FIND_REQUIRED_static true) +endif () + +# Handle requested components: +list (REMOVE_DUPLICATES ${LIBAEC_PACKAGE_NAME}_FIND_COMPONENTS) +foreach (comp IN LISTS ${LIBAEC_PACKAGE_NAME}_FIND_COMPONENTS) + list (FIND ${LIBAEC_PACKAGE_NAME}_EXPORT_LIBRARIES "@LIBAEC_LIB_CORENAME@-${comp}" HAVE_COMP) + if (${HAVE_COMP} LESS 0) + set (${LIBAEC_PACKAGE_NAME}_${comp}_FOUND 0) + else () + set (${LIBAEC_PACKAGE_NAME}_${comp}_FOUND 1) + string(TOUPPER ${LIBAEC_PACKAGE_NAME}_${comp}_LIBRARY COMP_LIBRARY) + set (${COMP_LIBRARY} ${${COMP_LIBRARY}} @LIBAEC_LIB_CORENAME@-${comp}) + endif () +endforeach () + +check_required_components (${LIBAEC_PACKAGE_NAME}) diff --git a/config/cmake/ZLIB/CMakeLists.txt b/config/cmake/ZLIB/CMakeLists.txt new file mode 100644 index 00000000000..c74ecea9dd9 --- /dev/null +++ b/config/cmake/ZLIB/CMakeLists.txt @@ -0,0 +1,572 @@ +cmake_minimum_required (VERSION 3.12) +PROJECT (ZLIB C) + +#----------------------------------------------------------------------------- +# Basic ZLIB stuff here +#----------------------------------------------------------------------------- +set (ZLIB_PACKAGE_EXT ${HDF_PACKAGE_EXT}) +set (HDF_USE_GNU_DIRS ${HDF5_USE_GNU_DIRS}) +set (CMAKE_OSX_ARCHITECTURES ${CMAKE_OSX_ARCHITECTURES}) +set (CMAKE_TOOLCHAIN_FILE ${CMAKE_TOOLCHAIN_FILE}) +set (PACKAGE_NAMESPACE ${HDF_PACKAGE_NAMESPACE}) +if (MINGW) + set (WINDOWS 1) # MinGW tries to imitate Windows +endif () +if (WINDOWS) + set (HAVE_STDDEF_H 1) + set (HAVE_SYS_TYPES_H 1) +endif () +# +# Check for unistd.h +# +check_include_file(unistd.h Z_HAVE_UNISTD_H) +CHECK_FUNCTION_EXISTS (memcpy HAVE_MEMCPY) +CHECK_FUNCTION_EXISTS (vsnprintf HAVE_VSNPRINTF) + +#----------------------------------------------------------------------------- +# Define some CMake variables for use later in the project +#----------------------------------------------------------------------------- +set (ZLIB_RESOURCES_DIR ${HDF_RESOURCES_DIR}/ZLIB) +set (ZLIB_SRC_DIR ${ZLIB_SOURCE_DIR}) + +#----------------------------------------------------------------------------- +# Set the core names of all the libraries +#----------------------------------------------------------------------------- +set (ZLIB_LIB_CORENAME "zlib") + +#----------------------------------------------------------------------------- +# Set the true names of all the libraries if customized by external project +#----------------------------------------------------------------------------- +set (ZLIB_LIB_NAME "${ZLIB_EXTERNAL_LIB_PREFIX}${ZLIB_LIB_CORENAME}") + +#----------------------------------------------------------------------------- +# Set the target names of all the libraries +#----------------------------------------------------------------------------- +set (ZLIB_LIB_TARGET "${ZLIB_LIB_CORENAME}-static") + +set(ZLIB_PC ${CMAKE_CURRENT_BINARY_DIR}/zlib.pc) +configure_file( ${CMAKE_CURRENT_SOURCE_DIR}/zlib.pc.cmakein ${ZLIB_PC} @ONLY) + +#----------------------------------------------------------------------------- +# Generate the zconf.h file containing user settings needed by compilation +#----------------------------------------------------------------------------- +configure_file(${CMAKE_CURRENT_SOURCE_DIR}/zconf.h.cmakein ${CMAKE_CURRENT_BINARY_DIR}/zconf.h @ONLY) + +if (DEFINED ADDITIONAL_CMAKE_PREFIX_PATH AND EXISTS "${ADDITIONAL_CMAKE_PREFIX_PATH}") + set (CMAKE_PREFIX_PATH ${ADDITIONAL_CMAKE_PREFIX_PATH} ${CMAKE_PREFIX_PATH}) +endif () + +#----------------------------------------------------------------------------- +# parse the full version number from zlib.h and include in ZLIB_VERS_INFO +#----------------------------------------------------------------------------- +file (READ ${ZLIB_SRC_DIR}/zlib.h _zlib_h_contents) +string (REGEX REPLACE ".*#define[ \t]+ZLIB_VER_MAJOR[ \t]+([0-9]*).*$" + "\\1" ZLIB_VERS_MAJOR ${_zlib_h_contents}) +string (REGEX REPLACE ".*#define[ \t]+ZLIB_VER_MINOR[ \t]+([0-9]*).*$" + "\\1" ZLIB_VERS_MINOR ${_zlib_h_contents}) +string (REGEX REPLACE ".*#define[ \t]+ZLIB_VER_REVISION[ \t]+([0-9]*).*$" + "\\1" ZLIB_VERS_RELEASE ${_zlib_h_contents}) +string (REGEX REPLACE ".*#define[ \t]+ZLIB_VER_SUBREVISION[ \t]+([0-9]*).*$" + "\\1" ZLIB_VERS_SUBRELEASE ${_zlib_h_contents}) +#message (STATUS "VERSION: ${ZLIB_VERS_MAJOR}.${ZLIB_VERS_MINOR}.${ZLIB_VERS_RELEASE}-${ZLIB_VERS_SUBRELEASE}") +string (REGEX REPLACE ".*#define[ \t]+ZLIB_VERSION[ \t]+\"([0-9A-Za-z.]+)\".*" + "\\1" ZLIB_FULL_VERSION ${_zlib_h_contents}) +#message (STATUS "VERSION: ${ZLIB_FULL_VERSION}") + +#----------------------------------------------------------------------------- +set (ZLIB_PACKAGE "zlib") +set (ZLIB_PACKAGE_NAME "ZLIB") +set (ZLIB_PACKAGE_VERSION "${ZLIB_VERS_MAJOR}.${ZLIB_VERS_MINOR}") +set (ZLIB_PACKAGE_VERSION_MAJOR "${ZLIB_VERS_MAJOR}.${ZLIB_VERS_MINOR}") +set (ZLIB_PACKAGE_VERSION_MINOR "${ZLIB_VERS_RELEASE}") +set (ZLIB_PACKAGE_STRING "${ZLIB_PACKAGE_NAME} ${ZLIB_PACKAGE_VERSION}-${ZLIB_VERS_SUBRELEASE}") +set (ZLIB_PACKAGE_TARNAME "zlib") +set (ZLIB_PACKAGE_URL "http://www.hdfgroup.org") +set (ZLIB_PACKAGE_BUGREPORT "help@hdfgroup.org") +set (ZLIB_PACKAGE_SOVERSION "${ZLIB_VERS_MAJOR}.${ZLIB_VERS_MINOR}.${ZLIB_VERS_RELEASE}") +set (ZLIB_PACKAGE_SOVERSION_MAJOR "${ZLIB_VERS_MAJOR}") + + +HDF_DIR_PATHS(${ZLIB_PACKAGE_NAME}) + +#----------------------------------------------------------------------------- +# Targets built within this project are exported at Install time for use +# by other projects +#----------------------------------------------------------------------------- +if (NOT ZLIB_EXPORTED_TARGETS) + set (ZLIB_EXPORTED_TARGETS "zlib-targets") +endif () + +#----------------------------------------------------------------------------- +# To include a library in the list exported by the project AT BUILD TIME, +# add it to this variable. This is NOT used by Make Install, but for projects +# which include zlib as a sub-project within their build tree +#----------------------------------------------------------------------------- +set_global_variable (ZLIB_LIBRARIES_TO_EXPORT "") + +set (CMAKE_POSITION_INDEPENDENT_CODE ON) + +#----------------------------------------------------------------------------- +# When building utility executables that generate other (source) files : +# we make use of the following variables defined in the root CMakeLists. +# Certain systems may add /Debug or /Release to output paths +# and we need to call the executable from inside the CMake configuration +#----------------------------------------------------------------------------- +set (EXE_EXT "") +if (WIN32) + set (EXE_EXT ".exe") + add_definitions (-D_BIND_TO_CURRENT_VCLIBS_VERSION=1) + add_definitions (-D_CRT_SECURE_NO_WARNINGS) + add_definitions (-D_CONSOLE) + add_definitions (-D_CRT_NONSTDC_NO_DEPRECATE) +endif () + +if (MSVC) + set (CMAKE_MFC_FLAG 0) +endif () + +set (MAKE_SYSTEM) +if (CMAKE_BUILD_TOOL MATCHES "make") + set (MAKE_SYSTEM 1) +endif () + +set (CFG_INIT "/${CMAKE_CFG_INTDIR}") +if (MAKE_SYSTEM) + set (CFG_INIT "") +endif () + +#----------------------------------------------------------------------------- +# Compiler specific flags : Shouldn't there be compiler tests for these +#----------------------------------------------------------------------------- +if (CMAKE_COMPILER_IS_GNUCC) + set (CMAKE_C_FLAGS "${CMAKE_ANSI_CFLAGS} ${CMAKE_C_FLAGS} -Wno-strict-prototypes") +endif () +if (CMAKE_C_COMPILER_ID MATCHES "IntelLLVM" OR CMAKE_C_COMPILER_ID MATCHES "[Cc]lang") + set (CMAKE_C_FLAGS "${CMAKE_ANSI_CFLAGS} ${CMAKE_C_FLAGS} -Wno-deprecated-non-prototype -Wno-implicit-function-declaration") +endif () + +#----------------------------------------------------------------------------- +# This is in here to help some of the GCC based IDES like Eclipse +# and code blocks parse the compiler errors and warnings better. +#----------------------------------------------------------------------------- +if (CMAKE_COMPILER_IS_GNUCC) + set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fmessage-length=0") +endif () + +#----------------------------------------------------------------------------- +# All libs/tests/examples need the main include directories +#----------------------------------------------------------------------------- +INCLUDE_DIRECTORIES (${ZLIB_BINARY_DIR} ${ZLIB_SOURCE_DIR} ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}) + +#============================================================================ +# zlib +#============================================================================ + +#----------------------------------------------------------------------------- +# Define zlib Library +#----------------------------------------------------------------------------- +set(ZLIB_PUBLIC_HDRS + ${CMAKE_CURRENT_BINARY_DIR}/zconf.h + zlib.h +) +set(ZLIB_PRIVATE_HDRS + crc32.h + deflate.h + gzguts.h + inffast.h + inffixed.h + inflate.h + inftrees.h + trees.h + zutil.h +) +set(ZLIB_SRCS + adler32.c + compress.c + crc32.c + deflate.c + gzclose.c + gzlib.c + gzread.c + gzwrite.c + inflate.c + infback.c + inftrees.c + inffast.c + trees.c + uncompr.c + zutil.c +) + +if(NOT MINGW) + set(ZLIB_DLL_SRCS + win32/zlib1.rc # If present will override custom build rule below. + ) +endif() + +if(CMAKE_COMPILER_IS_GNUCC) + if(ASM686) + set(ZLIB_ASMS contrib/asm686/match.S) + elseif (AMD64) + set(ZLIB_ASMS contrib/amd64/amd64-match.S) + endif () + + if(ZLIB_ASMS) + add_definitions(-DASMV) + set_source_files_properties (${ZLIB_ASMS} PROPERTIES LANGUAGE C COMPILE_FLAGS -DNO_UNDERLINE) + endif() +endif() + +if(MSVC) + if(ASM686) + enable_language(ASM_MASM) + set(ZLIB_ASMS + contrib/masmx86/inffas32.asm + contrib/masmx86/match686.asm + ) + elseif (AMD64) + enable_language(ASM_MASM) + set(ZLIB_ASMS + contrib/masmx64/gvmat64.asm + contrib/masmx64/inffasx64.asm + ) + endif() + + if(ZLIB_ASMS) + add_definitions(-DASMV -DASMINF) + endif() +endif() + +if(MINGW) + add_custom_command(OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/zlib1rc.obj + COMMAND ${CMAKE_RC_COMPILER} + -D GCC_WINDRES + -I ${CMAKE_CURRENT_SOURCE_DIR} + -I ${CMAKE_CURRENT_BINARY_DIR} + -o ${CMAKE_CURRENT_BINARY_DIR}/zlib1rc.obj + -i ${CMAKE_CURRENT_SOURCE_DIR}/win32/zlib1.rc) + set(ZLIB_DLL_SRCS ${CMAKE_CURRENT_BINARY_DIR}/zlib1rc.obj) +endif() + +add_library (${ZLIB_LIB_TARGET} STATIC ${ZLIB_SRCS} ${ZLIB_PRIVATE_HDRS} ${ZLIB_PUBLIC_HDRS}) +if (MSVC AND CMAKE_CL_64) + set_target_properties (${ZLIB_LIB_TARGET} PROPERTIES STATIC_LIBRARY_FLAGS "/machine:x64") +endif () +target_include_directories(${ZLIB_LIB_TARGET} PRIVATE "${CMAKE_BINARY_DIR}") +TARGET_C_PROPERTIES (${ZLIB_LIB_TARGET} STATIC) +target_link_libraries (${ZLIB_LIB_TARGET} PRIVATE ${LINK_LIBS}) +H5_SET_LIB_OPTIONS (${ZLIB_LIB_TARGET} ${ZLIB_LIB_NAME} STATIC 0) +set_target_properties(${ZLIB_LIB_TARGET} PROPERTIES + PUBLIC_HEADER "${ZLIB_PUBLIC_HEADERS}" + LINKER_LANGUAGE C + INTERFACE_INCLUDE_DIRECTORIES "$/include>" +) +set_global_variable (ZLIB_LIBRARIES_TO_EXPORT ${ZLIB_LIB_TARGET}) +set (install_targets ${ZLIB_LIB_TARGET}) + +#----------------------------------------------------------------------------- +# Add Target(s) to CMake Install for import into other projects +#----------------------------------------------------------------------------- +if (ZLIB_EXPORTED_TARGETS) + INSTALL_TARGET_PDB (${ZLIB_LIB_TARGET} ${ZLIB_INSTALL_BIN_DIR} libraries) + + install ( + TARGETS + ${install_targets} + EXPORT + ${ZLIB_EXPORTED_TARGETS} + LIBRARY DESTINATION ${ZLIB_INSTALL_LIB_DIR} COMPONENT libraries + ARCHIVE DESTINATION ${ZLIB_INSTALL_LIB_DIR} COMPONENT libraries + RUNTIME DESTINATION ${ZLIB_INSTALL_BIN_DIR} COMPONENT libraries + FRAMEWORK DESTINATION ${ZLIB_INSTALL_FWRK_DIR} COMPONENT libraries + PUBLIC_HEADER DESTINATION ${ZLIB_INSTALL_INCLUDE_DIR} COMPONENT headers + ) +endif () + +include (CMakePackageConfigHelpers) + +#----------------------------------------------------------------------------- +# Check for Installation Utilities +#----------------------------------------------------------------------------- +if (WIN32) + set (PF_ENV_EXT "(x86)") + find_program (NSIS_EXECUTABLE NSIS.exe PATHS "$ENV{ProgramFiles}\\NSIS" "$ENV{ProgramFiles${PF_ENV_EXT}}\\NSIS") + if(NOT CPACK_WIX_ROOT) + file(TO_CMAKE_PATH "$ENV{WIX}" CPACK_WIX_ROOT) + endif () + find_program (WIX_EXECUTABLE candle PATHS "${CPACK_WIX_ROOT}/bin") +endif () + +#----------------------------------------------------------------------------- +# Add file(s) to CMake Install +#----------------------------------------------------------------------------- +if (NOT ZLIB_INSTALL_NO_DEVELOPMENT) + install ( + FILES ${PROJECT_BINARY_DIR}/zconf.h + DESTINATION ${ZLIB_INSTALL_INCLUDE_DIR} + COMPONENT headers + ) +endif () + +#----------------------------------------------------------------------------- +# Add Target(s) to CMake Install for import into other projects +#----------------------------------------------------------------------------- +if (NOT ZLIB_EXTERNALLY_CONFIGURED) + install ( + EXPORT ${ZLIB_EXPORTED_TARGETS} + DESTINATION ${ZLIB_INSTALL_CMAKE_DIR} + FILE ${ZLIB_PACKAGE}${ZLIB_PACKAGE_EXT}-targets.cmake + NAMESPACE ${PACKAGE_NAMESPACE} + COMPONENT configinstall + ) +endif () + +#----------------------------------------------------------------------------- +# Export all exported targets to the build tree for use by parent project +#----------------------------------------------------------------------------- +if (NOT ZLIB_EXTERNALLY_CONFIGURED) + export ( + TARGETS ${ZLIB_LIBRARIES_TO_EXPORT} ${ZLIB_LIB_DEPENDENCIES} + FILE ${ZLIB_PACKAGE}${ZLIB_PACKAGE_EXT}-targets.cmake + NAMESPACE ${PACKAGE_NAMESPACE} + ) + export (PACKAGE ${ZLIB_PACKAGE}${ZLIB_PACKAGE_EXT}) +endif () + +#----------------------------------------------------------------------------- +# Set includes needed for build +#----------------------------------------------------------------------------- +set (ZLIB_INCLUDES_BUILD_TIME + ${ZLIB_SRC_DIR} ${ZLIB_BINARY_DIR} +) + +#----------------------------------------------------------------------------- +# Set variables needed for installation +#----------------------------------------------------------------------------- +set (ZLIB_VERSION_STRING ${ZLIB_PACKAGE_VERSION}) +set (ZLIB_VERSION_MAJOR ${ZLIB_PACKAGE_VERSION_MAJOR}) +set (ZLIB_VERSION_MINOR ${ZLIB_PACKAGE_VERSION_MINOR}) + +#----------------------------------------------------------------------------- +# Configure the zlib-config.cmake file for the build directory +#----------------------------------------------------------------------------- +set (INCLUDE_INSTALL_DIR ${ZLIB_INSTALL_INCLUDE_DIR}) +set (SHARE_INSTALL_DIR "${CMAKE_CURRENT_BINARY_DIR}/${ZLIB_INSTALL_CMAKE_DIR}" ) +set (CURRENT_BUILD_DIR "${CMAKE_CURRENT_BINARY_DIR}" ) +configure_package_config_file ( + ${ZLIB_RESOURCES_DIR}/zlib-config.cmake.in + "${ZLIB_BINARY_DIR}/${ZLIB_PACKAGE}${ZLIB_PACKAGE_EXT}-config.cmake" + INSTALL_DESTINATION "${ZLIB_INSTALL_CMAKE_DIR}" + PATH_VARS INCLUDE_INSTALL_DIR SHARE_INSTALL_DIR CURRENT_BUILD_DIR + INSTALL_PREFIX "${CMAKE_CURRENT_BINARY_DIR}" +) + +#----------------------------------------------------------------------------- +# Configure the zlib-config.cmake file for the install directory +#----------------------------------------------------------------------------- +set (INCLUDE_INSTALL_DIR ${ZLIB_INSTALL_INCLUDE_DIR}) +set (SHARE_INSTALL_DIR "${CMAKE_INSTALL_PREFIX}/${ZLIB_INSTALL_CMAKE_DIR}" ) +set (CURRENT_BUILD_DIR "${CMAKE_INSTALL_PREFIX}" ) +configure_package_config_file ( + ${ZLIB_RESOURCES_DIR}/zlib-config.cmake.in + "${ZLIB_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/${ZLIB_PACKAGE}${ZLIB_PACKAGE_EXT}-config.cmake" + INSTALL_DESTINATION "${ZLIB_INSTALL_CMAKE_DIR}" + PATH_VARS INCLUDE_INSTALL_DIR SHARE_INSTALL_DIR CURRENT_BUILD_DIR +) +if (NOT ZLIB_EXTERNALLY_CONFIGURED) + install ( + FILES ${ZLIB_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/${ZLIB_PACKAGE}${ZLIB_PACKAGE_EXT}-config.cmake + DESTINATION ${ZLIB_INSTALL_CMAKE_DIR} + COMPONENT configinstall + ) +endif () + +#----------------------------------------------------------------------------- +# Configure the ZLIB-config-version.cmake file for the install directory +#----------------------------------------------------------------------------- +if (NOT ZLIB_EXTERNALLY_CONFIGURED) + configure_file ( + ${ZLIB_RESOURCES_DIR}/zlib-config-version.cmake.in + ${ZLIB_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/${ZLIB_PACKAGE}${ZLIB_PACKAGE_EXT}-config-version.cmake @ONLY + ) + install ( + FILES ${ZLIB_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/${ZLIB_PACKAGE}${ZLIB_PACKAGE_EXT}-config-version.cmake + DESTINATION ${ZLIB_INSTALL_CMAKE_DIR} + COMPONENT configinstall + ) +endif () + +#----------------------------------------------------------------------------- +# Add Document File(s) to CMake Install +#----------------------------------------------------------------------------- +if (NOT ZLIB_EXTERNALLY_CONFIGURED) + install ( + FILES + ${ZLIB_SOURCE_DIR}/FAQ + ${ZLIB_SOURCE_DIR}/README + ${ZLIB_SOURCE_DIR}/INDEX + DESTINATION ${ZLIB_INSTALL_DATA_DIR} + COMPONENT documents + ) +endif () + +#----------------------------------------------------------------------------- +# Check for Installation Utilities +#----------------------------------------------------------------------------- +if (WIN32) + set (PF_ENV_EXT "(x86)") + find_program (NSIS_EXECUTABLE NSIS.exe PATHS "$ENV{ProgramFiles}\\NSIS" "$ENV{ProgramFiles${PF_ENV_EXT}}\\NSIS") + if(NOT CPACK_WIX_ROOT) + file(TO_CMAKE_PATH "$ENV{WIX}" CPACK_WIX_ROOT) + endif() + find_program (WIX_EXECUTABLE candle PATHS "${CPACK_WIX_ROOT}/bin") +endif () + +#----------------------------------------------------------------------------- +# Set the cpack variables +#----------------------------------------------------------------------------- +if (NOT ZLIB_EXTERNALLY_CONFIGURED) + set (CPACK_PACKAGE_VENDOR "HDF_Group") + set (CPACK_PACKAGE_NAME "${ZLIB_PACKAGE_NAME}") + if (CDASH_LOCAL) + set (CPACK_PACKAGE_VERSION "${ZLIB_PACKAGE_VERSION}") + else () + set (CPACK_PACKAGE_VERSION "${ZLIB_PACKAGE_VERSION_STRING}") + endif () + set (CPACK_PACKAGE_VERSION_MAJOR "${ZLIB_PACKAGE_VERSION_MAJOR}") + set (CPACK_PACKAGE_VERSION_MINOR "${ZLIB_PACKAGE_VERSION_MINOR}") + set (CPACK_PACKAGE_VERSION_PATCH "") + set (CPACK_RESOURCE_FILE_LICENSE "${CMAKE_CURRENT_SOURCE_DIR}/README") + set (CPACK_PACKAGE_DESCRIPTION_FILE "${CMAKE_CURRENT_SOURCE_DIR}/README") + set (CPACK_RESOURCE_FILE_README "${CMAKE_CURRENT_SOURCE_DIR}/README") + set (CPACK_PACKAGE_RELOCATABLE TRUE) + set (CPACK_PACKAGE_DESCRIPTION_SUMMARY "zlib Installation") + set (CPACK_PACKAGE_INSTALL_DIRECTORY "${CPACK_PACKAGE_VENDOR}/${CPACK_PACKAGE_NAME}/${CPACK_PACKAGE_VERSION}") + + set (CPACK_GENERATOR "TGZ") + if (WIN32) + set (CPACK_GENERATOR "ZIP") + + if (NSIS_EXECUTABLE) + list (APPEND CPACK_GENERATOR "NSIS") + endif () + # Installers for 32- vs. 64-bit CMake: + # - Root install directory (displayed to end user at installer-run time) + # - "NSIS package/display name" (text used in the installer GUI) + # - Registry key used to store info about the installation + set (CPACK_NSIS_PACKAGE_NAME "${ZLIB_PACKAGE_STRING}") + if (CMAKE_CL_64) + set (CPACK_NSIS_INSTALL_ROOT "$PROGRAMFILES64") + set (CPACK_PACKAGE_INSTALL_REGISTRY_KEY "${CPACK_PACKAGE_NAME}-${CPACK_PACKAGE_VERSION} (Win64)") + else () + set (CPACK_NSIS_INSTALL_ROOT "$PROGRAMFILES") + set (CPACK_PACKAGE_INSTALL_REGISTRY_KEY "${CPACK_PACKAGE_NAME}-${CPACK_PACKAGE_VERSION}") + endif () + set (CPACK_NSIS_DISPLAY_NAME "${CPACK_NSIS_PACKAGE_NAME}") + set (CPACK_PACKAGE_INSTALL_DIRECTORY "${CPACK_PACKAGE_VENDOR}\\\\${CPACK_PACKAGE_NAME}\\\\${CPACK_PACKAGE_VERSION}") + set (CPACK_NSIS_CONTACT "${ZLIB_PACKAGE_BUGREPORT}") + set (CPACK_NSIS_MODIFY_PATH ON) + set (CPACK_NSIS_PACKAGE_NAME "ZLIB ${ZLIB_PACKAGE_VERSION}") + if (WIX_EXECUTABLE) + list (APPEND CPACK_GENERATOR "WIX") + endif () +#WiX variables + set (CPACK_WIX_UNINSTALL "1") + set (CPACK_RESOURCE_FILE_LICENSE "${JPEG_BINARY_DIR}/README") + elseif (APPLE) + list (APPEND CPACK_GENERATOR "STGZ") + list (APPEND CPACK_GENERATOR "DragNDrop") + set (CPACK_COMPONENTS_ALL_IN_ONE_PACKAGE ON) + set (CPACK_PACKAGING_INSTALL_PREFIX "/${CPACK_PACKAGE_INSTALL_DIRECTORY}") + #set (CPACK_PACKAGE_ICON "${ZLIB_RESOURCES_DIR}/hdf.icns") + + option (ZLIB_PACK_MACOSX_FRAMEWORK "Package the ZLIB Library in a Framework" OFF) + if (ZLIB_PACK_MACOSX_FRAMEWORK AND ZLIB_BUILD_FRAMEWORKS) + set (CPACK_BUNDLE_NAME "${ZLIB_PACKAGE_STRING}") + set (CPACK_BUNDLE_LOCATION "/") # make sure CMAKE_INSTALL_PREFIX ends in / + set (CMAKE_INSTALL_PREFIX "/${CPACK_BUNDLE_NAME}.framework/Versions/${CPACK_PACKAGE_VERSION}/${CPACK_PACKAGE_NAME}/") + set (CPACK_SHORT_VERSION_STRING "${CPACK_PACKAGE_VERSION}") + #----------------------------------------------------------------------------- + # Configure the Info.plist file for the install bundle + #----------------------------------------------------------------------------- + configure_file ( + ${ZLIB_RESOURCES_DIR}/CPack.Info.plist.in + ${ZLIB_BINARY_DIR}/CMakeFiles/Info.plist @ONLY + ) + configure_file ( + ${ZLIB_RESOURCES_DIR}/PkgInfo.in + ${ZLIB_BINARY_DIR}/CMakeFiles/PkgInfo @ONLY + ) + install ( + FILES ${ZLIB_BINARY_DIR}/CMakeFiles/PkgInfo + DESTINATION .. + ) + endif () + else () + list (APPEND CPACK_GENERATOR "STGZ") + set (CPACK_PACKAGING_INSTALL_PREFIX "/${CPACK_PACKAGE_INSTALL_DIRECTORY}") + set (CPACK_COMPONENTS_ALL_IN_ONE_PACKAGE ON) + + set (CPACK_DEBIAN_PACKAGE_SECTION "Libraries") + set (CPACK_DEBIAN_PACKAGE_MAINTAINER "${ZLIB_PACKAGE_BUGREPORT}") + +# list (APPEND CPACK_GENERATOR "RPM") + set (CPACK_RPM_PACKAGE_RELEASE "1") + set (CPACK_RPM_COMPONENT_INSTALL ON) + set (CPACK_RPM_PACKAGE_RELOCATABLE ON) + endif () + + # By default, do not warn when built on machines using only VS Express: + if (NOT DEFINED CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS_NO_WARNINGS) + set (CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS_NO_WARNINGS ON) + endif () + include (InstallRequiredSystemLibraries) + + set (CPACK_INSTALL_CMAKE_PROJECTS "${ZLIB_BINARY_DIR};ZLIB;libraries;/") + set (CPACK_INSTALL_CMAKE_PROJECTS "${ZLIB_BINARY_DIR};ZLIB;headers;/") + set (CPACK_INSTALL_CMAKE_PROJECTS "${ZLIB_BINARY_DIR};ZLIB;configinstall;/") + + set (CPACK_ALL_INSTALL_TYPES Full Developer User) + set (CPACK_INSTALL_TYPE_FULL_DISPLAY_NAME "Everything") + + set(CPACK_COMPONENTS_ALL libraries headers documents configinstall) + + include (CPack) + + cpack_add_component_group(Runtime) + + cpack_add_component_group(Documents + EXPANDED + DESCRIPTION "Release notes for zlib" + ) + + cpack_add_component_group(Development + EXPANDED + DESCRIPTION "All of the tools you'll need to develop applications" + ) + + cpack_add_component (libraries + DISPLAY_NAME "ZLIB Libraries" + REQUIRED + GROUP Runtime + INSTALL_TYPES Full Developer User + ) + cpack_add_component (headers + DISPLAY_NAME "ZLIB Headers" + DEPENDS libraries + GROUP Development + INSTALL_TYPES Full Developer + ) + cpack_add_component (documents + DISPLAY_NAME "ZLIB Documents" + GROUP Documents + INSTALL_TYPES Full Developer + ) + cpack_add_component (configinstall + DISPLAY_NAME "ZLIB CMake files" + DEPENDS libraries + GROUP Development + INSTALL_TYPES Full Developer User + ) + +endif () diff --git a/config/cmake/ZLIB/CPack.Info.plist.in b/config/cmake/ZLIB/CPack.Info.plist.in new file mode 100644 index 00000000000..08d371bd5d9 --- /dev/null +++ b/config/cmake/ZLIB/CPack.Info.plist.in @@ -0,0 +1,26 @@ + + + + + CFBundleDevelopmentRegion + English + CFBundleExecutable + @CPACK_PACKAGE_FILE_NAME@ + CFBundleIconFile + @CPACK_BUNDLE_ICON@ + CFBundleIdentifier + org.@CPACK_PACKAGE_VENDOR@.@CPACK_PACKAGE_NAME@@CPACK_MODULE_VERSION_SUFFIX@ + CFBundleInfoDictionaryVersion + 6.0 + CFBundlePackageType + FMWK + CFBundleSignature + ???? + CFBundleVersion + @CPACK_PACKAGE_VERSIO@ + CFBundleShortVersionString + @CPACK_SHORT_VERSION_STRING@ + CSResourcesFileMapped + + + diff --git a/config/cmake/ZLIB/zconf.h.in b/config/cmake/ZLIB/zconf.h.in new file mode 100644 index 00000000000..a7f24cce60f --- /dev/null +++ b/config/cmake/ZLIB/zconf.h.in @@ -0,0 +1,536 @@ +/* zconf.h -- configuration of the zlib compression library + * Copyright (C) 1995-2016 Jean-loup Gailly, Mark Adler + * For conditions of distribution and use, see copyright notice in zlib.h + */ + +/* @(#) $Id$ */ + +#ifndef ZCONF_H +#define ZCONF_H +#cmakedefine Z_PREFIX +#cmakedefine Z_HAVE_UNISTD_H + +/* + * If you *really* need a unique prefix for all types and library functions, + * compile with -DZ_PREFIX. The "standard" zlib should be compiled without it. + * Even better than compiling with -DZ_PREFIX would be to use configure to set + * this permanently in zconf.h using "./configure --zprefix". + */ +#ifdef Z_PREFIX /* may be set to #if 1 by ./configure */ +# define Z_PREFIX_SET + +/* all linked symbols and init macros */ +# define _dist_code z__dist_code +# define _length_code z__length_code +# define _tr_align z__tr_align +# define _tr_flush_bits z__tr_flush_bits +# define _tr_flush_block z__tr_flush_block +# define _tr_init z__tr_init +# define _tr_stored_block z__tr_stored_block +# define _tr_tally z__tr_tally +# define adler32 z_adler32 +# define adler32_combine z_adler32_combine +# define adler32_combine64 z_adler32_combine64 +# define adler32_z z_adler32_z +# ifndef Z_SOLO +# define compress z_compress +# define compress2 z_compress2 +# define compressBound z_compressBound +# endif +# define crc32 z_crc32 +# define crc32_combine z_crc32_combine +# define crc32_combine64 z_crc32_combine64 +# define crc32_z z_crc32_z +# define deflate z_deflate +# define deflateBound z_deflateBound +# define deflateCopy z_deflateCopy +# define deflateEnd z_deflateEnd +# define deflateGetDictionary z_deflateGetDictionary +# define deflateInit z_deflateInit +# define deflateInit2 z_deflateInit2 +# define deflateInit2_ z_deflateInit2_ +# define deflateInit_ z_deflateInit_ +# define deflateParams z_deflateParams +# define deflatePending z_deflatePending +# define deflatePrime z_deflatePrime +# define deflateReset z_deflateReset +# define deflateResetKeep z_deflateResetKeep +# define deflateSetDictionary z_deflateSetDictionary +# define deflateSetHeader z_deflateSetHeader +# define deflateTune z_deflateTune +# define deflate_copyright z_deflate_copyright +# define get_crc_table z_get_crc_table +# ifndef Z_SOLO +# define gz_error z_gz_error +# define gz_intmax z_gz_intmax +# define gz_strwinerror z_gz_strwinerror +# define gzbuffer z_gzbuffer +# define gzclearerr z_gzclearerr +# define gzclose z_gzclose +# define gzclose_r z_gzclose_r +# define gzclose_w z_gzclose_w +# define gzdirect z_gzdirect +# define gzdopen z_gzdopen +# define gzeof z_gzeof +# define gzerror z_gzerror +# define gzflush z_gzflush +# define gzfread z_gzfread +# define gzfwrite z_gzfwrite +# define gzgetc z_gzgetc +# define gzgetc_ z_gzgetc_ +# define gzgets z_gzgets +# define gzoffset z_gzoffset +# define gzoffset64 z_gzoffset64 +# define gzopen z_gzopen +# define gzopen64 z_gzopen64 +# ifdef _WIN32 +# define gzopen_w z_gzopen_w +# endif +# define gzprintf z_gzprintf +# define gzputc z_gzputc +# define gzputs z_gzputs +# define gzread z_gzread +# define gzrewind z_gzrewind +# define gzseek z_gzseek +# define gzseek64 z_gzseek64 +# define gzsetparams z_gzsetparams +# define gztell z_gztell +# define gztell64 z_gztell64 +# define gzungetc z_gzungetc +# define gzvprintf z_gzvprintf +# define gzwrite z_gzwrite +# endif +# define inflate z_inflate +# define inflateBack z_inflateBack +# define inflateBackEnd z_inflateBackEnd +# define inflateBackInit z_inflateBackInit +# define inflateBackInit_ z_inflateBackInit_ +# define inflateCodesUsed z_inflateCodesUsed +# define inflateCopy z_inflateCopy +# define inflateEnd z_inflateEnd +# define inflateGetDictionary z_inflateGetDictionary +# define inflateGetHeader z_inflateGetHeader +# define inflateInit z_inflateInit +# define inflateInit2 z_inflateInit2 +# define inflateInit2_ z_inflateInit2_ +# define inflateInit_ z_inflateInit_ +# define inflateMark z_inflateMark +# define inflatePrime z_inflatePrime +# define inflateReset z_inflateReset +# define inflateReset2 z_inflateReset2 +# define inflateResetKeep z_inflateResetKeep +# define inflateSetDictionary z_inflateSetDictionary +# define inflateSync z_inflateSync +# define inflateSyncPoint z_inflateSyncPoint +# define inflateUndermine z_inflateUndermine +# define inflateValidate z_inflateValidate +# define inflate_copyright z_inflate_copyright +# define inflate_fast z_inflate_fast +# define inflate_table z_inflate_table +# ifndef Z_SOLO +# define uncompress z_uncompress +# define uncompress2 z_uncompress2 +# endif +# define zError z_zError +# ifndef Z_SOLO +# define zcalloc z_zcalloc +# define zcfree z_zcfree +# endif +# define zlibCompileFlags z_zlibCompileFlags +# define zlibVersion z_zlibVersion + +/* all zlib typedefs in zlib.h and zconf.h */ +# define Byte z_Byte +# define Bytef z_Bytef +# define alloc_func z_alloc_func +# define charf z_charf +# define free_func z_free_func +# ifndef Z_SOLO +# define gzFile z_gzFile +# endif +# define gz_header z_gz_header +# define gz_headerp z_gz_headerp +# define in_func z_in_func +# define intf z_intf +# define out_func z_out_func +# define uInt z_uInt +# define uIntf z_uIntf +# define uLong z_uLong +# define uLongf z_uLongf +# define voidp z_voidp +# define voidpc z_voidpc +# define voidpf z_voidpf + +/* all zlib structs in zlib.h and zconf.h */ +# define gz_header_s z_gz_header_s +# define internal_state z_internal_state + +#endif + +#if defined(__MSDOS__) && !defined(MSDOS) +# define MSDOS +#endif +#if (defined(OS_2) || defined(__OS2__)) && !defined(OS2) +# define OS2 +#endif +#if defined(_WINDOWS) && !defined(WINDOWS) +# define WINDOWS +#endif +#if defined(_WIN32) || defined(_WIN32_WCE) || defined(__WIN32__) +# ifndef WIN32 +# define WIN32 +# endif +#endif +#if (defined(MSDOS) || defined(OS2) || defined(WINDOWS)) && !defined(WIN32) +# if !defined(__GNUC__) && !defined(__FLAT__) && !defined(__386__) +# ifndef SYS16BIT +# define SYS16BIT +# endif +# endif +#endif + +/* + * Compile with -DMAXSEG_64K if the alloc function cannot allocate more + * than 64k bytes at a time (needed on systems with 16-bit int). + */ +#ifdef SYS16BIT +# define MAXSEG_64K +#endif +#ifdef MSDOS +# define UNALIGNED_OK +#endif + +#ifdef __STDC_VERSION__ +# ifndef STDC +# define STDC +# endif +# if __STDC_VERSION__ >= 199901L +# ifndef STDC99 +# define STDC99 +# endif +# endif +#endif +#if !defined(STDC) && (defined(__STDC__) || defined(__cplusplus)) +# define STDC +#endif +#if !defined(STDC) && (defined(__GNUC__) || defined(__BORLANDC__)) +# define STDC +#endif +#if !defined(STDC) && (defined(MSDOS) || defined(WINDOWS) || defined(WIN32)) +# define STDC +#endif +#if !defined(STDC) && (defined(OS2) || defined(__HOS_AIX__)) +# define STDC +#endif + +#if defined(__OS400__) && !defined(STDC) /* iSeries (formerly AS/400). */ +# define STDC +#endif + +#ifndef STDC +# ifndef const /* cannot use !defined(STDC) && !defined(const) on Mac */ +# define const /* note: need a more gentle solution here */ +# endif +#endif + +#if defined(ZLIB_CONST) && !defined(z_const) +# define z_const const +#else +# define z_const +#endif + +#ifdef Z_SOLO + typedef unsigned long z_size_t; +#else +# define z_longlong long long +# if defined(NO_SIZE_T) + typedef unsigned NO_SIZE_T z_size_t; +# elif defined(STDC) +# include + typedef size_t z_size_t; +# else + typedef unsigned long z_size_t; +# endif +# undef z_longlong +#endif + +/* Maximum value for memLevel in deflateInit2 */ +#ifndef MAX_MEM_LEVEL +# ifdef MAXSEG_64K +# define MAX_MEM_LEVEL 8 +# else +# define MAX_MEM_LEVEL 9 +# endif +#endif + +/* Maximum value for windowBits in deflateInit2 and inflateInit2. + * WARNING: reducing MAX_WBITS makes minigzip unable to extract .gz files + * created by gzip. (Files created by minigzip can still be extracted by + * gzip.) + */ +#ifndef MAX_WBITS +# define MAX_WBITS 15 /* 32K LZ77 window */ +#endif + +/* The memory requirements for deflate are (in bytes): + (1 << (windowBits+2)) + (1 << (memLevel+9)) + that is: 128K for windowBits=15 + 128K for memLevel = 8 (default values) + plus a few kilobytes for small objects. For example, if you want to reduce + the default memory requirements from 256K to 128K, compile with + make CFLAGS="-O -DMAX_WBITS=14 -DMAX_MEM_LEVEL=7" + Of course this will generally degrade compression (there's no free lunch). + + The memory requirements for inflate are (in bytes) 1 << windowBits + that is, 32K for windowBits=15 (default value) plus about 7 kilobytes + for small objects. +*/ + + /* Type declarations */ + +#ifndef OF /* function prototypes */ +# ifdef STDC +# define OF(args) args +# else +# define OF(args) () +# endif +#endif + +#ifndef Z_ARG /* function prototypes for stdarg */ +# if defined(STDC) || defined(Z_HAVE_STDARG_H) +# define Z_ARG(args) args +# else +# define Z_ARG(args) () +# endif +#endif + +/* The following definitions for FAR are needed only for MSDOS mixed + * model programming (small or medium model with some far allocations). + * This was tested only with MSC; for other MSDOS compilers you may have + * to define NO_MEMCPY in zutil.h. If you don't need the mixed model, + * just define FAR to be empty. + */ +#ifdef SYS16BIT +# if defined(M_I86SM) || defined(M_I86MM) + /* MSC small or medium model */ +# define SMALL_MEDIUM +# ifdef _MSC_VER +# define FAR _far +# else +# define FAR far +# endif +# endif +# if (defined(__SMALL__) || defined(__MEDIUM__)) + /* Turbo C small or medium model */ +# define SMALL_MEDIUM +# ifdef __BORLANDC__ +# define FAR _far +# else +# define FAR far +# endif +# endif +#endif + +#if defined(WINDOWS) || defined(WIN32) + /* If building or using zlib as a DLL, define ZLIB_DLL. + * This is not mandatory, but it offers a little performance increase. + */ +# ifdef ZLIB_DLL +# if defined(WIN32) && (!defined(__BORLANDC__) || (__BORLANDC__ >= 0x500)) +# ifdef ZLIB_INTERNAL +# define ZEXTERN extern __declspec(dllexport) +# else +# define ZEXTERN extern __declspec(dllimport) +# endif +# endif +# endif /* ZLIB_DLL */ + /* If building or using zlib with the WINAPI/WINAPIV calling convention, + * define ZLIB_WINAPI. + * Caution: the standard ZLIB1.DLL is NOT compiled using ZLIB_WINAPI. + */ +# ifdef ZLIB_WINAPI +# ifdef FAR +# undef FAR +# endif +# include + /* No need for _export, use ZLIB.DEF instead. */ + /* For complete Windows compatibility, use WINAPI, not __stdcall. */ +# define ZEXPORT WINAPI +# ifdef WIN32 +# define ZEXPORTVA WINAPIV +# else +# define ZEXPORTVA FAR CDECL +# endif +# endif +#endif + +#if defined (__BEOS__) +# ifdef ZLIB_DLL +# ifdef ZLIB_INTERNAL +# define ZEXPORT __declspec(dllexport) +# define ZEXPORTVA __declspec(dllexport) +# else +# define ZEXPORT __declspec(dllimport) +# define ZEXPORTVA __declspec(dllimport) +# endif +# endif +#endif + +#ifndef ZEXTERN +# define ZEXTERN extern +#endif +#ifndef ZEXPORT +# define ZEXPORT +#endif +#ifndef ZEXPORTVA +# define ZEXPORTVA +#endif + +#ifndef FAR +# define FAR +#endif + +#if !defined(__MACTYPES__) +typedef unsigned char Byte; /* 8 bits */ +#endif +typedef unsigned int uInt; /* 16 bits or more */ +typedef unsigned long uLong; /* 32 bits or more */ + +#ifdef SMALL_MEDIUM + /* Borland C/C++ and some old MSC versions ignore FAR inside typedef */ +# define Bytef Byte FAR +#else + typedef Byte FAR Bytef; +#endif +typedef char FAR charf; +typedef int FAR intf; +typedef uInt FAR uIntf; +typedef uLong FAR uLongf; + +#ifdef STDC + typedef void const *voidpc; + typedef void FAR *voidpf; + typedef void *voidp; +#else + typedef Byte const *voidpc; + typedef Byte FAR *voidpf; + typedef Byte *voidp; +#endif + +#if !defined(Z_U4) && !defined(Z_SOLO) && defined(STDC) +# include +# if (UINT_MAX == 0xffffffffUL) +# define Z_U4 unsigned +# elif (ULONG_MAX == 0xffffffffUL) +# define Z_U4 unsigned long +# elif (USHRT_MAX == 0xffffffffUL) +# define Z_U4 unsigned short +# endif +#endif + +#ifdef Z_U4 + typedef Z_U4 z_crc_t; +#else + typedef unsigned long z_crc_t; +#endif + +#ifdef HAVE_UNISTD_H /* may be set to #if 1 by ./configure */ +# define Z_HAVE_UNISTD_H +#endif + +#ifdef HAVE_STDARG_H /* may be set to #if 1 by ./configure */ +# define Z_HAVE_STDARG_H +#endif + +#ifdef STDC +# ifndef Z_SOLO +# include /* for off_t */ +# endif +#endif + +#if defined(STDC) || defined(Z_HAVE_STDARG_H) +# ifndef Z_SOLO +# include /* for va_list */ +# endif +#endif + +#ifdef _WIN32 +# ifndef Z_SOLO +# include /* for wchar_t */ +# endif +#endif + +/* a little trick to accommodate both "#define _LARGEFILE64_SOURCE" and + * "#define _LARGEFILE64_SOURCE 1" as requesting 64-bit operations, (even + * though the former does not conform to the LFS document), but considering + * both "#undef _LARGEFILE64_SOURCE" and "#define _LARGEFILE64_SOURCE 0" as + * equivalently requesting no 64-bit operations + */ +#if defined(_LARGEFILE64_SOURCE) && -_LARGEFILE64_SOURCE - -1 == 1 +# undef _LARGEFILE64_SOURCE +#endif + +#if defined(__WATCOMC__) && !defined(Z_HAVE_UNISTD_H) +# define Z_HAVE_UNISTD_H +#endif +#ifndef Z_SOLO +# if defined(Z_HAVE_UNISTD_H) || defined(_LARGEFILE64_SOURCE) +# include /* for SEEK_*, off_t, and _LFS64_LARGEFILE */ +# ifdef VMS +# include /* for off_t */ +# endif +# ifndef z_off_t +# define z_off_t off_t +# endif +# endif +#endif + +#if defined(_LFS64_LARGEFILE) && _LFS64_LARGEFILE-0 +# define Z_LFS64 +#endif + +#if defined(_LARGEFILE64_SOURCE) && defined(Z_LFS64) +# define Z_LARGE64 +#endif + +#if defined(_FILE_OFFSET_BITS) && _FILE_OFFSET_BITS-0 == 64 && defined(Z_LFS64) +# define Z_WANT64 +#endif + +#if !defined(SEEK_SET) && !defined(Z_SOLO) +# define SEEK_SET 0 /* Seek from beginning of file. */ +# define SEEK_CUR 1 /* Seek from current position. */ +# define SEEK_END 2 /* Set file pointer to EOF plus "offset" */ +#endif + +#ifndef z_off_t +# define z_off_t long +#endif + +#if !defined(_WIN32) && defined(Z_LARGE64) +# define z_off64_t off64_t +#else +# if defined(_WIN32) && !defined(__GNUC__) && !defined(Z_SOLO) +# define z_off64_t __int64 +# else +# define z_off64_t z_off_t +# endif +#endif + +/* MVS linker does not support external names larger than 8 bytes */ +#if defined(__MVS__) + #pragma map(deflateInit_,"DEIN") + #pragma map(deflateInit2_,"DEIN2") + #pragma map(deflateEnd,"DEEND") + #pragma map(deflateBound,"DEBND") + #pragma map(inflateInit_,"ININ") + #pragma map(inflateInit2_,"ININ2") + #pragma map(inflateEnd,"INEND") + #pragma map(inflateSync,"INSY") + #pragma map(inflateSetDictionary,"INSEDI") + #pragma map(compressBound,"CMBND") + #pragma map(inflate_table,"INTABL") + #pragma map(inflate_fast,"INFA") + #pragma map(inflate_copyright,"INCOPY") +#endif + +#endif /* ZCONF_H */ diff --git a/config/cmake/ZLIB/zlib-config-version.cmake.in b/config/cmake/ZLIB/zlib-config-version.cmake.in new file mode 100644 index 00000000000..38bcde858aa --- /dev/null +++ b/config/cmake/ZLIB/zlib-config-version.cmake.in @@ -0,0 +1,42 @@ +#----------------------------------------------------------------------------- +# ZLIB Version file for install directory +#----------------------------------------------------------------------------- + +set (PACKAGE_VERSION "@ZLIB_VERSION_STRING@") + +if("${PACKAGE_VERSION}" VERSION_LESS "${PACKAGE_FIND_VERSION}" ) + set(PACKAGE_VERSION_COMPATIBLE FALSE) +else() + if ("${PACKAGE_FIND_VERSION_MAJOR}" STREQUAL "@ZLIB_VERSION_MAJOR@") + + # exact match for version @ZLIB_VERSION_MAJOR@.@ZLIB_VERSION_MINOR@ + if ("${PACKAGE_FIND_VERSION_MINOR}" STREQUAL "@ZLIB_VERSION_MINOR@") + + # compatible with any version @ZLIB_VERSION_MAJOR@.@ZLIB_VERSION_MINOR@.x + set (PACKAGE_VERSION_COMPATIBLE TRUE) + + if ("${PACKAGE_FIND_VERSION_PATCH}" STREQUAL "@ZLIB_VERSION_RELEASE@") + set (PACKAGE_VERSION_EXACT TRUE) + + if ("${PACKAGE_FIND_VERSION_TWEAK}" STREQUAL "@ZLIB_VERSION_SUBRELEASE@") + # not using this yet + endif () + endif () + else () + set (PACKAGE_VERSION_COMPATIBLE FALSE) + endif () + endif () +endif () + +# if the installed or the using project don't have CMAKE_SIZEOF_VOID_P set, ignore it: +if("${CMAKE_SIZEOF_VOID_P}" STREQUAL "" OR "@CMAKE_SIZEOF_VOID_P@" STREQUAL "") + return() +endif() + +# check that the installed version has the same 32/64bit-ness as the one which is currently searching: +if(NOT "${CMAKE_SIZEOF_VOID_P}" STREQUAL "@CMAKE_SIZEOF_VOID_P@") + math(EXPR installedBits "@CMAKE_SIZEOF_VOID_P@ * 8") + set(PACKAGE_VERSION "${PACKAGE_VERSION} (${installedBits}bit)") + set(PACKAGE_VERSION_UNSUITABLE TRUE) +endif() + diff --git a/config/cmake/ZLIB/zlib-config.cmake.in b/config/cmake/ZLIB/zlib-config.cmake.in new file mode 100644 index 00000000000..307896b61a9 --- /dev/null +++ b/config/cmake/ZLIB/zlib-config.cmake.in @@ -0,0 +1,58 @@ +#----------------------------------------------------------------------------- +# ZLIB Config file for compiling against ZLIB build directory +#----------------------------------------------------------------------------- +@PACKAGE_INIT@ + +string(TOUPPER @ZLIB_PACKAGE@ ZLIB_PACKAGE_NAME) + +set (${ZLIB_PACKAGE_NAME}_VALID_COMPONENTS static shared) + +#----------------------------------------------------------------------------- +# User Options +#----------------------------------------------------------------------------- +set (${ZLIB_PACKAGE_NAME}_BUILD_SHARED_LIBS @BUILD_SHARED_LIBS@) +set (${ZLIB_PACKAGE_NAME}_EXPORT_LIBRARIES @ZLIB_LIBRARIES_TO_EXPORT@) + +#----------------------------------------------------------------------------- +# Directories +#----------------------------------------------------------------------------- +set (${ZLIB_PACKAGE_NAME}_INCLUDE_DIR "@PACKAGE_INCLUDE_INSTALL_DIR@") + +set (${ZLIB_PACKAGE_NAME}_SHARE_DIR "@PACKAGE_SHARE_INSTALL_DIR@") +set_and_check (${ZLIB_PACKAGE_NAME}_BUILD_DIR "@PACKAGE_CURRENT_BUILD_DIR@") + +#----------------------------------------------------------------------------- +# Version Strings +#----------------------------------------------------------------------------- +set (${ZLIB_PACKAGE_NAME}_VERSION_STRING @ZLIB_VERSION_STRING@) +set (${ZLIB_PACKAGE_NAME}_VERSION_MAJOR @ZLIB_VERSION_MAJOR@) +set (${ZLIB_PACKAGE_NAME}_VERSION_MINOR @ZLIB_VERSION_MINOR@) + +#----------------------------------------------------------------------------- +# Don't include targets if this file is being picked up by another +# project which has already build ZLIB as a subproject +#----------------------------------------------------------------------------- +if (NOT TARGET "@ZLIB_PACKAGE@") + include (@PACKAGE_SHARE_INSTALL_DIR@/@ZLIB_PACKAGE@@ZLIB_PACKAGE_EXT@-targets.cmake) +endif () + +# Handle default component(static) : +if (NOT ${ZLIB_PACKAGE_NAME}_FIND_COMPONENTS) + set (${ZLIB_PACKAGE_NAME}_FIND_COMPONENTS static) + set (${ZLIB_PACKAGE_NAME}_FIND_REQUIRED_static true) +endif () + +# Handle requested components: +list (REMOVE_DUPLICATES ${ZLIB_PACKAGE_NAME}_FIND_COMPONENTS) +foreach (comp IN LISTS ${ZLIB_PACKAGE_NAME}_FIND_COMPONENTS) + list (FIND ${ZLIB_PACKAGE_NAME}_EXPORT_LIBRARIES "@ZLIB_LIB_CORENAME@-${comp}" HAVE_COMP) + if (${HAVE_COMP} LESS 0) + set (${ZLIB_PACKAGE_NAME}_${comp}_FOUND 0) + else () + set (${ZLIB_PACKAGE_NAME}_${comp}_FOUND 1) + string(TOUPPER ${ZLIB_PACKAGE_NAME}_${comp}_LIBRARY COMP_LIBRARY) + set (${COMP_LIBRARY} ${${COMP_LIBRARY}} @ZLIB_LIB_CORENAME@-${comp}) + endif () +endforeach () + +check_required_components (${ZLIB_PACKAGE_NAME}) diff --git a/config/cmake/cacheinit.cmake b/config/cmake/cacheinit.cmake index faa05339260..41293ef723f 100644 --- a/config/cmake/cacheinit.cmake +++ b/config/cmake/cacheinit.cmake @@ -47,14 +47,21 @@ set (HDF5_MINGW_STATIC_GCC_LIBS ON CACHE BOOL "Statically link libgcc/libstdc++" set (HDF5_ALLOW_EXTERNAL_SUPPORT "NO" CACHE STRING "Allow External Library Building (NO GIT TGZ)" FORCE) set_property (CACHE HDF5_ALLOW_EXTERNAL_SUPPORT PROPERTY STRINGS NO GIT TGZ) + +set (ZLIB_PACKAGE_NAME "zlib" CACHE STRING "Name of ZLIB package" FORCE) set (ZLIB_TGZ_NAME "ZLib.tar.gz" CACHE STRING "Use HDF5_ZLib from compressed file" FORCE) +set (ZLIB_TGZ_ORIGPATH "https://github.com/madler/zlib/releases/download/v1.2.13" CACHE STRING "Use ZLIB from original location" FORCE) +set (ZLIB_TGZ_ORIGNAME "zlib-1.2.13.tar.gz" CACHE STRING "Use ZLIB from original compressed file" FORCE) +set (ZLIB_USE_LOCALCONTENT OFF CACHE BOOL "Use local file for ZLIB FetchContent" FORCE) + +set (SZIP_PACKAGE_NAME "szip" CACHE STRING "Name of SZIP package" FORCE) +set (LIBAEC_PACKAGE_NAME "libaec" CACHE STRING "Name of AEC SZIP package" FORCE) set (SZIP_TGZ_NAME "SZip.tar.gz" CACHE STRING "Use SZip from compressed file" FORCE) set (SZAEC_TGZ_NAME "LIBAEC.tar.gz" CACHE STRING "Use SZip AEC from compressed file" FORCE) set (USE_LIBAEC ON CACHE BOOL "Use libaec szip replacement" FORCE) - -set (ZLIB_PACKAGE_NAME "zlib" CACHE STRING "Name of HDF5_ZLIB package" FORCE) -set (LIBAEC_PACKAGE_NAME "libaec" CACHE STRING "Name of AEC SZIP package" FORCE) -set (SZIP_PACKAGE_NAME "szip" CACHE STRING "Name of SZIP package" FORCE) +set (LIBAEC_TGZ_ORIGPATH "https://gitlab.dkrz.de/k202009/libaec/-/archive/v1.0.6" CACHE STRING "Use LIBAEC from original location" FORCE) +set (LIBAEC_TGZ_ORIGNAME "libaec-v1.0.6.tar.gz" CACHE STRING "Use LIBAEC from original compressed file" FORCE) +set (LIBAEC_USE_LOCALCONTENT OFF CACHE BOOL "Use local file for LIBAEC FetchContent" FORCE) ######################## # filter plugin options diff --git a/config/cmake_ext_mod/HDFLibMacros.cmake b/config/cmake_ext_mod/HDFLibMacros.cmake index 740ddae7eaa..d2f2660d0a2 100644 --- a/config/cmake_ext_mod/HDFLibMacros.cmake +++ b/config/cmake_ext_mod/HDFLibMacros.cmake @@ -10,81 +10,72 @@ # help@hdfgroup.org. # #------------------------------------------------------------------------------- -macro (EXTERNAL_JPEG_LIBRARY compress_type jpeg_pic) - # May need to build JPEG with PIC on x64 machines with gcc - # Need to use CMAKE_ANSI_CFLAGS define so that compiler test works - +macro (ORIGINAL_ZLIB_LIBRARY compress_type) if (${compress_type} MATCHES "GIT") - EXTERNALPROJECT_ADD (JPEG - GIT_REPOSITORY ${JPEG_URL} - GIT_TAG ${JPEG_BRANCH} - INSTALL_COMMAND "" - CMAKE_ARGS - -DBUILD_SHARED_LIBS:BOOL=OFF - -DJPEG_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT} - -DJPEG_EXTERNALLY_CONFIGURED:BOOL=OFF - -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE} - -DCMAKE_DEBUG_POSTFIX:STRING=${CMAKE_DEBUG_POSTFIX} - -DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX} - -DCMAKE_RUNTIME_OUTPUT_DIRECTORY:PATH=${CMAKE_RUNTIME_OUTPUT_DIRECTORY} - -DCMAKE_LIBRARY_OUTPUT_DIRECTORY:PATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY} - -DCMAKE_ARCHIVE_OUTPUT_DIRECTORY:PATH=${CMAKE_ARCHIVE_OUTPUT_DIRECTORY} - -DCMAKE_PDB_OUTPUT_DIRECTORY:PATH=${CMAKE_PDB_OUTPUT_DIRECTORY} - -DCMAKE_ANSI_CFLAGS:STRING=${jpeg_pic} - -DHDF_USE_GNU_DIRS:STRING=${HDF5_USE_GNU_DIRS} - -DCMAKE_OSX_ARCHITECTURES:STRING=${CMAKE_OSX_ARCHITECTURES} - -DCMAKE_TOOLCHAIN_FILE:STRING=${CMAKE_TOOLCHAIN_FILE} - -DPACKAGE_NAMESPACE=${HDF_PACKAGE_NAMESPACE} + FetchContent_Declare (HDF5_ZLIB + GIT_REPOSITORY ${ZLIB_URL} + GIT_TAG ${ZLIB_BRANCH} ) elseif (${compress_type} MATCHES "TGZ") - EXTERNALPROJECT_ADD (JPEG - URL ${JPEG_URL} - URL_MD5 "" - INSTALL_COMMAND "" - CMAKE_ARGS - -DBUILD_SHARED_LIBS:BOOL=OFF - -DJPEG_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT} - -DJPEG_EXTERNALLY_CONFIGURED:BOOL=OFF - -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE} - -DCMAKE_DEBUG_POSTFIX:STRING=${CMAKE_DEBUG_POSTFIX} - -DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX} - -DCMAKE_RUNTIME_OUTPUT_DIRECTORY:PATH=${CMAKE_RUNTIME_OUTPUT_DIRECTORY} - -DCMAKE_LIBRARY_OUTPUT_DIRECTORY:PATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY} - -DCMAKE_ARCHIVE_OUTPUT_DIRECTORY:PATH=${CMAKE_ARCHIVE_OUTPUT_DIRECTORY} - -DCMAKE_PDB_OUTPUT_DIRECTORY:PATH=${CMAKE_PDB_OUTPUT_DIRECTORY} - -DCMAKE_ANSI_CFLAGS:STRING=${jpeg_pic} - -DHDF_USE_GNU_DIRS:STRING=${HDF5_USE_GNU_DIRS} - -DCMAKE_OSX_ARCHITECTURES:STRING=${CMAKE_OSX_ARCHITECTURES} - -DCMAKE_TOOLCHAIN_FILE:STRING=${CMAKE_TOOLCHAIN_FILE} - -DPACKAGE_NAMESPACE=${HDF_PACKAGE_NAMESPACE} + FetchContent_Declare (HDF5_ZLIB + URL ${ZLIB_URL} + URL_HASH "" ) endif () - externalproject_get_property (JPEG BINARY_DIR SOURCE_DIR) + FetchContent_GetProperties(HDF5_ZLIB) + if(NOT zlib_POPULATED) + FetchContent_Populate(HDF5_ZLIB) -##include (${BINARY_DIR}/${JPEG_PACKAGE_NAME}${HDF_PACKAGE_EXT}-targets.cmake) -# Create imported target jpeg-static - add_library(${HDF_PACKAGE_NAMESPACE}jpeg-static STATIC IMPORTED) - HDF_IMPORT_SET_LIB_OPTIONS (${HDF_PACKAGE_NAMESPACE}jpeg-static "jpeg" STATIC "") - add_dependencies (${HDF_PACKAGE_NAMESPACE}jpeg-static JPEG) - set (JPEG_STATIC_LIBRARY "${HDF_PACKAGE_NAMESPACE}jpeg-static") - set (JPEG_LIBRARIES ${JPEG_STATIC_LIBRARY}) + # Copy an additional/replacement files into the populated source + file(COPY ${HDF_RESOURCES_DIR}/ZLIB/CMakeLists.txt DESTINATION ${hdf5_zlib_SOURCE_DIR}) + + add_subdirectory(${hdf5_zlib_SOURCE_DIR} ${hdf5_zlib_BINARY_DIR}) + endif() + + add_library(${HDF_PACKAGE_NAMESPACE}zlib-static ALIAS zlib-static) + set (ZLIB_STATIC_LIBRARY "${HDF_PACKAGE_NAMESPACE}zlib-static") + set (ZLIB_LIBRARIES ${ZLIB_STATIC_LIBRARY}) - set (JPEG_INCLUDE_DIR_GEN "${BINARY_DIR}") - set (JPEG_INCLUDE_DIR "${SOURCE_DIR}/src") - set (JPEG_FOUND 1) - set (JPEG_INCLUDE_DIRS ${JPEG_INCLUDE_DIR_GEN} ${JPEG_INCLUDE_DIR}) + set (ZLIB_INCLUDE_DIR_GEN "${hdf5_zlib_BINARY_DIR}") + set (ZLIB_INCLUDE_DIR "${hdf5_zlib_SOURCE_DIR}") + set (ZLIB_FOUND 1) + set (ZLIB_INCLUDE_DIRS ${ZLIB_INCLUDE_DIR_GEN} ${ZLIB_INCLUDE_DIR}) endmacro () #------------------------------------------------------------------------------- -macro (PACKAGE_JPEG_LIBRARY compress_type) - add_custom_target (JPEG-GenHeader-Copy ALL - COMMAND ${CMAKE_COMMAND} -E copy_if_different ${JPEG_INCLUDE_DIR_GEN}/jconfig.h ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ - COMMENT "Copying ${JPEG_INCLUDE_DIR_GEN}/jconfig.h to ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/" - ) - set (EXTERNAL_HEADER_LIST ${EXTERNAL_HEADER_LIST} ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/jconfig.h) - if (${compress_type} MATCHES "GIT" OR ${compress_type} MATCHES "TGZ") - add_dependencies (JPEG-GenHeader-Copy JPEG) +macro (ORIGINAL_SZIP_LIBRARY compress_type encoding) + # Only libaec library is usable + if (${compress_type} MATCHES "GIT") + FetchContent_Declare (SZIP + GIT_REPOSITORY ${SZIP_URL} + GIT_TAG ${SZIP_BRANCH} + ) + elseif (${compress_type} MATCHES "TGZ") + FetchContent_Declare (SZIP + URL ${SZIP_URL} + URL_HASH "" + ) endif () + FetchContent_GetProperties(SZIP) + if(NOT szip_POPULATED) + FetchContent_Populate(SZIP) + + # Copy an additional/replacement files into the populated source + file(COPY ${HDF_RESOURCES_DIR}/LIBAEC/CMakeLists.txt DESTINATION ${szip_SOURCE_DIR}) + + add_subdirectory(${szip_SOURCE_DIR} ${szip_BINARY_DIR}) + endif() + + set (USE_LIBAEC ON CACHE BOOL "Use libaec szip replacement" FORCE) + add_library (${HDF_PACKAGE_NAMESPACE}szaec-static ALIAS szaec-static) + add_library (${HDF_PACKAGE_NAMESPACE}aec-static ALIAS aec-static) + set (SZIP_STATIC_LIBRARY "${HDF_PACKAGE_NAMESPACE}szaec-static;${HDF_PACKAGE_NAMESPACE}aec-static") + set (SZIP_LIBRARIES ${SZIP_STATIC_LIBRARY}) + + set (SZIP_INCLUDE_DIR_GEN "${szip_BINARY_DIR}") + set (SZIP_INCLUDE_DIR "${szip_SOURCE_DIR}/include") + set (SZIP_FOUND 1) + set (SZIP_INCLUDE_DIRS ${SZIP_INCLUDE_DIR_GEN} ${SZIP_INCLUDE_DIR}) endmacro () #------------------------------------------------------------------------------- @@ -105,7 +96,6 @@ macro (EXTERNAL_SZIP_LIBRARY compress_type encoding) -DCMAKE_LIBRARY_OUTPUT_DIRECTORY:PATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY} -DCMAKE_ARCHIVE_OUTPUT_DIRECTORY:PATH=${CMAKE_ARCHIVE_OUTPUT_DIRECTORY} -DCMAKE_PDB_OUTPUT_DIRECTORY:PATH=${CMAKE_PDB_OUTPUT_DIRECTORY} - -DCMAKE_ANSI_CFLAGS:STRING=${CMAKE_ANSI_CFLAGS} -DSZIP_ENABLE_ENCODING:BOOL=${encoding} -DHDF_USE_GNU_DIRS:STRING=${HDF5_USE_GNU_DIRS} -DCMAKE_OSX_ARCHITECTURES:STRING=${CMAKE_OSX_ARCHITECTURES} @@ -128,7 +118,6 @@ macro (EXTERNAL_SZIP_LIBRARY compress_type encoding) -DCMAKE_LIBRARY_OUTPUT_DIRECTORY:PATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY} -DCMAKE_ARCHIVE_OUTPUT_DIRECTORY:PATH=${CMAKE_ARCHIVE_OUTPUT_DIRECTORY} -DCMAKE_PDB_OUTPUT_DIRECTORY:PATH=${CMAKE_PDB_OUTPUT_DIRECTORY} - -DCMAKE_ANSI_CFLAGS:STRING=${CMAKE_ANSI_CFLAGS} -DSZIP_ENABLE_ENCODING:BOOL=${encoding} -DHDF_USE_GNU_DIRS:STRING=${HDF5_USE_GNU_DIRS} -DCMAKE_OSX_ARCHITECTURES:STRING=${CMAKE_OSX_ARCHITECTURES} @@ -137,7 +126,7 @@ macro (EXTERNAL_SZIP_LIBRARY compress_type encoding) ) endif () externalproject_get_property (SZIP BINARY_DIR SOURCE_DIR) - +# ##include (${BINARY_DIR}/${SZIP_PACKAGE_NAME}${HDF_PACKAGE_EXT}-targets.cmake) # Create imported target szip-static if (USE_LIBAEC) @@ -202,7 +191,6 @@ macro (EXTERNAL_ZLIB_LIBRARY compress_type) -DCMAKE_LIBRARY_OUTPUT_DIRECTORY:PATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY} -DCMAKE_ARCHIVE_OUTPUT_DIRECTORY:PATH=${CMAKE_ARCHIVE_OUTPUT_DIRECTORY} -DCMAKE_PDB_OUTPUT_DIRECTORY:PATH=${CMAKE_PDB_OUTPUT_DIRECTORY} - -DCMAKE_ANSI_CFLAGS:STRING=${CMAKE_ANSI_CFLAGS} -DHDF_USE_GNU_DIRS:STRING=${HDF5_USE_GNU_DIRS} -DCMAKE_OSX_ARCHITECTURES:STRING=${CMAKE_OSX_ARCHITECTURES} -DCMAKE_TOOLCHAIN_FILE:STRING=${CMAKE_TOOLCHAIN_FILE} @@ -224,7 +212,6 @@ macro (EXTERNAL_ZLIB_LIBRARY compress_type) -DCMAKE_LIBRARY_OUTPUT_DIRECTORY:PATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY} -DCMAKE_ARCHIVE_OUTPUT_DIRECTORY:PATH=${CMAKE_ARCHIVE_OUTPUT_DIRECTORY} -DCMAKE_PDB_OUTPUT_DIRECTORY:PATH=${CMAKE_PDB_OUTPUT_DIRECTORY} - -DCMAKE_ANSI_CFLAGS:STRING=${CMAKE_ANSI_CFLAGS} -DHDF_USE_GNU_DIRS:STRING=${HDF5_USE_GNU_DIRS} -DCMAKE_OSX_ARCHITECTURES:STRING=${CMAKE_OSX_ARCHITECTURES} -DCMAKE_TOOLCHAIN_FILE:STRING=${CMAKE_TOOLCHAIN_FILE} @@ -239,7 +226,6 @@ macro (EXTERNAL_ZLIB_LIBRARY compress_type) ##include (${BINARY_DIR}/${ZLIB_PACKAGE_NAME}${HDF_PACKAGE_EXT}-targets.cmake) # Create imported target zlib-static add_library(${HDF_PACKAGE_NAMESPACE}zlib-static STATIC IMPORTED) -# add_library(${HDF_PACKAGE_NAMESPACE}zlib-static ALIAS zlib-static) HDF_IMPORT_SET_LIB_OPTIONS (${HDF_PACKAGE_NAMESPACE}zlib-static ${ZLIB_LIB_NAME} STATIC "") add_dependencies (${HDF_PACKAGE_NAMESPACE}zlib-static HDF5_ZLIB) set (ZLIB_STATIC_LIBRARY "${HDF_PACKAGE_NAMESPACE}zlib-static") diff --git a/config/cmake_ext_mod/HDFMacros.cmake b/config/cmake_ext_mod/HDFMacros.cmake index 863b7ba2a68..7483f3cad8b 100644 --- a/config/cmake_ext_mod/HDFMacros.cmake +++ b/config/cmake_ext_mod/HDFMacros.cmake @@ -479,8 +479,10 @@ macro (HDF_DIR_PATHS package_prefix) CACHE PATH "Install path prefix, prepended onto install directories." FORCE) set (CMAKE_GENERIC_PROGRAM_FILES) endif () + set (CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT 0 CACHE PATH "" FORCE) endif () + include (FetchContent) #----------------------------------------------------------------------------- # Setup pre-3.14 FetchContent #----------------------------------------------------------------------------- diff --git a/config/sanitizer/tools.cmake b/config/sanitizer/tools.cmake index 242e33f9d3b..3a41ad448c1 100644 --- a/config/sanitizer/tools.cmake +++ b/config/sanitizer/tools.cmake @@ -90,7 +90,7 @@ find_program(CPPCHECK_EXE NAMES "cppcheck") mark_as_advanced(FORCE CPPCHECK_EXE) if(CPPCHECK_EXE) message(STATUS "cppcheck found: ${CPPCHECK_EXE}") - if(CPPECHECK) + if(CPPCHECK) set(CMAKE_CXX_CPPCHECK "${CPPCHECK_EXE};--enable=warning,performance,portability,missingInclude;--template=\"[{severity}][{id}] {message} {callstack} \(On {file}:{line}\)\";--suppress=missingIncludeSystem;--quiet;--verbose;--force" ) diff --git a/config/toolchain/GCC.cmake b/config/toolchain/GCC.cmake deleted file mode 100644 index c41d0cadb31..00000000000 --- a/config/toolchain/GCC.cmake +++ /dev/null @@ -1,11 +0,0 @@ -# Uncomment the following line and the correct system name to use cross-compiling -#set(CMAKE_SYSTEM_NAME Linux) - -set(CMAKE_COMPILER_VENDOR "GCC") - -set(CMAKE_C_COMPILER cc) -set(CMAKE_CXX_COMPILER c++) -set(CMAKE_Fortran_COMPILER gfortran) - -# the following is used if cross-compiling -set(CMAKE_CROSSCOMPILING_EMULATOR "") diff --git a/config/toolchain/aarch64.cmake b/config/toolchain/aarch64.cmake index adb86390e46..aa84a742654 100644 --- a/config/toolchain/aarch64.cmake +++ b/config/toolchain/aarch64.cmake @@ -1,5 +1,5 @@ set(TOOLCHAIN_PREFIX aarch64-linux-gnu) -set(ANDROID_NDK /opt/android-ndk-r25b-linux/android-ndk-r25b) +set(ANDROID_NDK /opt/android-ndk-linux) set (CMAKE_SYSTEM_NAME Android) set (CMAKE_ANDROID_ARCH_ABI x86_64) #set (CMAKE_ANDROID_STANDALONE_TOOLCHAIN ${ANDROID_NDK}/build/cmake/andriod.toolchain.cmake) diff --git a/release_docs/INSTALL_CMake.txt b/release_docs/INSTALL_CMake.txt index 44d32038413..31b7e46945b 100644 --- a/release_docs/INSTALL_CMake.txt +++ b/release_docs/INSTALL_CMake.txt @@ -297,8 +297,8 @@ IV. Further considerations B. Use source packages from an GIT server by adding the following CMake options: HDF5_ALLOW_EXTERNAL_SUPPORT:STRING="GIT" - ZLIB_GIT_URL:STRING="http://some_location/zlib" - SZIP_GIT_URL:STRING="http://some_location/szip" + ZLIB_GIT_URL:STRING="https://some_location/zlib" + SZIP_GIT_URL:STRING="https://some_location/szip" where "some_location" is the URL to the GIT repository. Also set CMAKE_BUILD_TYPE to the configuration type. @@ -313,6 +313,29 @@ IV. Further considerations to the configuration type during configuration. See the settings in the config/cmake/cacheinit.cmake file HDF uses for testing. + D. Use original source packages from a compressed file by adding the following + CMake options: + BUILD_SZIP_WITH_FETCHCONTENT:BOOL=ON + LIBAEC_TGZ_ORIGNAME:STRING="szip_src.ext" + LIBAEC_TGZ_ORIGPATH:STRING="some_location" + + BUILD_ZLIB_WITH_FETCHCONTENT:BOOL=ON + ZLIB_TGZ_ORIGNAME:STRING="zlib_src.ext" + ZLIB_TGZ_ORIGPATH:STRING="some_location" + + HDF5_ALLOW_EXTERNAL_SUPPORT:STRING="TGZ" + where "some_location" is the URL or full path to the compressed + file and ext is the type of compression file. The individual filters are + enabled by setting the BUILD__WITH_FETCHCONTENT CMake variable to ON. + Also set CMAKE_BUILD_TYPE to the configuration type during configuration. + See the settings in the config/cmake/cacheinit.cmake file HDF uses for testing. + + The files can also be retrieved from a local path if necessary + TGZPATH:STRING="some_location" + by setting + ZLIB_USE_LOCALCONTENT:BOOL=ON + LIBAEC_USE_LOCALCONTENT:BOOL=ON + 3. If you plan to use compression plugins: A. Use source packages from an GIT server by adding the following CMake options: @@ -473,6 +496,7 @@ These five steps are described in detail below. * Visual Studio 15 2017 * Visual Studio 15 2017 Win64 * Visual Studio 16 2019 + * Visual Studio 17 2022 is: * SZIP_INCLUDE_DIR:PATH= @@ -497,13 +521,19 @@ These five steps are described in detail below. set (HDF_TEST_EXPRESS "2" CACHE STRING "Control testing framework (0-3)" FORCE) set (HDF5_ALLOW_EXTERNAL_SUPPORT "NO" CACHE STRING "Allow External Library Building (NO GIT TGZ)" FORCE) set_property (CACHE HDF5_ALLOW_EXTERNAL_SUPPORT PROPERTY STRINGS NO GIT TGZ) + set (ZLIB_PACKAGE_NAME "zlib" CACHE STRING "Name of ZLIB package" FORCE) set (ZLIB_TGZ_NAME "ZLib.tar.gz" CACHE STRING "Use ZLib from compressed file" FORCE) + set (ZLIB_TGZ_ORIGPATH "https://github.com/madler/zlib/releases/download/v1.2.13" CACHE STRING "Use ZLIB from original location" FORCE) + set (ZLIB_TGZ_ORIGNAME "zlib-1.2.13.tar.gz" CACHE STRING "Use ZLIB from original compressed file" FORCE) + set (ZLIB_USE_LOCALCONTENT OFF CACHE BOOL "Use local file for ZLIB FetchContent" FORCE) + set (SZIP_PACKAGE_NAME "szip" CACHE STRING "Name of SZIP package" FORCE) + set (LIBAEC_PACKAGE_NAME "libaec" CACHE STRING "Name of AEC SZIP package" FORCE) set (SZIP_TGZ_NAME "SZip.tar.gz" CACHE STRING "Use SZip from compressed file" FORCE) set (SZAEC_TGZ_NAME "LIBAEC.tar.gz" CACHE STRING "Use SZip AEC from compressed file" FORCE) set (USE_LIBAEC ON CACHE BOOL "Use libaec szip replacement" FORCE) - set (ZLIB_PACKAGE_NAME "zlib" CACHE STRING "Name of ZLIB package" FORCE) - set (LIBAEC_PACKAGE_NAME "libaec" CACHE STRING "Name of AEC SZIP package" FORCE) - set (SZIP_PACKAGE_NAME "szip" CACHE STRING "Name of SZIP package" FORCE) + set (LIBAEC_TGZ_ORIGPATH "https://gitlab.dkrz.de/k202009/libaec/-/archive/v1.0.6" CACHE STRING "Use LIBAEC from original location" FORCE) + set (LIBAEC_TGZ_ORIGNAME "libaec-v1.0.6.tar.gz" CACHE STRING "Use LIBAEC from original compressed file" FORCE) + set (LIBAEC_USE_LOCALCONTENT OFF CACHE BOOL "Use local file for LIBAEC FetchContent" FORCE) ####################### # filter plugin options ####################### @@ -838,6 +868,14 @@ if (WINDOWS) else () H5_DEFAULT_PLUGINDIR "/usr/local/hdf5/lib/plugin" endif () +if (BUILD_SZIP_WITH_FETCHCONTENT) + LIBAEC_TGZ_ORIGPATH "Use LIBAEC from original location" "https://gitlab.dkrz.de/k202009/libaec/-/archive/v1.0.6" + LIBAEC_TGZ_ORIGNAME "Use LIBAEC from original compressed file" "libaec-v1.0.6.tar.gz" + LIBAEC_USE_LOCALCONTENT "Use local file for LIBAEC FetchContent" OFF +if (BUILD_ZLIB_WITH_FETCHCONTENT) + ZLIB_TGZ_ORIGPATH "Use ZLIB from original location" "https://github.com/madler/zlib/releases/download/v1.2.13" + ZLIB_TGZ_ORIGNAME "Use ZLIB from original compressed file" "zlib-1.2.13.tar.gz" + ZLIB_USE_LOCALCONTENT "Use local file for ZLIB FetchContent" OFF NOTE: The BUILD_STATIC_EXECS ("Build Static Executables") option is only valid diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index acbde91243b..05160fa52c0 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -47,6 +47,31 @@ New Features Configuration: ------------- + - Added new option to build libaec and zlib inline with CMake. + + Using the CMake FetchContent module, the external filters can populate + content at configure time via any method supported by the ExternalProject + module. Whereas ExternalProject_Add() downloads at build time, the + FetchContent module makes content available immediately, allowing the + configure step to use the content in commands like add_subdirectory(), + include() or file() operations. + + The HDF options (and defaults) for using this are: + BUILD_SZIP_WITH_FETCHCONTENT:BOOL=OFF + LIBAEC_USE_LOCALCONTENT:BOOL=OFF + BUILD_ZLIB_WITH_FETCHCONTENT:BOOL=OFF + ZLIB_USE_LOCALCONTENT:BOOL=OFF + + The CMake variables to control the path and file names: + LIBAEC_TGZ_ORIGPATH:STRING + LIBAEC_TGZ_ORIGNAME:STRING + ZLIB_TGZ_ORIGPATH:STRING + ZLIB_TGZ_ORIGNAME:STRING + + See the CMakeFilters.cmake and config/cmake/cacheinit.cmake files for usage. + + (ADB - 2023/02/21) + - Add new CMake configuration variable HDF5_USE_GNU_DIRS HDF5_USE_GNU_DIRS (default OFF) selects the use of GNU Coding Standard install From 91cf10156a65a96860cea965df74a5adecf3e4e8 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Tue, 28 Feb 2023 19:08:47 -0600 Subject: [PATCH 024/108] 1.12 Merge doxygen plist tables changes #2470 from develop (#2505) * Merge doxygen plist tables changes #2470 from develop * Add new/moved files * More add new/moved files * Doxy corrections --- doc/branches-explained.md | 31 +- doxygen/Doxyfile.in | 51 +- doxygen/dox/MetadataCachingInHDF5.dox | 2 +- doxygen/dox/Overview.dox | 2 +- doxygen/dox/PredefinedDatatypeTables.dox | 22 + doxygen/dox/ReferenceManual.dox | 152 +- doxygen/dox/ViewTools.dox | 8 +- doxygen/dox/cookbook/Accessibility.dox | 2 +- doxygen/dox/cookbook/Attributes.dox | 2 +- doxygen/dox/cookbook/Files.dox | 2 +- doxygen/dox/cookbook/Performance.dox | 2 +- doxygen/dox/high_level/extension.dox | 11 +- doxygen/examples/H5.format.2.0.html | 26718 ++++++++-------- doxygen/examples/H5.format.html | 17 +- doxygen/examples/H5R_examples.c | 171 + doxygen/examples/VFL.html | 11 +- doxygen/examples/{ => menus}/core_menu.md | 4 + doxygen/examples/{ => menus}/fortran_menu.md | 0 .../examples/{ => menus}/high_level_menu.md | 0 doxygen/examples/{ => menus}/java_menu.md | 0 doxygen/examples/tables/fileDriverLists.dox | 139 + .../examples/tables/predefinedDatatypes.dox | 629 + doxygen/examples/tables/propertyLists.dox | 955 + doxygen/examples/tables/volAPIs.dox | 637 + doxygen/hdf5_header.html | 2 +- doxygen/hdf5doxy_layout.xml | 9 +- src/H5Amodule.h | 2 +- src/H5Dmodule.h | 315 +- src/H5Dpublic.h | 6 +- src/H5ESpublic.h | 14 +- src/H5FDmpio.h | 6 +- src/H5FDpublic.h | 30 +- src/H5Fmodule.h | 298 +- src/H5Gmodule.h | 97 +- src/H5Gpublic.h | 20 +- src/H5Lpublic.h | 16 +- src/H5MMpublic.h | 2 - src/H5Mpublic.h | 3 + src/H5Opublic.h | 2 +- src/H5PLextern.h | 2 +- src/H5PLmodule.h | 2 +- src/H5PLpublic.h | 2 +- src/H5Pmodule.h | 155 +- src/H5Ppublic.h | 13 +- src/H5Tmodule.h | 34 +- src/H5Tpublic.h | 2 +- src/H5VLmodule.h | 24 +- 47 files changed, 16510 insertions(+), 14114 deletions(-) create mode 100644 doxygen/dox/PredefinedDatatypeTables.dox create mode 100644 doxygen/examples/H5R_examples.c rename doxygen/examples/{ => menus}/core_menu.md (91%) rename doxygen/examples/{ => menus}/fortran_menu.md (100%) rename doxygen/examples/{ => menus}/high_level_menu.md (100%) rename doxygen/examples/{ => menus}/java_menu.md (100%) create mode 100644 doxygen/examples/tables/fileDriverLists.dox create mode 100644 doxygen/examples/tables/predefinedDatatypes.dox create mode 100644 doxygen/examples/tables/propertyLists.dox create mode 100644 doxygen/examples/tables/volAPIs.dox diff --git a/doc/branches-explained.md b/doc/branches-explained.md index 22b9c8f34ca..5b55ec74125 100644 --- a/doc/branches-explained.md +++ b/doc/branches-explained.md @@ -8,34 +8,33 @@ We encourage code contributors to check the status of their commits. If you have ## `develop` Develop is the main branch whose source code always reflects a state with the latest delivered development changes for the next major release of HDF5. -This is also considered the integration branch, as **all** new features are integrated into this branch from respective feature branches. +This is also considered the integration branch, as **all** new features are integrated into this branch from respective feature branches. Although +develop is considered an integration branch, it is not an unstable branch. All code merged to develop is expected to pass all GitHub actions and daily tests. ## `Maintenance branches` - -Each currently supported release-line of HDF5 (e.g. 1.8.x, 1.10.x, 1.12.x) has a support branch with the name 1_8, 1_10, 1_12. +Each currently supported release line of HDF5 (e.g. 1.8.x, 1.10.x, 1.12.x) has an associated branch with the name hdf5\_1\_10, etc.. Maintenance branches are similar to the develop branch, except the source code in a maintenance branch always reflects a state with the latest delivered development changes for the next **maintenance** release of that particular supported release-line of HDF5. **Some** new features will be integrated into a release maintenance branch, depending on whether or not those features can be introduced in minor releases. Maintenance branches are removed when a release-line is retired from support. +## `Release branches` +Release branches are used to prepare a new production release. They are primarily used to allow for last minute dotting of i's and crossing of t's +(things like setting the release version, finalizing release notes, and generating Autotools files) and do not include new development. +They are created from the maintenance branch at the time of the maintenance release and have +names like hdf5\_1\_10\_N, where N is the minor release number. Once the release is done it is tagged, with a slightly different format: hdf5-1\_\10\_N. +Release branches are deleted after the tag has been created. If we have to create a patch version of a release (which is rare), we create a branch off of the tag. + ## `feature/*` Feature branches are temporary branches used to develop new features in HDF5. Feature branches branch off of develop and exist as long as the feature is under development. When the feature is complete, the branch is merged back into develop, as well as into any support branches in which the change will be included, and then the feature branch is removed. -## `release/*` -Release branches are used to prepare a new production release. They are primarily used to allow for last minute dotting of i's and crossing of t's -(things like setting the release version, finalizing release notes, et cetera) and do not include new development. -They are created from the maintenance branch at the time of the maintenance release and have -names 1_8_N, 1_10_N, 1_12_N, where N is the minor release number. Once the release is done it is tagged. -Patches can be applied to the release branch for patch releases that are treated as "scaled down" maintenance releases as defined by Release coordinator. - -## `1.X/master/*` where X is 8, 10 or 12 -These branches are used to tag 1.X.* maintenance releases. +Ideally, all feature branches should contain a BRANCH.md file in the root directory that explains the purpose of the branch, contact information for the person responsible, and, if possible, some clues about the branch's life cycle (so we have an idea about when it can be deleted, merged, or declared inactive). -## `inactive//*` -These branches are for experimental features that were developed in the past and have not been merged to develop, and are not under active development. The features -can be out of sync with the develop branch. +Minor bug fixes and refactoring work usually takes place on personal forks, not feature branches. -This document was last updated on March 16, 2021 +## `inactive/*` +These branches are for experimental features that were developed in the past, have not been merged to develop, and are not under active development. The exception to this is that some feature branches are labeled inactive and preserved for a short time after merging to develop. Integration branches are usually not kept in sync with the develop branch. +As for feature branches, inactive branches should have a BRANCH.md file as described above. diff --git a/doxygen/Doxyfile.in b/doxygen/Doxyfile.in index ce08db29da5..d9b0fe9aa55 100644 --- a/doxygen/Doxyfile.in +++ b/doxygen/Doxyfile.in @@ -1,4 +1,4 @@ -# Doxyfile 1.8.18 +# Doxyfile # This file describes the settings to be used by the documentation system # doxygen (www.doxygen.org) for a project. @@ -853,31 +853,20 @@ INPUT_ENCODING = UTF-8 # C comment), *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, *.f18, *.f, *.for, *.vhd, # *.vhdl, *.ucf, *.qsf and *.ice. -FILE_PATTERNS = H5*public.h \ - H5*module.h \ - H5FDcore.h \ - H5FDdirect.h \ - H5FDfamily.h \ - H5FDhdfs.h \ - H5FDlog.h \ - H5FDmirror.h \ - H5FDmpi.h \ - H5FDmpio.h \ - H5FDmulti.h \ - H5FDros3.h \ - H5FDsec2.h \ - H5FDsplitter.h \ - H5FDstdio.h \ - H5FDwindows.h \ - H5VLconnector.h \ - H5VLconnector_passthru.h \ - H5VLnative.h \ - H5Zdevelop.h \ - H5version.h \ - H5*.java \ - HDF*.java \ - *.F90 \ - *.dox +FILE_PATTERNS = H5*public.h H5*module.h H5*develop.h H5FD*.h \ + H5VLconnector.h H5VLconnector_passthru.h H5VLnative.h H5PLextern.h \ + H5Zdevelop.h \ + H5version.h \ + H5*.java \ + HDF*.java \ + *.F90 \ + *.dox \ + H5Cpp.h H5AbstractDs.h H5AtomType.h H5Attribute.h H5CommonFG.h H5CompType.h \ + H5DataSet.h H5DataSpace.h H5DataType.h H5OcreatProp.h H5DaccProp.h H5DcreatProp.h \ + H5DxferProp.h H5EnumType.h H5Exception.h H5FaccProp.h H5FcreatProp.h H5File.h \ + H5FloatType.h H5Group.h H5IdComponent.h H5Include.h H5IntType.h H5LcreatProp.h \ + H5LaccProp.h H5Library.h H5Location.h H5Object.h H5PredType.h H5PropList.h H5StrType.h \ + H5ArrayType.h H5VarLenType.h # The RECURSIVE tag can be used to specify whether or not subdirectories should # be searched for input files as well. @@ -908,7 +897,15 @@ EXCLUDE_SYMLINKS = NO # Note that the wildcards are matched against the file with absolute path, so to # exclude all test directories for example use the pattern */test/* -EXCLUDE_PATTERNS = +EXCLUDE_PATTERNS = */fortran/test/* +EXCLUDE_PATTERNS += */fortran/testpar/* +EXCLUDE_PATTERNS += */fortran/examples/* +EXCLUDE_PATTERNS += */fortran/src/*.c +EXCLUDE_PATTERNS += */fortran/src/*.h +EXCLUDE_PATTERNS += */hl/fortran/examples/* +EXCLUDE_PATTERNS += */hl/fortran/test/* +EXCLUDE_PATTERNS += */hl/fortran/src/*.c +EXCLUDE_PATTERNS += */hl/fortran/src/*.h # The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names # (namespaces, classes, functions, etc.) that should be excluded from the diff --git a/doxygen/dox/MetadataCachingInHDF5.dox b/doxygen/dox/MetadataCachingInHDF5.dox index b84ddeaff26..ce7f0dfb789 100644 --- a/doxygen/dox/MetadataCachingInHDF5.dox +++ b/doxygen/dox/MetadataCachingInHDF5.dox @@ -724,7 +724,7 @@ is allowed to write to file, and then only after entering a sync point with the other caches. After it writes entries to file, it sends the base addresses of the now clean entries to the other caches, so they can mark these entries clean as well, and then leaves the sync point. The other caches mark the specified -entries as clean before they leave the synch point as well. (Observe, that since +entries as clean before they leave the sync point as well. (Observe, that since all caches see the same stream of dirty metadata, they will all have the same set of dirty entries upon sync point entry and exit.) diff --git a/doxygen/dox/Overview.dox b/doxygen/dox/Overview.dox index 64e80c70429..eaa942e219c 100644 --- a/doxygen/dox/Overview.dox +++ b/doxygen/dox/Overview.dox @@ -24,7 +24,7 @@ documents cover a mix of tasks, concepts, and reference, to help a specific Version-specific documentation (see the version in the title area) can be found here: - HDF5 1.12 branch (this site) - - HDF5 1.12.x + - HDF5 1.14.x - HDF5 1.10.x - HDF5 1.8.x diff --git a/doxygen/dox/PredefinedDatatypeTables.dox b/doxygen/dox/PredefinedDatatypeTables.dox new file mode 100644 index 00000000000..fbafa9443e0 --- /dev/null +++ b/doxygen/dox/PredefinedDatatypeTables.dox @@ -0,0 +1,22 @@ +/** \page predefined_datatypes_tables HDF5 Predefined Datatypes + * + * The following datatypes are predefined in HDF5. + * + * \snippet{doc} tables/predefinedDatatypes.dox predefined_ieee_datatypes_table + * + * \snippet{doc} tables/predefinedDatatypes.dox predefined_std_datatypes_table + * + * \snippet{doc} tables/predefinedDatatypes.dox predefined_unix_datatypes_table + * + * \snippet{doc} tables/predefinedDatatypes.dox predefined_string_datatypes_table + * + * \snippet{doc} tables/predefinedDatatypes.dox predefined_intel_datatypes_table + * + * \snippet{doc} tables/predefinedDatatypes.dox predefined_dec_datatypes_table + * + * \snippet{doc} tables/predefinedDatatypes.dox predefined_mips_datatypes_table + * + * \snippet{doc} tables/predefinedDatatypes.dox predefined_native_datatypes_table + * + * \snippet{doc} tables/predefinedDatatypes.dox predefined_c9x_datatypes_table + */ diff --git a/doxygen/dox/ReferenceManual.dox b/doxygen/dox/ReferenceManual.dox index 790092530a9..b9bcd498357 100644 --- a/doxygen/dox/ReferenceManual.dox +++ b/doxygen/dox/ReferenceManual.dox @@ -8,49 +8,156 @@ The functions provided by the HDF5 API are grouped into the following - + + + + + + + + + - + - + - + - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
-\include{doc} core_menu.md +
Core Reference Manual Modules
ModuleLanguageDescription
Attributes (H5A)@ref H5A "C"@ref H5::Attribute "C++"@ref FH5A "Fortran"@ref JH5A "Java"HDF5 attribute is a small metadata object describing the nature and/or intended usage of a primary data object. +
Datasets (H5D)@ref H5D "C"@ref H5::DataSet "C++"@ref FH5D "Fortran"@ref JH5D "Java"Manage HDF5 datasets, including the transfer of data between memory and disk and the description of dataset properties. +
Dataspaces (H5S)@ref H5S "C"@ref H5::DataSpace "C++"@ref FH5S "Fortran"@ref JH5S "Java"HDF5 dataspaces describe the shape of datasets in memory or in HDF5 files.
- -\include{doc} high_level_menu.md +
Datatypes (H5T)@ref H5T "C"@ref H5::DataType "C++"@ref FH5T "Fortran"@ref JH5T "Java"HDF5 datatypes describe the element type of HDF5 datasets and attributes.
- -\include{doc} fortran_menu.md +
Error Handling (H5E)@ref H5E "C"@ref H5::Exception "C++"@ref FH5E "Fortran"@ref JH5E "Java"HDF5 library error reporting.
- -\include{doc} java_menu.md +
Event Set (H5ES)@ref H5ES "C""C++""Fortran""Java"HDF5 event set life cycle used with HDF5 VOL connectors that enable the asynchronous feature in HDF5.
Deprecated functionsFunctions with \ref ASYNC\ref api-compat-macrosFiles (H5F)@ref H5F "C"@ref H5::H5File "C++"@ref FH5F "Fortran"@ref JH5F "Java"Manage HDF5 files. +
Filters (H5Z)@ref H5Z "C""C++"@ref FH5Z "Fortran"@ref JH5Z "Java"Manage HDF5 user-defined filters +
Groups (H5G)@ref H5G "C"@ref H5::Group "C++"@ref FH5G "Fortran"@ref JH5G "Java"Manage HDF5 groups. +
Identifiers (H5I)@ref H5I "C"@ref H5::IdComponent "C++"@ref FH5I "Fortran"@ref JH5I "Java"Manage identifiers defined by the HDF5 library. +
Library General (%H5)@ref H5 "C"@ref H5::H5Library "C++"@ref FH5 "Fortran"@ref JH5 "Java"Manage the life cycle of HDF5 library instances. +
Links (H5L)@ref H5L "C""C++"@ref FH5L "Fortran"@ref JH5L "Java"Manage HDF5 links and link types. +
Objects (H5O)@ref H5O "C""C++"@ref FH5O "Fortran"@ref JH5O "Java"Manage HDF5 objects (groups, datasets, datatype objects). +
Property Lists (H5P)@ref H5P "C"@ref H5::PropList "C++"@ref FH5P "Fortran"@ref JH5P "Java"HDF5 property lists are the main vehicle to configure the behavior of HDF5 API functions. +
Dynamically-loaded Plugins (H5PL)@ref H5PL "C""C++""Fortran"@ref JH5PL "Java"Manage the loading behavior of HDF5 plugins. +
References (H5R)@ref H5R "C""C++"@ref FH5R "Fortran"@ref JH5R "Java"Manage HDF5 references (HDF5 objects, attributes, and selections on datasets a.k.a. dataset regions). +
VOL Connector (H5VL)@ref H5VL "C""C++"@ref FH5VL "Fortran"@ref JH5VL "Java"Manage HDF5 VOL connector plugins. +
- - -Mind the gap + + +Language + + + + + + + + + + + + + + + + + + + + + +
High-level Reference Manual Modules
ModuleDescription
HDF5 Lite APIs (H5LT,H5LD)@ref H5LT "C""C++"@ref FH5LT "Fortran""Java"Functions to simplify creating and manipulating datasets, attributes and other features. +
HDF5 Images API (H5IM)@ref H5IM "C""C++"@ref FH5IM "Fortran""Java"Creating and manipulating HDF5 datasets intended to be interpreted as images. +
HDF5 Table APIs (H5TB)@ref H5TB "C""C++"@ref FH5TB "Fortran""Java"Creating and manipulating HDF5 datasets intended to be interpreted as tables. +
HDF5 Packet Table APIs (H5PT)@ref H5PT "C""C++""Fortran""Java"Creating and manipulating HDF5 datasets to support append- and read-only operations on table data. +
HDF5 Dimension Scales APIs (H5DS)@ref H5DS "C""C++"@ref FH5DS "Fortran""Java"Creating and manipulating HDF5 datasets that are associated with the dimension of another HDF5 dataset. +
HDF5 Optimizations APIs (H5DO)@ref H5DO "C""C++""Fortran""Java"Bypassing default HDF5 behavior in order to optimize for specific use cases. +
Extensions (H5LR, H5LT)@ref H5LR "C""C++""Fortran""Java" +
+ + + + + + + + + + + + + + + + +
Additional Java Reference Manual Modules
@ref HDF5CONSTThis class contains C constants and enumerated types of HDF5 library. +
@ref HDFNATIVEThis class encapsulates native methods to deal with arrays of numbers, converting from numbers to bytes and bytes to numbers. +
@ref HDFARRAYThis is a class for handling multidimensional arrays for HDF. +
@ref ERRORSThe class HDF5Exception returns errors from the Java HDF5 Interface. +
+ + + + +\ref predefined_datatypes_tables
+Deprecated functions
+Functions with \ref ASYNC
+\ref api-compat-macros + + + + Follow these simple rules and stay out of trouble: -\li \Bold{Handle discipline:} The HDF5 C-API is rife with handles or +\li \Bold{Handle discipline:} The HDF5 API is rife with handles or identifiers, which you typically obtain by creating new HDF5 items, copying - items, or retrieving facets of items. \Emph{You acquire a handle, you own it!} - (Colin Powell) In other words, you are responsible for releasing the underlying + items, or retrieving facets of items. Consequently, \Bold{and most importantly}, you are + responsible for releasing the underlying resources via the matching \Code{H5*close()} call, or deal with the consequences of resource leakage. \li \Bold{Closed means closed:} Do not pass identifiers that were previously \Code{H5*close()}-d to other API functions! It will generate an error. \li \Bold{Dynamic memory allocation:} The API contains a few functions in which the HDF5 library dynamically allocates memory on the caller's behalf. The caller owns - this memory and eventually must free it by calling H5free_memory(). (\Bold{Not} - the `free` function \Emph{du jour}!) + this memory and eventually must free it by calling H5free_memory() and not language-explicit memory functions. \li \Bold{Be careful with that saw:} Do not modify the underlying collection when an iteration is in progress! \li \Bold{Use of locations:} Certain API functions, typically called \Code{H5***_by_name} @@ -58,7 +165,6 @@ Follow these simple rules and stay out of trouble: If the identifier fully specifies the object in question, pass \Code{'.'} (a dot) for the name! -Break a leg! diff --git a/doxygen/dox/ViewTools.dox b/doxygen/dox/ViewTools.dox index 0b685a0bb3d..2212d4ba3f2 100644 --- a/doxygen/dox/ViewTools.dox +++ b/doxygen/dox/ViewTools.dox @@ -829,6 +829,7 @@ by simply viewing the specified dataset with the -d option must be specified - -before -\par subsetting options (if not using the shorthand method). +Where, the -d option must be specified +before subsetting options (if not using the shorthand method). The -A 0 option suppresses the printing of attributes. diff --git a/doxygen/dox/cookbook/Accessibility.dox b/doxygen/dox/cookbook/Accessibility.dox index f10028367e0..28009be71d2 100644 --- a/doxygen/dox/cookbook/Accessibility.dox +++ b/doxygen/dox/cookbook/Accessibility.dox @@ -1,6 +1,6 @@ /** \page Accessibility -\section Accessibility +\section secAccessibility Accessibility \subsection CB_MaintainCompat Maintaining Compatibility with other HDF5 Library Versions diff --git a/doxygen/dox/cookbook/Attributes.dox b/doxygen/dox/cookbook/Attributes.dox index 68fd15906d9..59149099579 100644 --- a/doxygen/dox/cookbook/Attributes.dox +++ b/doxygen/dox/cookbook/Attributes.dox @@ -1,6 +1,6 @@ /** \page Attributes -\section Attributes +\section secAttributes Attributes \subsection CB_LargeAttributes Creating "Large" HDF5 Attributes diff --git a/doxygen/dox/cookbook/Files.dox b/doxygen/dox/cookbook/Files.dox index 169d6387251..489377153a0 100644 --- a/doxygen/dox/cookbook/Files.dox +++ b/doxygen/dox/cookbook/Files.dox @@ -1,6 +1,6 @@ /** \page Files -\section Files +\section secFiles Files \subsection CB_FreeSpace Tracking Free Space in HDF5 Files diff --git a/doxygen/dox/cookbook/Performance.dox b/doxygen/dox/cookbook/Performance.dox index 7ac3a182ad0..5e945b232c4 100644 --- a/doxygen/dox/cookbook/Performance.dox +++ b/doxygen/dox/cookbook/Performance.dox @@ -1,6 +1,6 @@ /** \page Performance -\section Performance +\section secPerformance Performance \subsection CB_MDCPerf Assessing HDF5 Metadata Cache Performance diff --git a/doxygen/dox/high_level/extension.dox b/doxygen/dox/high_level/extension.dox index e8471b9d8b4..d754b96bf11 100644 --- a/doxygen/dox/high_level/extension.dox +++ b/doxygen/dox/high_level/extension.dox @@ -8,13 +8,16 @@ * These functions were created as part of a project supporting * NPP/NPOESS Data Production and Exploitation ( * - * project, software). + * project, + * software ). * While they were written to facilitate access to NPP, NPOESS, and JPSS * data in the HDF5 format, these functions may be useful to anyone working * with region references, hyperslab selections, or bit-fields. * - * Note that these functions are not part of the standard HDF5 distribution; - * the software must be separately downloaded and installed. + * Note that these functions are not part of the standard HDF5 distribution; + * the + * software + * must be separately downloaded and installed. * * A comprehensive guide to this library, * @@ -28,7 +31,7 @@ * \n Copies data from a referenced region to a region in a destination dataset. * - \ref H5LRcreate_ref_to_all * \n Creates a dataset with the region references to the data in all datasets located under a - * specified group in a file or creates a dataset with object references to all objects (groups or datasets) + * specified group in a file or creates a dataset with object references to all objects (groups or datasets) * located under a specified group in a file. * - \ref H5LRcreate_region_references * \n Creates an array of region references using an array of paths to diff --git a/doxygen/examples/H5.format.2.0.html b/doxygen/examples/H5.format.2.0.html index 242fdeaf041..d2979e18ba1 100644 --- a/doxygen/examples/H5.format.2.0.html +++ b/doxygen/examples/H5.format.2.0.html @@ -1,289 +1,392 @@ - - - HDF5 File Format Specification Version 2.0 - - - -
+
- - + - - - - + + + + -
-
    -
  1. Introduction
  2. - -
      -
    1. This Document
    2. -
    3. Changes for HDF5 1.10
    4. -
    -
    - -
  3. Disk Format: Level 0 - File Metadata
  4. - -
      -
    1. Disk Format: Level 0A - Format Signature and Superblock
    2. -
    3. Disk Format: Level 0B - File Driver Info
    4. -
    5. Disk Format: Level 0C - Superblock Extension
    6. -
    -
    -
  5. Disk Format: Level 1 - File Infrastructure
  6. - -
      -
    1. Disk Format: Level 1A - B-trees and B-tree - Nodes
    2. -
        -
      1. Disk Format: Level 1A1 - Version 1 - B-trees (B-link Trees)
      2. -
      3. Disk Format: Level 1A2 - Version 2 - B-trees
      4. -
      -
    3. Disk Format: Level 1B - Group Symbol Table Nodes
    4. -
    5. Disk Format: Level 1C - Symbol Table Entry
    6. -
    7. Disk Format: Level 1D - Local Heaps
    8. -
    9. Disk Format: Level 1E - Global Heap
    10. -
    11. Disk Format: Level 1F - Fractal Heap
    12. -
    13. Disk Format: Level 1G - Free-space Manager
    14. -
    15. Disk Format: Level 1H - Shared Object Header Message Table
    16. -
    -
    -
  7. Disk Format: Level 2 - Data Objects
  8. - -
      -
    1. Disk Format: Level 2A - Data Object Headers
    2. -
        -
      1. Disk Format: Level 2A1 - Data Object Header Prefix
      2. -
          -
        1. Version 1 Data Object Header Prefix
        2. -
        3. Version 2 Data Object Header Prefix
        4. -
        -
      3. Disk Format: Level 2A2 - Data Object Header Messages
      4. -
          -
        1. The NIL Message
        2. -
        3. The Dataspace Message
        4. -
        5. The Link Info Message
        6. +
+
    +
  1. Introduction
  2. + +
      +
    1. This Document
    2. +
    3. Changes for HDF5 1.10
    4. +
    +
    + +
  3. Disk Format: Level 0 - File + Metadata
  4. + +
      +
    1. Disk Format: Level 0A - Format + Signature and Superblock
    2. +
    3. Disk Format: Level 0B - File + Driver Info
    4. +
    5. Disk Format: Level 0C - + Superblock Extension
    6. +
    +
    +
  5. Disk Format: Level 1 - File + Infrastructure
  6. + +
      +
    1. Disk Format: Level 1A - B-trees + and B-tree Nodes
    2. +
        +
      1. Disk Format: Level 1A1 - + Version 1 B-trees (B-link Trees)
      2. +
      3. Disk Format: Level 1A2 - + Version 2 B-trees
      4. +
      +
    3. Disk Format: Level 1B - Group + Symbol Table Nodes
    4. +
    5. Disk Format: Level 1C - + Symbol Table Entry
    6. +
    7. Disk Format: Level 1D - Local + Heaps
    8. +
    9. Disk Format: Level 1E - Global + Heap
    10. +
    11. Disk Format: Level 1F - + Fractal Heap
    12. +
    13. Disk Format: Level 1G - + Free-space Manager
    14. +
    15. Disk Format: Level 1H - Shared + Object Header Message Table
    16. +
    +
    +
  7. Disk Format: Level 2 - Data + Objects
  8. + +
      +
    1. Disk Format: Level 2A - Data + Object Headers
    2. +
        +
      1. Disk Format: Level + 2A1 - Data Object Header Prefix
      2. +
          +
        1. Version 1 Data + Object Header Prefix
        2. +
        3. Version 2 Data + Object Header Prefix
        4. +
        +
      3. Disk Format: Level + 2A2 - Data Object Header Messages
      4. +
          +
        1. The NIL Message
        2. + +
        3. The Dataspace Message
        4. + +
        5. The Link Info Message
        6. + +
        +
      +
    +
- - - - -
  -
    -
  1. Disk Format: Level 2 - Data - Objects (Continued)
  2. -
      -
    1. Disk Format: Level 2A - Data Object - Headers (Continued)
    2. -
        -
      1. Disk Format: Level 2A2 - - Data Object Header Messages (Continued)
      2. -
          -
        1. The Datatype Message
        2. -
        3. The Data Storage - - Fill Value (Old) Message
        4. -
        5. The Data Storage - - Fill Value Message
        6. -
        7. The Link Message
        8. -
        9. The Data Storage - - External Data Files Message
        10. -
        11. The Data Storage - - Layout Message
        12. -
        13. The Bogus Message
        14. -
        15. The Group Info - Message
        16. -
        17. The Data Storage - - Filter Pipeline Message
        18. -
        19. The Attribute - Message
        20. -
        21. The Object Comment - Message
        22. -
        23. The Object - Modification Time (Old) Message
        24. -
        25. The Shared Message - Table Message
        26. -
        27. The Object Header - Continuation Message
        28. -
        29. The Symbol - Table Message
        30. -
        31. The Object - Modification Time Message
        32. -
        33. The B-tree - ‘K’ Values Message
        34. -
        35. The Driver Info - Message
        36. -
        37. The Attribute Info - Message
        38. -
        39. The Object Reference - Count Message
        40. -
        41. The File Space Info - Message
        42. +
  +
    +
  1. Disk Format: Level 2 - Data + Objects (Continued)
  2. +
      +
    1. Disk Format: Level 2A - Data + Object Headers (Continued)
    2. +
        +
      1. Disk Format: Level + 2A2 - Data Object Header Messages (Continued)
      2. +
          +
        1. The Datatype Message
        2. + +
        3. The Data Storage - + Fill Value (Old) Message
        4. + +
        5. The Data Storage - Fill + Value Message
        6. + +
        7. The Link Message
        8. + +
        9. The Data Storage + - External Data Files Message
        10. + +
        11. The Data Storage - Layout + Message
        12. + +
        13. The Bogus Message
        14. + +
        15. The Group Info Message
        16. + +
        17. The Data Storage - Filter + Pipeline Message
        18. + +
        19. The Attribute Message
        20. + +
        21. The Object Comment + Message
        22. + +
        23. The Object + Modification Time (Old) Message
        24. + +
        25. The Shared Message + Table Message
        26. + +
        27. The Object Header + Continuation Message
        28. + +
        29. The Symbol Table + Message
        30. + +
        31. The Object + Modification Time Message
        32. + +
        33. The B-tree + ‘K’ Values Message
        34. + +
        35. The Driver Info Message
        36. + +
        37. The Attribute Info Message
        38. + +
        39. The Object Reference + Count Message
        40. + +
        41. The File Space Info + Message
        42. + +
        +
      +
    3. Disk Format: Level 2B - Data + Object Data Storage
    4. +
    + +
  3. Appendix A: Definitions
  4. +
  5. Appendix B: File Memory + Allocation Types
- -
  • Disk Format: Level 2B - Data Object Data Storage
  • - - -
  • Appendix A: Definitions
  • -
  • Appendix B: File Memory Allocation Types
  • - -
    + + +
    @@ -293,14610 +396,14857 @@

    I. Introduction

    - - - - - - -
      -
    - HDF5 Groups -
     
      - Figure 1: Relationships among the HDF5 root group, other groups, and objects -
    -
     
      - HDF5 Objects -  
      - Figure 2: HDF5 objects -- datasets, datatypes, or dataspaces -
    -
     
    - - -

    The format of an HDF5 file on disk encompasses several - key ideas of the HDF4 and AIO file formats as well as - addressing some shortcomings therein. The new format is - more self-describing than the HDF4 format and is more - uniformly applied to data objects in the file.

    - -

    An HDF5 file appears to the user as a directed graph. - The nodes of this graph are the higher-level HDF5 objects - that are exposed by the HDF5 APIs:

    - -
      -
    • Groups
    • -
    • Datasets
    • -
    • Committed (formerly Named) datatypes
    • -
    - -

    At the lowest level, as information is actually written to the disk, - an HDF5 file is made up of the following objects:

    -
      -
    • A superblock
    • -
    • B-tree nodes
    • -
    • Heap blocks
    • -
    • Object headers
    • -
    • Object data
    • -
    • Free space
    • -
    - -

    The HDF5 Library uses these low-level objects to represent the - higher-level objects that are then presented to the user or - to applications through the APIs. For instance, a group is an - object header that contains a message that points to a local - heap (for storing the links to objects in the group) and to a - B-tree (which indexes the links). A dataset is an object header - that contains messages that describe datatype, dataspace, layout, - filters, external files, fill value, and other elements with the - layout message pointing to either a raw data chunk or to a - B-tree that points to raw data chunks.

    + + + + + + + + + + + + + + + + + + + + + + +
      +
    HDF5 Groups +
     
     Figure 1: Relationships among + the HDF5 root group, other groups, and objects +
     
     HDF5 Objects 
     Figure 2: HDF5 objects -- + datasets, datatypes, or dataspaces +
     
    + + +

    The format of an HDF5 file on disk encompasses several key ideas + of the HDF4 and AIO file formats as well as addressing some + shortcomings therein. The new format is more self-describing than the + HDF4 format and is more uniformly applied to data objects in the file.

    + +

    An HDF5 file appears to the user as a directed graph. The nodes + of this graph are the higher-level HDF5 objects that are exposed by the + HDF5 APIs:

    + +
      +
    • Groups
    • +
    • Datasets
    • +
    • Committed (formerly Named) datatypes
    • +
    + +

    At the lowest level, as information is actually written to the + disk, an HDF5 file is made up of the following objects:

    +
      +
    • A superblock
    • +
    • B-tree nodes
    • +
    • Heap blocks
    • +
    • Object headers
    • +
    • Object data
    • +
    • Free space
    • +
    + +

    The HDF5 Library uses these low-level objects to represent the + higher-level objects that are then presented to the user or to + applications through the APIs. For instance, a group is an object + header that contains a message that points to a local heap (for storing + the links to objects in the group) and to a B-tree (which indexes the + links). A dataset is an object header that contains messages that + describe datatype, dataspace, layout, filters, external files, fill + value, and other elements with the layout message pointing to either a + raw data chunk or to a B-tree that points to raw data chunks.


    I.A. This Document

    -

    This document describes the lower-level data objects; - the higher-level objects and their properties are described - in the HDF5 User Guide.

    - -

    Three levels of information comprise the file format. - Level 0 contains basic information for identifying and - defining information about the file. Level 1 information contains - the information about the pieces of a file shared by many objects - in the file (such as a B-trees and heaps). Level 2 is the rest - of the file and contains all of the data objects, with each object - partitioned into header information, also known as - metadata, and data.

    - -

    The sizes of various fields in the following layout tables are - determined by looking at the number of columns the field spans - in the table. There are three exceptions: (1) The size may be - overridden by specifying a size in parentheses, (2) the size of - addresses is determined by the Size of Offsets field - in the superblock and is indicated in this document with a - superscripted ‘O’, and (3) the size of length fields is determined - by the Size of Lengths field in the superblock and is - indicated in this document with a superscripted ‘L’.

    - -

    Values for all fields in this document should be treated as unsigned - integers, unless otherwise noted in the description of a field. - Additionally, all metadata fields are stored in little-endian byte - order. -

    - -

    All checksums used in the format are computed with the - Jenkins’ - lookup3 algorithm. -

    - -

    Whenever a bit flag or field is mentioned for an entry, bits are - numbered from the lowest bit position in the entry. -

    - -

    Various tables in this document aligned with “This space inserted - only to align table nicely”. These entries in the table are just - to make the table presentation nicer and do not represent any values - or padding in the file. -

    +

    + This document describes the lower-level data objects; the higher-level + objects and their properties are described in the HDF5 + User Guide. +

    + +

    + Three levels of information comprise the file format. Level 0 contains + basic information for identifying and defining information about the + file. Level 1 information contains the information about the pieces of + a file shared by many objects in the file (such as a B-trees and + heaps). Level 2 is the rest of the file and contains all of the data + objects, with each object partitioned into header information, also + known as metadata, and data. +

    + +

    + The sizes of various fields in the following layout tables are + determined by looking at the number of columns the field spans in the + table. There are three exceptions: (1) The size may be overridden by + specifying a size in parentheses, (2) the size of addresses is + determined by the Size of Offsets field in the superblock and + is indicated in this document with a superscripted ‘O’, and + (3) the size of length fields is determined by the Size of + Lengths field in the superblock and is indicated in this document with + a superscripted ‘L’. +

    + +

    Values for all fields in this document should be treated as + unsigned integers, unless otherwise noted in the description of a + field. Additionally, all metadata fields are stored in little-endian + byte order.

    + +

    + All checksums used in the format are computed with the Jenkins’ + lookup3 algorithm. +

    + +

    Whenever a bit flag or field is mentioned for an entry, bits are + numbered from the lowest bit position in the entry.

    + +

    Various tables in this document aligned with “This space + inserted only to align table nicely”. These entries in the table + are just to make the table presentation nicer and do not represent any + values or padding in the file.


    I.B. Changes for HDF5 1.10

    -

    As of October 2015, changes in the file format for HDF5 1.10 - have not yet been finalized.

    +

    As of October 2015, changes in the file format for HDF5 1.10 have + not yet been finalized.




    -

    -II. Disk Format: Level 0 - File Metadata

    - -
    -

    -II.A. Disk Format: Level 0A - Format Signature and Superblock

    - -

    The superblock may begin at certain predefined offsets within - the HDF5 file, allowing a block of unspecified content for - users to place additional information at the beginning (and - end) of the HDF5 file without limiting the HDF5 Library’s - ability to manage the objects within the file itself. This - feature was designed to accommodate wrapping an HDF5 file in - another file format or adding descriptive information to an HDF5 - file without requiring the modification of the actual file’s - information. The superblock is located by searching for the - HDF5 format signature at byte offset 0, byte offset 512, and at - successive locations in the file, each a multiple of two of - the previous location; in other words, at these byte offsets: - 0, 512, 1024, 2048, and so on.

    - -

    The superblock is composed of the format signature, followed by a - superblock version number and information that is specific to each - version of the superblock. - Currently, there are three versions of the superblock format. - Version 0 is the default format, while version 1 is basically the same - as version 0 with additional information when a non-default B-tree ‘K’ - value is stored. Version 2 is the latest format, with some fields - eliminated or compressed and with superblock extension and checksum - support.

    - -

    Version 0 and 1 of the superblock are described below:

    - - -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - Superblock (Versions 0 and 1) -
    bytebytebytebyte

    Format Signature (8 bytes)

    Version # of SuperblockVersion # of File’s Free Space StorageVersion # of Root Group Symbol Table EntryReserved (zero)
    Version # of Shared Header Message FormatSize of OffsetsSize of LengthsReserved (zero)
    Group Leaf Node KGroup Internal Node K
    File Consistency Flags
    Indexed Storage Internal Node K1Reserved (zero)1

    Base AddressO


    Address of File Free space InfoO


    End of File AddressO


    Driver Information Block AddressO

    Root Group Symbol Table Entry
    - - - - - - - - -
      - (Items marked with an ‘O’ in the above table are - of the size specified in “Size of Offsets.”) -
      - (Items marked with a ‘1’ in the above table are - new in version 1 of the superblock) -
    -
    - -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Field NameDescription

    Format Signature

    This field contains a constant value and can be used to - quickly identify a file as being an HDF5 file. The - constant value is designed to allow easy identification of - an HDF5 file and to allow certain types of data corruption - to be detected. The file signature of an HDF5 file always - contains the following values:

    -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Decimal:13772687013102610
    Hexadecimal:894844460d0a1a0a
    ASCII C Notation:\211HDF\r\n\032\n
    -
    -

    This signature both identifies the file as an HDF5 file - and provides for immediate detection of common - file-transfer problems. The first two bytes distinguish - HDF5 files on systems that expect the first two bytes to - identify the file type uniquely. The first byte is - chosen as a non-ASCII value to reduce the probability - that a text file may be misrecognized as an HDF5 file; - also, it catches bad file transfers that clear bit - 7. Bytes two through four name the format. The CR-LF - sequence catches bad file transfers that alter newline - sequences. The control-Z character stops file display - under MS-DOS. The final line feed checks for the inverse - of the CR-LF translation problem. (This is a direct - descendent of the - PNG file - signature.)

    -

    This field is present in version 0+ of the superblock. -

    Version Number of the Superblock

    This value is used to determine the format of the - information in the superblock. When the format of the - information in the superblock is changed, the version number - is incremented to the next integer and can be used to - determine how the information in the superblock is - formatted.

    - -

    Values of 0, 1 and 2 are defined for this field. (The format - of version 2 is described below, not here) -

    - -

    This field is present in version 0+ of the superblock. -

    -

    Version Number of the File’s Free Space - Information

    -

    This value is used to determine the format of the - file’s free space information. -

    -

    The only value currently valid in this field is ‘0’, which - indicates that the file’s free space is as described - below. -

    - -

    This field is present in version 0 and 1 of the superblock. -

    -

    Version Number of the Root Group Symbol Table - Entry

    This value is used to determine the format of the - information in the Root Group Symbol Table Entry. When the - format of the information in that field is changed, the - version number is incremented to the next integer and can be - used to determine how the information in the field - is formatted.

    -

    The only value currently valid in this field is ‘0’, - which indicates that the root group symbol table entry is - formatted as described below.

    -

    This field is present in version 0 and 1 of the - superblock.

    -

    Version Number of the Shared Header Message Format

    This value is used to determine the format of the - information in a shared object header message. Since the format - of the shared header messages differs from the other private - header messages, a version number is used to identify changes - in the format. -

    -

    The only value currently valid in this field is ‘0’, which - indicates that shared header messages are formatted as - described below. -

    - -

    This field is present in version 0 and 1 of the superblock. -

    -

    Size of Offsets

    This value contains the number of bytes used to store - addresses in the file. The values for the addresses of - objects in the file are offsets relative to a base address, - usually the address of the superblock signature. This - allows a wrapper to be added after the file is created - without invalidating the internal offset locations. -

    - -

    This field is present in version 0+ of the superblock. -

    -

    Size of Lengths

    This value contains the number of bytes used to store - the size of an object. -

    -

    This field is present in version 0+ of the superblock. -

    -

    Group Leaf Node K

    -

    Each leaf node of a group B-tree will have at - least this many entries but not more than twice this - many. If a group has a single leaf node then it - may have fewer entries. -

    -

    This value must be greater than zero. -

    -

    See the description of B-trees below. -

    - -

    This field is present in version 0 and 1 of the superblock. -

    -

    Group Internal Node K

    -

    Each internal node of a group B-tree will have at - least this many entries but not more than twice this - many. If the group has only one internal - node then it might have fewer entries. -

    -

    This value must be greater than zero. -

    -

    See the description of B-trees below. -

    - -

    This field is present in version 0 and 1 of the superblock. -

    -

    File Consistency Flags

    -

    This value contains flags to indicate information - about the consistency of the information contained - within the file. Currently, the following bit flags are - defined: -

      -
    • Bit 0 set indicates that the file is opened for - write-access.
    • -
    • Bit 1 set indicates that the file has - been verified for consistency and is guaranteed to be - consistent with the format defined in this document.
    • -
    • Bits 2-31 are reserved for future use.
    • -
    - Bit 0 should be - set as the first action when a file is opened for write - access and should be cleared only as the final action - when closing a file. Bit 1 should be cleared during - normal access to a file and only set after the file’s - consistency is guaranteed by the library or a - consistency utility. -

    - -

    This field is present in version 0+ of the superblock. -

    -

    Indexed Storage Internal Node K

    -

    Each internal node of an indexed storage B-tree will have at - least this many entries but not more than twice this - many. If the index storage B-tree has only one internal - node then it might have fewer entries. -

    -

    This value must be greater than zero. -

    -

    See the description of B-trees below. -

    - -

    This field is present in version 1 of the superblock. -

    -

    Base Address

    -

    This is the absolute file address of the first byte of - the HDF5 data within the file. The library currently - constrains this value to be the absolute file address - of the superblock itself when creating new files; - future versions of the library may provide greater - flexibility. When opening an existing file and this address does - not match the offset of the superblock, the library assumes - that the entire contents of the HDF5 file have been adjusted in - the file and adjusts the base address and end of file address to - reflect their new positions in the file. Unless otherwise noted, - all other file addresses are relative to this base - address. -

    - -

    This field is present in version 0+ of the superblock. -

    -

    Address of Global Free-space Index

    -

    The file’s free space is not persistent for version 0 and 1 of - the superblock. - Currently this field always contains the - undefined address. -

    - -

    This field is present in version 0 and 1 of the superblock. -

    -

    End of File Address

    -

    This is the absolute file address of the first byte past - the end of all HDF5 data. It is used to determine whether a - file has been accidentally truncated and as an address where - file data allocation can occur if space from the free list is - not used. -

    - -

    This field is present in version 0+ of the superblock. -

    -

    Driver Information Block Address

    -

    This is the relative file address of the file driver - information block which contains driver-specific - information needed to reopen the file. If there is no - driver information block then this entry should be the - undefined address. -

    - -

    This field is present in version 0 and 1 of the superblock. -

    -

    Root Group Symbol Table Entry

    -

    This is the symbol table entry - of the root group, which serves as the entry point into - the group graph for the file. -

    - -

    This field is present in version 0 and 1 of the superblock. -

    -
    -
    +

    + II. Disk Format: Level 0 - File Metadata +

    -
    -

    Version 2 of the superblock is described below:

    - -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - Superblock (Version 2) -
    bytebytebytebyte

    Format Signature (8 bytes)

    Version # of SuperblockSize of OffsetsSize of LengthsFile Consistency Flags

    Base AddressO


    Superblock Extension AddressO


    End of File AddressO


    Root Group Object Header AddressO

    Superblock Checksum
    +
    +

    + II.A. Disk Format: Level 0A - Format + Signature and Superblock +

    - - - - -
      - (Items marked with an ‘O’ in the above table are - of the size specified in “Size of Offsets.”) -
    +

    The superblock may begin at certain predefined offsets within the + HDF5 file, allowing a block of unspecified content for users to place + additional information at the beginning (and end) of the HDF5 file + without limiting the HDF5 Library’s ability to manage the objects + within the file itself. This feature was designed to accommodate + wrapping an HDF5 file in another file format or adding descriptive + information to an HDF5 file without requiring the modification of the + actual file’s information. The superblock is located by searching + for the HDF5 format signature at byte offset 0, byte offset 512, and at + successive locations in the file, each a multiple of two of the + previous location; in other words, at these byte offsets: 0, 512, 1024, + 2048, and so on.

    -
    +

    The superblock is composed of the format signature, followed by a + superblock version number and information that is specific to each + version of the superblock. Currently, there are three versions of the + superblock format. Version 0 is the default format, while version 1 is + basically the same as version 0 with additional information when a + non-default B-tree ‘K’ value is stored. Version 2 is the + latest format, with some fields eliminated or compressed and with + superblock extension and checksum support.

    -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +

    Version 0 and 1 of the superblock are described below:

    -
    Field NameDescription

    Format Signature

    -

    This field is the same as described for versions 0 and 1 of the - superblock. -

    Version Number of the Superblock

    -

    This field has a value of 2 and has the same meaning as for - versions 0 and 1. -

    -

    Size of Offsets

    -

    This field is the same as described for versions 0 and 1 of the - superblock. -

    -

    Size of Lengths

    -

    This field is the same as described for versions 0 and 1 of the - superblock. -

    -

    File Consistency Flags

    -

    This field is the same as described for versions 0 and 1 except - that it is smaller (the number of reserved bits has been reduced - from 30 to 6). -

    -

    Base Address

    -

    This field is the same as described for versions 0 and 1 of the - superblock. -

    -

    Superblock Extension Address

    -

    The field is the address of the object header for the - superblock extension. - If there is no extension then this entry should be the - undefined address. -

    -

    End of File Address

    -

    This field is the same as described for versions 0 and 1 of the - superblock. -

    -

    Root Group Object Header Address

    -

    This is the address of - the root group object header, - which serves as the entry point into the group graph for the file. -

    -

    Superblock Checksum

    -

    The checksum for the superblock. -

    -
    -
    -
    -

    -II.B. Disk Format: Level 0B - File Driver Info

    +
    + + -

    The driver information block is an optional region of the - file which contains information needed by the file driver - to reopen a file. The format is described below:

    + + + + + + + + + -
    -
    Superblock (Versions 0 and 1)
    bytebytebytebyte

    Format Signature (8 bytes)
    +
    - + + + + + + - - - - - + + + + + - - + + - + - + + - + -
    - Driver Information Block -
    Version # of SuperblockVersion # of File’s Free Space StorageVersion # of Root Group Symbol Table EntryReserved (zero)
    bytebytebytebyte
    Version # of Shared Header Message FormatSize of OffsetsSize of LengthsReserved (zero)
    VersionReservedGroup Leaf Node KGroup Internal Node K
    Driver Information SizeFile Consistency Flags

    Driver Identification (8 bytes)

    Indexed Storage Internal + Node K1 + Reserved (zero)1


    Driver Information (variable size)



    Base AddressO
    +
    -
    -
    -
    - - - - + + - - + - - + - - + +
    Field NameDescription

    Address of File Free space InfoO
    +

    Version

    -

    The version number of the Driver Information Block. - This document describes version 0. -

    -

    End of File AddressO
    +

    Driver Information Size

    -

    The size in bytes of the Driver Information field. -

    -

    Driver Information Block AddressO
    +

    Driver Identification

    -

    This is an eight-byte ASCII string without null - termination which identifies the driver and/or version number - of the Driver Information Block. The predefined driver encoded - in this field by the HDF5 Library is identified by the - letters NCSA followed by the first four characters of - the driver name. If the Driver Information block is not - the original version then the last letter(s) of the - identification will be replaced by a version number in - ASCII, starting with 0. -

    -

    - Identification for user-defined drivers is also eight-byte long. - It can be arbitrary but should be unique to avoid - the four character prefix “NCSA”. -

    -
    Root Group Symbol Table Entry
    - -

    Driver Information

    - Driver information is stored in a format defined by the - file driver (see description below). + + + + -
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets.”)
    -
    + +   + (Items marked with a ‘1’ in the above table are + new in version 1 of the superblock) + + +
    -
    - The two drivers encoded in the Driver Identification field are as follows: -
      -
    • - Multi driver: -

      - The identifier for this driver is “NCSAmulti”. - This driver provides a mechanism for segregating raw data and different types of metadata - into multiple files. - These files are viewed by the library as a single virtual HDF5 file with a single file address. - A maximum of 6 files will be created for the following data: - superblock, B-tree, raw data, global heap, local heap, and object header. - More than one type of data can be written to the same file. -

    • -
    • - Family driver -

      - The identifier for this driver is “NCSAfami” and is encoded in this field for library version 1.8 and after. - This driver is designed for systems that do not support files larger than 2 gigabytes - by splitting the HDF5 file address space across several smaller files. - It does nothing to segregate metadata and raw data; - they are mixed in the address space just as they would be in a single contiguous file. -

    • -
    -

    The format of the Driver Information field for the - above two drivers are described below:

    - -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +
    +
    +
    - Multi Driver Information -
    bytebytebytebyte
    Member MappingMember MappingMember MappingMember Mapping
    Member MappingMember MappingReservedReserved

    Address of Member File 1


    End of Address for Member File 1


    Address of Member File 2


    End of Address for Member File 2


    ... ...


    Address of Member File N


    End of Address for Member File N


    Name of Member File 1 (variable size)


    Name of Member File 2 (variable size)


    ... ...


    Name of Member File N (variable size)

    + + + + + + + + + + + + + + + + + + -
    -
    -
    Field NameDescription

    Format Signature

    This field contains a constant value and can be used + to quickly identify a file as being an HDF5 file. The constant + value is designed to allow easy identification of an HDF5 file and + to allow certain types of data corruption to be detected. The file + signature of an HDF5 file always contains the following values:

    +
    + + + + + + + + + + + + -
    Decimal:13772687013102610
    - +
    Hexadecimal:894844460d0a1a0a
    - - - - + + + + + + + + + + + +
    Field NameDescription
    ASCII C Notation:\211HDF\r\n\032\n
    + +

    + This signature both identifies the file as an HDF5 file and + provides for immediate detection of common file-transfer problems. + The first two bytes distinguish HDF5 files on systems that expect + the first two bytes to identify the file type uniquely. The first + byte is chosen as a non-ASCII value to reduce the probability that + a text file may be misrecognized as an HDF5 file; also, it catches + bad file transfers that clear bit 7. Bytes two through four name + the format. The CR-LF sequence catches bad file transfers that + alter newline sequences. The control-Z character stops file display + under MS-DOS. The final line feed checks for the inverse of the + CR-LF translation problem. (This is a direct descendent of the PNG + file signature.) +

    +

    + This field is present in version 0+ of the superblock. +

    + - -

    Member Mapping

    -

    These fields are integer values from 1 to 6 - indicating how the data can be mapped to or merged with another type of - data. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Member MappingDescription
    1The superblock data.
    2The B-tree data.
    3The raw data.
    4The global heap data.
    5The local heap data.
    6The object header data.

    -

    For example, if the third field has the value 3 and all the rest have the - value 1, it means there are two files: one for raw data, and one for superblock, - B-tree, global heap, local heap, and object header.

    - - - - -

    Reserved

    -

    These fields are reserved and should always be zero.

    - - - -

    Address of Member File N

    -

    This field Specifies the virtual address at which the member file starts.

    -

    N is the number of member files.

    - - - - -

    End of Address for Member File N

    -

    This field is the end of the allocated address for the member file. -

    - - - -

    Name of Member File N

    -

    This field is the null-terminated name of the member file and - its length should be multiples of 8 bytes. - Additional bytes will be padded with NULLs. The default naming - convention is %s-X.h5, where X is one of the letters - s (for superblock), b (for B-tree), r (for raw data), - g (for global heap), l (for local heap), and o (for - object header). The name of the whole HDF5 file will substitute the %s - in the string. -

    - - - -
    + +

    Version Number of the Superblock

    +

    This value is used to determine the format of the + information in the superblock. When the format of the information + in the superblock is changed, the version number is incremented to + the next integer and can be used to determine how the information + in the superblock is formatted.

    -
    -
    - - - - - - - - - - - - - +

    Values of 0, 1 and 2 are defined for this field. (The format + of version 2 is described below, not here)

    -
    - Family Driver Information -
    bytebytebytebyte

    Size of Member File

    -
    +

    + This field is present in version 0+ of the superblock. +

    + -
    -
    - - - - - - - - - - -
    Field NameDescription

    Size of Member File

    This field is the size of the member file in the family of files.

    -
    + +

    Version Number of the File’s Free Space + Information

    + +

    This value is used to determine the format of the + file’s free space information.

    +

    + The only value currently valid in this field is ‘0’, + which indicates that the file’s free space is as described below. +

    -
    -

    -II.C. Disk Format: Level 0C - Superblock Extension

    +

    + This field is present in version 0 and 1 of the + superblock. +

    + + -

    The superblock extension is used to store superblock metadata - which is either optional, or added after the version of the superblock - was defined. Superblock extensions may only exist when version 2+ of - superblock is used. A superblock extension is an object header which may - hold the following messages:

    - + +

    Version Number of the Root Group Symbol Table Entry

    +

    This value is used to determine the format of the + information in the Root Group Symbol Table Entry. When the format + of the information in that field is changed, the version number is + incremented to the next integer and can be used to determine how + the information in the field is formatted.

    +

    + The only value currently valid in this field is ‘0’, + which indicates that the root group symbol table entry is formatted + as described below. +

    +

    + This field is present in version 0 and 1 of the + superblock. +

    + + + +

    Version Number of the Shared Header Message Format

    +

    This value is used to determine the format of the + information in a shared object header message. Since the format of + the shared header messages differs from the other private header + messages, a version number is used to identify changes in the + format.

    +

    + The only value currently valid in this field is ‘0’, + which indicates that shared header messages are formatted as + described below. +

    +

    + This field is present in version 0 and 1 of the + superblock. +

    + + +

    Size of Offsets

    +

    This value contains the number of bytes used to store + addresses in the file. The values for the addresses of objects in + the file are offsets relative to a base address, usually the + address of the superblock signature. This allows a wrapper to be + added after the file is created without invalidating the internal + offset locations.

    -
    -
    -
    -

    -III. Disk Format: Level 1 - File Infrastructure

    - -
    -

    -III.A. Disk Format: Level 1A - B-trees and B-tree Nodes

    - -

    B-trees allow flexible storage for objects which tend to grow - in ways that cause the object to be stored discontiguously. B-trees - are described in various algorithms books including “Introduction to - Algorithms” by Thomas H. Cormen, Charles E. Leiserson, and Ronald - L. Rivest. B-trees are used in several places in the HDF5 file format, - when an index is needed for another data structure.

    - -

    The version 1 B-tree structure described below is the original index - structure, but are limited by some bugs in our implementation (mainly in - how they handle deleting records). The version 1 B-trees are being phased - out in favor of the version 2 B-trees described below, although both - types of structures may be found in the same file, depending on - application settings when creating the file.

    - -
    -

    -III.A.1. Disk Format: Level 1A1 - Version 1 B-trees (B-link Trees)

    - -

    Version 1 B-trees in HDF5 files an implementation of the B-link tree, - in which the sibling nodes at a particular level in the tree are stored - in a doubly-linked list, is described in the “Efficient Locking for - Concurrent Operations on B-trees” paper by Phillip Lehman and S. Bing Yao - as published in the ACM Transactions on Database Systems, - Vol. 6, No. 4, December 1981.

    - -

    The B-link trees implemented by the file format contain one more - key than the number of children. In other words, each child - pointer out of a B-tree node has a left key and a right key. - The pointers out of internal nodes point to sub-trees while - the pointers out of leaf nodes point to symbol nodes and - raw data chunks. - Aside from that difference, internal nodes and leaf nodes - are identical.

    - -
    - - +

    + This field is present in version 0+ of the superblock. +

    + - - - - + + - + + - - - + + - + + - + + - + + - + + - + + - + + - + + +
    - B-link Tree Nodes -
    bytebytebytebyte

    Size of Lengths

    This value contains the number of bytes used to store + the size of an object.

    +

    + This field is present in version 0+ of the superblock. +

    Signature

    Group Leaf Node K

    +

    Each leaf node of a group B-tree will have at least this many + entries but not more than twice this many. If a group has a single + leaf node then it may have fewer entries.

    +

    This value must be greater than zero.

    +

    + See the description of B-trees below. +

    + +

    + This field is present in version 0 and 1 of the + superblock. +

    +
    Node TypeNode LevelEntries Used

    Group Internal Node K

    +

    Each internal node of a group B-tree will have at least this + many entries but not more than twice this many. If the group has + only one internal node then it might have fewer entries.

    +

    This value must be greater than zero.

    +

    + See the description of B-trees below. +

    + +

    + This field is present in version 0 and 1 of the + superblock. +

    +

    Address of Left SiblingO

    File Consistency Flags

    +

    This value contains flags to indicate information about the + consistency of the information contained within the file. + Currently, the following bit flags are defined:

    +
      +
    • Bit 0 set indicates that the file is opened for + write-access.
    • +
    • Bit 1 set indicates that the file has been verified for + consistency and is guaranteed to be consistent with the format + defined in this document.
    • +
    • Bits 2-31 are reserved for future use.
    • +
    Bit 0 should be set as the first action when a file is opened for + write access and should be cleared only as the final action when + closing a file. Bit 1 should be cleared during normal access to a + file and only set after the file’s consistency is guaranteed + by the library or a consistency utility. +

    + +

    + This field is present in version 0+ of the superblock. +

    +

    Address of Right SiblingO

    Indexed Storage Internal Node K

    +

    Each internal node of an indexed storage B-tree will have at + least this many entries but not more than twice this many. If the + index storage B-tree has only one internal node then it might have + fewer entries.

    +

    This value must be greater than zero.

    +

    + See the description of B-trees below. +

    + +

    + This field is present in version 1 of the superblock. +

    +
    Key 0 (variable size)

    Base Address

    +

    This is the absolute file address of the first byte of the + HDF5 data within the file. The library currently constrains this + value to be the absolute file address of the superblock itself when + creating new files; future versions of the library may provide + greater flexibility. When opening an existing file and this address + does not match the offset of the superblock, the library assumes + that the entire contents of the HDF5 file have been adjusted in the + file and adjusts the base address and end of file address to + reflect their new positions in the file. Unless otherwise noted, + all other file addresses are relative to this base address.

    + +

    + This field is present in version 0+ of the superblock. +

    +

    Address of Child 0O

    Address of Global Free-space Index

    +

    + The file’s free space is not persistent for version 0 and 1 + of the superblock. Currently this field always contains the undefined address. +

    + +

    + This field is present in version 0 and 1 of the + superblock. +

    +
    Key 1 (variable size)

    End of File Address

    +

    This is the absolute file address of the first byte past the + end of all HDF5 data. It is used to determine whether a file has + been accidentally truncated and as an address where file data + allocation can occur if space from the free list is not used.

    + +

    + This field is present in version 0+ of the superblock. +

    +

    Address of Child 1O

    Driver Information Block Address

    +

    + This is the relative file address of the file driver information + block which contains driver-specific information needed to reopen + the file. If there is no driver information block then this entry + should be the undefined address. +

    + +

    + This field is present in version 0 and 1 of the + superblock. +

    +
    ...

    Root Group Symbol Table Entry

    +

    + This is the symbol table entry of + the root group, which serves as the entry point into the group + graph for the file. +

    + +

    + This field is present in version 0 and 1 of the + superblock. +

    +
    +
    + +
    +

    Version 2 of the superblock is described below:

    + +
    + + - + + + + - + - + + + + -
    Superblock (Version 2)
    Key 2K (variable size)bytebytebytebyte

    Address of Child 2KO


    Format Signature (8 bytes)
    +
    Key 2K+1 (variable size)Version # of SuperblockSize of OffsetsSize of LengthsFile Consistency Flags
    - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
    - -
    - -
    -
    - - - - - - - - - - - - - - - - - - - + - - - + + - - - + + - - - + + - - - + + +
    Field NameDescription

    Signature

    -

    The ASCII character string “TREE” is - used to indicate the - beginning of a B-link tree node. This gives file - consistency checking utilities a better chance of - reconstructing a damaged file. -

    -

    Node Type

    -

    Each B-link tree points to a particular type of data. - This field indicates the type of data as well as - implying the maximum degree K of the tree and - the size of each Key field. - - - - - - - - - - - - - - - -
    Node TypeDescription
    0This tree points to group nodes.
    1This tree points to raw data chunk nodes.

    -

    Node Level

    -

    The node level indicates the level at which this node - appears in the tree (leaf nodes are at level zero). Not - only does the level indicate whether child pointers - point to sub-trees or to data, but it can also be used - to help file consistency checking utilities reconstruct - damaged trees. -

    -

    Base AddressO
    +

    Entries Used

    -

    This determines the number of children to which this - node points. All nodes of a particular type of tree - have the same maximum degree, but most nodes will point - to less than that number of children. The valid child - pointers and keys appear at the beginning of the node - and the unused pointers and keys appear at the end of - the node. The unused pointers and keys have undefined - values. -

    -

    Superblock Extension AddressO
    +

    Address of Left Sibling

    -

    This is the relative file address of the left sibling of - the current node. If the current - node is the left-most node at this level then this field - is the undefined address. -

    -

    End of File AddressO
    +

    Address of Right Sibling

    -

    This is the relative file address of the right sibling of - the current node. If the current - node is the right-most node at this level then this - field is the undefined address. -

    -

    Root Group Object Header AddressO
    +

    Keys and Child Pointers

    -

    Each tree has 2K+1 keys with 2K - child pointers interleaved between the keys. The number - of keys and child pointers actually containing valid - values is determined by the node’s Entries Used field. - If that field is N then the B-link tree contains - N child pointers and N+1 keys. -

    -
    Superblock Checksum
    - -

    Key

    - -

    The format and size of the key values is determined by - the type of data to which this tree points. The keys are - ordered and are boundaries for the contents of the child - pointer; that is, the key values represented by child - N fall between Key N and Key - N+1. Whether the interval is open or closed on - each end is determined by the type of data to which the - tree points. -

    - -

    - The format of the key depends on the node type. - For nodes of node type 0 (group nodes), the key is formatted as - follows: - - - - - - -
    A single field of Size of Lengths - bytes:Indicates the byte offset into the local heap - for the first object name in the subtree which - that key describes. -
    -

    - - -

    - For nodes of node type 1 (chunked raw data nodes), the key is - formatted as follows: - - - - - - - - - - - - - - -
    Bytes 1-4:Size of chunk in bytes.
    Bytes 4-8:Filter mask, a 32-bit bit field indicating which - filters have been skipped for this chunk. Each filter - has an index number in the pipeline (starting at 0, with - the first filter to apply) and if that filter is skipped, - the bit corresponding to its index is set.
    (D + 1) 64-bit fields:The offset of the - chunk within the dataset where D is the number - of dimensions of the dataset, and the last value is the - offset within the dataset’s datatype and should always be - zero. For example, if - a chunk in a 3-dimensional dataset begins at the - position [5,5,5], there will be three - such 64-bit values, each with the value of - 5, followed by a 0 value.
    -

    - - + + + + +
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets.”)
    - -

    Child Pointer

    - -

    The tree node contains file addresses of subtrees or - data depending on the node level. Nodes at Level 0 point - to data addresses, either raw data chunks or group nodes. - Nodes at non-zero levels point to other nodes of the - same B-tree. -

    -

    For raw data chunk nodes, the child pointer is the address - of a single raw data chunk. For group nodes, the child pointer - points to a symbol table, which contains - information for multiple symbol table entries. -

    - - - -
    - -

    - Conceptually, each B-tree node looks like this:

    -
    - - - - - - - - - - - - - -
    key[0] child[0] key[1] child[1] key[2] ... ... key[N-1] child[N-1] key[N]
    -
    -
    - - where child[i] is a pointer to a sub-tree (at a level - above Level 0) or to data (at Level 0). - Each key[i] describes an item stored by the B-tree - (a chunk or an object of a group node). The range of values - represented by child[i] is indicated by key[i] - and key[i+1]. - - -

    The following question must next be answered: - “Is the value described by key[i] contained in - child[i-1] or in child[i]?” - The answer depends on the type of tree. - In trees for groups (node type 0) the object described by - key[i] is the greatest object contained in - child[i-1] while in chunk trees (node type 1) the - chunk described by key[i] is the least chunk in - child[i].

    - -

    That means that key[0] for group trees is sometimes unused; - it points to offset zero in the heap, which is always the - empty string and compares as “less-than” any valid object name.

    - -

    And key[N] for chunk trees is sometimes unused; - it contains a chunk offset which compares as “greater-than” - any other chunk offset and has a chunk byte size of zero - to indicate that it is not actually allocated.

    - -
    -

    -III.A.2. Disk Format: Level 1A2 - Version 2 B-trees

    - -

    Version 2 B-trees are “traditional” B-trees, with one major difference. - Instead of just using a simple pointer (or address in the file) to a - child of an internal node, the pointer to the child node contains two - additional pieces of information: the number of records in the child - node itself, and the total number of records in the child node and - all its descendants. Storing this additional information allows fast - array-like indexing to locate the nth record in the B-tree.

    - -

    The entry into a version 2 B-tree is a header which contains global - information about the structure of the B-tree. The root node - address - field in the header points to the B-tree root node, which is either an - internal or leaf node, depending on the value in the header’s - depth field. An internal node consists of records plus - pointers to further leaf or internal nodes in the tree. A leaf node - consists of solely of records. The format of the records depends on - the B-tree type (stored in the header).

    - -
    - - + +
    +
    +
    - Version 2 B-tree Header -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    bytebytebytebyte
    Signature
    VersionTypeThis space inserted only to align table nicely
    Node Size
    Record SizeDepth
    Split PercentMerge PercentThis space inserted only to align table nicely

    Root Node AddressO

    Number of Records in Root NodeThis space inserted only to align table nicely

    Total Number of Records in B-treeL

    Checksum
    + Field Name + Description + - - - - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
      - (Items marked with an ‘L’ in the above table are of the size - specified in “Size of Lengths” field in the superblock.) -
    - -
    - -
    -
    - - - - - - - - - - - - - - - - - - - + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - - - - - - - - - - - - -
    Field NameDescription

    Signature

    -

    The ASCII character string “BTHD” is - used to indicate the header of a version 2 B-link tree node. -

    -

    Version

    -

    The version number for this B-tree header. This document - describes version 0. -

    -

    Type

    -

    This field indicates the type of B-tree: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    ValueDescription
    0A “testing” B-tree, this value should not be - used for storing records in actual HDF5 files. -
    1This B-tree is used for indexing indirectly accessed, - non-filtered ‘huge’ fractal heap objects. -
    2This B-tree is used for indexing indirectly accessed, - filtered ‘huge’ fractal heap objects. -
    3This B-tree is used for indexing directly accessed, - non-filtered ‘huge’ fractal heap objects. -
    4This B-tree is used for indexing directly accessed, - filtered ‘huge’ fractal heap objects. -
    5This B-tree is used for indexing the ‘name’ field for - links in indexed groups. -
    6This B-tree is used for indexing the ‘creation order’ - field for links in indexed groups. -
    7This B-tree is used for indexing shared object header - messages. -
    8This B-tree is used for indexing the ‘name’ field for - indexed attributes. -
    9This B-tree is used for indexing the ‘creation order’ - field for indexed attributes. -

    -

    The format of records for each type is described below.

    -

    Format Signature

    +

    This field is the same as described for versions 0 and 1 of + the superblock.

    +

    Node Size

    -

    This is the size in bytes of all B-tree nodes. -

    -

    Version Number of the Superblock

    +

    This field has a value of 2 and has the same meaning as for + versions 0 and 1.

    +

    Record Size

    -

    This field is the size in bytes of the B-tree record. -

    -

    Size of Offsets

    +

    This field is the same as described for versions 0 and 1 of + the superblock.

    +

    Depth

    -

    This is the depth of the B-tree. -

    -

    Size of Lengths

    +

    This field is the same as described for versions 0 and 1 of + the superblock.

    +

    Split Percent

    -

    The percent full that a node needs to increase above before it - is split. -

    -

    File Consistency Flags

    +

    This field is the same as described for versions 0 and 1 + except that it is smaller (the number of reserved bits has been + reduced from 30 to 6).

    +

    Merge Percent

    -

    The percent full that a node needs to be decrease below before it - is split. -

    -

    Base Address

    +

    This field is the same as described for versions 0 and 1 of + the superblock.

    +

    Root Node Address

    -

    This is the address of the root B-tree node. A B-tree with - no records will have the undefined - address in this field. -

    -

    Superblock Extension Address

    +

    + The field is the address of the object header for the superblock extension. If there is no + extension then this entry should be the undefined + address. +

    +

    Number of Records in Root Node

    -

    This is the number of records in the root node. -

    -

    Total Number of Records in B-tree

    -

    This is the total number of records in the entire B-tree. -

    -

    Checksum

    -

    This is the checksum for the B-tree header. -

    -
    -
    - -
    -
    -
    - - - - - - - + + - + + + - - - + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - Version 2 B-tree Internal Node -
    bytebytebytebyte

    End of File Address

    +

    This field is the same as described for versions 0 and 1 of + the superblock.

    +
    Signature

    Root Group Object Header Address

    +

    + This is the address of the root group + object header, which serves as the entry point into the group + graph for the file. +

    +
    VersionTypeRecords 0, 1, 2...N-1 (variable size)

    Superblock Checksum

    +

    The checksum for the superblock.

    +

    Child Node Pointer 0O


    Number of Records N0 for Child Node 0 (variable size)

    Total Number of Records for Child Node 0 (optional, variable size)

    Child Node Pointer 1O


    Number of Records N1 for Child Node 1 (variable size)

    Total Number of Records for Child Node 1 (optional, variable size)
    ...

    Child Node Pointer NO


    Number of Records Nn for Child Node N (variable size)

    Total Number of Records for Child Node N (optional, variable size)
    Checksum
    - - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
    -
    + + +
    +

    + II.B. Disk Format: Level 0B - File Driver + Info +

    + +

    + The driver information block is an optional region of the file + which contains information needed by the file driver to reopen a file. + The format is described below: +

    -
    -
    - - - - - - - - - +
    +
    Field NameDescription

    Signature

    -

    The ASCII character string “BTIN” is - used to indicate the internal node of a B-link tree. -

    -
    + - - + + + + - - + + - - + - - + - - - - - - - - - - - - + +
    Driver Information Block

    Version

    -

    The version number for this B-tree internal node. - This document describes version 0. -

    -
    bytebytebytebyte

    Type

    -

    This field is the type of the B-tree node. It should always - be the same as the B-tree type in the header. -

    -
    VersionReserved

    Records

    -

    The size of this field is determined by the number of records - for this node and the record size (from the header). The format - of records depends on the type of B-tree. -

    -
    Driver Information Size

    Child Node Pointer

    -

    This field is the address of the child node pointed to by the - internal node. -

    -

    Driver Identification (8 bytes)
    +

    Number of Records in Child Node

    -

    This is the number of records in the child node pointed to by - the corresponding Node Pointer. -

    -

    The number of bytes used to store this field is determined by - the maximum possible number of records able to be stored in the - child node. -

    -

    - The maximum number of records in a child node is computed - in the following way: - -

      -
    • Subtract the fixed size overhead for - the child node (for example, its signature, version, - checksum, and so on and one pointer triplet - of information for the child node (because there is one - more pointer triplet than records in each internal node)) - from the size of nodes for the B-tree.
    • -
    • Divide that result by the size of a record plus the - pointer triplet of information stored to reach each - child node from this node. -
    - -

    -

    - Note that leaf nodes do not encode any - child pointer triplets, so the maximum number of records in a - leaf node is just the node size minus the leaf node overhead, - divided by the record size. -

    -

    - Also note that the first level of internal nodes above the - leaf nodes do not encode the Total Number of Records in Child - Node value in the child pointer triplets (since it is the - same as the Number of Records in Child Node), so the - maximum number of records in these nodes is computed with the - equation above, but using (Child Pointer, Number of - Records in Child Node) pairs instead of triplets. -

    -

    - The number of - bytes used to encode this field is the least number of bytes - required to encode the maximum number of records in a child - node value for the child nodes below this level - in the B-tree. -

    -

    - For example, if the maximum number of child records is - 123, one byte will be used to encode these values in this - node; if the maximum number of child records is - 20000, two bytes will be used to encode these values in this - node; and so on. The maximum number of bytes used to - encode these values is 8 (in other words, an unsigned - 64-bit integer). -

    -

    Total Number of Records in Child Node

    -

    This is the total number of records for the node pointed to by - the corresponding Node Pointer and all its children. - This field exists only in nodes whose depth in the B-tree node - is greater than 1 (in other words, the “twig” - internal nodes, just above leaf nodes, do not store this - field in their child node pointers). -

    -

    The number of bytes used to store this field is determined by - the maximum possible number of records able to be stored in the - child node and its descendants. -

    -

    - The maximum possible number of records able to be stored in a - child node and its descendants is computed iteratively, in the - following way: The maximum number of records in a leaf node - is computed, then that value is used to compute the maximum - possible number of records in the first level of internal nodes - above the leaf nodes. Multiplying these two values together - determines the maximum possible number of records in child node - pointers for the level of nodes two levels above leaf nodes. - This process is continued up to any level in the B-tree. -

    -

    - The number of bytes used to encode this value is computed in - the same way as for the Number of Records in Child Node - field. -

    -

    Checksum

    -

    This is the checksum for this node. -

    -

    +
    Driver Information (variable size)
    +
    +
    +
    - - - -
    -
    -
    - - - +
    +
    +
    - Version 2 B-tree Leaf Node -
    - - - - + + - + + - - - - - - - - -
    bytebytebytebyteField NameDescription
    Signature

    Version

    +

    The version number of the Driver Information Block. This + document describes version 0.

    +
    VersionTypeRecord 0, 1, 2...N-1 (variable size)
    Checksum
    -
    -
    -
    - - - - + + + - - + + - - - + + + +
    Field NameDescription

    Driver Information Size

    +

    + The size in bytes of the Driver Information field. +

    +

    Signature

    -

    The ASCII character string “BTLF“ is - used to indicate the leaf node of a version 2 B-link tree. -

    -

    Driver Identification

    +

    + This is an eight-byte ASCII string without null termination which + identifies the driver and/or version number of the Driver + Information Block. The predefined driver encoded in this field by + the HDF5 Library is identified by the letters + NCSA + followed by the first four characters of the driver name. If the + Driver Information block is not the original version then the last + letter(s) of the identification will be replaced by a version + number in ASCII, starting with 0. +

    +

    Identification for user-defined drivers is also eight-byte + long. It can be arbitrary but should be unique to avoid the four + character prefix “NCSA”.

    +

    Version

    -

    The version number for this B-tree leaf node. - This document describes version 0. -

    -

    Driver Information

    Driver information is stored in a format defined by the file + driver (see description below).
    +
    + +
    The two drivers encoded in the +Driver Identification field are as follows: +
      +
    • Multi driver: +

      The identifier for this driver is “NCSAmulti”. This + driver provides a mechanism for segregating raw data and different + types of metadata into multiple files. These files are viewed by the + library as a single virtual HDF5 file with a single file address. A + maximum of 6 files will be created for the following data: + superblock, B-tree, raw data, global heap, local heap, and object + header. More than one type of data can be written to the same file.

      +
    • +
    • Family driver +

      The identifier for this driver is “NCSAfami” and is + encoded in this field for library version 1.8 and after. This driver + is designed for systems that do not support files larger than 2 + gigabytes by splitting the HDF5 file address space across several + smaller files. It does nothing to segregate metadata and raw data; + they are mixed in the address space just as they would be in a single + contiguous file.

      +
    • +
    +

    + The format of the Driver Information field for the above two + drivers are described below: +

    - -

    Type

    - -

    This field is the type of the B-tree node. It should always - be the same as the B-tree type in the header. -

    - - +
    + + - - + + + + - - + + + + -
    Multi Driver Information

    Records

    -

    The size of this field is determined by the number of records - for this node and the record size (from the header). The format - of records depends on the type of B-tree. -

    -
    bytebytebytebyte

    Checksum

    -

    This is the checksum for this node. -

    -
    Member MappingMember MappingMember MappingMember Mapping
    -
    - -
    -

    The record layout for each stored (in other words, non-testing) - B-tree type is as follows:

    - -
    - - - - - - - - - - - - - - - - - + + + + -
    - Version 2 B-tree, Type 1 Record Layout - Indirectly Accessed, Non-Filtered, - ‘Huge’ Fractal Heap Objects -
    bytebytebytebyte

    Huge Object AddressO


    Huge Object LengthL


    Huge Object IDL

    Member MappingMember MappingReservedReserved
    - - - - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
      - (Items marked with an ‘L’ in the above table are of the size - specified in “Size of Lengths” field in the superblock.) -
    - -
    - -
    -
    - - - - + - - + - - + - - + -
    Field NameDescription
    Address of Member File 1
    +

    Huge Object Address

    -

    The address of the huge object in the file. -

    -

    End of Address for Member File 1
    +

    Huge Object Length

    -

    The length of the huge object in the file. -

    -

    Address of Member File 2
    +

    Huge Object ID

    -

    The heap ID for the huge object. -

    -

    End of Address for Member File 2
    +
    -
    - -
    -
    -
    - - - - - - - + - + + - + + - + + - + + - + -
    - Version 2 B-tree, Type 2 Record Layout - Indirectly Accessed, Filtered, - ‘Huge’ Fractal Heap Objects -
    bytebytebytebyte
    ... ...
    +

    Filtered Huge Object AddressO


    Address of Member File N
    +

    Filtered Huge Object LengthL


    End of Address for Member File N
    +
    Filter Mask
    Name of Member File 1 (variable + size)
    +

    Filtered Huge Object Memory SizeL


    Name of Member File 2 (variable + size)
    +

    Huge Object IDL


    ... ...
    +
    - - - - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
      - (Items marked with an ‘L’ in the above table are of the size - specified in “Size of Lengths” field in the superblock.) -
    +
    Name of Member File N (variable + size)
    +
    + -
    + + -
    -
    - - - - +
    +
    +
    Field NameDescription
    + + + - - + + - - + + - - + + - - + + - - + + +
    Field NameDescription

    Filtered Huge Object Address

    -

    The address of the filtered huge object in the file. -

    -

    Member Mapping

    These fields are integer values from 1 to 6 + indicating how the data can be mapped to or merged with another + type of data.

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Member MappingDescription
    1The superblock data.
    2The B-tree data.
    3The raw data.
    4The global heap data.
    5The local heap data.
    6The object header data.
    +

    +

    For example, if the third field has the value 3 and all the + rest have the value 1, it means there are two files: one for raw + data, and one for superblock, B-tree, global heap, local heap, and + object header.

    Filtered Huge Object Length

    -

    The length of the filtered huge object in the file. -

    -

    Reserved

    These fields are reserved and should always be zero.

    Filter Mask

    -

    A 32-bit bit field indicating which filters have been skipped for - this chunk. Each filter has an index number in the pipeline - (starting at 0, with the first filter to apply) and if that - filter is skipped, the bit corresponding to its index is set. -

    -

    Address of Member File N

    This field Specifies the virtual address at which the + member file starts.

    +

    N is the number of member files.

    Filtered Huge Object Memory Size

    -

    The size of the de-filtered huge object in memory. -

    -

    End of Address for Member File N

    This field is the end of the allocated address for + the member file.

    Huge Object ID

    -

    The heap ID for the huge object. -

    -

    Name of Member File N

    + This field is the null-terminated name of the member file and its + length should be multiples of 8 bytes. Additional bytes will be + padded with NULLs. The default naming convention is %s-X.h5, + where X is one of the letters s (for superblock), + b (for B-tree), r (for raw data), g (for + global heap), l (for local heap), and o (for + object header). The name of the whole HDF5 file will substitute the + %s in the string. +

    +
    - - - -
    -
    -
    - - - - - - - - - +
    +
    +
    - Version 2 B-tree, Type 3 Record Layout - Directly Accessed, Non-Filtered, - ‘Huge’ Fractal Heap Objects -
    bytebytebytebyte
    + - - - - + + + + -
    Family Driver Information

    Huge Object AddressO


    Huge Object LengthL

    bytebytebytebyte
    - - - - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
      - (Items marked with an ‘L’ in the above table are of the size - specified in “Size of Lengths” field in the superblock.) -
    +
    Size of Member File
    +
    + -
    + + -
    -
    - +
    +
    +
    - - + + - - + + +
    Field NameDescriptionField NameDescription

    Huge Object Address

    -

    The address of the huge object in the file. -

    -

    Size of Member File

    This field is the size of the member file in the + family of files.

    +
    - -

    Huge Object Length

    - -

    The length of the huge object in the file. -

    - - +
    +

    + II.C. Disk Format: Level 0C - Superblock + Extension +

    + +

    + The superblock extension is used to store superblock metadata + which is either optional, or added after the version of the superblock + was defined. Superblock extensions may only exist when version 2+ of + superblock is used. A superblock extension is an object header which + may hold the following messages: +

    + - - -
    -
    -
    - - - - - - - - +
    +
    +
    +

    + III. Disk Format: Level 1 - File + Infrastructure +

    - - - - - - - - - - - - -
    - Version 2 B-tree, Type 4 Record Layout - Directly Accessed, Filtered, - ‘Huge’ Fractal Heap Objects -
    bytebytebytebyte

    Filtered Huge Object AddressO


    Filtered Huge Object LengthL

    Filter Mask

    Filtered Huge Object Memory SizeL

    +
    +

    + III.A. Disk Format: Level 1A - B-trees and B-tree + Nodes +

    + +

    B-trees allow flexible storage for objects which tend to grow in + ways that cause the object to be stored discontiguously. B-trees are + described in various algorithms books including “Introduction to + Algorithms” by Thomas H. Cormen, Charles E. Leiserson, and Ronald + L. Rivest. B-trees are used in several places in the HDF5 file format, + when an index is needed for another data structure.

    + +

    The version 1 B-tree structure described below is the original + index structure, but are limited by some bugs in our implementation + (mainly in how they handle deleting records). The version 1 B-trees are + being phased out in favor of the version 2 B-trees described below, + although both types of structures may be found in the same file, + depending on application settings when creating the file.

    - - - - - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
      - (Items marked with an ‘L’ in the above table are of the size - specified in “Size of Lengths” field in the superblock.) -
    +
    +

    + III.A.1. Disk Format: Level 1A1 - Version 1 + B-trees (B-link Trees) +

    + +

    + Version 1 B-trees in HDF5 files an implementation of the B-link tree, + in which the sibling nodes at a particular level in the tree are stored + in a doubly-linked list, is described in the “Efficient Locking + for Concurrent Operations on B-trees” paper by Phillip Lehman and + S. Bing Yao as published in the ACM Transactions on + Database Systems, Vol. 6, No. 4, December 1981. +

    -
    +

    The B-link trees implemented by the file format contain one more + key than the number of children. In other words, each child pointer out + of a B-tree node has a left key and a right key. The pointers out of + internal nodes point to sub-trees while the pointers out of leaf nodes + point to symbol nodes and raw data chunks. Aside from that difference, + internal nodes and leaf nodes are identical.

    -
    -
    - - - - - +
    +
    Field NameDescription
    + - - + + + + - - + - - + + + - - + -
    B-link Tree Nodes

    Filtered Huge Object Address

    -

    The address of the filtered huge object in the file. -

    -
    bytebytebytebyte

    Filtered Huge Object Length

    -

    The length of the filtered huge object in the file. -

    -
    Signature

    Filter Mask

    -

    A 32-bit bit field indicating which filters have been skipped for - this chunk. Each filter has an index number in the pipeline - (starting at 0, with the first filter to apply) and if that - filter is skipped, the bit corresponding to its index is set. -

    -
    Node TypeNode LevelEntries Used

    Filtered Huge Object Memory Size

    -

    The size of the de-filtered huge object in memory. -

    -

    Address of Left SiblingO
    +
    -
    - -
    -
    -
    - - - - - - - - - - - + - - - - - - -
    - Version 2 B-tree, Type 5 Record Layout - Link Name for Indexed Group -
    bytebytebytebyte
    Hash of Name
    Address of Right SiblingO
    +
    ID (bytes 1-4)
    ID (bytes 5-7)
    -
    - -
    -
    - - - - + - - + - - + -
    Field NameDescriptionKey 0 (variable size)

    Hash

    -

    This field is hash value of the name for the link. The hash - value is the Jenkins’ lookup3 checksum algorithm applied to - the link’s name. -

    -

    Address of Child 0O
    +

    ID

    -

    This is a 7-byte sequence of bytes and is the heap ID for the - link record in the group’s fractal heap.

    -
    Key 1 (variable size)
    -
    - -
    -
    -
    - - - - - - - + - - - - + + - + -
    - Version 2 B-tree, Type 6 Record Layout - Creation Order for Indexed Group -
    bytebytebytebyte
    Address of Child 1O
    +

    Creation Order (8 bytes)

    ID (bytes 1-4)...
    ID (bytes 5-7)Key 2K (variable size) +
    -
    -
    -
    - - - + - - + +
    Field NameDescription
    Address of Child 2KO
    +

    Creation Order

    -

    This field is the creation order value for the link. -

    -
    Key 2K+1 (variable size) +
    + - - + + +

    ID

    -

    This is a 7-byte sequence of bytes and is the heap ID for the - link record in the group’s fractal heap.

    -
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
    - -
    - -
    -
    -
    - - + +
    +
    +
    - Version 2 B-tree, Type 7 Record Layout - Shared Object Header Messages (Sub-Type 0 - Message in Heap) -
    - - - - + + - - - - - - - - - - - + + -
    bytebytebytebyteField NameDescription
    Message LocationThis space inserted only to align table nicely
    Hash
    Reference Count

    Heap ID (8 bytes)

    Signature

    +

    + The ASCII character string “ + TREE + ” is used to indicate the beginning of a B-link tree node. + This gives file consistency checking utilities a better chance of + reconstructing a damaged file. +

    +
    -
    -
    -
    - - - + + - - + + - - - + + + - - - + + + - - - + + + -
    Field NameDescription

    Node Type

    +

    + Each B-link tree points to a particular type of data. This field + indicates the type of data as well as implying the maximum degree K + of the tree and the size of each Key field. + + +

    + + + + + + + + + + + + + +
    Node TypeDescription
    0This tree points to group nodes.
    1This tree points to raw data chunk nodes.
    +

    +

    Message Location

    -

    This field Indicates the location where the message is stored: - - - - - - - - - - - - - -
    ValueDescription
    0Shared message is stored in shared message index heap. -
    1Shared message is stored in object header. -

    -

    Node Level

    +

    The node level indicates the level at which this node appears + in the tree (leaf nodes are at level zero). Not only does the level + indicate whether child pointers point to sub-trees or to data, but + it can also be used to help file consistency checking utilities + reconstruct damaged trees.

    +

    Hash

    -

    This field is hash value of the shared message. The hash - value is the Jenkins’ lookup3 checksum algorithm applied to - the shared message.

    -

    Entries Used

    +

    This determines the number of children to which this node + points. All nodes of a particular type of tree have the same + maximum degree, but most nodes will point to less than that number + of children. The valid child pointers and keys appear at the + beginning of the node and the unused pointers and keys appear at + the end of the node. The unused pointers and keys have undefined + values.

    +

    Reference Count

    -

    The number of objects which reference this message.

    -

    Address of Left Sibling

    +

    + This is the relative file address of the left sibling of the + current node. If the current node is the left-most node at this + level then this field is the undefined + address. +

    +

    Heap ID

    -

    This is an 8-byte sequence of bytes and is the heap ID for the - shared message in the shared message index’s fractal heap.

    -

    Address of Right Sibling

    +

    + This is the relative file address of the right sibling of the + current node. If the current node is the right-most node at this + level then this field is the undefined + address. +

    +
    -
    + +

    Keys and Child Pointers

    + +

    + Each tree has 2K+1 keys with 2K child pointers + interleaved between the keys. The number of keys and child pointers + actually containing valid values is determined by the node’s + Entries Used field. If that field is N then the + B-link tree contains N child pointers and N+1 + keys. +

    + + -
    -
    -
    - - + + + - - - - - +

    The format of the key depends on the node type. For nodes of + node type 0 (group nodes), the key is formatted as follows:

    +
    - Version 2 B-tree, Type 7 Record Layout - Shared Object Header Messages (Sub-Type 1 - Message in Object Header) -

    Key

    +

    + The format and size of the key values is determined by the type of + data to which this tree points. The keys are ordered and are + boundaries for the contents of the child pointer; that is, the key + values represented by child N fall between Key N + and Key N+1. Whether the interval is open or closed on + each end is determined by the type of data to which the tree + points. +

    -
    bytebytebytebyte
    + + + + +
    A single field of Size of Lengths + bytes: + Indicates the byte offset into the local heap + for the first object name in the subtree which that key + describes.
    +

    - - Message Location - This space inserted only to align table nicely - - - Hash + +

    For nodes of node type 1 (chunked raw data nodes), the key is + formatted as follows:

    + + + + + + + + + + + + + +
    Bytes 1-4:Size of chunk in bytes.
    Bytes 4-8:Filter mask, a 32-bit bit field indicating which filters + have been skipped for this chunk. Each filter has an index number + in the pipeline (starting at 0, with the first filter to apply) + and if that filter is skipped, the bit corresponding to its index + is set.
    (D + 1) 64-bit fields: + The offset of the chunk within the dataset where D + is the number of dimensions of the dataset, and the last value is + the offset within the dataset’s datatype and should always + be zero. For example, if a chunk in a 3-dimensional dataset + begins at the position [5,5,5], there will be three + such 64-bit values, each with the value of 5, + followed by a 0 value. +
    +

    + + - - Reserved (zero) - Message Type - Object Header Index + + +

    Child Pointer

    + +

    The tree node contains file addresses of subtrees or data + depending on the node level. Nodes at Level 0 point to data + addresses, either raw data chunks or group nodes. Nodes at non-zero + levels point to other nodes of the same B-tree.

    +

    + For raw data chunk nodes, the child pointer is the address of a + single raw data chunk. For group nodes, the child pointer points to + a symbol table, which contains + information for multiple symbol table entries. +

    + - -
    Object Header AddressO

    + +
    + +

    Conceptually, each B-tree node looks like this:

    +
    + + + + + + + + + + + + + + + + + + + + + -
    key[0] child[0] key[1] child[1] key[2] ... ... key[N-1] +  child[N-1] +  key[N] +
    + +
    +
    where child[ +i] is a pointer to a sub-tree (at a level above Level 0) or to +data (at Level 0). Each key[ +i] describes an +item stored by the B-tree (a chunk or an object of a group node). +The range of values represented by child[ +i] is indicated by key[ +i] and key[ +i+1]. + + +

    + The following question must next be answered: “Is the value + described by key[i] contained in child[i-1] or in child[i]?” + The answer depends on the type of tree. In trees for groups (node type + 0) the object described by key[i] is the greatest object + contained in child[i-1] while in chunk trees (node type 1) the + chunk described by key[i] is the least chunk in child[i]. +

    - - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
    +

    That means that key[0] for group trees is sometimes unused; it + points to offset zero in the heap, which is always the empty string and + compares as “less-than” any valid object name.

    - +

    + And key[N] for chunk trees is sometimes unused; it contains a + chunk offset which compares as “greater-than” any other + chunk offset and has a chunk byte size of zero to indicate that it is + not actually allocated. +

    -
    -
    - - - - - +
    +

    + III.A.2. Disk Format: Level 1A2 - Version 2 + B-trees +

    + +

    + Version 2 B-trees are “traditional” B-trees, with one major + difference. Instead of just using a simple pointer (or address in the + file) to a child of an internal node, the pointer to the child node + contains two additional pieces of information: the number of records in + the child node itself, and the total number of records in the child + node and all its descendants. Storing this additional information + allows fast array-like indexing to locate the nth record in + the B-tree. +

    - - - - +

    + The entry into a version 2 B-tree is a header which contains global + information about the structure of the B-tree. The root node + address field in the header points to the B-tree root node, which is + either an internal or leaf node, depending on the value in the + header’s depth field. An internal node consists of + records plus pointers to further leaf or internal nodes in the tree. A + leaf node consists of solely of records. The format of the records + depends on the B-tree type (stored in the header). +

    - - - - +
    +
    Field NameDescription

    Message Location

    -

    This field Indicates the location where the message is stored: - - - - - - - - - - - - - -
    ValueDescription
    0Shared message is stored in shared message index heap. -
    1Shared message is stored in object header. -

    -

    Hash

    -

    This field is hash value of the shared message. The hash - value is the Jenkins’ lookup3 checksum algorithm applied to - the shared message.

    -
    + - - - + + + + - - - - - - + - -
    Version 2 B-tree Header

    Message Type

    -

    The object header message type of the shared message.

    -
    bytebytebytebyte

    Object Header Index

    -

    This field indicates that the shared message is the nth message - of its type in the specified object header.

    -

    Object Header Address

    -

    The address of the object header containing the shared message.

    -
    Signature
    -
    - -
    -
    -
    - - - - - - - + + + - - + - - + + - + + + - + -
    - Version 2 B-tree, Type 8 Record Layout - Attribute Name for Indexed Attributes -
    bytebytebytebyteVersionTypeThis space inserted + only to align table nicely

    Heap ID (8 bytes)

    Node Size
    Message FlagsThis space inserted only to align table nicelyRecord SizeDepth
    Creation OrderSplit PercentMerge PercentThis space inserted + only to align table nicely
    Hash of Name
    Root Node AddressO
    +
    -
    - -
    -
    - - - + + - - - + - - - + +
    Field NameDescriptionNumber of Records in Root NodeThis space inserted + only to align table nicely

    Heap ID

    -

    This is an 8-byte sequence of bytes and is the heap ID for the - attribute in the object’s attribute fractal heap.

    -

    Total Number of Records in B-treeL
    +

    Message Flags

    The object header message flags for the attribute message.

    -
    Checksum
    + - - + + - - - + + +

    Creation Order

    -

    This field is the creation order value for the attribute. -

    -
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)

    Hash

    -

    This field is hash value of the name for the attribute. The hash - value is the Jenkins’ lookup3 checksum algorithm applied to - the attribute’s name. -

    -
     (Items marked with an ‘L’ in the above table are + of the size specified in “Size of Lengths” field in the + superblock.)
    - -
    - -
    -
    -
    - - + +
    +
    +
    - Version 2 B-tree, Type 9 Record Layout- Creation Order for Indexed Attributes -
    - - - - + + - - - - - + + + - + + -
    bytebytebytebyteField NameDescription

    Heap ID (8 bytes)

    Message FlagsThis space inserted only to align table nicely

    Signature

    +

    + The ASCII character string “ + BTHD + ” is used to indicate the header of a version 2 B-link tree + node. +

    +
    Creation Order

    Version

    +

    The version number for this B-tree header. This document + describes version 0.

    +
    -
    -
    -
    - - - + + - - - + + + - - - + + + - - - + + + -
    Field NameDescription

    Type

    +

    This field indicates the type of B-tree:

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    ValueDescription
    0A “testing” B-tree, this value should not + be used for storing records in actual HDF5 files. +
    1This B-tree is used for indexing indirectly accessed, + non-filtered ‘huge’ fractal heap objects.
    2This B-tree is used for indexing indirectly accessed, + filtered ‘huge’ fractal heap objects.
    3This B-tree is used for indexing directly accessed, + non-filtered ‘huge’ fractal heap objects.
    4This B-tree is used for indexing directly accessed, + filtered ‘huge’ fractal heap objects.
    5This B-tree is used for indexing the ‘name’ + field for links in indexed groups.
    6This B-tree is used for indexing the ‘creation + order’ field for links in indexed groups.
    7This B-tree is used for indexing shared object header + messages.
    8This B-tree is used for indexing the ‘name’ + field for indexed attributes.
    9This B-tree is used for indexing the ‘creation + order’ field for indexed attributes.
    +

    +

    The format of records for each type is described below.

    +

    Heap ID

    -

    This is an 8-byte sequence of bytes and is the heap ID for the - attribute in the object’s attribute fractal heap.

    -

    Node Size

    +

    This is the size in bytes of all B-tree nodes.

    +

    Message Flags

    -

    The object header message flags for the attribute message.

    -

    Record Size

    +

    This field is the size in bytes of the B-tree record.

    +

    Creation Order

    -

    This field is the creation order value for the attribute. -

    -

    Depth

    +

    This is the depth of the B-tree.

    +
    -
    + +

    Split Percent

    + +

    The percent full that a node needs to increase above before + it is split.

    + + + +

    Merge Percent

    + +

    The percent full that a node needs to be decrease below + before it is split.

    + + -
    -

    -III.B. Disk Format: Level 1B - Group Symbol Table Nodes

    + +

    Root Node Address

    + +

    + This is the address of the root B-tree node. A B-tree with no + records will have the undefined + address in this field. +

    + + -

    A group is an object internal to the file that allows - arbitrary nesting of objects within the file (including other groups). - A group maps a set of link names in the group to a set of relative - file addresses of objects in the file. Certain metadata for an object to - which the group points can be cached in the group’s symbol table entry in - addition to being in the object’s header.

    + +

    Number of Records in Root Node

    + +

    This is the number of records in the root node.

    + + -

    An HDF5 object name space can be stored hierarchically by - partitioning the name into components and storing each - component as a link in a group. The link for a - non-ultimate component points to the group containing - the next component. The link for the last - component points to the object being named.

    + +

    Total Number of Records in B-tree

    + +

    This is the total number of records in the entire B-tree.

    + + -

    One implementation of a group is a collection of symbol table nodes - indexed by a B-link tree. Each symbol table node contains entries - for one or more links. If an attempt is made to add a link to an already - full symbol table node containing 2K entries, then the node is - split and one node contains K symbols and the other contains - K+1 symbols.

    + +

    Checksum

    + +

    This is the checksum for the B-tree header.

    + + + + -
    - - +
    +
    +
    +
    - Symbol Table Node (A Leaf of a B-link tree) -
    + - - - - + + + + - + - - - - + + + - - + -
    Version 2 B-tree Internal Node
    bytebytebytebytebytebytebytebyte
    SignatureSignature
    Version NumberReserved (zero)Number of SymbolsVersionTypeRecords 0, 1, 2...N-1 (variable size)


    Group Entries



    Child Node Pointer 0O
    +
    -
    - -
    -
    - - - + - - - + - - - + - + + - - + - - - + -
    Field NameDescription
    Number of Records N0 for Child + Node 0 (variable size)

    Signature

    -

    The ASCII character string “SNOD” is - used to indicate the - beginning of a symbol table node. This gives file - consistency checking utilities a better chance of - reconstructing a damaged file. -

    -

    Total Number of Records for Child Node 0 + (optional, variable size)

    Version Number

    -

    The version number for the symbol table node. This - document describes version 1. (There is no version ‘0’ - of the symbol table node) -

    -

    Child Node Pointer 1O
    +

    Number of Records N1 for Child + Node 1 (variable size)

    Number of Entries

    -

    Although all symbol table nodes have the same length, - most contain fewer than the maximum possible number of - link entries. This field indicates how many entries - contain valid data. The valid entries are packed at the - beginning of the symbol table node while the remaining - entries contain undefined values. -

    -

    Total Number of Records for Child Node 1 + (optional, variable size)

    Symbol Table Entries

    -

    Each link has an entry in the symbol table node. - The format of the entry is described below. - There are 2K entries in each group node, where - K is the “Group Leaf Node K” value from the - superblock. -

    -
    ...
    -
    - -
    -

    -III.C. Disk Format: Level 1C - Symbol Table Entry

    - -

    Each symbol table entry in a symbol table node is designed - to allow for very fast browsing of stored objects. - Toward that design goal, the symbol table entries - include space for caching certain constant metadata from the - object header.

    - -
    - - - - - - - + - - + - - + - - + +
    - Symbol Table Entry -
    bytebytebytebyte
    Child Node Pointer NO
    +

    Link Name OffsetO


    Number of Records Nn for Child + Node N (variable size)

    Object Header AddressO


    Total Number of Records for Child Node N + (optional, variable size)
    Cache TypeChecksum
    + - + + +
    Reserved (zero) (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
    +
    + +
    +
    + - + + -


    Scratch-pad Space (16 bytes)


    Field NameDescription
    - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
    - -
    - -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + -
    Field NameDescription

    Link Name Offset

    -

    This is the byte offset into the group’s local - heap for the name of the link. The name is null - terminated. -

    -

    Object Header Address

    -

    Every object has an object header which serves as a - permanent location for the object’s metadata. In addition - to appearing in the object header, some of the object’s metadata - can be cached in the scratch-pad space. -

    -

    Cache Type

    -

    The cache type is determined from the object header. - It also determines the format for the scratch-pad space: - - - - - - - - - - - - - - - - - - -
    TypeDescription
    0No data is cached by the group entry. This - is guaranteed to be the case when an object header - has a link count greater than one. -
    1Group object header metadata is cached in the - scratch-pad space. This implies that the symbol table - entry refers to another group. -
    2The entry is a symbolic link. The first four bytes - of the scratch-pad space are the offset into the local - heap for the link value. The object header address - will be undefined. -

    - -

    Reserved

    -

    These four bytes are present so that the scratch-pad - space is aligned on an eight-byte boundary. They are - always set to zero. -

    -

    Scratch-pad Space

    -

    This space is used for different purposes, depending - on the value of the Cache Type field. Any metadata - about an object represented in the scratch-pad - space is duplicated in the object header for that - object. -

    -

    - Furthermore, no data is cached in the group - entry scratch-pad space if the object header for - the object has a link count greater than one. -

    -

    Signature

    +

    + The ASCII character string “ + BTIN + ” is used to indicate the internal node of a B-link tree. +

    +
    -
    - -
    -

    Format of the Scratch-pad Space

    - -

    The symbol table entry scratch-pad space is formatted - according to the value in the Cache Type field.

    - -

    If the Cache Type field contains the value zero - (0) then no information is - stored in the scratch-pad space.

    - -

    If the Cache Type field contains the value one - (1), then the scratch-pad space - contains cached metadata for another object header - in the following format:

    - -
    - - - - - - + + - + + - + + -
    - Object Header Scratch-pad Format -
    bytebytebytebyte

    Version

    +

    The version number for this B-tree internal node. This + document describes version 0.

    +

    Address of B-treeO

    Type

    +

    This field is the type of the B-tree node. It should always + be the same as the B-tree type in the header.

    +

    Address of Name HeapO

    Records

    +

    The size of this field is determined by the number of records + for this node and the record size (from the header). The format of + records depends on the type of B-tree.

    +
    - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
    - -
    +

    Child Node Pointer

    + +

    This field is the address of the child node pointed to by the + internal node.

    + + -
    -
    - - - + + - - + + - - + + -
    Field NameDescription

    Number of Records in Child Node

    +

    + This is the number of records in the child node pointed to by the + corresponding Node Pointer. +

    +

    The number of bytes used to store this field is determined by + the maximum possible number of records able to be stored in the + child node.

    +

    The maximum number of records in a child node is computed in + the following way:

    +
      +
    • Subtract the fixed size overhead for the child node (for + example, its signature, version, checksum, and so on and one + pointer triplet of information for the child node (because there + is one more pointer triplet than records in each internal node)) + from the size of nodes for the B-tree. +
    • +
    • Divide that result by the size of a record plus the + pointer triplet of information stored to reach each child node + from this node.
    • +
    + +

    +

    Note that leaf nodes do not encode any child pointer + triplets, so the maximum number of records in a leaf node is just + the node size minus the leaf node overhead, divided by the record + size.

    +

    + Also note that the first level of internal nodes above the leaf + nodes do not encode the Total Number of Records in Child + Node value in the child pointer triplets (since it is the same as + the Number of Records in Child Node), so the maximum + number of records in these nodes is computed with the equation + above, but using (Child Pointer, Number of + Records in Child Node) pairs instead of triplets. +

    +

    The number of bytes used to encode this field is the least + number of bytes required to encode the maximum number of records in + a child node value for the child nodes below this level in the + B-tree.

    +

    For example, if the maximum number of child records is 123, + one byte will be used to encode these values in this node; if the + maximum number of child records is 20000, two bytes will be used to + encode these values in this node; and so on. The maximum number of + bytes used to encode these values is 8 (in other words, an unsigned + 64-bit integer).

    +

    Address of B-tree

    -

    This is the file address for the root of the - group’s B-tree. -

    -

    Total Number of Records in Child Node

    +

    + This is the total number of records for the node pointed to by the + corresponding Node Pointer and all its children. This + field exists only in nodes whose depth in the B-tree node is + greater than 1 (in other words, the “twig” internal + nodes, just above leaf nodes, do not store this field in their + child node pointers). +

    +

    The number of bytes used to store this field is determined by + the maximum possible number of records able to be stored in the + child node and its descendants.

    +

    The maximum possible number of records able to be stored in a + child node and its descendants is computed iteratively, in the + following way: The maximum number of records in a leaf node is + computed, then that value is used to compute the maximum possible + number of records in the first level of internal nodes above the + leaf nodes. Multiplying these two values together determines the + maximum possible number of records in child node pointers for the + level of nodes two levels above leaf nodes. This process is + continued up to any level in the B-tree.

    +

    + The number of bytes used to encode this value is computed in the + same way as for the Number of Records in Child Node field. +

    +

    Address of Name Heap

    -

    This is the file address for the group’s local - heap, in which are stored the group’s symbol names. -

    -

    Checksum

    +

    This is the checksum for this node.

    +
    -
    + + -
    -

    If the Cache Type field contains the value two - (2), then the scratch-pad space - contains cached metadata for a symbolic link - in the following format:

    - -
    - - +
    +
    +
    +
    - Symbolic Link Scratch-pad Format -
    + - - - - + + + + - + -
    Version 2 B-tree Leaf Node
    bytebytebytebytebytebytebytebyte
    Offset to Link ValueSignature
    -
    - -
    -
    - - - + + + - - - + -
    Field NameDescriptionVersionTypeRecord 0, 1, 2...N-1 (variable size)

    Offset to Link Value

    -

    The value of a symbolic link (that is, the name of the - thing to which it points) is stored in the local heap. - This field is the 4-byte offset into the local heap for - the start of the link value, which is null terminated. -

    -
    Checksum
    -
    + +
    -

    -III.D. Disk Format: Level 1D - Local Heaps

    - -

    A local heap is a collection of small pieces of data that are particular - to a single object in the HDF5 file. Objects can be - inserted and removed from the heap at any time. - The address of a heap does not change once the heap is created. - For example, a group stores addresses of objects in symbol table nodes - with the names of links stored in the group’s local heap. -

    - -
    - - - +
    +
    - Local Heap -
    - - - - + + - + + - - + + - + + - + + - + + -
    bytebytebytebyteField NameDescription
    Signature

    Signature

    +

    + The ASCII character string “ + BTLF + “ is used to indicate the leaf node of a version 2 B-link + tree. +

    +
    VersionReserved (zero)

    Version

    +

    The version number for this B-tree leaf node. This document + describes version 0.

    +

    Data Segment SizeL

    Type

    +

    This field is the type of the B-tree node. It should always + be the same as the B-tree type in the header.

    +

    Offset to Head of Free-listL

    Records

    +

    The size of this field is determined by the number of records + for this node and the record size (from the header). The format of + records depends on the type of B-tree.

    +

    Address of Data SegmentO

    Checksum

    +

    This is the checksum for this node.

    +
    - - - - - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
      - (Items marked with an ‘L’ in the above table are of the size - specified in “Size of Lengths” field in the superblock.) -
    + +
    + +
    +

    The record layout for each stored (in other words, non-testing) + B-tree type is as follows:

    - +
    + + -
    -
    -
    Version 2 B-tree, Type 1 Record Layout - Indirectly + Accessed, Non-Filtered, ‘Huge’ Fractal Heap Objects
    - - + + + + - - + - - - + - - - + +
    Field NameDescriptionbytebytebytebyte

    Signature

    -

    The ASCII character string “HEAP” - is used to indicate the - beginning of a heap. This gives file consistency - checking utilities a better chance of reconstructing a - damaged file. -

    -

    Huge Object AddressO
    +

    Version

    -

    Each local heap has its own version number so that new - heaps can be added to old files. This document - describes version zero (0) of the local heap. -

    -

    Huge Object LengthL
    +

    Data Segment Size

    -

    The total amount of disk memory allocated for the heap - data. This may be larger than the amount of space - required by the objects stored in the heap. The extra - unused space in the heap holds a linked list of free blocks. -

    -

    Huge Object IDL
    +
    + - - + + - - - + + -

    Offset to Head of Free-list

    -

    This is the offset within the heap data segment of the - first free block (or the - undefined address if there is no - free block). The free block contains “Size of Lengths” bytes that - are the offset of the next free block (or the - value ‘1’ if this is the - last free block) followed by “Size of Lengths” bytes that store - the size of this free block. The size of the free block includes - the space used to store the offset of the next free block and - the size of the current block, making the minimum size of a free - block 2 * “Size of Lengths”. -

    -
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)

    Address of Data Segment

    -

    The data segment originally starts immediately after - the heap header, but if the data segment must grow as a - result of adding more objects, then the data segment may - be relocated, in its entirety, to another part of the - file. -

    -
     (Items marked with an ‘L’ in the above table are + of the size specified in “Size of Lengths” field in the + superblock.)
    -
    - -

    Objects within a local heap should be aligned on an 8-byte boundary.

    - -
    -

    -III.E. Disk Format: Level 1E - Global Heap

    - -

    Each HDF5 file has a global heap which stores various types of - information which is typically shared between datasets. The - global heap was designed to satisfy these goals:

    - -
      -
    1. Repeated access to a heap object must be efficient without - resulting in repeated file I/O requests. Since global heap - objects will typically be shared among several datasets, it is - probable that the object will be accessed repeatedly.
    2. -
    3. Collections of related global heap objects should result in - fewer and larger I/O requests. For instance, a dataset of - object references will have a global heap object for each - reference. Reading the entire set of object references - should result in a few large I/O requests instead of one small - I/O request for each reference.
    4. -
    5. It should be possible to remove objects from the global heap - and the resulting file hole should be eligible to be reclaimed - for other uses.
    6. -
    - - -

    The implementation of the heap makes use of the memory management - already available at the file level and combines that with a new - object called a collection to achieve goal B. The global heap - is the set of all collections. Each global heap object belongs to - exactly one collection and each collection contains one or more global - heap objects. For the purposes of disk I/O and caching, a collection is - treated as an atomic object, addressing goal A. -

    - -

    When a global heap object is deleted from a collection (which occurs - when its reference count falls to zero), objects located after the - deleted object in the collection are packed down toward the beginning - of the collection and the collection’s global heap object 0 is created - (if possible) or its size is increased to account for the recently - freed space. There are no gaps between objects in each collection, - with the possible exception of the final space in the collection, if - it is not large enough to hold the header for the collection’s global - heap object 0. These features address goal C. -

    - -

    The HDF5 Library creates global heap collections as needed, so there may - be multiple collections throughout the file. The set of all of them is - abstractly called the “global heap”, although they do not actually link - to each other, and there is no global place in the file where you can - discover all of the collections. The collections are found simply by - finding a reference to one through another object in the file. For - example, data of variable-length datatype elements is stored in the - global heap and is accessed via a global heap ID. The format for - global heap IDs is described at the end of this section. -

    - -
    - - +
    - A Global Heap Collection -
    +
    + +
    +
    + - - - - + + - + + - - + + - + + - - - +
    bytebytebytebyteField NameDescription
    Signature

    Huge Object Address

    +

    The address of the huge object in the file.

    +
    VersionReserved (zero)

    Huge Object Length

    +

    The length of the huge object in the file.

    +

    Collection SizeL

    Huge Object ID

    +

    The heap ID for the huge object.

    +

    Global Heap Object 1

    +
    - -
    Global Heap Object 2

    - +
    +
    +
    + + - + + + + - + - - + -
    Version 2 B-tree, Type 2 Record Layout - Indirectly + Accessed, Filtered, ‘Huge’ Fractal Heap Objects

    ...

    bytebytebytebyte

    Global Heap Object N


    Filtered Huge Object AddressO
    +

    Global Heap Object 0 (free space)


    Filtered Huge Object LengthL
    +
    - - - - - -
      - (Items marked with an ‘L’ in the above table are of the size - specified in “Size of Lengths” field in the superblock.) -
    - -
    - -
    -
    - - - + - - - + - - - + +
    Field NameDescriptionFilter Mask

    Signature

    -

    The ASCII character string “GCOL” - is used to indicate the - beginning of a collection. This gives file consistency - checking utilities a better chance of reconstructing a - damaged file. -

    -

    Filtered Huge Object Memory SizeL
    +

    Version

    -

    Each collection has its own version number so that new - collections can be added to old files. This document - describes version one (1) of the collections (there is no - version zero (0)). -

    -

    Huge Object IDL
    +
    + - - + + - - - + + +

    Collection Size

    -

    This is the size in bytes of the entire collection - including this field. The default (and minimum) - collection size is 4096 bytes which is a typical file - system block size. This allows for 127 16-byte heap - objects plus their overhead (the collection header of 16 bytes - and the 16 bytes of information about each heap object). -

    -
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)

    Global Heap Object 1 through N

    -

    The objects are stored in any order with no - intervening unused space. -

    -
     (Items marked with an ‘L’ in the above table are + of the size specified in “Size of Lengths” field in the + superblock.)
    + +
    +
    +
    + - - + + -

    Global Heap Object 0

    -

    Global Heap Object 0 (zero), when present, represents the free - space in the collection. Free space always appears at the end of - the collection. If the free space is too small to store the header - for Object 0 (described below) then the header is implied and the - collection contains no free space. -

    -
    Field NameDescription
    -
    - -
    -
    -
    - - - - - - + + - - + + - + + - + + - + + -
    - Global Heap Object -
    bytebytebytebyte

    Filtered Huge Object Address

    +

    The address of the filtered huge object in the file.

    +
    Heap Object IndexReference Count

    Filtered Huge Object Length

    +

    The length of the filtered huge object in the file.

    +
    Reserved (zero)

    Filter Mask

    +

    A 32-bit bit field indicating which filters have been skipped + for this chunk. Each filter has an index number in the pipeline + (starting at 0, with the first filter to apply) and if that filter + is skipped, the bit corresponding to its index is set.

    +

    Object SizeL

    Filtered Huge Object Memory Size

    +

    The size of the de-filtered huge object in memory.

    +

    Object Data

    Huge Object ID

    +

    The heap ID for the huge object.

    +
    - - - - -
      - (Items marked with an ‘L’ in the above table are of the size - specified in “Size of Lengths” field in the superblock.) -
    + +
    - +
    +
    +
    + + -
    -
    -
    Version 2 B-tree, Type 3 Record Layout - Directly + Accessed, Non-Filtered, ‘Huge’ Fractal Heap Objects
    - - + + + + - - + - - - + +
    Field NameDescriptionbytebytebytebyte

    Heap Object Index

    -

    Each object has a unique identification number within a - collection. The identification numbers are chosen so that - new objects have the smallest value possible with the - exception that the identifier 0 always refers to the - object which represents all free space within the - collection. -

    -

    Huge Object AddressO
    +

    Reference Count

    -

    All heap objects have a reference count field. An - object which is referenced from some other part of the - file will have a positive reference count. The reference - count for Object 0 is always zero. -

    -

    Huge Object LengthL
    +
    + + + + + - - + + +
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)

    Reserved

    -

    Zero padding to align next field on an 8-byte boundary. -

    -
     (Items marked with an ‘L’ in the above table are + of the size specified in “Size of Lengths” field in the + superblock.)
    +
    + +
    +
    + - - + + - - + + -

    Object Size

    -

    This is the size of the object data stored for the object. - The actual storage space allocated for the object data is rounded - up to a multiple of eight. -

    -
    Field NameDescription

    Object Data

    -

    The object data is treated as a one-dimensional array - of bytes to be interpreted by the caller. -

    -

    Huge Object Address

    +

    The address of the huge object in the file.

    +
    -
    + +

    Huge Object Length

    + +

    The length of the huge object in the file.

    + + -
    -

    - The format for the ID used to locate an object in the global heap is - described here:

    + + -
    - - +
    +
    +
    +
    - Global Heap ID -
    + - - - - + + + + - + - - + -
    Version 2 B-tree, Type 4 Record Layout - Directly + Accessed, Filtered, ‘Huge’ Fractal Heap Objects
    bytebytebytebytebytebytebytebyte

    Collection AddressO


    Filtered Huge Object AddressO
    +
    Object Index
    Filtered Huge Object LengthL
    +
    - - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
    - -
    - -
    -
    - + + - - + +
    Filter Mask
    Field NameDescription
    Filtered Huge Object Memory SizeL
    +
    + - - + + - - - + + +

    Collection Address

    -

    This field is the address of the global heap collection - where the data object is stored. -

    -
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)

    ID

    -

    This field is the index of the data object within the - global heap collection. -

    -
     (Items marked with an ‘L’ in the above table are + of the size specified in “Size of Lengths” field in the + superblock.)
    - -
    - - -
    -

    -III.F. Disk Format: Level 1F - Fractal Heap

    - -

    - Each fractal heap consists of a header and zero or more direct and - indirect blocks (described below). The header contains general - information as well as - initialization parameters for the doubling table. The Root - Block Address in the header points to the first direct or - indirect block in the heap. -

    - -

    - Fractal heaps are based on a data structure called a doubling - table. A doubling table provides a mechanism for quickly - extending an array-like data structure that minimizes the number of - empty blocks in the heap, while retaining very fast lookup of any - element within the array. More information on fractal heaps and - doubling tables can be found in the RFC - “Private - Heaps in HDF5.” -

    - -

    - The fractal heap implements the doubling table structure with - indirect and direct blocks. - Indirect blocks in the heap do not actually contain data for - objects in the heap, their “size” is abstract - - they represent the indexing structure for locating the - direct blocks in the doubling table. - Direct blocks - contain the actual data for objects stored in the heap. -

    - -

    - All indirect blocks have a constant number of block entries in each - row, called the width of the doubling table (stored in - the heap header). - - The number - of rows for each indirect block in the heap is determined by the - size of the block that the indirect block represents in the - doubling table (calculation of this is shown below) and is - constant, except for the “root” - indirect block, which expands and shrinks its number of rows as - needed. -

    - -

    - Blocks in the first two rows of an indirect block - are Starting Block Size number of bytes in size, - and the blocks in each subsequent row are twice the size of - the blocks in the previous row. In other words, blocks in - the third row are twice the Starting Block Size, - blocks in the fourth row are four times the - Starting Block Size, and so on. Entries for - blocks up to the Maximum Direct Block Size point to - direct blocks, and entries for blocks greater than that size - point to further indirect blocks (which have their own - entries for direct and indirect blocks). -

    - -

    - The number of rows of blocks, nrows, in an - indirect block of size iblock_size is given by the - following expression: -

    - nrows = (log2(iblock_size) - - log2(<Starting Block Size> * - <Width>)) + 1 -

    - -

    - The maximum number of rows of direct blocks, max_dblock_rows, - in any indirect block of a fractal heap is given by the - following expression: -

    - max_dblock_rows = - (log2(<Max. Direct Block Size>) - - log2(<Starting Block Size>)) + 2 -

    - -

    - Using the computed values for nrows and - max_dblock_rows, along with the Width of the - doubling table, the number of direct and indirect block entries - (K and N in the indirect block description, below) - in an indirect block can be computed: -

    - K = MIN(nrows, max_dblock_rows) * - Width - -

    - If nrows is less than or equal to max_dblock_rows, - N is 0. Otherwise, N is simply computed: -

    - N = K - (max_dblock_rows * - Width) -

    - -

    - The size indirect blocks on disk is determined by the number - of rows in the indirect block (computed above). The size of direct - blocks on disk is exactly the size of the block in the doubling - table. -

    - -
    - - + +
    +
    +
    - Fractal Heap Header -
    - - - - + + - + + - - + + - - + + - - + + +
    bytebytebytebyteField NameDescription
    Signature

    Filtered Huge Object Address

    +

    The address of the filtered huge object in the file.

    +
    VersionThis space inserted only to align table nicely

    Filtered Huge Object Length

    +

    The length of the filtered huge object in the file.

    +
    Heap ID LengthI/O Filters’ Encoded Length

    Filter Mask

    +

    A 32-bit bit field indicating which filters have been skipped + for this chunk. Each filter has an index number in the pipeline + (starting at 0, with the first filter to apply) and if that filter + is skipped, the bit corresponding to its index is set.

    +
    FlagsThis space inserted only to align table nicely

    Filtered Huge Object Memory Size

    +

    The size of the de-filtered huge object in memory.

    +
    +
    + +
    +
    +
    + + + - + + + + - + - - + - + +
    Version 2 B-tree, Type 5 Record Layout - Link Name + for Indexed Group
    Maximum Size of Managed Objectsbytebytebytebyte

    Next Huge Object IDL

    Hash of Name

    v2 B-tree Address of Huge ObjectsO

    ID (bytes 1-4)

    Amount of Free Space in Managed BlocksL

    ID (bytes 5-7)
    +
    + +
    +
    + - + + - + + - + + +

    Address of Managed Block Free Space ManagerO

    Field NameDescription

    Amount of Managed Space in HeapL

    Hash

    +

    This field is hash value of the name for the link. The hash + value is the Jenkins’ lookup3 checksum algorithm applied to + the link’s name.

    +

    Amount of Allocated Managed Space in HeapL

    ID

    +

    This is a 7-byte sequence of bytes and is the heap ID for the + link record in the group’s fractal heap.

    +
    +
    + +
    +
    +
    + + + - + + + + - + - - + - - + +
    Version 2 B-tree, Type 6 Record Layout - Creation + Order for Indexed Group

    Offset of Direct Block Allocation Iterator in Managed SpaceL

    bytebytebytebyte

    Number of Managed Objects in HeapL


    Creation Order (8 bytes)
    +

    Size of Huge Objects in HeapL

    ID (bytes 1-4)

    Number of Huge Objects in HeapL

    ID (bytes 5-7)
    +
    +
    +
    + - + + - + + - - + + +

    Size of Tiny Objects in HeapL

    Field NameDescription

    Number of Tiny Objects in HeapL

    Creation Order

    +

    This field is the creation order value for the link.

    +
    Table WidthThis space inserted only to align table nicely

    ID

    +

    This is a 7-byte sequence of bytes and is the heap ID for the + link record in the group’s fractal heap.

    +
    +
    + +
    +
    +
    + + + - + + + + - + + - - - + - - + - - - + +
    Version 2 B-tree, Type 7 Record Layout - Shared + Object Header Messages (Sub-Type 0 - Message in Heap)

    Starting Block SizeL

    bytebytebytebyte

    Maximum Direct Block SizeL

    Message LocationThis space inserted + only to align table nicely
    Maximum Heap SizeStarting # of Rows in Root Indirect BlockHash

    Address of Root BlockO

    Reference Count
    Current # of Rows in Root Indirect BlockThis space inserted only to align table nicely
    Heap ID (8 bytes)
    +
    +
    +
    +
    + - + + - + + - + + - + + -

    Size of Filtered Root Direct Block (optional)L

    Field NameDescription
    I/O Filter Mask (optional)

    Message Location

    +

    This field Indicates the location where the message is + stored:

    + + + + + + + + + + + + + +
    ValueDescription
    0Shared message is stored in shared message index heap.
    1Shared message is stored in object header.
    +

    +
    I/O Filter Information (optional, variable size)

    Hash

    +

    This field is hash value of the shared message. The hash + value is the Jenkins’ lookup3 checksum algorithm applied to + the shared message.

    +
    Checksum

    Reference Count

    +

    The number of objects which reference this message.

    +
    - - - - - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
      - (Items marked with an ‘L’ in the above table are of the size - specified in “Size of Lengths” field in the superblock.) -
    +

    Heap ID

    + +

    This is an 8-byte sequence of bytes and is the heap ID for + the shared message in the shared message index’s fractal + heap.

    + + -
    + + + +
    +
    +
    + + -
    -
    -
    Version 2 B-tree, Type 7 Record Layout - Shared + Object Header Messages (Sub-Type 1 - Message in Object Header)
    - - + + + + - - + + - - - + - - - + + + - - - + +
    Field NameDescriptionbytebytebytebyte

    Signature

    -

    The ASCII character string “FRHP” - is used to indicate the - beginning of a fractal heap header. This gives file consistency - checking utilities a better chance of reconstructing a - damaged file. -

    -
    Message LocationThis space inserted + only to align table nicely

    Version

    -

    This document describes version 0.

    -
    Hash

    Heap ID Length

    -

    This is the length in bytes of heap object IDs for this heap.

    -
    Reserved (zero)Message TypeObject Header Index

    I/O Filters’ Encoded Length

    -

    This is the size in bytes of the encoded I/O Filter Information. -

    -

    Object Header AddressO
    +
    + - - - - - - - + + +

    Flags

    -

    This field is the heap status flag and is a bit field - indicating additional information about the fractal heap. - - - - - - - - - - - - - - - - - - -
    Bit(s)Description
    0If set, the ID value to use for huge object has wrapped - around. If the value for the Next Huge Object ID - has wrapped around, each new huge object inserted into the - heap will require a search for an ID value. -
    1If set, the direct blocks in the heap are checksummed. -
    2-7Reserved

    - -

    Maximum Size of Managed Objects

    -

    This is the maximum size of managed objects allowed in the heap. - Objects greater than this this are ‘huge’ objects and will be - stored in the file directly, rather than in a direct block for - the heap. -

    -
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
    +
    + +
    +
    + - - + + - - + + - - + + - - + + - - + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + -

    Next Huge Object ID

    -

    This is the next ID value to use for a huge object in the heap. -

    -
    Field NameDescription

    v2 B-tree Address of Huge Objects

    -

    This is the address of the v2 B-tree - used to track huge objects in the heap. The type of records - stored in the v2 B-tree will - be determined by whether the address & length of a huge object - can fit into a heap ID (if yes, it is a “directly” accessed - huge object) and whether there is a filter used on objects - in the heap. -

    -

    Message Location

    +

    This field Indicates the location where the message is + stored:

    + + + + + + + + + + + + + +
    ValueDescription
    0Shared message is stored in shared message index heap.
    1Shared message is stored in object header.
    +

    +

    Amount of Free Space in Managed Blocks

    -

    This is the total amount of free space in managed direct blocks - (in bytes). -

    -

    Hash

    +

    This field is hash value of the shared message. The hash + value is the Jenkins’ lookup3 checksum algorithm applied to + the shared message.

    +

    Address of Managed Block Free Space Manager

    -

    This is the address of the - Free-space Manager for - managed blocks. -

    -

    Message Type

    +

    The object header message type of the shared message.

    +

    Amount of Managed Space in Heap

    -

    This is the total amount of managed space in the heap (in bytes), - essentially the upper bound of the heap’s linear address space. -

    -

    Object Header Index

    +

    + This field indicates that the shared message is the nth + message of its type in the specified object header. +

    +

    Amount of Allocated Managed Space in Heap

    -

    This is the total amount of managed space (in bytes) actually - allocated in - the heap. This can be less than the Amount of Managed Space - in Heap field, if some direct blocks in the heap’s linear - address space are not allocated. -

    -

    Offset of Direct Block Allocation Iterator in Managed Space

    -

    This is the linear heap offset where the next direct - block should be allocated at (in bytes). This may be less than - the Amount of Managed Space in Heap value because the - heap’s address space is increased by a “row” of direct blocks - at a time, rather than by single direct block increments. -

    -

    Number of Managed Objects in Heap

    -

    This is the number of managed objects in the heap. -

    -

    Size of Huge Objects in Heap

    -

    This is the total size of huge objects in the heap (in bytes). -

    -

    Number of Huge Objects in Heap

    -

    This is the number of huge objects in the heap. -

    -

    Size of Tiny Objects in Heap

    -

    This is the total size of tiny objects that are packed in heap - IDs (in bytes). -

    -

    Number of Tiny Objects in Heap

    -

    This is the number of tiny objects that are packed in heap IDs. -

    -

    Table Width

    -

    This is the number of columns in the doubling table for managed - blocks. This value must be a power of two. -

    -

    Starting Block Size

    -

    This is the starting block size to use in the doubling table for - managed blocks (in bytes). This value must be a power of two. -

    -

    Maximum Direct Block Size

    -

    This is the maximum size allowed for a managed direct block. - Objects inserted into the heap that are larger than this value - (less the # of bytes of direct block prefix/suffix) - are stored as ‘huge’ objects. This value must be a power of - two. -

    -

    Maximum Heap Size

    -

    This is the maximum size of the heap’s linear address space for - managed objects (in bytes). The value stored is the log2 of - the actual value, that is: the # of bits of the address space. - ‘Huge’ and ‘tiny’ objects are not counted in this value, since - they do not store objects in the linear address space of the - heap. -

    -

    Starting # of Rows in Root Indirect Block

    -

    This is the starting number of rows for the root indirect block. - A value of 0 indicates that the root indirect block will have - the maximum number of rows needed to address the heap’s Maximum - Heap Size. -

    -

    Address of Root Block

    -

    This is the address of the root block for the heap. It can - be the undefined address if - there is no data in the heap. It either points to a direct - block (if the Current # of Rows in the Root Indirect Block - value is 0), or an indirect block. -

    -

    Current # of Rows in Root Indirect Block

    -

    This is the current number of rows in the root indirect block. - A value of 0 indicates that Address of Root Block - points to direct block instead of indirect block. -

    -

    Size of Filtered Root Direct Block

    -

    This is the size of the root direct block, if filters are - applied to heap objects (in bytes). This field is only - stored in the header if the I/O Filters’ Encoded Length - is greater than 0. -

    -

    I/O Filter Mask

    -

    This is the filter mask for the root direct block, if filters - are applied to heap objects. This mask has the same format as - that used for the filter mask in chunked raw data records in a - v1 B-tree. - This field is only - stored in the header if the I/O Filters’ Encoded Length - is greater than 0. -

    -

    I/O Filter Information

    -

    This is the I/O filter information encoding direct blocks and - huge objects, if filters are applied to heap objects. This - field is encoded as a Filter Pipeline - message. - The size of this field is determined by I/O Filters’ - Encoded Length. -

    -

    Checksum

    -

    This is the checksum for the header.

    -

    Object Header Address

    +

    The address of the object header containing the shared + message.

    +
    -
    + + -
    -
    -
    - - +
    +
    +
    +
    - Fractal Heap Direct Block -
    + - - - - + + + + - + - - - + + - - + - - + +
    Version 2 B-tree, Type 8 Record Layout - Attribute + Name for Indexed Attributes
    bytebytebytebytebytebytebytebyte
    Signature
    Heap ID (8 bytes)
    +
    VersionThis space inserted only to align table nicelyMessage FlagsThis space inserted + only to align table nicely

    Heap Header AddressO

    Creation Order
    Block Offset (variable size)Hash of Name
    +
    +
    +
    + - + + - + + -
    Checksum (optional)Field NameDescription

    Object Data (variable size)

    Heap ID

    +

    This is an 8-byte sequence of bytes and is the heap ID for + the attribute in the object’s attribute fractal heap.

    +
    - - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
    - -
    - -
    -
    - - - - + + - - + + - - + + - - - - +
    Field NameDescription

    Message Flags

    The object header message flags for the attribute + message.

    Signature

    -

    The ASCII character string “FHDB” - is used to indicate the - beginning of a fractal heap direct block. This gives file consistency - checking utilities a better chance of reconstructing a - damaged file. -

    -

    Creation Order

    +

    This field is the creation order value for the attribute.

    +

    Version

    -

    This document describes version 0.

    -

    Hash

    +

    This field is hash value of the name for the attribute. The + hash value is the Jenkins’ lookup3 checksum algorithm applied + to the attribute’s name.

    +

    Heap Header Address

    -

    This is the address for the fractal heap header that this - block belongs to. This field is principally used for file - integrity checking. -

    -
    +
    + +
    +
    +
    + + - - + + + + - - + - - - + + - -
    Version 2 B-tree, Type 9 Record Layout- Creation + Order for Indexed Attributes

    Block Offset

    -

    This is the offset of the block within the fractal heap’s - address space (in bytes). The number of bytes used to encode - this field is the Maximum Heap Size (in the heap’s - header) divided by 8 and rounded up to the next highest integer, - for values that are not a multiple of 8. This value is - principally used for file integrity checking. -

    -
    bytebytebytebyte

    Checksum

    -

    This is the checksum for the direct block.

    -

    This field is only present if bit 1 of Flags in the - heap’s header is set.

    -

    Heap ID (8 bytes)
    +

    Object Data

    -

    This section of the direct block stores the actual data for - objects in the heap. The size of this section is determined by - the direct block’s size minus the size of the other fields - stored in the direct block (for example, the Signature, - Version, and others including the Checksum if it is - present). -

    -
    Message FlagsThis space inserted + only to align table nicely
    -
    - -
    -
    -
    - - - - - - - + +
    - Fractal Heap Indirect Block -
    bytebytebytebyteCreation Order
    +
    +
    +
    + - + + - - + + - + + - + + - - - - - - - - - +
    SignatureField NameDescription
    VersionThis space inserted only to align table nicely

    Heap ID

    +

    This is an 8-byte sequence of bytes and is the heap ID for + the attribute in the object’s attribute fractal heap.

    +

    Heap Header AddressO

    Message Flags

    +

    The object header message flags for the attribute message.

    +
    Block Offset (variable size)

    Creation Order

    +

    This field is the creation order value for the attribute.

    +

    Child Direct Block #0 AddressO


    Size of Filtered Direct Block #0 (optional) L

    Filter Mask for Direct Block #0 (optional)
    +
    - -
    Child Direct Block #1 AddressO

    - - -
    Size of Filtered Direct Block #1 (optional)L

    - - - Filter Mask for Direct Block #1 (optional) - - - ... - +
    +

    + III.B. Disk Format: Level 1B - Group Symbol + Table Nodes +

    + +

    A group is an object internal to the file that allows arbitrary + nesting of objects within the file (including other groups). A group + maps a set of link names in the group to a set of relative file + addresses of objects in the file. Certain metadata for an object to + which the group points can be cached in the group’s symbol table + entry in addition to being in the object’s header.

    + +

    An HDF5 object name space can be stored hierarchically by + partitioning the name into components and storing each component as a + link in a group. The link for a non-ultimate component points to the + group containing the next component. The link for the last component + points to the object being named.

    + +

    + One implementation of a group is a collection of symbol table nodes + indexed by a B-link tree. Each symbol table node contains entries for + one or more links. If an attempt is made to add a link to an already + full symbol table node containing 2K entries, then the node is + split and one node contains K symbols and the other contains K+1 + symbols. +

    - -
    Child Direct Block #K-1 AddressO

    - - -
    Size of Filtered Direct Block #K-1 (optional)L

    - - - Filter Mask for Direct Block #K-1 (optional) - +
    + + - - + + + + + - - - - - - + + - - + + + + - + -
    Symbol Table Node (A Leaf of a B-link tree)

    Child Indirect Block #0 AddressO

    bytebytebytebyte

    Child Indirect Block #1 AddressO

    ...
    Signature

    Child Indirect Block #N-1 AddressO

    Version NumberReserved (zero)Number of Symbols
    Checksum
    +
    Group Entries
    +
    +
    + +
    - - - - +
    +
    +
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
    - - -
      - (Items marked with an ‘L’ in the above table are of the size - specified in “Size of Lengths” field in the superblock.) -
    - - - -
    -
    - - - - + + - - + + - - + + - - + + - - + + +
    Field NameDescriptionField NameDescription

    Signature

    -

    The ASCII character string “FHIB” is used to - indicate the beginning of a fractal heap indirect block. This - gives file consistency checking utilities a better chance of - reconstructing a damaged file. -

    -

    Signature

    +

    + The ASCII character string “ + SNOD + ” is used to indicate the beginning of a symbol table node. + This gives file consistency checking utilities a better chance of + reconstructing a damaged file. +

    +

    Version

    -

    This document describes version 0.

    -

    Version Number

    +

    The version number for the symbol table node. This document + describes version 1. (There is no version ‘0’ of the + symbol table node)

    +

    Heap Header Address

    -

    This is the address for the fractal heap header that this - block belongs to. This field is principally used for file - integrity checking. -

    -

    Number of Entries

    +

    Although all symbol table nodes have the same length, most + contain fewer than the maximum possible number of link entries. + This field indicates how many entries contain valid data. The valid + entries are packed at the beginning of the symbol table node while + the remaining entries contain undefined values.

    +

    Block Offset

    -

    This is the offset of the block within the fractal heap’s - address space (in bytes). The number of bytes used to encode - this field is the Maximum Heap Size (in the heap’s - header) divided by 8 and rounded up to the next highest integer, - for values that are not a multiple of 8. This value is - principally used for file integrity checking. -

    -

    Symbol Table Entries

    +

    + Each link has an entry in the symbol table node. The format of the + entry is described below. There are 2K entries in each + group node, where K is the “Group Leaf Node K” + value from the superblock. +

    +
    +
    + +
    +

    + III.C. Disk Format: Level 1C - Symbol + Table Entry +

    + +

    Each symbol table entry in a symbol table node is designed to + allow for very fast browsing of stored objects. Toward that design + goal, the symbol table entries include space for caching certain + constant metadata from the object header.

    + +
    + + - - + + + + - - - - - - - + + - - + - - + -
    Symbol Table Entry

    Child Direct Block #K Address

    -

    This field is the address of the child direct block. - The size of the [uncompressed] direct block can be computed by - its offset in the heap’s linear address space. -

    -
    bytebytebytebyte

    Size of Filtered Direct Block #K

    -

    This is the size of the child direct block after passing through - the I/O filters defined for this heap (in bytes). If no I/O - filters are present for this heap, this field is not present. -

    -

    Filter Mask for Direct Block #K

    -

    This is the I/O filter mask for the filtered direct block. - This mask has the same format as that used for the filter mask - in chunked raw data records in a v1 B-tree. - If no I/O filters are present for this heap, this field is not - present. -

    -

    Link Name OffsetO
    +

    Child Indirect Block #N Address

    -

    This field is the address of the child indirect block. - The size of the indirect block can be computed by - its offset in the heap’s linear address space. -

    -

    Object Header AddressO
    +

    Checksum

    -

    This is the checksum for the indirect block.

    -
    Cache Type
    - -
    - -
    -

    An object in the fractal heap is identified by means of a fractal heap ID, - which encodes information to locate the object in the heap. - Currently, the fractal heap stores an object in one of three ways, - depending on the object’s size:

    - -
    - - - - - - - - - - - - - - - - - - - + + -
    TypeDescription
    Tiny -

    When an object is small enough to be encoded in the heap ID, the - object’s data is embedded in the fractal heap ID itself. There are - 2 sub-types for this type of object: normal and extended. The - sub-type for tiny heap IDs depends on whether the heap ID is large - enough to store objects greater than 16 bytes or not. If the - heap ID length is 18 bytes or smaller, the ‘normal’ tiny heap ID - form is used. If the heap ID length is greater than 18 bytes in - length, the “extended” form is used. See format description below - for both sub-types. -

    -
    Huge -

    When the size of an object is larger than Maximum Size of - Managed Objects in the Fractal Heap Header, the - object’s data is stored on its own in the file and the object - is tracked/indexed via a version 2 B-tree. All huge objects - for a particular fractal heap use the same v2 B-tree. All huge - objects for a particular fractal heap use the same format for - their huge object IDs. -

    - -

    Depending on whether the IDs for a heap are large enough to hold - the object’s retrieval information and whether I/O pipeline filters - are applied to the heap’s objects, 4 sub-types are derived for - huge object IDs for this heap:

    - -
    - - - - - - - - - - - - - - - - - - - - - - - - - -
    Sub-typeDescription
    Directly accessed, non-filtered -

    The object’s address and length are embedded in the - fractal heap ID itself and the object is directly accessed - from them. This allows the object to be accessed without - resorting to the B-tree. -

    -
    Directly accessed, filtered -

    The filtered object’s address, length, filter mask and - de-filtered size are embedded in the fractal heap ID itself - and the object is accessed directly with them. This allows - the object to be accessed without resorting to the B-tree. -

    -
    Indirectly accessed, non-filtered -

    The object is located by using a B-tree key embedded in - the fractal heap ID to retrieve the address and length from - the version 2 B-tree for huge objects. Then, the address - and length are used to access the object. -

    -
    Indirectly accessed, filtered -

    The object is located by using a B-tree key embedded in - the fractal heap ID to retrieve the filtered object’s - address, length, filter mask and de-filtered size from the - version 2 B-tree for huge objects. Then, this information - is used to access the object. -

    -
    -
    - -
    Managed -

    When the size of an object does not meet the above two - conditions, the object is stored and managed via the direct and - indirect blocks based on the doubling table. -

    -
    Reserved (zero)
    -
    + +
    +
    Scratch-pad Space (16 bytes)
    +
    +
    + + -

    The specific format for each type of heap ID is described below: -

    + + + + + +
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
    -
    - - + +
    +
    +
    Fractal Heap ID for Tiny Objects (sub-type 1 - ‘Normal’) -
    - - - - - + + + - - + + - + + -
    bytebytebytebyte
    Field NameDescription
    Version, Type & LengthThis space inserted only to align table nicely

    Link Name Offset

    +

    This is the byte offset into the group’s local heap for + the name of the link. The name is null terminated.

    +

    Data (variable size)

    Object Header Address

    +

    Every object has an object header which serves as a permanent + location for the object’s metadata. In addition to appearing + in the object header, some of the object’s metadata can be + cached in the scratch-pad space.

    +
    -
    - -
    -
    - - - - - - - - - - - - - - - - -
    Field NameDescription

    Version, Type & Length

    -

    This is a bit field with the following definition: - - - - - - - - - - - - - - - - - - -
    BitDescription
    6-7The current version of ID format. This document - describes version 0. -
    4-5The ID type. Tiny objects have a value of 2. -
    0-3The length of the tiny object. The value stored - is one less than the actual length (since zero-length - objects are not allowed to be stored in the heap). - For example, an object of actual length 1 has an - encoded length of 0, an object of actual length 2 - has an encoded length of 1, and so on. -

    - -

    Data

    -

    This is the data for the object. -

    -
    -
    - -
    -
    -
    - - - - - - - - + + + - - - + + - + + +
    Fractal Heap ID for Tiny Objects (sub-type 2 - ‘Extended’) -
    bytebytebytebyte

    Cache Type

    +

    The cache type is determined from the object header. It also + determines the format for the scratch-pad space:

    + + + + + + + + + + + + + + + + + +
    TypeDescription
    0No data is cached by the group entry. This is guaranteed + to be the case when an object header has a link count greater + than one.
    1Group object header metadata is cached in the scratch-pad + space. This implies that the symbol table entry refers to another + group.
    2The entry is a symbolic link. The first four bytes of the + scratch-pad space are the offset into the local heap for the link + value. The object header address will be undefined.
    +

    + +
    Version, Type & LengthExtended LengthThis space inserted only to align table nicely

    Reserved

    +

    These four bytes are present so that the scratch-pad space is + aligned on an eight-byte boundary. They are always set to zero.

    +
    Data (variable size)

    Scratch-pad Space

    +

    This space is used for different purposes, depending on the + value of the Cache Type field. Any metadata about an object + represented in the scratch-pad space is duplicated in the object + header for that object.

    +

    Furthermore, no data is cached in the group entry scratch-pad + space if the object header for the object has a link count greater + than one.

    +
    +
    - - +
    +

    Format of the Scratch-pad Space

    -
    -
    - - - - - - - - - - - - - - - - - - - - +

    The symbol table entry scratch-pad space is formatted according + to the value in the Cache Type field.

    -
    Field NameDescription

    Version, Type & Length

    -

    This is a bit field with the following definition: - - - - - - - - - - - - - - - - - - -
    BitDescription
    6-7The current version of ID format. This document - describes version 0. -
    4-5The ID type. Tiny objects have a value of 2. -
    0-3These 4 bits, together with the next byte, form an - unsigned 12-bit integer for holding the length of the - object. These 4-bits are bits 8-11 of the 12-bit integer. - See description for the Extended Length field below. -

    - -

    Extended Length

    -

    This byte, together with the 4 bits in the previous byte, - forms an unsigned 12-bit integer for holding the length of - the tiny object. These 8 bits are bits 0-7 of the 12-bit - integer formed. The value stored is one less than the actual - length (since zero-length objects are not allowed to be - stored in the heap). For example, an object of actual length - 1 has an encoded length of 0, an object of actual length - 2 has an encoded length of 1, and so on. -

    -

    Data

    -

    This is the data for the object. -

    -
    -
    +

    + If the Cache Type field contains the value zero + (0) + then no information is stored in the scratch-pad space. +

    +

    + If the Cache Type field contains the value one + (1) + , then the scratch-pad space contains cached metadata for another + object header in the following format: +

    -
    -
    -
    - - +
    +
    Fractal Heap ID for Huge Objects (sub-type 1 & 2): indirectly accessed, non-filtered/filtered -
    + - - - - + + + + - - + - + +
    Object Header Scratch-pad Format
    bytebytebytebytebytebytebytebyte
    Version & TypeThis space inserted only to align table nicely
    Address of B-treeO
    +

    v2 B-tree KeyL (variable size)


    Address of Name HeapO
    +
    - - - +
    - - -
      - (Items marked with an ‘L’ in the above table are of the size - specified in “Size of Lengths” field in the superblock.) -
    -
    +   + (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.) + + -
    -
    - + + +
    +
    +
    - - + + - - + + - - + +
    Field NameDescriptionField NameDescription

    Version & Type

    -

    This is a bit field with the following definition: - - - - - - - - - - - - - - - - - - -
    BitDescription
    6-7The current version of ID format. This document - describes version 0. -
    4-5The ID type. Huge objects have a value of 1. -
    0-3Reserved. -

    - -

    Address of B-tree

    +

    This is the file address for the root of the group’s + B-tree.

    +

    v2 B-tree Key

    This field is the B-tree key for retrieving the information - from the version 2 B-tree for huge objects needed to access the - object. See the description of v2 B-tree - records sub-type 1 & 2 for a description of the fields. New key - values are derived from Next Huge Object ID in the - Fractal Heap Header.

    +

    Address of Name Heap

    +

    This is the file address for the group’s local heap, in + which are stored the group’s symbol names.

    +
    - - -
    -
    -
    - - +
    +

    + If the Cache Type field contains the value two + (2) + , then the scratch-pad space contains cached metadata for a symbolic + link in the following format: +

    + +
    +
    Fractal Heap ID for Huge Objects (sub-type 3): directly accessed, non-filtered -
    + - - - - + + + + - - + +
    Symbolic Link Scratch-pad Format
    bytebytebytebytebytebytebytebyte
    Version & TypeThis space inserted only to align table nicelyOffset to Link Value
    +
    +
    +
    + - + + - + + +

    Address O

    Field NameDescription

    Length L

    Offset to Link Value

    +

    The value of a symbolic link (that is, the name of the thing + to which it points) is stored in the local heap. This field is the + 4-byte offset into the local heap for the start of the link value, + which is null terminated.

    +
    +
    - +
    +

    + III.D. Disk Format: Level 1D - Local Heaps +

    - - - - - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
      - (Items marked with an ‘L’ in the above table are of the size - specified in “Size of Lengths” field in the superblock.) -
    +

    A local heap is a collection of small pieces of data that are + particular to a single object in the HDF5 file. Objects can be inserted + and removed from the heap at any time. The address of a heap does not + change once the heap is created. For example, a group stores addresses + of objects in symbol table nodes with the names of links stored in the + group’s local heap.

    - +
    + + -
    -
    -
    Local Heap
    - - + + + + - - + - - + + - - + -
    Field NameDescriptionbytebytebytebyte

    Version & Type

    -

    This is a bit field with the following definition: - - - - - - - - - - - - - - - - - - -
    BitDescription
    6-7The current version of ID format. This document - describes version 0. -
    4-5The ID type. Huge objects have a value of 1. -
    0-3Reserved. -

    - -
    Signature

    Address

    This field is the address of the object in the file.

    -
    VersionReserved (zero)

    Length

    This field is the length of the object in the file.

    -

    Data Segment SizeL
    +
    -
    - -
    -
    -
    - - - - - - + - - + +
    Fractal Heap ID for Huge Objects (sub-type 4): directly accessed, filtered -
    bytebytebytebyte
    Offset to Head of Free-listL
    +
    Version & TypeThis space inserted only to align table nicely
    Address of Data SegmentO
    +
    + - + + - - + + +

    Address O

     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)

    Length L

     (Items marked with an ‘L’ in the above table are + of the size specified in “Size of Lengths” field in the + superblock.)
    + +
    +
    +
    + - + + - + + -
    Filter MaskField NameDescription

    De-filtered Size L

    Signature

    +

    + The ASCII character string “ + HEAP + ” is used to indicate the beginning of a heap. This gives + file consistency checking utilities a better chance of + reconstructing a damaged file. +

    +
    - - - - + + - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -

    Version

    +

    Each local heap has its own version number so that new heaps + can be added to old files. This document describes version zero (0) + of the local heap.

    +
     (Items marked with an ‘L’ in the above table are of the size - specified in “Size of Lengths” field in the superblock.) -
    -
    - -
    -
    - - - + + - - + + - - - - - - - - - - - - - - - - - - + + + +
    Field NameDescription

    Data Segment Size

    +

    The total amount of disk memory allocated for the heap data. + This may be larger than the amount of space required by the objects + stored in the heap. The extra unused space in the heap holds a + linked list of free blocks.

    +

    Version & Type

    -

    This is a bit field with the following definition: - - - - - - - - - - - - - - - - - - -
    BitDescription
    6-7The current version of ID format. This document - describes version 0. -
    4-5The ID type. Huge objects have a value of 1. -
    0-3Reserved. -

    - -

    Offset to Head of Free-list

    +

    + This is the offset within the heap data segment of the first free + block (or the undefined address if + there is no free block). The free block contains “Size of + Lengths” bytes that are the offset of the next free block (or + the value ‘1’ if this is the last free block) followed + by “Size of Lengths” bytes that store the size of this + free block. The size of the free block includes the space used to + store the offset of the next free block and the size of the current + block, making the minimum size of a free block 2 * “Size of + Lengths”. +

    +

    Address

    This field is the address of the filtered object in the file.

    -

    Length

    This field is the length of the filtered object in the file.

    -

    Filter Mask

    This field is the I/O pipeline filter mask for the - filtered object in the file.

    -

    Filtered Size

    This field is the size of the de-filtered object in the file.

    -

    Address of Data Segment

    +

    The data segment originally starts immediately after the heap + header, but if the data segment must grow as a result of adding + more objects, then the data segment may be relocated, in its + entirety, to another part of the file.

    +
    +
    - - +

    Objects within a local heap should be aligned on an 8-byte + boundary.

    -
    -
    -
    - - +
    +

    + III.E. Disk Format: Level 1E - Global Heap +

    + +

    Each HDF5 file has a global heap which stores various types of + information which is typically shared between datasets. The global heap + was designed to satisfy these goals:

    + +
      +
    1. Repeated access to a heap object must be efficient without + resulting in repeated file I/O requests. Since global heap objects + will typically be shared among several datasets, it is probable that + the object will be accessed repeatedly.
    2. +
    3. Collections of related global heap objects should result in + fewer and larger I/O requests. For instance, a dataset of object + references will have a global heap object for each reference. Reading + the entire set of object references should result in a few large I/O + requests instead of one small I/O request for each reference.
    4. +
    5. It should be possible to remove objects from the global heap + and the resulting file hole should be eligible to be reclaimed for + other uses.
    6. +
    + + +

    + The implementation of the heap makes use of the memory management + already available at the file level and combines that with a new object + called a collection to achieve goal B. The global heap is the + set of all collections. Each global heap object belongs to exactly one + collection and each collection contains one or more global heap + objects. For the purposes of disk I/O and caching, a collection is + treated as an atomic object, addressing goal A. +

    - - - - - - +

    When a global heap object is deleted from a collection (which + occurs when its reference count falls to zero), objects located after + the deleted object in the collection are packed down toward the + beginning of the collection and the collection’s global heap + object 0 is created (if possible) or its size is increased to account + for the recently freed space. There are no gaps between objects in each + collection, with the possible exception of the final space in the + collection, if it is not large enough to hold the header for the + collection’s global heap object 0. These features address goal C. +

    + +

    The HDF5 Library creates global heap collections as needed, so + there may be multiple collections throughout the file. The set of all + of them is abstractly called the “global heap”, although + they do not actually link to each other, and there is no global place + in the file where you can discover all of the collections. The + collections are found simply by finding a reference to one through + another object in the file. For example, data of variable-length + datatype elements is stored in the global heap and is accessed via a + global heap ID. The format for global heap IDs is described at the end + of this section.

    + +
    +
    Fractal Heap ID for Managed Objects -
    bytebytebytebyte
    + - - + + + + + - + - + + -
    A Global Heap Collection
    Version & TypeThis space inserted only to align table nicelybytebytebytebyte
    Offset (variable size)Signature
    Length (variable size)VersionReserved (zero)
    -
    -
    -
    - - - - - - - - - - - - - - - - - - - + + -
    Field NameDescription

    Version & Type

    This is a bit field with the following definition: - - - - - - - - - - - - - - - - - - -
    BitDescription
    6-7The current version of ID format. This document - describes version 0. -
    4-5The ID type. Managed objects have a value of 0. -
    0-3Reserved. -

    -

    Offset

    This field is the offset of the object in the heap. - This field’s size is the minimum number of bytes - necessary to encode the Maximum Heap Size value - (from the Fractal Heap Header). For example, if the - value of the Maximum Heap Size is less than 256 bytes, - this field is 1 byte in length, a Maximum Heap Size - of 256-65535 bytes uses a 2 byte length, and so on.

    Length

    This field is the length of the object in the heap. It - is determined by taking the minimum value of Maximum - Direct Block Size and Maximum Size of Managed - Objects in the Fractal Heap Header. Again, - the minimum number of bytes needed to encode that value is - used for the size of this field.


    Collection SizeL
    +
    -
    - -
    -

    -III.G. Disk Format: Level 1G - Free-space Manager

    - -

    - Free-space managers are used to describe space within a heap or - the entire HDF5 file that is not currently used for that heap or - file. -

    - -

    - The free-space manager header contains metadata information - about the space being tracked, along with the address of the list - of free space sections which actually describes the free - space. The header records information about free-space sections being - tracked, creation parameters for handling free-space sections of a - client, and section information used to locate the collection of - free-space sections. -

    - -

    - The free-space section list stores a collection of - free-space sections that is specific to each client of the - free-space manager. - - For example, the fractal heap is a client of the free space manager - and uses it to track unused space within the heap. There are 4 - types of section records for the fractal heap, each of which has - its own format, listed below. -

    - -
    - - - - - - + - + - - - + - + - + +
    - Free-space Manager Header -
    bytebytebytebyte
    Global Heap Object 1
    +
    Signature
    Global Heap Object 2
    +
    VersionClient IDThis space inserted only to align table nicely
    ...
    +

    Total Space TrackedL


    Global Heap Object N
    +

    Total Number of SectionsL


    Global Heap Object 0 (free space)
    +
    + - - - - - - + + + +

    Number of Serialized SectionsL


    Number of Un-Serialized SectionsL

     (Items marked with an ‘L’ in the + above table are of the size specified in “Size of + Lengths” field in the superblock.)
    - - Number of Section Classes - This space inserted only to align table nicely - +
    +
    +
    + - - + + - - - + + + - - + + + - + + - + + - - + + + +
    Shrink PercentExpand PercentField NameDescription
    Size of Address SpaceThis space inserted only to align table nicely

    Signature

    +

    + The ASCII character string “ + GCOL + ” is used to indicate the beginning of a collection. This + gives file consistency checking utilities a better chance of + reconstructing a damaged file. +

    +

    Maximum Section Size L

    Version

    +

    Each collection has its own version number so that new + collections can be added to old files. This document describes + version one (1) of the collections (there is no version zero (0)). +

    +

    Address of Serialized Section ListO

    Collection Size

    +

    This is the size in bytes of the entire collection including + this field. The default (and minimum) collection size is 4096 bytes + which is a typical file system block size. This allows for 127 + 16-byte heap objects plus their overhead (the collection header of + 16 bytes and the 16 bytes of information about each heap object).

    +

    Size of Serialized Section List UsedL

    + Global Heap Object 1 through N +

    +

    The objects are stored in any order with no intervening + unused space.

    +

    Allocated Size of Serialized Section ListL

    Global Heap Object 0

    +

    Global Heap Object 0 (zero), when present, represents the + free space in the collection. Free space always appears at the end + of the collection. If the free space is too small to store the + header for Object 0 (described below) then the header is implied + and the collection contains no free space.

    +
    +
    - - Checksum - - +
    +
    +
    + + -
    Global Heap Object
    - - - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
      - (Items marked with an ‘L’ in the above table are of the size - specified in “Size of Lengths” field in the superblock.) -
    - -
    - -
    -
    - - - - + + + + - - + + - - + - - + - - - - - - - - - - - - - - - - + +
    Field NameDescriptionbytebytebytebyte

    Signature

    -

    The ASCII character string “FSHD” is used to - indicate the beginning of the Free-space Manager Header. - This gives file consistency checking utilities a better chance of - reconstructing a damaged file. -

    -
    Heap Object IndexReference Count

    Version

    -

    This is the version number for the Free-space Manager Header - and this document describes version 0.

    -
    Reserved (zero)

    Client ID

    -

    This is the client ID for identifying the user of this - free-space manager: - - - - - - - - - - - - - - - - - - - -
    IDDescription
    0Fractal heap -
    1File -
    2+Reserved. -

    - -

    Object SizeL
    +

    Total Space Tracked

    -

    This is the total amount of free space being tracked, in bytes. -

    -

    Total Number of Sections

    -

    This is the total number of free-space sections being tracked. -

    -

    Number of Serialized Sections

    -

    This is the number of serialized free-space sections being - tracked. -

    -

    Number of Un-Serialized Sections

    -

    This is the number of un-serialized free-space sections being - managed. Un-serialized sections are created by the free-space - client when the list of sections is read in. -

    -

    Object Data
    +
    + - - + + +

    Number of Section Classes

    -

    This is the number of section classes handled by this free space - manager for the free-space client. -

    -
     (Items marked with an ‘L’ in the + above table are of the size specified in “Size of + Lengths” field in the superblock.)
    - -

    Shrink Percent

    - -

    This is the percent of current size to shrink the allocated - serialized free-space section list. -

    - - +
    +
    +
    + - - + + - - + + - - + + - - + + - - + + - - + + +

    Expand Percent

    -

    This is the percent of current size to expand the allocated - serialized free-space section list. -

    -
    Field NameDescription

    Size of Address Space

    -

    This is the size of the address space that free-space sections - are within. This is stored as the log2 of the - actual value (in other words, the number of bits required - to store values within that address space). -

    -

    Heap Object Index

    +

    + Each object has a unique identification number within a collection. + The identification numbers are chosen so that new objects have the + smallest value possible with the exception that the identifier + 0 + always refers to the object which represents all free space within + the collection. +

    +

    Maximum Section Size

    -

    This is the maximum size of a section to be tracked. -

    -

    Reference Count

    +

    All heap objects have a reference count field. An object + which is referenced from some other part of the file will have a + positive reference count. The reference count for Object 0 is + always zero.

    +

    Address of Serialized Section List

    -

    This is the address where the serialized free-space section - list is stored. -

    -

    Reserved

    +

    Zero padding to align next field on an 8-byte boundary.

    +

    Size of Serialized Section List Used

    -

    This is the size of the serialized free-space section - list used (in bytes). This value must be less than - or equal to the allocated size of serialized section - list, below. -

    -

    Object Size

    +

    This is the size of the object data stored for the object. + The actual storage space allocated for the object data is rounded + up to a multiple of eight.

    +

    Allocated Size of Serialized Section List

    -

    This is the size of serialized free-space section list - actually allocated (in bytes). -

    -

    Object Data

    +

    The object data is treated as a one-dimensional array of + bytes to be interpreted by the caller.

    +
    - -

    Checksum

    - -

    This is the checksum for the free-space manager header.

    - - +
    - - +
    +

    The format for the ID used to locate an object in the global heap + is described here:

    -
    -

    The free-space sections being managed are stored in a - free-space section list, described below. The sections - in the free-space section list are stored in the following way: - a count of the number of sections describing a particular size of - free space and the size of the free-space described (in bytes), - followed by a list of section description records; then another - section count and size, followed by the list of section - descriptions for that size; and so on.

    - - -
    - - +
    +
    - Free-space Section List -
    + - - - - + + + + - + - - + +
    Global Heap ID
    bytebytebytebytebytebytebytebyte
    Signature
    Collection AddressO
    +
    VersionThis space inserted only to align table nicelyObject Index
    + - + + +

    Free-space Manager Header AddressO

     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
    +
    + +
    +
    + - + + - - - - - - - - - - - + + + - - - + + + + - - - +
    Number of Section Records in Set #0 (variable size)Field NameDescription
    Size of Free-space Section Described in Record Set #0 (variable size)
    Record Set #0 Section Record #0 Offset(variable size)
    Record Set #0 Section Record #0 TypeThis space inserted only to align table nicely

    Collection Address

    +

    This field is the address of the global heap collection where + the data object is stored.

    +
    Record Set #0 Section Record #0 Data (variable size)

    ID

    +

    This field is the index of the data object within the global + heap collection.

    +
    ...
    +
    - - Record Set #0 Section Record #K-1 Offset(variable size) - - - Record Set #0 Section Record #K-1 Type - This space inserted only to align table nicely - +
    +

    + III.F. Disk Format: Level 1F - Fractal Heap +

    + +

    + Each fractal heap consists of a header and zero or more direct and + indirect blocks (described below). The header contains general + information as well as initialization parameters for the doubling + table. The Root Block Address in the header points to the + first direct or indirect block in the heap. +

    - - Record Set #0 Section Record #K-1 Data (variable size) - +

    + Fractal heaps are based on a data structure called a doubling + table. A doubling table provides a mechanism for quickly extending an + array-like data structure that minimizes the number of empty blocks in + the heap, while retaining very fast lookup of any element within the + array. More information on fractal heaps and doubling tables can be + found in the RFC “Private Heaps in + HDF5.” +

    - - Number of Section Records in Set #1 (variable size) - +

    The fractal heap implements the doubling table structure with + indirect and direct blocks. Indirect blocks in the heap do not actually + contain data for objects in the heap, their “size” is + abstract - they represent the indexing structure for locating the + direct blocks in the doubling table. Direct blocks contain the actual + data for objects stored in the heap.

    + +

    + All indirect blocks have a constant number of block entries in each + row, called the width of the doubling table (stored in the + heap header). The number of rows for each indirect block in the heap is + determined by the size of the block that the indirect block represents + in the doubling table (calculation of this is shown below) and is + constant, except for the “root” indirect block, which + expands and shrinks its number of rows as needed. +

    - - Size of Free-space Section Described in Record Set #1 (variable size) - +

    + Blocks in the first two rows of an indirect block are Starting + Block Size number of bytes in size, and the blocks in each subsequent + row are twice the size of the blocks in the previous row. In other + words, blocks in the third row are twice the Starting Block + Size, blocks in the fourth row are four times the Starting + Block Size, and so on. Entries for blocks up to the Maximum + Direct Block Size point to direct blocks, and entries for blocks + greater than that size point to further indirect blocks (which have + their own entries for direct and indirect blocks). +

    - - Record Set #1 Section Record #0 Offset(variable size) - +

    + The number of rows of blocks, nrows, in an indirect block of + size iblock_size is given by the following expression:
    +
    nrows = (log2(iblock_size) - log2(<Starting + Block Size> * <Width>)) + 1 +

    - - Record Set #1 Section Record #0 Type - This space inserted only to align table nicely - +

    + The maximum number of rows of direct blocks, max_dblock_rows, + in any indirect block of a fractal heap is given by the following + expression:

    max_dblock_rows = (log2(<Max. + Direct Block Size>) - log2(<Starting Block + Size>)) + 2 +

    - - Record Set #1 Section Record #0 Data (variable size) - +

    + Using the computed values for nrows and max_dblock_rows, + along with the Width of the doubling table, the number of + direct and indirect block entries (K and N in the + indirect block description, below) in an indirect block can be + computed:

    K = MIN(nrows, max_dblock_rows) + * Width

    If nrows is less than or + equal to max_dblock_rows, N is 0. Otherwise, N + is simply computed:

    N = K - (max_dblock_rows + * Width) +

    - - ... - +

    The size indirect blocks on disk is determined by the number of + rows in the indirect block (computed above). The size of direct blocks + on disk is exactly the size of the block in the doubling table.

    - - Record Set #1 Section Record #K-1 Offset(variable size) - +
    + + - - - - + + + + + + - - - + + + - - - + + + + - - - + + + + - + + - - + + - - - + + + - - - - + + + - - - + + + - - - + + + - - - + + + - - - - + + + - - - + + + - - - -
    Fractal Heap Header
    Record Set #1 Section Record #K-1 TypeThis space inserted only to align table nicely
    bytebytebytebyte
    Record Set #1 Section Record #K-1 Data (variable size)
    Signature
    ...
    VersionThis space inserted + only to align table nicely
    ...
    Heap ID LengthI/O Filters’ Encoded Length
    Number of Section Records in Set #N-1 (variable size)FlagsThis space inserted + only to align table nicely
    Size of Free-space Section Described in Record Set #N-1 (variable size)
    Maximum Size of Managed Objects
    Record Set #N-1 Section Record #0 Offset(variable size)

    Next Huge Object IDL
    +
    Record Set #N-1 Section Record #0 TypeThis space inserted only to align table nicely

    v2 B-tree Address of Huge ObjectsO
    +
    Record Set #N-1 Section Record #0 Data (variable size)

    Amount of Free Space in Managed BlocksL
    +
    ...

    Address of Managed Block Free Space + ManagerO
    +
    Record Set #N-1 Section Record #K-1 Offset(variable size)

    Amount of Managed Space in HeapL
    +
    Record Set #N-1 Section Record #K-1 TypeThis space inserted only to align table nicely

    Amount of Allocated Managed Space in HeapL
    +
    Record Set #N-1 Section Record #K-1 Data (variable size)

    Offset of Direct Block Allocation + Iterator in Managed SpaceL
    +
    Checksum
    + +
    Number of Managed Objects in HeapL
    +
    + - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
    -
    +
    Size of Huge Objects in HeapL
    +
    + -
    -
    - - - - + + - - + - - + - - + + - - - + + -

    - The number of sets of free-space section records is - determined by the size of serialized section list in - the free-space manager header. -

    - - + + + - - + + -

    - The length of this field is the minimum number of bytes needed - to store the maximum section size (from the - free-space manager header). -

    - + + - - + + -

    - The length of this field is the minimum number of bytes needed - to store the size of address space (from the - free-space manager header). -

    - + + - - + -
    Field NameDescription

    Number of Huge Objects in HeapL
    +

    Signature

    -

    The ASCII character string “FSSE” is used to - indicate the beginning of the Free-space Section Information. - This gives file consistency checking utilities a better chance of - reconstructing a damaged file. -

    -

    Size of Tiny Objects in HeapL
    +

    Version

    -

    This is the version number for the Free-space Section List - and this document describes version 0.

    -

    Number of Tiny Objects in HeapL
    +

    Free-space Manager Header Address

    -

    This is the address of the Free-space Manager Header. - This field is principally used for file - integrity checking. -

    -
    Table WidthThis space inserted + only to align table nicely

    Number of Section Records for Set #N

    -

    This is the number of free-space section records for set #N. - The length of this field is the minimum number of bytes needed - to store the number of serialized sections (from the - free-space manager header). -

    +

    Starting Block SizeL
    +

    Maximum Direct Block SizeL
    +

    Section Size for Record Set #N

    -

    This is the size (in bytes) of the free-space section described - for all the section records in set #N. -

    +
    Maximum Heap SizeStarting # of Rows in Root Indirect Block

    Address of Root BlockO
    +

    Record Set #N Section #K Offset

    -

    This is the offset (in bytes) of the free-space section within - the client for the free-space manager. -

    +
    Current # of Rows in Root Indirect BlockThis space inserted + only to align table nicely

    Size of Filtered Root Direct Block (optional)L
    +

    Record Set #N Section #K Type

    -

    This is the type of the section record, used to decode the - record set #N section #K data information. The defined - record type for file client is: +

    I/O Filter Mask (optional)
    - - - - + + + - - - - - - - - -
    TypeDescription
    I/O Filter Information (optional, + variable size)
    0File’s section (a range of actual bytes in file) -
    1+Reserved. -

    + + Checksum + -

    The defined record types for a fractal heap client are: + - - - - - +
    TypeDescription
    + + + + + + + + +
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
     (Items marked with an ‘L’ in the above table are + of the size specified in “Size of Lengths” field in the + superblock.)
    - - 0 - Fractal heap “single” section - - +

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Signature

    +

    + The ASCII character string “ + FRHP + ” is used to indicate the beginning of a fractal heap header. + This gives file consistency checking utilities a better chance of + reconstructing a damaged file. +

    +

    Version

    +

    This document describes version 0.

    +

    Heap ID Length

    +

    This is the length in bytes of heap object IDs for this heap.

    +

    I/O Filters’ Encoded Length

    +

    + This is the size in bytes of the encoded I/O Filter + Information. +

    +

    Flags

    +

    This field is the heap status flag and is a bit field + indicating additional information about the fractal heap.

    + - - + + - - + + - - - + + - - - + + -
    1Fractal heap “first row” section - Bit(s)Description
    2Fractal heap “normal row” section - 0If set, the ID value to use for huge object has wrapped + around. If the value for the Next Huge Object ID has + wrapped around, each new huge object inserted into the heap will + require a search for an ID value. +
    3Fractal heap “indirect” section - 1If set, the direct blocks in the heap are checksummed.
    4+Reserved. - 2-7Reserved

    +
    +

    - + -

    Record Set #N Section #K Data

    - -

    This is the section-type specific information for each record - in the record set, described below. -

    - +

    Maximum Size of Managed Objects

    + +

    This is the maximum size of managed objects allowed in the + heap. Objects greater than this this are ‘huge’ objects + and will be stored in the file directly, rather than in a direct + block for the heap.

    + -

    Checksum

    - -

    This is the checksum for the Free-space Section List. -

    - +

    Next Huge Object ID

    + +

    This is the next ID value to use for a huge object in the + heap.

    + - -
    - -
    -

    - The section-type specific data for each free-space section record is - described below: -

    - -
    - - - - + + -
    - File’s Section Data Record -
    No additional record data stored

    v2 B-tree Address of Huge Objects

    +

    + This is the address of the v2 B-tree used + to track huge objects in the heap. The type of records stored in + the v2 B-tree will be determined by whether the address & + length of a huge object can fit into a heap ID (if yes, it is a + “directly” accessed huge object) and whether there is a + filter used on objects in the heap. +

    +
    -
    - -
    -
    -
    - - - + + -
    - Fractal Heap “Single” Section Data Record -
    No additional record data stored

    Amount of Free Space in Managed Blocks

    +

    This is the total amount of free space in managed direct + blocks (in bytes).

    +
    -
    - -
    -
    -
    - - - + + -
    - Fractal Heap “First Row” Section Data Record -
    Same format as “indirect” section data

    Address of Managed Block Free Space Manager

    +

    + This is the address of the Free-space + Manager for managed blocks. +

    +
    -
    - -
    -
    -
    - - - + + -
    - Fractal Heap “Normal Row” Section Data Record -
    No additional record data stored

    Amount of Managed Space in Heap

    +

    This is the total amount of managed space in the heap (in + bytes), essentially the upper bound of the heap’s linear + address space.

    +
    -
    - -
    -
    -
    - - - - - - + + - + + - - + + - - + + -
    - Fractal Heap “Indirect” Section Data Record -
    bytebytebytebyte

    Amount of Allocated Managed Space in Heap

    +

    + This is the total amount of managed space (in bytes) actually + allocated in the heap. This can be less than the Amount of + Managed Space in Heap field, if some direct blocks in the + heap’s linear address space are not allocated. +

    +
    Fractal Heap Indirect Block Offset (variable size)

    Offset of Direct Block Allocation Iterator in Managed + Space

    +

    + This is the linear heap offset where the next direct block should + be allocated at (in bytes). This may be less than the Amount + of Managed Space in Heap value because the heap’s address + space is increased by a “row” of direct blocks at a + time, rather than by single direct block increments. +

    +
    Block Start RowBlock Start Column

    Number of Managed Objects in Heap

    +

    This is the number of managed objects in the heap.

    +
    Number of BlocksThis space inserted only to align table nicely

    Size of Huge Objects in Heap

    +

    This is the total size of huge objects in the heap (in + bytes).

    +
    -
    -
    -
    - - - - + + + - - + + - - + + - - + + - - + + -
    Field NameDescription

    Number of Huge Objects in Heap

    +

    This is the number of huge objects in the heap.

    +

    Fractal Heap Block Offset

    -

    The offset of the indirect block in the fractal heap’s address - space containing the empty blocks. -

    -

    - The number of bytes used to encode this field is the minimum - number of bytes needed to encode values for the Maximum - Heap Size (in the fractal heap’s header). -

    -

    Size of Tiny Objects in Heap

    +

    This is the total size of tiny objects that are packed in + heap IDs (in bytes).

    +

    Block Start Row

    -

    This is the row that the empty blocks start in. -

    -

    Number of Tiny Objects in Heap

    +

    This is the number of tiny objects that are packed in heap + IDs.

    +

    Block Start Column

    -

    This is the column that the empty blocks start in. -

    -

    Table Width

    +

    This is the number of columns in the doubling table for + managed blocks. This value must be a power of two.

    +

    Number of Blocks

    -

    This is the number of empty blocks covered by the section. -

    -

    Starting Block Size

    +

    This is the starting block size to use in the doubling table + for managed blocks (in bytes). This value must be a power of two.

    +
    -
    - -
    -

    -III.H. Disk Format: Level 1H - Shared Object Header Message Table

    - -

    - The shared object header message table is used to locate - object - header messages that are shared between two or more object headers - in the file. Shared object header messages are stored and indexed - in the file in one of two ways: indexed sequentially in a - shared header message list or indexed with a v2 B-tree. - The shared messages themselves are either stored in a fractal - heap (when two or more objects share the message), or remain in an - object’s header (when only one object uses the message currently, - but the message can be shared in the future). -

    - -

    - The shared object header message table - contains a list of shared message index headers. Each index header - records information about the version of the index format, the index - storage type, flags for the message types indexed, the number of - messages in the index, the address where the index resides, - and the fractal heap address if shared messages are stored there. -

    - -

    - Each index can be either a list or a v2 B-tree and may transition - between those two forms as the number of messages in the index - varies. Each shared message record contains information used to - locate the shared message from either a fractal heap or an object - header. The types of messages that can be shared are: Dataspace, - Datatype, Fill Value, Filter Pipeline and Attribute. -

    - -

    - The shared object header message table is pointed to - from a shared message table message - in the superblock extension for a file. This message stores the - version of the table format, along with the number of index headers - in the table. -

    - -
    - - - - - - + + - + + - - - + + - + + - - + + - - - - - - - - - - - + + + - - + + + - - + + + - - - + + +
    - Shared Object Header Message Table -
    bytebytebytebyte

    Maximum Direct Block Size

    +

    This is the maximum size allowed for a managed direct block. + Objects inserted into the heap that are larger than this value + (less the # of bytes of direct block prefix/suffix) are stored as + ‘huge’ objects. This value must be a power of two.

    +
    Signature

    Maximum Heap Size

    +

    This is the maximum size of the heap’s linear address + space for managed objects (in bytes). The value stored is the log2 + of the actual value, that is: the # of bits of the address space. + ‘Huge’ and ‘tiny’ objects are not counted + in this value, since they do not store objects in the linear + address space of the heap.

    +
    Version for index #0Index Type for index #0Message Type Flags for index #0

    Starting # of Rows in Root Indirect Block

    +

    + This is the starting number of rows for the root indirect block. A + value of 0 indicates that the root indirect block will have the + maximum number of rows needed to address the heap’s Maximum + Heap Size. +

    +
    Minimum Message Size for index #0

    Address of Root Block

    +

    + This is the address of the root block for the heap. It can be the undefined address if there is no data + in the heap. It either points to a direct block (if the Current + # of Rows in the Root Indirect Block value is 0), or an indirect + block. +

    +
    List Cutoff for index #0v2 B-tree Cutoff for index #0

    Current # of Rows in Root Indirect Block

    +

    + This is the current number of rows in the root indirect block. A + value of 0 indicates that Address of Root Block points to + direct block instead of indirect block. +

    +
    Number of Messages for index #0This space inserted only to align table nicely

    Index AddressO for index #0


    Fractal Heap AddressO for index #0

    Size of Filtered Root Direct Block

    +

    + This is the size of the root direct block, if filters are applied + to heap objects (in bytes). This field is only stored in the header + if the I/O Filters’ Encoded Length is greater than + 0. +

    +
    ...

    I/O Filter Mask

    +

    + This is the filter mask for the root direct block, if filters are + applied to heap objects. This mask has the same format as that used + for the filter mask in chunked raw data records in a v1 B-tree. This field is only stored in the + header if the I/O Filters’ Encoded Length is greater + than 0. +

    +
    ...

    I/O Filter Information

    +

    + This is the I/O filter information encoding direct blocks and huge + objects, if filters are applied to heap objects. This field is + encoded as a Filter Pipeline message. + The size of this field is determined by I/O Filters’ + Encoded Length. +

    +
    Version for index #N-1Index Type for index #N-1Message Type Flags for index #N-1

    Checksum

    +

    This is the checksum for the header.

    +
    +
    + +
    +
    +
    + + + - + + + + - - + - - - - - - - - - - - - - - - -
    Fractal Heap Direct Block
    Minimum Message Size for index #N-1bytebytebytebyte
    List Cutoff for index #N-1v2 B-tree Cutoff for index #N-1Signature
    Number of Messages for index #N-1This space inserted only to align table nicely

    Index AddressO for index #N-1


    Fractal Heap AddressO for index #N-1

    Checksum
    + Version + This space inserted + only to align table nicely + - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
    - -
    - -
    -
    - - - - + - - + - - + - - + - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Field NameDescription
    Heap Header AddressO
    +

    Signature

    -

    The ASCII character string “SMTB” is used to - indicate the beginning of the Shared Object Header Message table. - This gives file consistency checking utilities a better chance of - reconstructing a damaged file. -

    -
    Block Offset (variable size)

    Version for index #N

    -

    This is the version number for the list of shared object header message - indexes and this document describes version 0.

    -
    Checksum (optional)

    Index Type for index #N

    -

    The type of index can be an unsorted list or a v2 B-tree. -

    -

    Object Data (variable size)
    +

    Message Type Flags for index #N

    -

    This field indicates the type of messages tracked in the index, - as follows: - - - - - +
    BitsDescription
    -

    0If set, the index tracks Dataspace Messages. -
    1If set, the message tracks Datatype Messages. -
    2If set, the message tracks Fill Value Messages. -
    3If set, the message tracks Filter Pipeline Messages. -
    4If set, the message tracks Attribute Messages. -
    5-15Reserved (zero). -

    + + + + + +
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
    +
    -

    - An index can track more than one type of message, but each type - of message can only by in one index. -

    - - +
    +
    + + + + + - - + + - - - - - - - + + + - - + + - - + + - - + + - - + + -
    Field NameDescription

    Minimum Message Size for index #N

    -

    This is the message size sharing threshold for the index. - If the encoded size of the message is less than this value, the - message is not shared. -

    -

    Signature

    +

    + The ASCII character string “ + FHDB + ” is used to indicate the beginning of a fractal heap direct + block. This gives file consistency checking utilities a better + chance of reconstructing a damaged file. +

    +

    List Cutoff for index #N

    -

    This is the cutoff value for the indexing of messages to - switch from a list to a v2 B-tree. If the number of messages - is greater than this value, the index should be a v2 B-tree. -

    -

    v2 B-tree Cutoff for index #N

    -

    This is is the cutoff value for the indexing of messages to - switch from a v2 B-tree back to a list. If the number of - messages is less than this value, the index should be a list. -

    -

    Version

    +

    This document describes version 0.

    +

    Number of Messages for index #N

    -

    The number of shared messages being tracked for the index. -

    -

    Heap Header Address

    +

    This is the address for the fractal heap header that this + block belongs to. This field is principally used for file integrity + checking.

    +

    Index Address for index #N

    -

    This field is the address of the list or v2 B-tree where the - index nodes reside. -

    -

    Block Offset

    +

    + This is the offset of the block within the fractal heap’s + address space (in bytes). The number of bytes used to encode this + field is the Maximum Heap Size (in the heap’s + header) divided by 8 and rounded up to the next highest integer, + for values that are not a multiple of 8. This value is principally + used for file integrity checking. +

    +

    Fractal Heap Address for index #N

    -

    This field is the address of the fractal heap if shared messages - are stored there. -

    -

    Checksum

    +

    This is the checksum for the direct block.

    +

    + This field is only present if bit 1 of Flags in the + heap’s header is set. +

    +

    Checksum

    -

    This is the checksum for the table.

    -

    Object Data

    +

    + This section of the direct block stores the actual data for objects + in the heap. The size of this section is determined by the direct + block’s size minus the size of the other fields stored in the + direct block (for example, the Signature, Version, + and others including the Checksum if it is present). +

    +
    -
    + + -
    -

    - Shared messages are indexed either with a shared message record - list, described below, or using a v2 B-tree (using record type 7). - The number of records in the shared message record list is - determined in the index’s entry in the shared object header message - table. -

    - -
    - - +
    +
    +
    +
    - Shared Message Record List -
    + - - - - + + + + - + - + + - + - + - + - - + -
    Fractal Heap Indirect Block
    bytebytebytebytebytebytebytebyte
    SignatureSignature
    Shared Message Record #0VersionThis space inserted + only to align table nicely
    Shared Message Record #1
    Heap Header AddressO
    +
    ...Block Offset (variable size)
    Shared Message Record #N-1
    Child Direct Block #0 AddressO
    +
    Checksum
    Size of Filtered Direct Block #0 (optional) + L
    +
    -
    - -
    -
    - - - - + + - - + - - - + - - - + -
    Field NameDescription
    Filter Mask for Direct Block #0 (optional)

    Signature

    -

    The ASCII character string “SMLI” is used to - indicate the beginning of a list of index nodes. - This gives file consistency checking utilities a better chance of - reconstructing a damaged file. -

    -

    Child Direct Block #1 AddressO
    +

    Shared Message Record #N

    -

    The record for locating the shared message, either in the - fractal heap for the index, or an object header (see format for - index nodes below). -

    -

    Size of Filtered Direct Block #1 (optional)L
    +

    Checksum

    -

    This is the checksum for the list. -

    -
    Filter Mask for Direct Block #1 (optional)
    -
    - -
    -

    - The record for each shared message in an index is stored in one of the - following forms: -

    - -
    - - - - - - - + - - + - - + - - + - + -
    - Shared Message Record, for messages stored in a fractal heap -
    bytebytebytebyte...
    Message LocationThis space inserted only to align table nicely
    Child Direct Block #K-1 AddressO
    +
    Hash Value
    Size of Filtered Direct Block #K-1 (optional)L
    +
    Reference CountFilter Mask for Direct Block #K-1 (optional)

    Fractal Heap ID


    Child Indirect Block #0 AddressO
    +
    -
    -
    -
    - - - - + + - - + - - + - - + +
    Field NameDescription

    Child Indirect Block #1 AddressO
    +

    Message Location

    -

    This has a value of 0 indicating that the message is stored in - the heap. -

    -
    ...

    Hash Value

    -

    This is the hash value for the message. -

    -

    Child Indirect Block #N-1 AddressO
    +

    Reference Count

    -

    This is the number of times the message is used in the file. -

    -
    Checksum
    + - - + + -

    Fractal Heap ID

    -

    This is an 8-byte fractal heap ID for the message as stored in - the fractal heap for the index. -

    -
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
    -
    - -
    -
    -
    - - - - - - - + + +
    - Shared Message Record, for messages stored in an object header -
    bytebytebytebyte (Items marked with an ‘L’ in the above table are + of the size specified in “Size of Lengths” field in the + superblock.)
    + +
    +
    +
    + - - + + - + + - - - + + - + + -
    Message LocationThis space inserted only to align table nicelyField NameDescription
    Hash Value

    Signature

    +

    + The ASCII character string “ + FHIB + ” is used to indicate the beginning of a fractal heap + indirect block. This gives file consistency checking utilities a + better chance of reconstructing a damaged file. +

    +
    ReservedMessage TypeCreation Index

    Version

    +

    This document describes version 0.

    +

    Object Header AddressO

    Heap Header Address

    +

    This is the address for the fractal heap header that this + block belongs to. This field is principally used for file integrity + checking.

    +
    - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
    - -
    - -
    -
    - - - - + + - - + + - - + + - - - + + - - + + - - + + -
    Field NameDescription

    Block Offset

    +

    + This is the offset of the block within the fractal heap’s + address space (in bytes). The number of bytes used to encode this + field is the Maximum Heap Size (in the heap’s + header) divided by 8 and rounded up to the next highest integer, + for values that are not a multiple of 8. This value is principally + used for file integrity checking. +

    +

    Message Location

    -

    This has a value of 1 indicating that the message is stored in - an object header. -

    -

    Child Direct Block #K Address

    +

    This field is the address of the child direct block. The size + of the [uncompressed] direct block can be computed by its offset in + the heap’s linear address space.

    +

    Hash Value

    -

    This is the hash value for the message. -

    -

    Size of Filtered Direct Block #K

    +

    This is the size of the child direct block after passing + through the I/O filters defined for this heap (in bytes). If no I/O + filters are present for this heap, this field is not present.

    +

    Message Type

    -

    This is the message type in the object header. -

    -

    Filter Mask for Direct Block #K

    +

    + This is the I/O filter mask for the filtered direct block. This + mask has the same format as that used for the filter mask in + chunked raw data records in a v1 B-tree. If + no I/O filters are present for this heap, this field is not + present. +

    +

    Creation Index

    -

    This is the creation index of the message within the object - header. -

    -

    Child Indirect Block #N Address

    +

    This field is the address of the child indirect block. The + size of the indirect block can be computed by its offset in the + heap’s linear address space.

    +

    Object Header Address

    -

    This is the address of the object header where the message is - located. -

    -

    Checksum

    +

    This is the checksum for the indirect block.

    +
    -
    + +
    -
    -
    -

    -IV. Disk Format: Level 2 - Data Objects

    - -

    Data objects contain the “real” user-visible information in the file. - These objects compose the scientific data and other information which - are generally thought of as “data” by the end-user. All the - other information in the file is provided as a framework for - storing and accessing these data objects. -

    - -

    A data object is composed of header and data - information. The header information contains the information - needed to interpret the data information for the object as - well as additional “metadata” or pointers to additional - “metadata” used to describe or annotate each object. -

    - -
    -

    -IV.A. Disk Format: Level 2A - Data Object Headers

    - -

    The header information of an object is designed to encompass - all of the information about an object, except for the data itself. - This information includes the dataspace, the datatype, information - about how the data is stored on disk (in external files, compressed, - broken up in blocks, and so on), as well as other information used - by the library to speed up access to the data objects or maintain - a file’s integrity. Information stored by user applications - as attributes is also stored in the object’s header. The header - of each object is not necessarily located immediately prior to the - object’s data in the file and in fact may be located in any - position in the file. The order of the messages in an object header - is not significant.

    - -

    Object headers are composed of a prefix and a set of messages. The - prefix contains the information needed to interpret the messages and - a small amount of metadata about the object, and the messages contain - the majority of the metadata about the object. -

    - -
    -

    -IV.A.1. Disk Format: Level 2A1 - Data Object Header Prefix

    - -
    -

    -IV.A.1.a. Version 1 Data Object Header Prefix

    - -

    Header messages are aligned on 8-byte boundaries for version 1 - object headers. -

    - -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - Version 1 Object Header -
    bytebytebytebyte
    VersionReserved (zero)Total Number of Header Messages
    Object Reference Count
    Object Header Size
    Header Message Type #1Size of Header Message Data #1
    Header Message #1 FlagsReserved (zero)

    Header Message Data #1

    .
    .
    .
    Header Message Type #nSize of Header Message Data #n
    Header Message #n FlagsReserved (zero)

    Header Message Data #n

    -
    +

    An object in the fractal heap is identified by means of a fractal + heap ID, which encodes information to locate the object in the heap. + Currently, the fractal heap stores an object in one of three ways, + depending on the object’s size:

    + +
    + + + + + + + + + + -
    -
    -
    TypeDescription
    Tiny +

    When an object is small enough to be encoded in the heap ID, + the object’s data is embedded in the fractal heap ID itself. + There are 2 sub-types for this type of object: normal and extended. + The sub-type for tiny heap IDs depends on whether the heap ID is + large enough to store objects greater than 16 bytes or not. If the + heap ID length is 18 bytes or smaller, the ‘normal’ + tiny heap ID form is used. If the heap ID length is greater than 18 + bytes in length, the “extended” form is used. See + format description below for both sub-types.

    +
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Field NameDescription

    Version

    -

    This value is used to determine the format of the - information in the object header. When the format of the - object header is changed, the version number - is incremented and can be used to determine how the - information in the object header is formatted. This - is version one (1) (there was no version zero (0)) of the - object header. -

    -

    Total Number of Header Messages

    -

    This value determines the total number of messages listed in - object headers for this object. This value includes the messages - in continuation messages for this object. -

    -

    Object Reference Count

    -

    This value specifies the number of “hard links” to this object - within the current file. References to the object from external - files, “soft links” in this file and object references in this - file are not tracked. -

    -

    Object Header Size

    -

    This value specifies the number of bytes of header message data - following this length field that contain object header messages - for this object header. This value does not include the size of - object header continuation blocks for this object elsewhere in the - file. -

    -

    Header Message #n Type

    -

    This value specifies the type of information included in the - following header message data. The message types for - header messages are defined in sections below. -

    -

    Size of Header Message #n Data

    -

    This value specifies the number of bytes of header - message data following the header message type and length - information for the current message. The size includes - padding bytes to make the message a multiple of eight - bytes. -

    -

    Header Message #n Flags

    -

    This is a bit field with the following definition: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    BitDescription
    0If set, the message data is constant. This is used - for messages like the datatype message of a dataset. -
    1If set, the message is shared and stored - in another location than the object header. The Header - Message Data field contains a Shared Message - (described in the Data Object Header Messages - section below) - and the Size of Header Message Data field - contains the size of that Shared Message. -
    2If set, the message should not be shared. -
    3If set, the HDF5 decoder should fail to open this object - if it does not understand the message’s type and the file - is open with permissions allowing write access to the file. - (Normally, unknown messages can just be ignored by HDF5 - decoders) -
    4If set, the HDF5 decoder should set bit 5 of this - message’s flags (in other words, this bit field) - if it does not understand the message’s type - and the object is modified in any way. (Normally, - unknown messages can just be ignored by HDF5 - decoders) -
    5If set, this object was modified by software that did not - understand this message. - (Normally, unknown messages should just be ignored by HDF5 - decoders) (Can be used to invalidate an index or a similar - feature) -
    6If set, this message is shareable. -
    7If set, the HDF5 decoder should always fail to open this - object if it does not understand the message’s type (whether - it is open for read-only or read-write access). (Normally, - unknown messages can just be ignored by HDF5 decoders) -

    - -

    Header Message #n Data

    -

    The format and length of this field is determined by the - header message type and size respectively. Some header - message types do not require any data and this information - can be eliminated by setting the length of the message to - zero. The data is padded with enough zeroes to make the - size a multiple of eight. -

    -
    -
    - -
    -

    -IV.A.1.b. Version 2 Data Object Header Prefix

    - -

    Note that the “total number of messages” field has been dropped from - the data object header prefix in this version. The number of messages - in the data object header is just determined by the messages encountered - in all the object header blocks.

    - -

    Note also that the fields and messages in this version of data object - headers have no alignment or padding bytes inserted - they are - stored packed together.

    - -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - Version 2 Object Header -
    bytebytebytebyte
    Signature
    VersionFlagsThis space inserted only to align table nicely
    Access time (optional)
    Modification Time (optional)
    Change Time (optional)
    Birth Time (optional)
    Maximum # of compact attributes (optional)Minimum # of dense attributes (optional)
    Size of Chunk #0 (variable size)This space inserted only to align table nicely
    Header Message Type #1Size of Header Message Data #1Header Message #1 Flags
    Header Message #1 Creation Order (optional)This space inserted only to align table nicely

    Header Message Data #1

    .
    .
    .
    Header Message Type #nSize of Header Message Data #nHeader Message #n Flags
    Header Message #n Creation Order (optional)This space inserted only to align table nicely

    Header Message Data #n

    Gap (optional, variable size)
    Checksum
    -
    + + Huge + +

    + When the size of an object is larger than Maximum Size of + Managed Objects in the Fractal Heap Header, the + object’s data is stored on its own in the file and the object + is tracked/indexed via a version 2 B-tree. All huge objects for a + particular fractal heap use the same v2 B-tree. All huge objects + for a particular fractal heap use the same format for their huge + object IDs. +

    -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +
    Field NameDescription

    Signature

    -

    The ASCII character string “OHDR” - is used to indicate the - beginning of an object header. This gives file consistency - checking utilities a better chance of reconstructing a - damaged file. -

    -

    Version

    -

    This field has a value of 2 indicating version 2 of the object header. -

    -

    Flags

    -

    This field is a bit field indicating additional information - about the object header. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Bit(s)Description
    0-1This two bit field determines the size of the - Size of Chunk #0 field. The values are: +

    Depending on whether the IDs for a heap are large enough to + hold the object’s retrieval information and whether I/O + pipeline filters are applied to the heap’s objects, 4 + sub-types are derived for huge object IDs for this heap:

    + +
    - - + + - - + + + - - + + + - - + + + - - + + -
    ValueDescriptionSub-typeDescription
    0The Size of Chunk #0 field is 1 byte. - Directly accessed, non-filtered +

    The object’s address and length are embedded in the + fractal heap ID itself and the object is directly accessed from + them. This allows the object to be accessed without resorting + to the B-tree.

    +
    1The Size of Chunk #0 field is 2 bytes. - Directly accessed, filtered +

    The filtered object’s address, length, filter mask + and de-filtered size are embedded in the fractal heap ID itself + and the object is accessed directly with them. This allows the + object to be accessed without resorting to the B-tree.

    +
    2The Size of Chunk #0 field is 4 bytes. - Indirectly accessed, non-filtered +

    The object is located by using a B-tree key embedded in + the fractal heap ID to retrieve the address and length from the + version 2 B-tree for huge objects. Then, the address and length + are used to access the object.

    +
    3The Size of Chunk #0 field is 8 bytes. - Indirectly accessed, filtered +

    The object is located by using a B-tree key embedded in + the fractal heap ID to retrieve the filtered object’s + address, length, filter mask and de-filtered size from the + version 2 B-tree for huge objects. Then, this information is + used to access the object.

    +

    -
    2If set, attribute creation order is tracked.
    3If set, attribute creation order is indexed.
    4If set, non-default attribute storage phase change - values are stored.
    5If set, access, modification, change and birth times - are stored.
    6-7Reserved

    - -

    Access Time

    -

    This 32-bit value represents the number of seconds after the - UNIX epoch when the object’s raw data was last accessed - (in other words, read or written). -

    -

    This field is present if bit 5 of flags is set. -

    -

    Modification Time

    -

    This 32-bit value represents the number of seconds after - the UNIX epoch when the object’s raw data was last - modified (in other words, written). -

    -

    This field is present if bit 5 of flags is set. -

    -

    Change Time

    -

    This 32-bit value represents the number of seconds after the - UNIX epoch when the object’s metadata was last changed. -

    -

    This field is present if bit 5 of flags is set. -

    -

    Birth Time

    -

    This 32-bit value represents the number of seconds after the - UNIX epoch when the object was created. -

    -

    This field is present if bit 5 of flags is set. -

    -

    Maximum # of compact attributes

    -

    This is the maximum number of attributes to store in the compact - format before switching to the indexed format. -

    -

    This field is present if bit 4 of flags is set. -

    -

    Minimum # of dense attributes

    -

    This is the minimum number of attributes to store in the indexed - format before switching to the compact format. -

    -

    This field is present if bit 4 of flags is set. -

    -

    Size of Chunk #0

    -

    - This unsigned value specifies the number of bytes of header - message data following this field that contain object header - information. -

    -

    - This value does not include the size of object header - continuation blocks for this object elsewhere in the file. -

    -

    - The length of this field varies depending on bits 0 and 1 of - the flags field. -

    -

    Header Message #n Type

    -

    Same format as version 1 of the object header, described above. -

    -

    Size of Header Message #n Data

    -

    This value specifies the number of bytes of header - message data following the header message type and length - information for the current message. The size of messages - in this version does not include any padding bytes. -

    -

    Header Message #n Flags

    -

    Same format as version 1 of the object header, described above. -

    -

    Header Message #n Creation Order

    -

    This field stores the order that a message of a given type - was created in. -

    -

    This field is present if bit 2 of flags is set. -

    -

    Header Message #n Data

    -

    Same format as version 1 of the object header, described above. -

    -

    Gap

    -

    A gap in an object header chunk is inferred by the end of the - messages for the chunk before the beginning of the chunk’s - checksum. Gaps are always smaller than the size of an - object header message prefix (message type + message size + - message flags). -

    -

    Gaps are formed when a message (typically an attribute message) - in an earlier chunk is deleted and a message from a later - chunk that does not quite fit into the free space is moved - into the earlier chunk. -

    -

    Checksum

    -

    This is the checksum for the object header chunk. -

    -
    +
    + + - - - -

    The header message types and the message data associated with - them compose the critical “metadata” about each object. Some - header messages are required for each object while others are - optional. Some optional header messages may also be repeated - several times in the header itself, the requirements and number - of times allowed in the header will be noted in each header - message description below. -

    - - -
    -

    -IV.A.2. Disk Format: Level 2A2 - Data Object Header Messages

    - -

    Data object header messages are small pieces of metadata that are - stored in the data object header for each object in an HDF5 file. - Data object header messages provide the metadata required to describe - an object and its contents, as well as optional pieces of metadata - that annotate the meaning or purpose of the object. -

    - -

    Data object header messages are either stored directly in the data - object header for the object or are shared between multiple objects - in the file. When a message is shared, a flag in the Message Flags - indicates that the actual Message Data - portion of that message is stored in another location (such as another - data object header, or a heap in the file) and the Message Data - field contains the information needed to locate the actual information - for the message. -

    - -

    - The format of shared message data is described here:

    - -
    - - - - - - - - - - - - - - - - - - - - - - - -
    - Shared Message (Version 1) -
    bytebytebytebyte
    VersionTypeReserved (zero)
    Reserved (zero)

    AddressO

    - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
    + Managed + +

    When the size of an object does not meet the above two + conditions, the object is stored and managed via the direct and + indirect blocks based on the doubling table.

    + + + +
    - -
    -
    - - - - - - - - - - - - - - - - - - - - -
    Field NameDescription

    Version

    The version number is used when there are changes in the format - of a shared object message and is described here: - - - - - - - - - - - - - - - -
    VersionDescription
    0Never used.
    1Used by the library before version 1.6.1. -

    -

    Type

    The type of shared message location: - - - - - - - - - - -
    ValueDescription
    0Message stored in another object’s header (a committed - message). -

    -

    Address

    The address of the object header - containing the message to be shared.

    -
    -
    +

    The specific format for each type of heap ID is described below: +

    -
    -
    -
    - - - - - - - - - - - - - - - - - - - -
    - Shared Message (Version 2) -
    bytebytebytebyte
    VersionTypeThis space inserted only to align table nicely

    AddressO

    +
    + + -
    Fractal Heap ID for Tiny Objects (sub-type 1 - + ‘Normal’)
    - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
    -
    + byte + byte + byte + byte + -
    -
    - - - - - - - - - - - - - - - - - - - - -
    Field NameDescription

    Version

    The version number is used when there are changes in the format - of a shared object message and is described here: - - - - - - - - - - -
    VersionDescription
    2Used by the library of version 1.6.1 and after. -

    -

    Type

    The type of shared message location: - - - - - - - - - - -
    ValueDescription
    0Message stored in another object’s header (a committed - message). -

    -

    Address

    The address of the object header - containing the message to be shared.

    -
    + + Version, Type & Length + This space inserted + only to align table nicely + -
    -
    -
    - - - - - - - - - - - - - - - - - - - -
    - Shared Message (Version 3) -
    bytebytebytebyte
    VersionTypeThis space inserted only to align table nicely
    Location (variable size)
    -
    - -
    -
    - - - - - - - - - - - - - - - - - - - - -
    Field NameDescription

    Version

    The version number indicates changes in the format of shared - object message and is described here: - - - - - - - - - - -
    VersionDescription
    3Used by the library of version 1.8 and after. In this - version, the Type field can indicate that - the message is stored in the fractal heap. -

    -

    Type

    The type of shared message location: - - - - - - - - - - - - - - - - - - - - - - - - - - -
    ValueDescription
    0Message is not shared and is not shareable. -
    1Message stored in file’s shared object header message - heap (a shared message). -
    2Message stored in another object’s header (a committed - message). -
    3Message stored is not shared, but is shareable. -

    -

    Location

    This field contains either a Size of Offsets-bytes - address of the object header - containing the message to be shared, or an 8-byte fractal heap ID - for the message in the file’s shared object header message - heap. -

    -
    -
    - - -

    The following is a list of currently defined header messages: -

    - -
    -

    IV.A.2.a. The NIL Message

    - - -
    - - - - - - - - -
    Header Message Name: NIL
    Header Message Type: 0x0000
    Length: Varies
    Status: Optional; may be repeated.
    Description:The NIL message is used to indicate a message which is to be - ignored when reading the header messages for a data object. - [Possibly one which has been deleted for some reason.] -
    Format of Data: Unspecified
    - + +
    Data (variable size) + + +

    -

    IV.A.2.b. The Dataspace Message

    - - -
    - - - - - - - - - - -
    Header Message Name: Dataspace
    Header Message Type: 0x0001
    Length: Varies according to the number of - dimensions, as described in the following table.
    Status: Required for dataset objects; - may not be repeated.
    Description:The dataspace message describes the number of dimensions (in - other words, “rank”) and size of each dimension that - the data object has. This message is only used for datasets which - have a simple, rectilinear, array-like layout; datasets requiring - a more complex layout are not yet supported. -
    Format of Data: See the tables - below.
    - - -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - Dataspace Message - Version 1 -
    bytebytebytebyte
    VersionDimensionalityFlagsReserved
    Reserved

    Dimension #1 SizeL

    .
    .
    .

    Dimension #n SizeL


    Dimension #1 Maximum SizeL (optional)

    .
    .
    .

    Dimension #n Maximum SizeL (optional)


    Permutation Index #1L (optional)

    .
    .
    .

    Permutation Index #nL (optional)

    - - +
    +
    - - -
      - (Items marked with an ‘L’ in the above table are of the size - specified in “Size of Lengths” field in the superblock.) -
    -
    - -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Field NameDescription

    Version

    -

    This value is used to determine the format of the - Dataspace Message. When the format of the - information in the message is changed, the version number - is incremented and can be used to determine how the - information in the object header is formatted. This - document describes version one (1) (there was no version - zero (0)). -

    -

    Dimensionality

    -

    This value is the number of dimensions that the data - object has. -

    -

    Flags

    -

    This field is used to store flags to indicate the - presence of parts of this message. Bit 0 (the least - significant bit) is used to indicate that maximum - dimensions are present. Bit 1 is used to indicate that - permutation indices are present. -

    -

    Dimension #n Size

    -

    This value is the current size of the dimension of the - data as stored in the file. The first dimension stored in - the list of dimensions is the slowest changing dimension - and the last dimension stored is the fastest changing - dimension. -

    -

    Dimension #n Maximum Size

    -

    This value is the maximum size of the dimension of the - data as stored in the file. This value may be the special - “unlimited” size which indicates - that the data may expand along this dimension indefinitely. - If these values are not stored, the maximum size of each - dimension is assumed to be the dimension’s current size. -

    -

    Permutation Index #n

    -

    This value is the index permutation used to map - each dimension from the canonical representation to an - alternate axis for each dimension. If these values are - not stored, the first dimension stored in the list of - dimensions is the slowest changing dimension and the last - dimension stored is the fastest changing dimension. -

    -
    -
    - - - -
    -

    Version 2 of the dataspace message dropped the optional - permutation index value support, as it was never implemented in the - HDF5 Library:

    - -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - Dataspace Message - Version 2 -
    bytebytebytebyte
    VersionDimensionalityFlagsType

    Dimension #1 SizeL

    .
    .
    .

    Dimension #n SizeL


    Dimension #1 Maximum SizeL (optional)

    .
    .
    .

    Dimension #n Maximum SizeL (optional)

    + Field Name + Description + - - - -
      - (Items marked with an ‘L’ in the above table are of the size - specified in “Size of Lengths” field in the superblock.) -
    -
    +

    Version, Type & Length

    + +

    This is a bit field with the following definition:

    + + + + + -
    -
    -
    BitDescription
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + +
    Field NameDescription

    Version

    -

    This value is used to determine the format of the - Dataspace Message. This field should be ‘2’ for version 2 - format messages. -

    -

    Dimensionality

    -

    This value is the number of dimensions that the data object has. -

    -

    Flags

    -

    This field is used to store flags to indicate the - presence of parts of this message. Bit 0 (the least - significant bit) is used to indicate that maximum - dimensions are present. -

    -

    Type

    -

    This field indicates the type of the dataspace: - - - - - - - - - - - - - - - - - - -
    ValueDescription
    0A scalar dataspace; in other words, - a dataspace with a single, dimensionless element. -
    1A simple dataspace; in other words, - a dataspace with a rank > 0 and an appropriate # of - dimensions. -
    2A null dataspace; in other words, - a dataspace with no elements. -

    -

    Dimension #n Size

    -

    This value is the current size of the dimension of the - data as stored in the file. The first dimension stored in - the list of dimensions is the slowest changing dimension - and the last dimension stored is the fastest changing - dimension. -

    -

    Dimension #n Maximum Size

    -

    This value is the maximum size of the dimension of the - data as stored in the file. This value may be the special - “unlimited” size which indicates - that the data may expand along this dimension indefinitely. - If these values are not stored, the maximum size of each - dimension is assumed to be the dimension’s current size. -

    -
    6-7The current version of ID format. This document describes + version 0.
    4-5The ID type. Tiny objects have a value of 2. +
    0-3The length of the tiny object. The value stored is one + less than the actual length (since zero-length objects are not + allowed to be stored in the heap). For example, an object of + actual length 1 has an encoded length of 0, an object of actual + length 2 has an encoded length of 1, and so on.
    +

    - - + + + +

    Data

    + +

    This is the data for the object.

    + + + + - -
    -

    IV.A.2.c. The Link Info Message

    - - -
    - - - - - - - - -
    Header Message Name: Link Info
    Header Message Type: 0x002
    Length: Varies
    Status: Optional; may not be - repeated.
    Description:The link info message tracks variable information about the - current state of the links for a “new style” - group’s behavior. Variable information will be stored in - this message and constant information will be stored in the - Group Info message. -
    Format of Data: See the tables - below.
    - - -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +
    +
    - Link Info -
    bytebytebytebyte
    VersionFlagsThis space inserted only to align table nicely

    Maximum Creation Index (8 bytes, optional)


    Fractal Heap AddressO


    Address of v2 B-tree for Name IndexO


    Address of v2 B-tree for Creation Order IndexO (optional)

    + -
    Fractal Heap ID for Tiny Objects (sub-type 2 - + ‘Extended’)
    - - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
    -
    + byte + byte + byte + byte + -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + -
    Field NameDescription

    Version

    -

    The version number for this message. This document describes - version 0.

    -

    Flags

    This field determines various optional aspects of the link - info message: - - - - - - - - - - - - - - - - - - - -
    BitDescription
    0If set, creation order for the links is tracked. -
    1If set, creation order for the links is indexed. -
    2-7Reserved

    - -

    Maximum Creation Index

    This 64-bit value is the maximum creation order index value - stored for a link in this group.

    -

    This field is present if bit 0 of flags is set.

    -

    Fractal Heap Address

    -

    - This is the address of the fractal heap to store dense links. - Each link stored in the fractal heap is stored as a - Link Message. -

    -

    - If there are no links in the group, or the group’s links - are stored “compactly” (as object header messages), this - value will be the undefined - address. -

    -

    Address of v2 B-tree for Name Index

    This is the address of the version 2 B-tree to index names of links.

    -

    If there are no links in the group, or the group’s links - are stored “compactly” (as object header messages), this - value will be the undefined - address. -

    -

    Address of v2 B-tree for Creation Order Index

    This is the address of the version 2 B-tree to index creation order of links.

    -

    If there are no links in the group, or the group’s links - are stored “compactly” (as object header messages), this - value will be the undefined - address. -

    -

    This field exists if bit 1 of flags is set.

    -
    Version, Type & LengthExtended LengthThis space inserted + only to align table nicely
    -
    + + Data (variable size) + + +
    -

    IV.A.2.d. The Datatype Message

    - - -
    - - - - - - - - -
    Header Message Name: Datatype
    Header Message Type: 0x0003 -
    Length: Variable
    Status: Required for dataset or committed - datatype (formerly named datatype) objects; may not be repeated. -
    Description:

    The datatype message defines the datatype for each element - of a dataset or a common datatype for sharing between multiple - datasets. A datatype can describe an atomic type like a fixed- - or floating-point type or more complex types like a C struct - (compound datatype), array (array datatype) or C++ vector - (variable-length datatype).

    -

    Datatype messages that are part of a dataset object do not - describe how elements are related to one another; the dataspace - message is used for that purpose. Datatype messages that are part of - a committed datatype (formerly named datatype) message describe - a common datatype that can be shared by multiple datasets in the - file.

    -
    Format of Data: See the tables - below.
    - - -
    - - - - - - - - - - - - - - - - - - - - - - - - -
    - Datatype Message -
    bytebytebytebyte
    Class and VersionClass Bit Field, Bits 0-7Class Bit Field, Bits 8-15Class Bit Field, Bits 16-23
    Size


    Properties


    -
    +
    + + + + + -
    -
    -
    Field NameDescription
    - - - - - - - - + +
    Field NameDescription

    Class and Version

    -

    The version of the datatype message and the datatype’s class - information are packed together in this field. The version - number is packed in the top 4 bits of the field and the class - is contained in the bottom 4 bits. -

    -

    The version number information is used for changes in the - format of the datatype message and is described here: +

    Version, Type & Length

    +

    This is a bit field with the following definition:

    - - + + - - - - - - + + - - + + - - + + -
    VersionDescriptionBitDescription
    0Never used -
    1Used by early versions of the library to encode - compound datatypes with explicit array fields. - See the compound datatype description below for - further details. - 6-7The current version of ID format. This document describes + version 0.
    2Used when an array datatype needs to be encoded. - 4-5The ID type. Tiny objects have a value of 2. +
    3Used when a VAX byte-ordered type needs to be - encoded. Packs various other datatype classes more - efficiently also. - 0-3These 4 bits, together with the next byte, form an + unsigned 12-bit integer for holding the length of the object. + These 4-bits are bits 8-11 of the 12-bit integer. See description + for the Extended Length field below. +

    +
    +

    -

    The class of the datatype determines the format for the class - bit field and properties portion of the datatype message, which - are described below. The - following classes are currently defined: + + - - - - - + + + + - - - - + + + + - - - - +
    ValueDescription

    Extended Length

    +

    This byte, together with the 4 bits in the previous byte, + forms an unsigned 12-bit integer for holding the length of the tiny + object. These 8 bits are bits 0-7 of the 12-bit integer formed. The + value stored is one less than the actual length (since zero-length + objects are not allowed to be stored in the heap). For example, an + object of actual length 1 has an encoded length of 0, an object of + actual length 2 has an encoded length of 1, and so on.

    +
    0Fixed-Point

    Data

    +

    This is the data for the object.

    +
    1Floating-Point
    +

    - - 2 - Time - - - 3 - String - +
    +
    +
    + + - - - - + + + + + + - - - - + + + + - - - - + + + + +
    Fractal Heap ID for Huge Objects (sub-type 1 & 2): + indirectly accessed, non-filtered/filtered
    4Bit field
    bytebytebytebyte
    5Opaque
    Version & TypeThis space inserted + only to align table nicely
    6Compound

    v2 B-tree KeyL + (variable size)
    +
    + + + + + + +
     (Items marked with an ‘L’ in the + above table are of the size specified in “Size of + Lengths” field in the superblock.)
    +
    + +
    +
    + + + + + + + +
    Field NameDescription

    Version & Type

    +

    This is a bit field with the following definition:

    + - - + + - - + + - - - + + - - - + + -
    7ReferenceBitDescription
    8Enumerated6-7The current version of ID format. This document describes + version 0.
    9Variable-Length4-5The ID type. Huge objects have a value of 1. +
    10Array0-3Reserved.

    +
    +

    + + + - - + +

    v2 B-tree Key

    +

    + This field is the B-tree key for retrieving the information from + the version 2 B-tree for huge objects needed to access the object. + See the description of v2 B-tree records + sub-type 1 & 2 for a description of the fields. New key values are + derived from Next Huge Object ID in the Fractal + Heap Header. +

    + - -

    Class Bit Fields

    - -

    The information in these bit fields is specific to each datatype - class and is described below. All bits not defined for a - datatype class are set to zero. -

    - - + +
    - -

    Size

    - -

    The size of a datatype element in bytes. -

    - - +
    +
    +
    + + - - - - + + + + + + -
    Fractal Heap ID for Huge Objects (sub-type 3): + directly accessed, non-filtered

    Properties

    -

    This variable-sized sequence of bytes encodes information - specific to each datatype class and is described for each class - below. If there is no property information specified for a - datatype class, the size of this field is zero bytes. -

    -
    bytebytebytebyte
    -
    + + Version & Type + This space inserted + only to align table nicely + + +
    Address O
    +
    + -
    -

    Class specific information for Fixed-Point Numbers (Class 0):

    - -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - Fixed-point Bit Field Description -
    BitsMeaning

    0

    Byte Order. If zero, byte order is little-endian; - otherwise, byte order is big endian.

    1, 2

    Padding type. Bit 1 is the lo_pad bit and bit 2 - is the hi_pad bit. If a datum has unused bits at either - end, then the lo_pad or hi_pad bit is copied to those - locations.

    3

    Signed. If this bit is set then the fixed-point - number is in 2’s complement form.

    4-23

    Reserved (zero).

    -
    + +
    Length L
    +
    + -
    -
    - - - - - - - - - - - - - - -
    - Fixed-Point Property Description -
    ByteByteByteByte
    Bit OffsetBit Precision
    -
    + -
    -
    - - - - - - - - - - - - - - - +
    Field NameDescription

    Bit Offset

    -

    The bit offset of the first significant bit of the fixed-point - value within the datatype. The bit offset specifies the number - of bits “to the right of” the value (which are set to the - lo_pad bit value). -

    -

    Bit Precision

    -

    The number of bits of precision of the fixed-point value - within the datatype. This value, combined with the datatype - element’s size and the Bit Offset field specifies the number - of bits “to the left of” the value (which are set to the - hi_pad bit value). -

    -
    + + + + + + + + +
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
     (Items marked with an ‘L’ in the above table are + of the size specified in “Size of Lengths” field in the + superblock.)
    - -
    + +
    +
    + + + + + -
    -

    Class specific information for Floating-Point Numbers (Class 1):

    - -
    -
    Field NameDescription
    - - - - - - - - - - - - - - - - - - - - + +
    - Floating-Point Bit Field Description -
    BitsMeaning

    0, 6

    Byte Order. These two non-contiguous bits specify the - “endianness” of the bytes in the datatype element. - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Bit 6Bit 0Description
    00Byte order is little-endian -
    01Byte order is big-endian -
    10Reserved -
    11Byte order is VAX-endian -

    -

    1, 2, 3

    Padding type. Bit 1 is the low bits pad type, bit 2 - is the high bits pad type, and bit 3 is the internal bits - pad type. If a datum has unused bits at either end or between - the sign bit, exponent, or mantissa, then the value of bit - 1, 2, or 3 is copied to those locations.

    4-5

    Mantissa Normalization. This 2-bit bit field specifies - how the most significant bit of the mantissa is managed. +

    Version & Type

    +

    This is a bit field with the following definition:

    - - + + - - + + - - + + - - + + - - - - -
    ValueDescriptionBitDescription
    0No normalization - 6-7The current version of ID format. This document describes + version 0.
    1The most significant bit of the mantissa is always set - (except for 0.0). - 4-5The ID type. Huge objects have a value of 1. +
    2The most significant bit of the mantissa is not stored, - but is implied to be set. - 0-3Reserved.
    3Reserved. -

    +
    +

    + - + - -

    7

    -

    Reserved (zero).

    - + +

    Address

    +

    This field is the address of the object in the file.

    + + - -

    8-15

    -

    Sign Location. This is the bit position of the sign - bit. Bits are numbered with the least significant bit zero.

    - + +

    Length

    +

    This field is the length of the object in the file.

    + + +
    - -

    16-23

    -

    Reserved (zero).

    - +
    +
    +
    + + -
    Fractal Heap ID for Huge Objects (sub-type 4): + directly accessed, filtered
    -
    + + byte + byte + byte + byte + -
    -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - -
    - Floating-Point Property Description -
    ByteByteByteByte
    Bit OffsetBit Precision
    Exponent LocationExponent SizeMantissa LocationMantissa Size
    Exponent Bias
    -
    + + Version & Type + This space inserted + only to align table nicely + -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + -
    Field NameDescription

    Bit Offset

    -

    The bit offset of the first significant bit of the floating-point - value within the datatype. The bit offset specifies the number - of bits “to the right of” the value. -

    -

    Bit Precision

    -

    The number of bits of precision of the floating-point value - within the datatype. -

    -

    Exponent Location

    -

    The bit position of the exponent field. Bits are numbered with - the least significant bit number zero. -

    -

    Exponent Size

    -

    The size of the exponent field in bits. -

    -

    Mantissa Location

    -

    The bit position of the mantissa field. Bits are numbered with - the least significant bit number zero. -

    -

    Mantissa Size

    -

    The size of the mantissa field in bits. -

    -

    Exponent Bias

    -

    The bias of the exponent field. -

    -

    Address O
    +
    -
    + +
    Length L
    +
    + + + Filter Mask + -
    -

    Class specific information for Time (Class 2):

    - - -
    - - - - - - - - - - - - - - - - - -
    - Time Bit Field Description -
    BitsMeaning

    0

    Byte Order. If zero, byte order is little-endian; - otherwise, byte order is big endian.

    1-23

    Reserved (zero).

    -
    + +
    De-filtered Size L
    +
    + -
    -
    - - - - - - - - - - - -
    - Time Property Description -
    ByteByte
    Bit Precision
    -
    + -
    -
    - - - - - - - - - - +
    Field NameDescription

    Bit Precision

    -

    The number of bits of precision of the time value. -

    -
    + + + + + + + + +
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
     (Items marked with an ‘L’ in the above table are + of the size specified in “Size of Lengths” field in the + superblock.)
    - -
    + +
    +
    + + + + + -
    -

    Class specific information for Strings (Class 3):

    - - -
    -
    Field NameDescription
    - - - - - - - - - - - - - - - - - - - - - -
    - String Bit Field Description -
    BitsMeaning

    0-3

    Padding type. This four-bit value determines the - type of padding to use for the string. The values are: - - - - - - - - - - - - - - - - - - - - - - - - - - -
    ValueDescription
    0Null Terminate: A zero byte marks the end of the - string and is guaranteed to be present after - converting a long string to a short string. When - converting a short string to a long string the value is - padded with additional null characters as necessary. -
    1Null Pad: Null characters are added to the end of - the value during conversions from short values to long - values but conversion in the opposite direction simply - truncates the value. -
    2Space Pad: Space characters are added to the end of - the value during conversions from short values to long - values but conversion in the opposite direction simply - truncates the value. This is the Fortran - representation of the string. -
    3-15Reserved -

    -

    4-7

    Character Set. The character set used to - encode the string. - - - - - - - - - - - - - - - - - - - - -
    ValueDescription
    0ASCII character set encoding -
    1UTF-8 character set encoding -
    2-15Reserved -

    -

    8-23

    Reserved (zero).

    -
    - -

    There are no properties defined for the string class. -

    - - -

    Class specific information for bit fields (Class 4):

    - -
    - - - - - - - - - - - - - - - - - - - - - - -
    - Bitfield Bit Field Description -
    BitsMeaning

    0

    Byte Order. If zero, byte order is little-endian; - otherwise, byte order is big endian.

    1, 2

    Padding type. Bit 1 is the lo_pad type and bit 2 - is the hi_pad type. If a datum has unused bits at either - end, then the lo_pad or hi_pad bit is copied to those - locations.

    3-23

    Reserved (zero).

    -
    + +

    Version & Type

    + +

    This is a bit field with the following definition:

    + + + + + -
    -
    -
    BitDescription
    - - - - - - - - - - - - - -
    - Bit Field Property Description -
    ByteByteByteByte
    Bit OffsetBit Precision
    - + + 6-7 + The current version of ID format. This document describes + version 0. + + + 4-5 + The ID type. Huge objects have a value of 1. + + + + 0-3 + Reserved. + + +

    -
    -
    - - - - - - - - - - - - - - - -
    Field NameDescription

    Bit Offset

    -

    The bit offset of the first significant bit of the bit field - within the datatype. The bit offset specifies the number - of bits “to the right of” the value. -

    -

    Bit Precision

    -

    The number of bits of precision of the bit field - within the datatype. -

    -
    -
    + + + +

    Address

    +

    This field is the address of the filtered object in + the file.

    + -
    -

    Class specific information for Opaque (Class 5):

    - -
    - - - - - - - - - - - - - - - - - -
    - Opaque Bit Field Description -
    BitsMeaning

    0-7

    Length of ASCII tag in bytes.

    8-23

    Reserved (zero).

    -
    + +

    Length

    +

    This field is the length of the filtered object in + the file.

    + -
    -
    - - - - - - - - - - - - - -
    - Opaque Property Description -
    ByteByteByteByte

    ASCII Tag
    -
    -
    + +

    Filter Mask

    +

    This field is the I/O pipeline filter mask for the + filtered object in the file.

    + -
    -
    - - - - - - - - - - -
    Field NameDescription

    ASCII Tag

    -

    This NUL-terminated string provides a description for the - opaque type. It is NUL-padded to a multiple of 8 bytes. -

    -
    -
    + +

    Filtered Size

    +

    This field is the size of the de-filtered object in + the file.

    + + + -
    -

    Class specific information for Compound (Class 6):

    - -
    - - - - - - - - - - - - - - - - - -
    - Compound Bit Field Description -
    BitsMeaning

    0-15

    Number of Members. This field contains the number - of members defined for the compound datatype. The member - definitions are listed in the Properties field of the data - type message.

    16-23

    Reserved (zero).

    -
    +
    +
    +
    + + + + + + + + -

    The Properties field of a compound datatype is a list of the - member definitions of the compound datatype. The member - definitions appear one after another with no intervening bytes. - The member types are described with a (recursively) encoded datatype - message.

    + + + + + + + -

    Note that the property descriptions are different for different - versions of the datatype version. Additionally note that the version - 0 datatype encoding is deprecated and has been replaced with later - encodings in versions of the HDF5 Library from the 1.4 release - onward.

    + + + +
    Fractal Heap ID for Managed Objects
    bytebytebytebyte
    Version & TypeThis space inserted + only to align table nicely
    Offset (variable size)
    Length (variable size)
    +
    +
    +
    + + + + + -
    -
    Field NameDescription
    - + + + + - - - + + + + - - - + + + + +
    - Compound Properties Description for Datatype Version 1 -

    Version & Type

    This is a bit field with the following definition:

    + + + + + - - - - - - + + + + + + + + + + + + +
    BitDescription
    ByteByteByteByte
    6-7The current version of ID format. This document describes + version 0.
    4-5The ID type. Managed objects have a value of 0. +
    0-3Reserved.
    +


    Name

    Offset

    + This field is the offset of the object in the heap. This + field’s size is the minimum number of bytes necessary to + encode the Maximum Heap Size value (from the Fractal + Heap Header). For example, if the value of the Maximum + Heap Size is less than 256 bytes, this field is 1 byte in length, + a Maximum Heap Size of 256-65535 bytes uses a 2 byte + length, and so on. +

    Byte Offset of Member

    Length

    + This field is the length of the object in the heap. It is + determined by taking the minimum value of Maximum Direct + Block Size and Maximum Size of Managed Objects in the Fractal + Heap Header. Again, the minimum number of bytes needed to encode + that value is used for the size of this field. +

    +
    - - Dimensionality - Reserved (zero) - +
    +

    + III.G. Disk Format: Level 1G - + Free-space Manager +

    - - Dimension Permutation - +

    Free-space managers are used to describe space within a heap or + the entire HDF5 file that is not currently used for that heap or file. +

    - - Reserved (zero) - +

    + The free-space manager header contains metadata information + about the space being tracked, along with the address of the list of free + space sections which actually describes the free space. The header + records information about free-space sections being tracked, creation + parameters for handling free-space sections of a client, and section + information used to locate the collection of free-space sections. +

    - - Dimension #1 Size (required) - +

    + The free-space section list stores a collection of free-space + sections that is specific to each client of the free-space + manager. For example, the fractal heap is a client of the free space + manager and uses it to track unused space within the heap. There are 4 + types of section records for the fractal heap, each of which has its + own format, listed below. +

    - - Dimension #2 Size (required) - +
    + + - - - + + + + + + - - - + + + - - - + + + + + -
    Free-space Manager Header
    Dimension #3 Size (required)
    bytebytebytebyte
    Dimension #4 Size (required)
    Signature

    Member Type Message

    VersionClient IDThis space inserted + only to align table nicely
    -
    + +
    Total Space TrackedL
    +
    + -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + -
    Field NameDescription

    Name

    -

    This NUL-terminated string provides a description for the - opaque type. It is NUL-padded to a multiple of 8 bytes. -

    -

    Byte Offset of Member

    -

    This is the byte offset of the member within the datatype. -

    -

    Dimensionality

    -

    If set to zero, this field indicates a scalar member. If set - to a value greater than zero, this field indicates that the - member is an array of values. For array members, the size of - the array is indicated by the ‘Size of Dimension n’ field in - this message. -

    -

    Dimension Permutation

    -

    This field was intended to allow an array field to have - its dimensions permuted, but this was never implemented. - This field should always be set to zero. -

    -

    Dimension #n Size

    -

    This field is the size of a dimension of the array field as - stored in the file. The first dimension stored in the list of - dimensions is the slowest changing dimension and the last - dimension stored is the fastest changing dimension. -

    -

    Member Type Message

    -

    This field is a datatype message describing the datatype of - the member. -

    -

    Total Number of SectionsL
    +
    -
    + +
    Number of Serialized SectionsL
    +
    + -
    -
    -
    - - - - - - - - - - - - - - - - - - - - - + + + -
    - Compound Properties Description for Datatype Version 2 -
    ByteByteByteByte

    Name

    Byte Offset of Member

    Member Type Message


    Number of Un-Serialized SectionsL
    +
    -
    + + Number of Section Classes + This space inserted + only to align table nicely + -
    -
    - - - - - - - - - - - - - - - - - - - - + + + + -
    Field NameDescription

    Name

    -

    This NUL-terminated string provides a description for the - opaque type. It is NUL-padded to a multiple of 8 bytes. -

    -

    Byte Offset of Member

    -

    This is the byte offset of the member within the datatype. -

    -

    Member Type Message

    -

    This field is a datatype message describing the datatype of - the member. -

    -
    Shrink PercentExpand Percent
    -
    + + Size of Address Space + This space inserted + only to align table nicely + + +
    Maximum Section Size L
    +
    + -
    -
    -
    - - - - - - - - - - - - - - - - - - - - - + + + -
    - Compound Properties Description for Datatype Version 3 -
    ByteByteByteByte

    Name

    Byte Offset of Member (variable size)

    Member Type Message


    Address of Serialized Section ListO
    +
    -
    + +
    Size of Serialized Section List UsedL
    +
    + -
    -
    - - - - - - - - - - - - - - - - - - - - + + + -
    Field NameDescription

    Name

    This NUL-terminated string provides a description for the - opaque type. It is not NUL-padded to a multiple of 8 - bytes.

    Byte Offset of Member

    This is the byte offset of the member within the datatype. - The field size is the minimum number of bytes necessary, - based on the size of the datatype element. For example, a - datatype element size of less than 256 bytes uses a 1 byte - length, a datatype element size of 256-65535 bytes uses a - 2 byte length, and so on.

    Member Type Message

    This field is a datatype message describing the datatype of - the member.


    Allocated Size of Serialized Section ListL
    +
    -
    + + Checksum + + + + + + + + + + + +
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
     (Items marked with an ‘L’ in the above table are + of the size specified in “Size of Lengths” field in the + superblock.)
    -
    -

    Class specific information for Reference (Class 7):

    - -
    - - - - - - - - - - - - - - - - - -
    - Reference Bit Field Description -
    BitsMeaning

    0-3

    Type. This four-bit value contains the type of reference - described. The values defined are: - - - - - - - - - - - - - - - - - - - - - -
    ValueDescription
    0Object Reference: A reference to another object in this - HDF5 file. -
    1Dataset Region Reference: A reference to a region within - a dataset in this HDF5 file. -
    2-15Reserved -

    - -

    4-23

    Reserved (zero).

    -
    + -

    There are no properties defined for the reference class. -

    +
    +
    + + + + + + + + + -
    -

    Class specific information for Enumeration (Class 8):

    - -
    -
    Field NameDescription

    Signature

    +

    + The ASCII character string “ + FSHD + ” is used to indicate the beginning of the Free-space Manager + Header. This gives file consistency checking utilities a better + chance of reconstructing a damaged file. +

    +
    - - - - - - - - - - - - - - - - -
    - Enumeration Bit Field Description -
    BitsMeaning

    0-15

    Number of Members. The number of name/value - pairs defined for the enumeration type.

    16-23

    Reserved (zero).

    -
    + +

    Version

    + +

    This is the version number for the Free-space Manager Header + and this document describes version 0.

    + + -
    -
    -
    - - + + + - - + + - - - + + + + - - + + + -
    - Enumeration Property Description for Datatype Versions 1 & 2 -

    Client ID

    +

    This is the client ID for identifying the user of this + free-space manager:

    + + + + + - - - - - - + + + + + + + + + + + + +
    IDDescription
    ByteByteByteByte
    0Fractal heap
    1File
    2+Reserved.
    +

    -

    Base Type


    Names

    Total Space Tracked

    +

    This is the total amount of free space being tracked, in + bytes.

    +

    Values

    Total Number of Sections

    +

    This is the total number of free-space sections being + tracked.

    +
    -
    + +

    Number of Serialized Sections

    + +

    This is the number of serialized free-space sections being + tracked.

    + + + +

    Number of Un-Serialized Sections

    + +

    This is the number of un-serialized free-space sections being + managed. Un-serialized sections are created by the free-space + client when the list of sections is read in.

    + + -
    -
    - - - - - - - - - - - - - - - - - - - - + + + + -
    Field NameDescription

    Base Type

    -

    Each enumeration type is based on some parent type, usually an - integer. The information for that parent type is described - recursively by this field. -

    -

    Names

    -

    The name for each name/value pair. Each name is stored as a null - terminated ASCII string in a multiple of eight bytes. The names - are in no particular order. -

    -

    Values

    -

    The list of values in the same order as the names. The values - are packed (no inter-value padding) and the size of each value - is determined by the parent type. -

    -

    Number of Section Classes

    +

    This is the number of section classes handled by this free + space manager for the free-space client.

    +
    -
    + +

    Shrink Percent

    + +

    This is the percent of current size to shrink the allocated + serialized free-space section list.

    + + -
    -
    -
    - - + + + + - - - - - - + + + + - - - + + + + - - - + + + + - - + + + -
    - Enumeration Property Description for Datatype Version 3 -

    Expand Percent

    +

    This is the percent of current size to expand the allocated + serialized free-space section list.

    +
    ByteByteByteByte

    Size of Address Space

    +

    + This is the size of the address space that free-space sections are + within. This is stored as the log2 of the actual value + (in other words, the number of bits required to store values within + that address space). +

    +

    Base Type

    Maximum Section Size

    +

    This is the maximum size of a section to be tracked.

    +

    Names

    Address of Serialized Section List

    +

    This is the address where the serialized free-space section + list is stored.

    +

    Values

    Size of Serialized Section List Used

    +

    + This is the size of the serialized free-space section list used (in + bytes). This value must be less than or equal to the allocated + size of serialized section list, below. +

    +
    -
    + +

    Allocated Size of Serialized Section List

    + +

    This is the size of serialized free-space section list + actually allocated (in bytes).

    + + -
    -
    - - - - - - - - - - - - - - - - - - - - + + + + -
    Field NameDescription

    Base Type

    -

    Each enumeration type is based on some parent type, usually an - integer. The information for that parent type is described - recursively by this field. -

    -

    Names

    -

    The name for each name/value pair. Each name is stored as a null - terminated ASCII string, not padded to a multiple of - eight bytes. The names are in no particular order. -

    -

    Values

    -

    The list of values in the same order as the names. The values - are packed (no inter-value padding) and the size of each value - is determined by the parent type. -

    -

    Checksum

    +

    This is the checksum for the free-space manager header.

    +
    -
    + + +
    +

    + The free-space sections being managed are stored in a free-space + section list, described below. The sections in the free-space section + list are stored in the following way: a count of the number of sections + describing a particular size of free space and the size of the + free-space described (in bytes), followed by a list of section + description records; then another section count and size, followed by + the list of section descriptions for that size; and so on. +

    -
    -

    Class specific information for Variable-Length (Class 9):

    - -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - Variable-Length Bit Field Description -
    BitsMeaning

    0-3

    Type. This four-bit value contains the type of - variable-length datatype described. The values defined are: - - - - - - - - - - - - - - - - - - - - - -
    ValueDescription
    0Sequence: A variable-length sequence of any datatype. - Variable-length sequences do not have padding or - character set information. -
    1String: A variable-length sequence of characters. - Variable-length strings have padding and character set - information. -
    2-15Reserved -

    - -

    4-7

    Padding type. (variable-length string only) - This four-bit value determines the type of padding - used for variable-length strings. The values are the same - as for the string padding type, as follows: - - - - - - - - - - - - - - - - - - - - - - - - - -
    ValueDescription
    0Null terminate: A zero byte marks the end of a string - and is guaranteed to be present after converting a long - string to a short string. When converting a short string - to a long string, the value is padded with additional null - characters as necessary. -
    1Null pad: Null characters are added to the end of the - value during conversion from a short string to a longer - string. Conversion from a long string to a shorter string - simply truncates the value. -
    2Space pad: Space characters are added to the end of the - value during conversion from a short string to a longer - string. Conversion from a long string to a shorter string - simply truncates the value. This is the Fortran - representation of the string. -
    3-15Reserved -

    - -

    This value is set to zero for variable-length sequences.

    - -

    8-11

    Character Set. (variable-length string only) - This four-bit value specifies the character set - to be used for encoding the string: - - - - - - - - - - - - - - - - - - - - -
    ValueDescription
    0ASCII character set encoding -
    1UTF-8 character set encoding -
    2-15Reserved -

    - -

    This value is set to zero for variable-length sequences.

    - -

    12-23

    Reserved (zero).

    -
    +
    + + -
    -
    -
    -
    Free-space Section List
    - + + + + + + - - - - + - + + -
    - Variable-Length Property Description -
    bytebytebytebyte
    ByteByteByteByteSignature

    Base Type

    VersionThis space inserted + only to align table nicely
    -
    + +
    Free-space Manager Header AddressO
    +
    + -
    -
    - - - - - - - - - - + + + -
    Field NameDescription

    Base Type

    -

    Each variable-length type is based on some parent type. The - information for that parent type is described recursively by - this field. -

    -
    Number of Section Records in Set #0 (variable + size)
    -
    + + Size of Free-space Section Described in Record + Set #0 (variable size) + + + + + Record Set #0 Section Record #0 Offset(variable + size) + + + Record Set #0 Section Record #0 Type + This space inserted + only to align table nicely + -
    -

    Class specific information for Array (Class 10):

    + + Record Set #0 Section Record #0 Data (variable + size) + -

    There are no bit fields defined for the array class. -

    + + ... + -

    Note that the dimension information defined in the property for this - datatype class is independent of dataspace information for a dataset. - The dimension information here describes the dimensionality of the - information within a data element (or a component of an element, if the - array datatype is nested within another datatype) and the dataspace for a - dataset describes the size and locations of the elements in a dataset. -

    + + Record Set #0 Section Record #K-1 Offset(variable + size) + + + Record Set #0 Section Record #K-1 Type + This space inserted + only to align table nicely + -
    - - + + + - - - - + - - - - + + + - - - - - - - - - + + + - - - - - - - - - + + + + - + -
    - Array Property Description for Datatype Version 2 -
    Record Set #0 Section Record #K-1 Data (variable + size)
    ByteByteByteByteNumber of Section Records in Set #1 (variable + size)
    DimensionalityReserved (zero)
    Size of Free-space Section Described in Record + Set #1 (variable size) +
    Dimension #1 Size
    .
    .
    .
    Dimension #n Size
    Record Set #1 Section Record #0 Offset(variable + size)
    Permutation Index #1
    .
    .
    .
    Permutation Index #n
    Record Set #1 Section Record #0 TypeThis space inserted + only to align table nicely

    Base Type

    Record Set #1 Section Record #0 Data (variable + size)
    -
    + + ... + -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - + + + -
    Field NameDescription

    Dimensionality

    -

    This value is the number of dimensions that the array has. -

    -

    Dimension #n Size

    -

    This value is the size of the dimension of the array - as stored in the file. The first dimension stored in - the list of dimensions is the slowest changing dimension - and the last dimension stored is the fastest changing - dimension. -

    -

    Permutation Index #n

    -

    This value is the index permutation used to map - each dimension from the canonical representation to an - alternate axis for each dimension. Currently, dimension - permutations are not supported, and these indices should - be set to the index position minus one. In other words, - the first dimension should be set to 0, the second dimension - should be set to 1, and so on. -

    -

    Base Type

    -

    Each array type is based on some parent type. The - information for that parent type is described recursively by - this field. -

    -
    Record Set #1 Section Record #K-1 Offset(variable + size)
    -
    + + Record Set #1 Section Record #K-1 Type + This space inserted + only to align table nicely + -
    -
    - - + + + - - - - + - - - - + + + - - - - - - - - - + + + - + -
    - Array Property Description for Datatype Version 3 -
    Record Set #1 Section Record #K-1 Data (variable + size)
    ByteByteByteByte...
    DimensionalityThis space inserted only to align table nicely
    ...
    Dimension #1 Size
    .
    .
    .
    Dimension #n Size
    Number of Section Records in Set #N-1 (variable + size)

    Base Type

    Size of Free-space Section Described in Record + Set #N-1 (variable size) +
    -
    + + Record Set #N-1 Section Record #0 Offset(variable + size) + -
    -
    - - - - - - - - - - - - - - - - - - - - + + + + -
    Field NameDescription

    Dimensionality

    -

    This value is the number of dimensions that the array has. -

    -

    Dimension #n Size

    -

    This value is the size of the dimension of the array - as stored in the file. The first dimension stored in - the list of dimensions is the slowest changing dimension - and the last dimension stored is the fastest changing - dimension. -

    -

    Base Type

    -

    Each array type is based on some parent type. The - information for that parent type is described recursively by - this field. -

    -
    Record Set #N-1 Section Record #0 TypeThis space inserted + only to align table nicely
    -
    + + Record Set #N-1 Section Record #0 Data (variable + size) + + + ... + + + Record Set #N-1 Section Record #K-1 Offset(variable + size) + -
    -

    IV.A.2.e. The Data Storage - -Fill Value (Old) Message

    + + Record Set #N-1 Section Record #K-1 Type + This space inserted + only to align table nicely + - -
    - - - - - - - - -
    Header Message Name: Fill Value - (old)
    Header Message Type: 0x0004
    Length: Varies
    Status: Optional; may not be - repeated.
    Description:

    The fill value message stores a single data value which - is returned to the application when an uninitialized data element - is read from a dataset. The fill value is interpreted with the - same datatype as the dataset. If no fill value message is present - then a fill value of all zero bytes is assumed.

    -

    This fill value message is deprecated in favor of the - “new” fill value message (Message Type 0x0005) and - is only written to the file for forward compatibility with - versions of the HDF5 Library before the 1.6.0 version. - Additionally, it only appears for datasets with a user-defined - fill value (as opposed to the library default fill value or an - explicitly set “undefined” fill value).

    -
    Format of Data: See the tables - below.
    - - -
    - - - - - - - - - - - - - - - - - -
    - Fill Value Message (Old) -
    bytebytebytebyte
    Size

    Fill Value (optional, variable size)

    -
    + + Record Set #N-1 Section Record #K-1 Data (variable + size) + -
    -
    - - - - - - - - - - - - - - - -
    Field NameDescription

    Size

    -

    This is the size of the Fill Value field in bytes. -

    -

    Fill Value

    -

    The fill value. The bytes of the fill value are interpreted - using the same datatype as for the dataset. -

    -
    -
    + + Checksum + + + + + + + +
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
    +
    -

    IV.A.2.f. The Data Storage - -Fill Value Message

    +
    + + + + + - -
    -
    Field NameDescription
    - - - - - - - -
    Header Message Name: Fill - Value
    Header Message Type: 0x0005
    Length: Varies
    Status: Required for dataset objects; - may not be repeated.
    Description:The fill value message stores a single data value which is - returned to the application when an uninitialized data element - is read from a dataset. The fill value is interpreted with the - same datatype as the dataset.
    Format of Data: See the tables - below.
    - - -
    - - - - - - - - - - - - - - - - - - - - - - - - -
    - Fill Value Message - Versions 1 & 2 -
    bytebytebytebyte
    VersionSpace Allocation TimeFill Value Write TimeFill Value Defined
    Size (optional)

    Fill Value (optional, variable size)

    -
    + +

    Signature

    + +

    + The ASCII character string “ + FSSE + ” is used to indicate the beginning of the Free-space Section + Information. This gives file consistency checking utilities a + better chance of reconstructing a damaged file. +

    + + -
    -
    - - - - - - - - - - + + + + - - - + + + + + - +

    + The length of this field is the minimum number of bytes needed to + store the maximum section size (from the free-space + manager header). +

    + + - - - - - - - - - - - - - - - - - - - -
    Field NameDescription

    Version

    -

    The version number information is used for changes in the - format of the fill value message and is described here: - - - - - + + + + - - - - - - - - - - - - - - - - -
    VersionDescription

    Version

    +

    This is the version number for the Free-space Section List + and this document describes version 0.

    +
    0Never used -
    1Initial version of this message. -
    2In this version, the Size and Fill Value fields are - only present if the Fill Value Defined field is set - to 1. -
    3This version packs the other fields in the message - more efficiently than version 2. -

    -

    -

    Free-space Manager Header Address

    +

    + This is the address of the Free-space Manager Header. This + field is principally used for file integrity checking. +

    +

    Space Allocation Time

    -

    When the storage space for the dataset’s raw data will be - allocated. The allowed values are: - - - - - + + + - - - - - - - - - - - - - - - -
    ValueDescription

    Number of Section Records for Set #N

    +

    + This is the number of free-space section records for set #N. The + length of this field is the minimum number of bytes needed to store + the number of serialized sections (from the free-space + manager header). +

    -
    0Not used. -
    1Early allocation. Storage space for the entire dataset - should be allocated in the file when the dataset is - created. -
    2Late allocation. Storage space for the entire dataset - should not be allocated until the dataset is written - to. -
    3Incremental allocation. Storage space for the - dataset should not be allocated until the portion - of the dataset is written to. This is currently - used in conjunction with chunked data storage for - datasets. -

    +

    + The number of sets of free-space section records is determined by + the size of serialized section list in the free-space + manager header. +

    +

    Section Size for Record Set #N

    +

    + This is the size (in bytes) of the free-space section described for + all the section records in set #N. +

    -

    Fill Value Write Time

    -

    At the time that storage space for the dataset’s raw data is - allocated, this value indicates whether the fill value should - be written to the raw data storage elements. The allowed values - are: - - - - - - - - - - - - - - - - - - -
    ValueDescription
    0On allocation. The fill value is always written to - the raw data storage when the storage space is allocated. -
    1Never. The fill value should never be written to - the raw data storage. -
    2Fill value written if set by user. The fill value - will be written to the raw data storage when the storage - space is allocated only if the user explicitly set - the fill value. If the fill value is the library - default or is undefined, it will not be written to - the raw data storage. -

    - -

    Fill Value Defined

    -

    This value indicates if a fill value is defined for this - dataset. If this value is 0, the fill value is undefined. - If this value is 1, a fill value is defined for this dataset. - For version 2 or later of the fill value message, this value - controls the presence of the Size and Fill Value fields. -

    -

    Size

    -

    This is the size of the Fill Value field in bytes. This field - is not present if the Version field is greater than 1, - and the Fill Value Defined field is set to 0. -

    -

    Fill Value

    -

    The fill value. The bytes of the fill value are interpreted - using the same datatype as for the dataset. This field is - not present if the Version field is greater than 1, - and the Fill Value Defined field is set to 0. -

    -
    -
    + +

    Record Set #N Section #K Offset

    + +

    This is the offset (in bytes) of the free-space section + within the client for the free-space manager.

    -
    -
    - - - - - - - - - - - - - - - - - - - - - - - -
    - Fill Value Message - Version 3 -
    bytebytebytebyte
    VersionFlagsThis space inserted only to align table nicely
    Size (optional)

    Fill Value (optional, variable size)

    -
    +

    + The length of this field is the minimum number of bytes needed to + store the size of address space (from the free-space + manager header). +

    + + -
    -
    - - - - - - - - - + +
    Field NameDescription

    Version

    -

    The version number information is used for changes in the - format of the fill value message and is described here: +

    Record Set #N Section #K Type

    +

    + This is the type of the section record, used to decode the record + set #N section #K data information. The defined record type for file + client is: + +

    - - + + - - - - - - + + - - + + - - - - -
    VersionDescriptionTypeDescription
    0Never used -
    1Initial version of this message. - 0File’s section (a range of actual bytes in file)
    2In this version, the Size and Fill Value fields are - only present if the Fill Value Defined field is set - to 1. - 1+Reserved.
    3This version packs the other fields in the message - more efficiently than version 2. -

    +
    +

    - - +

    + The defined record types for a fractal heap client are: - -

    Flags

    - -

    When the storage space for the dataset’s raw data will be - allocated. The allowed values are: +

    - - + + - - + + + - - + + + - - + + + - - + + + - - + + -
    BitsDescriptionTypeDescription
    0-1Space Allocation Time, with the same - values as versions 1 and 2 of the message. - 0Fractal heap “single” section
    2-3Fill Value Write Time, with the same - values as versions 1 and 2 of the message. - 1Fractal heap “first row” section
    4Fill Value Undefined, indicating that the fill - value has been marked as “undefined” for this dataset. - Bits 4 and 5 cannot both be set. - 2Fractal heap “normal row” section
    5Fill Value Defined, with the same values as - versions 1 and 2 of the message. - Bits 4 and 5 cannot both be set. - 3Fractal heap “indirect” section
    6-7Reserved (zero). - 4+Reserved.

    + +

    - - + + - -

    Size

    - -

    This is the size of the Fill Value field in bytes. This field - is not present if the Version field is greater than 1, - and the Fill Value Defined flag is set to 0. -

    - - + +

    Record Set #N Section #K Data

    + +

    This is the section-type specific information for each record + in the record set, described below.

    + + - -

    Fill Value

    - -

    The fill value. The bytes of the fill value are interpreted - using the same datatype as for the dataset. This field is - not present if the Version field is greater than 1, - and the Fill Value Defined flag is set to 0. -

    - - - -
    + +

    Checksum

    + +

    + This is the checksum for the Free-space Section List. +

    + + + +

    -

    IV.A.2.g. The Link Message

    +

    The section-type specific data for each free-space section record + is described below:

    - -
    - - - - - - - - -
    Header Message Name: Link
    Header Message Type: 0x0006
    Length: Varies
    Status: Optional; may be - repeated.
    Description:

    This message encodes the information for a link in a - group’s object header, when the group is storing its links - “compactly”, or in the group’s fractal heap, - when the group is storing its links “densely”.

    -

    A group is storing its links compactly when the fractal heap - address in the Link Info - Message is set to the “undefined address” - value.

    Format of Data: See the tables - below.
    - - -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - Link Message -
    bytebytebytebyte
    VersionFlagsLink type (optional)This space inserted only to align table nicely

    Creation Order (8 bytes, optional)

    Link Name Character Set (optional)Length of Link Name (variable size)This space inserted only to align table nicely
    Link Name (variable size)

    Link Information (variable size)

    -
    +
    + + -
    -
    -
    File’s Section Data Record
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + -
    Field NameDescription

    Version

    The version number for this message. This document describes version 1.

    -

    Flags

    This field contains information about the link and controls - the presence of other fields below. - - - - - - - - - - - - - - - - - - - - - - - - - - -
    BitsDescription
    0-1Determines the size of the Length of Link Name - field. - - - - - - - - - - - - - - - - - - - - - - -
    ValueDescription
    0The size of the Length of Link Name - field is 1 byte. -
    1The size of the Length of Link Name - field is 2 bytes. -
    2The size of the Length of Link Name - field is 4 bytes. -
    3The size of the Length of Link Name - field is 8 bytes. -
    -
    2Creation Order Field Present: if set, the Creation - Order field is present. If not set, creation order - information is not stored for links in this group. -
    3Link Type Field Present: if set, the link is not - a hard link and the Link Type field is present. - If not set, the link is a hard link. -
    4Link Name Character Set Field Present: if set, the - link name is not represented with the ASCII character - set and the Link Name Character Set field is - present. If not set, the link name is represented with - the ASCII character set. -
    5-7Reserved (zero). -

    - -

    Link type

    This is the link class type and can be one of the following - values: - - - - - - - - - - - - - - - - - - - - - - - - - - -
    ValueDescription
    0A hard link (should never be stored in the file) -
    1A soft link. -
    2-63Reserved for future HDF5 internal use. -
    64An external link. -
    65-255Reserved, but available for user-defined link types. -

    - -

    This field is present if bit 3 of Flags is set.

    -

    Creation Order

    This 64-bit value is an index of the link’s creation time within - the group. Values start at 0 when the group is created an increment - by one for each link added to the group. Removing a link from a - group does not change existing links’ creation order field. -

    -

    This field is present if bit 2 of Flags is set.

    -

    Link Name Character Set

    This is the character set for encoding the link’s name: - - - - - - - - - - - - - - - -
    ValueDescription
    0ASCII character set encoding (this should never be stored - in the file) -
    1UTF-8 character set encoding -

    - -

    This field is present if bit 4 of Flags is set.

    -

    Length of link name

    This is the length of the link’s name. The size of this field - depends on bits 0 and 1 of Flags.

    -

    Link name

    This is the name of the link, non-NULL terminated.

    -

    Link information

    The format of this field depends on the link type.

    -

    For hard links, the field is formatted as follows: - - - - - - -
    Size of Offsets bytes:The address of the object header for the object that the - link points to. -
    -

    - -

    - For soft links, the field is formatted as follows: - - - - - - - - - - -
    Bytes 1-2:Length of soft link value.
    Length of soft link value bytes:A non-NULL-terminated string storing the value of the - soft link. -
    -

    - -

    - For external links, the field is formatted as follows: - - - - - - - - - - -
    Bytes 1-2:Length of external link value.
    Length of external link value bytes:The first byte contains the version number in the - upper 4 bits and flags in the lower 4 bits for the external - link. Both version and flags are defined to be zero in - this document. The remaining bytes consist of two - NULL-terminated strings, with no padding between them. - The first string is the name of the HDF5 file containing - the object linked to and the second string is the full path - to the object linked to, within the HDF5 file’s - group hierarchy. -
    -

    - -

    - For user-defined links, the field is formatted as follows: - - - - - - - - - - -
    Bytes 1-2:Length of user-defined data.
    Length of user-defined link value bytes:The data supplied for the user-defined link type.
    -

    - -
    No additional record data stored
    -
    + +
    -

    IV.A.2.h. The Data Storage - -External Data Files Message

    +
    +
    + + - -
    -
    Fractal Heap “Single” Section Data + Record
    - - - - - - - -
    Header Message Name: External - Data Files
    Header Message Type: 0x0007
    Length: Varies
    Status: Optional; may not be - repeated.
    Description:The external data storage message indicates that the data - for an object is stored outside the HDF5 file. The filename of - the object is stored as a Universal Resource Location (URL) of - the actual filename containing the data. An external file list - record also contains the byte offset of the start of the data - within the file and the amount of space reserved in the file - for that data.
    Format of Data: See the tables - below.
    - - -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - External File List Message -
    bytebytebytebyte
    VersionReserved (zero)
    Allocated SlotsUsed Slots

    Heap AddressO


    Slot Definitions...

    + + No additional record data stored + + +
    + +
    +
    +
    + + -
    Fractal Heap “First Row” Section Data + Record
    - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
    + Same format as “indirect” + section data + + +
    -
    +
    +
    +
    + + -
    -
    -
    Fractal Heap “Normal Row” Section Data + Record
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Field NameDescription

    Version

    -

    The version number information is used for changes in the format of - External Data Storage Message and is described here: - - - - - - - - - - - - - -
    VersionDescription
    0Never used.
    1The current version used by the library.

    - -

    Allocated Slots

    -

    The total number of slots allocated in the message. Its value must be at least as - large as the value contained in the Used Slots field. (The current library simply - uses the number of Used Slots for this message)

    -

    Used Slots

    -

    The number of initial slots which contains valid information.

    -

    Heap Address

    -

    This is the address of a local heap which contains the names for the external - files (The local heap information can be found in Disk Format Level 1D in this - document). The name at offset zero in the heap is always the empty string.

    -

    Slot Definitions

    -

    The slot definitions are stored in order according to the array addresses they - represent.

    -
    -
    + + No additional record data stored + + + -
    -
    - - - - - - - - - - - - - - - - - - - - - -
    - External File List Slot -
    bytebytebytebyte

    Name Offset in Local HeapL


    Offset in External Data FileL


    Data Size in External FileL

    +
    +
    +
    + + -
    Fractal Heap “Indirect” Section Data + Record
    - - -
      - (Items marked with an ‘L’ in the above table are of the size - specified in “Size of Lengths” field in the superblock.) -
    + byte + byte + byte + byte + -
    + + Fractal Heap Indirect Block Offset (variable + size) + -
    -
    - - - - - - - - - - - - - - - - - - - - -
    Field NameDescription

    Name Offset in Local Heap

    -

    The byte offset within the local name heap for the name - of the file. File names are stored as a URL which has a - protocol name, a host name, a port number, and a file - name: - protocol:port//host/file. - If the protocol is omitted then “file:” is assumed. If - the port number is omitted then a default port for that - protocol is used. If both the protocol and the port - number are omitted then the colon can also be omitted. If - the double slash and host name are omitted then - “localhost” is assumed. The file name is the only - mandatory part, and if the leading slash is missing then - it is relative to the application’s current working - directory (the use of relative names is not - recommended). -

    -

    Offset in External Data File

    -

    This is the byte offset to the start of the data in the - specified file. For files that contain data for a single - dataset this will usually be zero.

    -

    Data Size in External File

    -

    This is the total number of bytes reserved in the - specified file for raw data storage. For a file that - contains exactly one complete dataset which is not - extendable, the size will usually be the exact size of the - dataset. However, by making the size larger one allows - HDF5 to extend the dataset. The size can be set to a value - larger than the entire file since HDF5 will read zeroes - past the end of the file without failing.

    -
    -
    - - -
    -

    IV.A.2.i. The Data Storage - Layout -Message

    - - -
    - - - - - - - - -
    Header Message Name: Data Storage - - Layout
    Header Message Type: 0x0008
    Length: Varies
    Status: Required for datasets; may not - be repeated.
    Description:Data layout describes how the elements of a multi-dimensional - array are stored in the HDF5 file. Three types of data layout - are supported: -
      -
    1. Contiguous: The array is stored in one contiguous area of - the file. This layout requires that the size of the array be - constant: data manipulations such as chunking, compression, - checksums, or encryption are not permitted. The message stores - the total storage size of the array. The offset of an element - from the beginning of the storage area is computed as in a C - array.
    2. -
    3. Chunked: The array domain is regularly decomposed into - chunks, and each chunk is allocated and stored separately. This - layout supports arbitrary element traversals, compression, - encryption, and checksums. (these features are described - in other messages). The message stores the size of a chunk - instead of the size of the entire array; the storage size of - the entire array can be calculated by traversing the B-tree - that stores the chunk addresses.
    4. -
    5. Compact: The array is stored in one contiguous block, as - part of this object header message.
    6. -
    Format of Data: See the tables - below.
    - - -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - Data Layout Message (Versions 1 and 2) -
    bytebytebytebyte
    VersionDimensionalityLayout ClassReserved (zero)
    Reserved (zero)

    Data AddressO (optional)

    Dimension 0 Size
    Dimension 1 Size
    ...
    Dimension #n Size
    Dataset Element Size (optional)
    Compact Data Size (optional)

    Compact Data... (variable size, optional)

    + + Block Start Row + Block Start Column + - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
    + Number of Blocks + This space inserted + only to align table nicely + + +
    -
    +
    +
    + + + + + -
    -
    -
    Field NameDescription
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + - - - - -
    Field NameDescription

    Version

    -

    The version number information is used for changes in the format of the data - layout message and is described here: - - - - - - - - - - - - - - - - - - - - -
    VersionDescription
    0Never used.
    1Used by version 1.4 and before of the library to encode layout information. - Data space is always allocated when the data set is created.
    2Used by version 1.6.x of the library to encode layout information. - Data space is allocated only when it is necessary.

    -

    Dimensionality

    An array has a fixed dimensionality. This field - specifies the number of dimension size fields later in the - message. The value stored for chunked storage is 1 greater than - the number of dimensions in the dataset’s dataspace. - For example, 2 is stored for a 1 dimensional dataset. -

    -

    Layout Class

    The layout class specifies the type of storage for the data - and how the other fields of the layout message are to be - interpreted. - - - - - - - - - - - - - - - - - - - - - -
    ValueDescription
    0Compact Storage -
    1Contiguous Storage -
    2Chunked Storage -
    -

    -

    Data Address

    For contiguous storage, this is the address of the raw - data in the file. For chunked storage this is the address - of the v1 B-tree that is used to look up the addresses of the - chunks. This field is not present for compact storage. - If the version for this message is greater than 1, the address - may have the “undefined address” value, to indicate that - storage has not yet been allocated for this array.

    -

    Dimension #n Size

    For contiguous and compact storage the dimensions define - the entire size of the array while for chunked storage they define - the size of a single chunk. In all cases, they are in units of - array elements (not bytes). The first dimension stored in the list - of dimensions is the slowest changing dimension and the last - dimension stored is the fastest changing dimension. -

    -

    Dataset Element Size

    The size of a dataset element, in bytes. This field is only - present for chunked storage. -

    -

    Compact Data Size

    This field is only present for compact data storage. - It contains the size of the raw data for the dataset array, in - bytes.

    -

    Fractal Heap Block Offset

    +

    The offset of the indirect block in the fractal heap’s + address space containing the empty blocks.

    +

    + The number of bytes used to encode this field is the minimum number + of bytes needed to encode values for the Maximum Heap Size + (in the fractal heap’s header). +

    +

    Compact Data

    This field is only present for compact data storage. - It contains the raw data for the dataset array.

    -
    -
    + +

    Block Start Row

    + +

    This is the row that the empty blocks start in.

    + + -
    -

    Version 3 of this message re-structured the format into specific - properties that are required for each layout class.

    - - -
    - - - - - - - - - - - - - - - - - - - -
    - Data Layout Message (Version 3) -
    bytebytebytebyte
    VersionLayout ClassThis space inserted only to align table nicely

    Properties (variable size)

    -
    + +

    Block Start Column

    + +

    This is the column that the empty blocks start in.

    + + -
    -
    - - - - - - - - - - - - - - - - - - - - -
    Field NameDescription

    Version

    -

    The version number information is used for changes in the format of layout message - and is described here: - - - - - - - - - - -
    VersionDescription
    3Used by the version 1.6.3 and later of the library to store properties - for each layout class.

    -

    Layout Class

    The layout class specifies the type of storage for the data - and how the other fields of the layout message are to be - interpreted. - - - - - - - - - - - - - - - - - - - - -
    ValueDescription
    0Compact Storage -
    1Contiguous Storage -
    2Chunked Storage -
    -

    -

    Properties

    This variable-sized field encodes information specific to each - layout class and is described below. If there is no property - information specified for a layout class, the size of this field - is zero bytes.

    -
    + +

    Number of Blocks

    + +

    This is the number of empty blocks covered by the section.

    + + + + -
    -

    Class-specific information for compact layout (Class 0): (Note: The dimensionality information - is in the Dataspace message)

    - - -
    - - - - - - - - - - - - - - - - - - -
    - Compact Storage Property Description -
    bytebytebytebyte
    SizeThis space inserted only to align table nicely

    Raw Data... (variable size)

    -
    +
    +

    + III.H. Disk Format: Level 1H - Shared Object + Header Message Table +

    + +

    + The shared object header message table is used to locate + object header messages that are shared between two or more object + headers in the file. Shared object header messages are stored and + indexed in the file in one of two ways: indexed sequentially in a shared + header message list or indexed with a v2 B-tree. The shared messages + themselves are either stored in a fractal heap (when two or more + objects share the message), or remain in an object’s header (when + only one object uses the message currently, but the message can be + shared in the future). +

    -
    -
    - - - - - - - - - - - - - - - -
    Field NameDescription

    Size

    This field contains the size of the raw data for the dataset - array, in bytes. -

    -

    Raw Data

    This field contains the raw data for the dataset array.

    -
    +

    + The shared object header message table contains a list of + shared message index headers. Each index header records information + about the version of the index format, the index storage type, flags + for the message types indexed, the number of messages in the index, the + address where the index resides, and the fractal heap address if shared + messages are stored there. +

    + +

    + Each index can be either a list or a v2 B-tree and may transition + between those two forms as the number of messages in the index varies. + Each shared message record contains information used to locate the + shared message from either a fractal heap or an object header. The + types of messages that can be shared are: Dataspace, Datatype, + Fill Value, Filter Pipeline and Attribute. +

    +

    + The shared object header message table is pointed to from a shared message table message in the + superblock extension for a file. This message stores the version of the + table format, along with the number of index headers in the table. +

    -
    -

    Class-specific information for contiguous layout (Class 1): (Note: The dimensionality information - is in the Dataspace message)

    - - -
    - - - - - - - - - - - - - - - - - -
    - Contiguous Storage Property Description -
    bytebytebytebyte

    AddressO


    SizeL

    +
    + + -
    Shared Object Header Message Table
    - - + + + + + + - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
    bytebytebytebyte
      - (Items marked with an ‘L’ in the above table are of the size - specified in “Size of Lengths” field in the superblock.) -
    + Signature + -
    + + Version for index #0 + Index Type for index #0 + Message Type Flags for index #0 + -
    -
    - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Address

    This is the address of the raw data in the file. - The address may have the “undefined address” value, to indicate - that storage has not yet been allocated for this array.

    Size

    This field contains the size allocated to store the raw data, - in bytes. -

    -
    Minimum Message Size for index #0
    List Cutoff for index #0v2 B-tree Cutoff for index #0
    Number of Messages for index #0This space inserted + only to align table nicely

    Index AddressO for index #0
    +

    Fractal Heap AddressO for + index #0
    +
    ...
    ...
    Version for index #N-1Index Type for index #N-1Message Type Flags for index #N-1
    Minimum Message Size for index #N-1
    List Cutoff for index #N-1v2 B-tree Cutoff for index #N-1
    Number of Messages for index #N-1This space inserted + only to align table nicely

    Index AddressO for index #N-1
    +

    Fractal Heap AddressO for + index #N-1
    +
    Checksum
    + + + + + + +
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
    + +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Signature

    +

    + The ASCII character string “ + SMTB + ” is used to indicate the beginning of the Shared Object + Header Message table. This gives file consistency checking + utilities a better chance of reconstructing a damaged file. +

    +

    Version for index #N

    +

    This is the version number for the list of shared object + header message indexes and this document describes version 0.

    +

    Index Type for index #N

    +

    The type of index can be an unsorted list or a v2 B-tree.

    +

    Message Type Flags for index #N

    +

    This field indicates the type of messages tracked in the + index, as follows:

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    BitsDescription
    0If set, the index tracks Dataspace Messages. +
    1If set, the message tracks Datatype Messages. +
    2If set, the message tracks Fill Value Messages. +
    3If set, the message tracks Filter Pipeline + Messages. +
    4If set, the message tracks Attribute Messages. +
    5-15Reserved (zero).
    +

    + + +

    An index can track more than one type of message, but each + type of message can only by in one index.

    +

    Minimum Message Size for index #N

    +

    This is the message size sharing threshold for the index. If + the encoded size of the message is less than this value, the + message is not shared.

    +

    List Cutoff for index #N

    +

    This is the cutoff value for the indexing of messages to + switch from a list to a v2 B-tree. If the number of messages is + greater than this value, the index should be a v2 B-tree.

    +

    v2 B-tree Cutoff for index #N

    +

    This is the cutoff value for the indexing of messages to + switch from a v2 B-tree back to a list. If the number of messages + is less than this value, the index should be a list.

    +

    Number of Messages for index #N

    +

    The number of shared messages being tracked for the index.

    +

    Index Address for index #N

    +

    This field is the address of the list or v2 B-tree where the + index nodes reside.

    +

    Fractal Heap Address for index #N

    +

    This field is the address of the fractal heap if shared + messages are stored there.

    +

    Checksum

    +

    This is the checksum for the table.

    +
    +
    + +
    +

    + Shared messages are indexed either with a shared message + record list, described below, or using a v2 B-tree (using record type + 7). The number of records in the shared message record list is + determined in the index’s entry in the shared object + header message table. +

    + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Shared Message Record List
    bytebytebytebyte
    Signature
    Shared Message Record #0
    Shared Message Record #1
    ...
    Shared Message Record #N-1
    Checksum
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Signature

    +

    + The ASCII character string “ + SMLI + ” is used to indicate the beginning of a list of index nodes. + This gives file consistency checking utilities a better chance of + reconstructing a damaged file. +

    +

    Shared Message Record #N

    +

    + The record for locating the shared message, either in the fractal + heap for the index, or an object header (see format for index + nodes below). +

    +

    Checksum

    +

    This is the checksum for the list.

    +
    +
    + +
    +

    The record for each shared message in an index is stored in one + of the following forms:

    + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Shared Message Record, for messages stored in a + fractal heap
    bytebytebytebyte
    Message LocationThis space inserted + only to align table nicely
    Hash Value
    Reference Count

    Fractal Heap ID
    +
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Message Location

    +

    This has a value of 0 indicating that the message is stored + in the heap.

    +

    Hash Value

    +

    This is the hash value for the message.

    +

    Reference Count

    +

    This is the number of times the message is used in the file. +

    +

    Fractal Heap ID

    +

    This is an 8-byte fractal heap ID for the message as stored + in the fractal heap for the index.

    +
    +
    + +
    +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Shared Message Record, for messages stored in an + object header
    bytebytebytebyte
    Message LocationThis space inserted + only to align table nicely
    Hash Value
    ReservedMessage TypeCreation Index

    Object Header AddressO
    +
    + + + + + + +
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
    + +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Message Location

    +

    This has a value of 1 indicating that the message is stored + in an object header.

    +

    Hash Value

    +

    This is the hash value for the message.

    +

    Message Type

    +

    This is the message type in the object header.

    +

    Creation Index

    +

    This is the creation index of the message within the object + header.

    +

    Object Header Address

    +

    This is the address of the object header where the message is + located.

    +
    +
    + + + +
    +
    +
    +

    + IV. Disk Format: Level 2 - Data Objects +

    + +

    Data objects contain the “real” user-visible + information in the file. These objects compose the scientific data and + other information which are generally thought of as “data” + by the end-user. All the other information in the file is provided as a + framework for storing and accessing these data objects.

    + +

    A data object is composed of header and data information. The + header information contains the information needed to interpret the + data information for the object as well as additional + “metadata” or pointers to additional “metadata” + used to describe or annotate each object.

    + +
    +

    + IV.A. Disk Format: Level 2A - Data Object + Headers +

    + +

    The header information of an object is designed to encompass all + of the information about an object, except for the data itself. This + information includes the dataspace, the datatype, information about how + the data is stored on disk (in external files, compressed, broken up in + blocks, and so on), as well as other information used by the library to + speed up access to the data objects or maintain a file’s + integrity. Information stored by user applications as attributes is + also stored in the object’s header. The header of each object is + not necessarily located immediately prior to the object’s data in + the file and in fact may be located in any position in the file. The + order of the messages in an object header is not significant.

    + +

    Object headers are composed of a prefix and a set of messages. + The prefix contains the information needed to interpret the messages + and a small amount of metadata about the object, and the messages + contain the majority of the metadata about the object.

    + +
    +

    + IV.A.1. Disk Format: Level 2A1 - Data + Object Header Prefix +

    + +
    +

    + IV.A.1.a. Version 1 Data Object + Header Prefix +

    + +

    Header messages are aligned on 8-byte boundaries for version 1 + object headers.

    + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Version 1 Object Header
    bytebytebytebyte
    VersionReserved (zero)Total Number of Header Messages
    Object Reference Count
    Object Header Size
    Header Message Type #1Size of Header Message Data #1
    Header Message #1 FlagsReserved (zero)

    Header Message Data #1
    +
    .
    .
    .
    Header Message Type #nSize of Header Message Data #n
    Header Message #n FlagsReserved (zero)

    Header Message Data #n
    +
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Version

    +

    This value is used to determine the format of the information + in the object header. When the format of the object header is + changed, the version number is incremented and can be used to + determine how the information in the object header is formatted. + This is version one (1) (there was no version zero (0)) of the + object header.

    +

    Total Number of Header Messages

    +

    This value determines the total number of messages listed in + object headers for this object. This value includes the messages in + continuation messages for this object.

    +

    Object Reference Count

    +

    This value specifies the number of “hard links” + to this object within the current file. References to the object + from external files, “soft links” in this file and + object references in this file are not tracked.

    +

    Object Header Size

    +

    This value specifies the number of bytes of header message + data following this length field that contain object header + messages for this object header. This value does not include the + size of object header continuation blocks for this object elsewhere + in the file.

    +

    Header Message #n Type

    +

    This value specifies the type of information included in the + following header message data. The message types for header + messages are defined in sections below.

    +

    Size of Header Message #n Data

    +

    This value specifies the number of bytes of header message + data following the header message type and length information for + the current message. The size includes padding bytes to make the + message a multiple of eight bytes.

    +

    Header Message #n Flags

    +

    This is a bit field with the following definition:

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    BitDescription
    0If set, the message data is constant. This is used for + messages like the datatype message of a dataset.
    1If set, the message is shared and stored in + another location than the object header. The Header Message Data + field contains a Shared Message (described in the Data Object Header Messages + section below) and the Size of Header Message Data field contains + the size of that Shared Message. +
    2If set, the message should not be shared.
    3If set, the HDF5 decoder should fail to open this object + if it does not understand the message’s type and the file + is open with permissions allowing write access to the file. + (Normally, unknown messages can just be ignored by HDF5 decoders) +
    4If set, the HDF5 decoder should set bit 5 of this + message’s flags (in other words, this bit field) if it does + not understand the message’s type and the object is + modified in any way. (Normally, unknown messages can just be + ignored by HDF5 decoders)
    5If set, this object was modified by software that did not + understand this message. (Normally, unknown messages should just + be ignored by HDF5 decoders) (Can be used to invalidate an index + or a similar feature)
    6If set, this message is shareable.
    7If set, the HDF5 decoder should always fail to open this + object if it does not understand the message’s type + (whether it is open for read-only or read-write access). + (Normally, unknown messages can just be ignored by HDF5 decoders) +
    +

    + +

    Header Message #n Data

    +

    The format and length of this field is determined by the + header message type and size respectively. Some header message + types do not require any data and this information can be + eliminated by setting the length of the message to zero. The data + is padded with enough zeroes to make the size a multiple of eight. +

    +
    +
    + +
    +

    + IV.A.1.b. Version 2 Data Object + Header Prefix +

    + +

    Note that the “total number of messages” field has + been dropped from the data object header prefix in this version. The + number of messages in the data object header is just determined by the + messages encountered in all the object header blocks.

    + +

    + Note also that the fields and messages in this version of data object + headers have no alignment or padding bytes inserted - they are + stored packed together. +

    + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Version 2 Object Header
    bytebytebytebyte
    Signature
    VersionFlagsThis space inserted + only to align table nicely
    Access time (optional)
    Modification Time (optional)
    Change Time (optional)
    Birth Time (optional)
    Maximum # of compact attributes (optional)Minimum # of dense attributes (optional)
    Size of Chunk #0 (variable size)This space inserted + only to align table nicely
    Header Message Type #1Size of Header Message Data #1Header Message #1 Flags
    Header Message #1 Creation Order (optional)This space inserted + only to align table nicely

    Header Message Data #1
    +
    .
    .
    .
    Header Message Type #nSize of Header Message Data #nHeader Message #n Flags
    Header Message #n Creation Order (optional)This space inserted + only to align table nicely

    Header Message Data #n
    +
    Gap (optional, variable size)
    Checksum
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Signature

    +

    + The ASCII character string “ + OHDR + ” is used to indicate the beginning of an object header. This + gives file consistency checking utilities a better chance of + reconstructing a damaged file. +

    +

    Version

    +

    This field has a value of 2 indicating version 2 of the + object header.

    +

    Flags

    +

    This field is a bit field indicating additional information + about the object header.

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Bit(s)Description
    0-1This two bit field determines the size of the Size + of Chunk #0 field. The values are: + + + + + + + + + + + + + + + + + + + + + + +
    ValueDescription
    0The Size of Chunk #0 field is 1 byte. +
    1The Size of Chunk #0 field is 2 bytes. +
    2The Size of Chunk #0 field is 4 bytes. +
    3The Size of Chunk #0 field is 8 bytes. +
    +

    +
    2If set, attribute creation order is tracked.
    3If set, attribute creation order is indexed.
    4If set, non-default attribute storage phase change values + are stored.
    5If set, access, modification, change and birth times are + stored.
    6-7Reserved
    +

    + +

    Access Time

    +

    This 32-bit value represents the number of seconds after the + UNIX epoch when the object’s raw data was last accessed (in + other words, read or written).

    +

    + This field is present if bit 5 of flags is set. +

    +

    Modification Time

    +

    This 32-bit value represents the number of seconds after the + UNIX epoch when the object’s raw data was last modified (in + other words, written).

    +

    + This field is present if bit 5 of flags is set. +

    +

    Change Time

    +

    This 32-bit value represents the number of seconds after the + UNIX epoch when the object’s metadata was last changed.

    +

    + This field is present if bit 5 of flags is set. +

    +

    Birth Time

    +

    This 32-bit value represents the number of seconds after the + UNIX epoch when the object was created.

    +

    + This field is present if bit 5 of flags is set. +

    +

    Maximum # of compact attributes

    +

    This is the maximum number of attributes to store in the + compact format before switching to the indexed format.

    +

    + This field is present if bit 4 of flags is set. +

    +

    Minimum # of dense attributes

    +

    This is the minimum number of attributes to store in the + indexed format before switching to the compact format.

    +

    + This field is present if bit 4 of flags is set. +

    +

    Size of Chunk #0

    +

    This unsigned value specifies the number of bytes of header + message data following this field that contain object header + information.

    +

    This value does not include the size of object header + continuation blocks for this object elsewhere in the file.

    +

    + The length of this field varies depending on bits 0 and 1 of the flags + field. +

    +

    Header Message #n Type

    +

    Same format as version 1 of the object header, described + above.

    +

    Size of Header Message #n Data

    +

    + This value specifies the number of bytes of header message data + following the header message type and length information for the + current message. The size of messages in this version does not + include any padding bytes. +

    +

    Header Message #n Flags

    +

    Same format as version 1 of the object header, described + above.

    +

    Header Message #n Creation Order

    +

    This field stores the order that a message of a given type + was created in.

    +

    + This field is present if bit 2 of flags is set. +

    +

    Header Message #n Data

    +

    Same format as version 1 of the object header, described + above.

    +

    Gap

    +

    A gap in an object header chunk is inferred by the end of the + messages for the chunk before the beginning of the chunk’s + checksum. Gaps are always smaller than the size of an object header + message prefix (message type + message size + message flags).

    +

    Gaps are formed when a message (typically an attribute + message) in an earlier chunk is deleted and a message from a later + chunk that does not quite fit into the free space is moved into the + earlier chunk.

    +

    Checksum

    +

    This is the checksum for the object header chunk.

    +
    +
    + +

    The header message types and the message data associated with + them compose the critical “metadata” about each object. + Some header messages are required for each object while others are + optional. Some optional header messages may also be repeated several + times in the header itself, the requirements and number of times + allowed in the header will be noted in each header message description + below.

    + + +
    +

    + IV.A.2. Disk Format: Level 2A2 - + Data Object Header Messages +

    + +

    Data object header messages are small pieces of metadata that are + stored in the data object header for each object in an HDF5 file. Data + object header messages provide the metadata required to describe an + object and its contents, as well as optional pieces of metadata that + annotate the meaning or purpose of the object.

    + +

    + Data object header messages are either stored directly in the data + object header for the object or are shared between multiple objects in + the file. When a message is shared, a flag in the Message + Flags indicates that the actual Message Data portion of that + message is stored in another location (such as another data object + header, or a heap in the file) and the Message Data field + contains the information needed to locate the actual information for + the message. +

    + +

    The format of shared message data is described here:

    + +
    + + + + + + + + + + + + + + + + + + + + + + + +
    Shared Message (Version 1)
    bytebytebytebyte
    VersionTypeReserved (zero)
    Reserved (zero)

    AddressO
    +
    + + + + + + +
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
    + +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Version

    The version number is used when there are changes in + the format of a shared object message and is described here:

    + + + + + + + + + + + + + + + +
    VersionDescription
    0Never used.
    1Used by the library before version 1.6.1.
    +

    Type

    The type of shared message location:

    + + + + + + + + + + +
    ValueDescription
    0Message stored in another object’s header (a committed + message). +
    +

    Address

    The address of the object header containing the + message to be shared.

    +
    + +
    +
    +
    + + + + + + + + + + + + + + + + + + + +
    Shared Message (Version 2)
    bytebytebytebyte
    VersionTypeThis space inserted + only to align table nicely

    AddressO
    +
    + + + + + + +
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Version

    The version number is used when there are changes in + the format of a shared object message and is described here:

    + + + + + + + + + + +
    VersionDescription
    2Used by the library of version 1.6.1 and after.
    +

    Type

    The type of shared message location:

    + + + + + + + + + + +
    ValueDescription
    0Message stored in another object’s header (a committed + message). +
    +

    Address

    The address of the object header containing the + message to be shared.

    +
    + +
    +
    +
    + + + + + + + + + + + + + + + + + + + +
    Shared Message (Version 3)
    bytebytebytebyte
    VersionTypeThis space inserted + only to align table nicely
    Location (variable size)
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Version

    The version number indicates changes in the format of + shared object message and is described here:

    + + + + + + + + + + +
    VersionDescription
    3Used by the library of version 1.8 and after. In this + version, the Type field can indicate that the message is + stored in the fractal heap. +
    +

    Type

    The type of shared message location:

    + + + + + + + + + + + + + + + + + + + + + + + + + + +
    ValueDescription
    0Message is not shared and is not shareable.
    1Message stored in file’s shared object + header message heap (a shared message). +
    2Message stored in another object’s header (a committed + message). +
    3Message stored is not shared, but is shareable.
    +

    Location

    + This field contains either a Size of Offsets-bytes address + of the object header containing the message to be shared, or an + 8-byte fractal heap ID for the message in the file’s shared + object header message heap. +

    +
    + + +

    The following is a list of currently defined header messages:

    + +
    +

    + IV.A.2.a. The NIL Message +

    + + +
    + + + + + + + + + + + + + + + + + + + + +
    Header Message Name: NIL
    Header Message Type: 0x0000
    Length: Varies
    Status: Optional; may be repeated.
    Description:The NIL message is used to indicate a message which is to be + ignored when reading the header messages for a data object. + [Possibly one which has been deleted for some reason.]
    Format of Data: Unspecified
    +
    + + + +
    +

    + IV.A.2.b. The Dataspace Message +

    + + +
    + + + + + + + + + + + + + + + + + + + + + +
    Header Message Name: Dataspace
    Header Message Type: 0x0001
    Length: Varies according to the number of + dimensions, as described in the following table.
    Status: Required for dataset objects; may + not be repeated.
    Description:The dataspace message describes the number of dimensions (in + other words, “rank”) and size of each dimension that the + data object has. This message is only used for datasets which have a + simple, rectilinear, array-like layout; datasets requiring a more + complex layout are not yet supported.
    Format of Data: See the tables below.
    +
    + + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Dataspace Message - Version 1
    bytebytebytebyte
    VersionDimensionalityFlagsReserved
    Reserved

    Dimension #1 SizeL
    +
    .
    .
    .

    Dimension #n SizeL
    +

    Dimension #1 Maximum SizeL (optional)
    +
    .
    .
    .

    Dimension #n Maximum SizeL (optional)
    +

    Permutation Index #1L (optional)
    +
    .
    .
    .

    Permutation Index #nL (optional)
    +
    + + + + + + +
     (Items marked with an ‘L’ in the + above table are of the size specified in “Size of + Lengths” field in the superblock.)
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Version

    +

    This value is used to determine the format of the Dataspace + Message. When the format of the information in the message is + changed, the version number is incremented and can be used to + determine how the information in the object header is formatted. + This document describes version one (1) (there was no version zero + (0)).

    +

    Dimensionality

    +

    This value is the number of dimensions that the data object + has.

    +

    Flags

    +

    This field is used to store flags to indicate the presence of + parts of this message. Bit 0 (the least significant bit) is used to + indicate that maximum dimensions are present. Bit 1 is used to + indicate that permutation indices are present.

    +

    Dimension #n Size

    +

    This value is the current size of the dimension of the data + as stored in the file. The first dimension stored in the list of + dimensions is the slowest changing dimension and the last dimension + stored is the fastest changing dimension.

    +

    Dimension #n Maximum Size

    +

    + This value is the maximum size of the dimension of the data as + stored in the file. This value may be the special “unlimited” size which indicates + that the data may expand along this dimension indefinitely. If + these values are not stored, the maximum size of each dimension is + assumed to be the dimension’s current size. +

    +

    Permutation Index #n

    +

    This value is the index permutation used to map each + dimension from the canonical representation to an alternate axis + for each dimension. If these values are not stored, the first + dimension stored in the list of dimensions is the slowest changing + dimension and the last dimension stored is the fastest changing + dimension.

    +
    +
    + + + +
    +

    Version 2 of the dataspace message dropped the optional + permutation index value support, as it was never implemented in the + HDF5 Library:

    + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Dataspace Message - Version 2
    bytebytebytebyte
    VersionDimensionalityFlagsType

    Dimension #1 SizeL
    +
    .
    .
    .

    Dimension #n SizeL
    +

    Dimension #1 Maximum SizeL (optional)
    +
    .
    .
    .

    Dimension #n Maximum SizeL (optional)
    +
    + + + + + + +
     (Items marked with an ‘L’ in the + above table are of the size specified in “Size of + Lengths” field in the superblock.)
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Version

    +

    This value is used to determine the format of the Dataspace + Message. This field should be ‘2’ for version 2 format + messages.

    +

    Dimensionality

    +

    This value is the number of dimensions that the data object + has.

    +

    Flags

    +

    This field is used to store flags to indicate the presence of + parts of this message. Bit 0 (the least significant bit) is used to + indicate that maximum dimensions are present.

    +

    Type

    +

    This field indicates the type of the dataspace:

    + + + + + + + + + + + + + + + + + + +
    ValueDescription
    0A scalar dataspace; in other words, a dataspace + with a single, dimensionless element. +
    1A simple dataspace; in other words, a dataspace + with a rank > 0 and an appropriate # of dimensions. +
    2A null dataspace; in other words, a dataspace + with no elements. +
    +

    +

    Dimension #n Size

    +

    This value is the current size of the dimension of the data + as stored in the file. The first dimension stored in the list of + dimensions is the slowest changing dimension and the last dimension + stored is the fastest changing dimension.

    +

    Dimension #n Maximum Size

    +

    + This value is the maximum size of the dimension of the data as + stored in the file. This value may be the special “unlimited” size which indicates + that the data may expand along this dimension indefinitely. If + these values are not stored, the maximum size of each dimension is + assumed to be the dimension’s current size. +

    +
    +
    + + + + + +
    +

    + IV.A.2.c. The Link Info Message +

    + + +
    + + + + + + + + + + + + + + + + + + + + +
    Header Message Name: Link Info
    Header Message Type: 0x002
    Length: Varies
    Status: Optional; may not be repeated.
    Description:The link info message tracks variable information about the + current state of the links for a “new style” + group’s behavior. Variable information will be stored in this + message and constant information will be stored in the Group Info message. +
    Format of Data: See the tables below.
    +
    + + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Link Info
    bytebytebytebyte
    VersionFlagsThis space inserted + only to align table nicely

    Maximum Creation Index (8 bytes, + optional)
    +

    Fractal Heap AddressO
    +

    Address of v2 B-tree for Name IndexO
    +

    Address of v2 B-tree for Creation Order + IndexO (optional)
    +
    + + + + + + +
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Version

    +

    The version number for this message. This document describes + version 0.

    +

    Flags

    This field determines various optional aspects of the + link info message:

    + + + + + + + + + + + + + + + + + + +
    BitDescription
    0If set, creation order for the links is tracked.
    1If set, creation order for the links is indexed.
    2-7Reserved
    +

    Maximum Creation Index

    This 64-bit value is the maximum creation order index + value stored for a link in this group.

    +

    + This field is present if bit 0 of flags is set. +

    Fractal Heap Address

    +

    + This is the address of the fractal heap to store dense links. Each + link stored in the fractal heap is stored as a Link Message. +

    +

    + If there are no links in the group, or the group’s links are + stored “compactly” (as object header messages), this + value will be the undefined address. +

    +

    Address of v2 B-tree for Name Index

    This is the address of the version 2 B-tree to index + names of links.

    +

    + If there are no links in the group, or the group’s links are + stored “compactly” (as object header messages), this + value will be the undefined address. +

    Address of v2 B-tree for Creation Order Index

    This is the address of the version 2 B-tree to index + creation order of links.

    +

    + If there are no links in the group, or the group’s links are + stored “compactly” (as object header messages), this + value will be the undefined address. +

    +

    + This field exists if bit 1 of flags is set. +

    +
    + + +
    +

    + IV.A.2.d. The Datatype Message +

    + + +
    + + + + + + + + + + + + + + + + + + + + +
    Header Message Name: Datatype
    Header Message Type: 0x0003
    Length: Variable
    Status: Required for dataset or committed + datatype (formerly named datatype) objects; may not be repeated.
    Description:

    The datatype message defines the datatype for each + element of a dataset or a common datatype for sharing between + multiple datasets. A datatype can describe an atomic type like a + fixed- or floating-point type or more complex types like a C struct + (compound datatype), array (array datatype) or C++ vector + (variable-length datatype).

    +

    Datatype messages that are part of a dataset object do not + describe how elements are related to one another; the dataspace + message is used for that purpose. Datatype messages that are part + of a committed datatype (formerly named datatype) message describe + a common datatype that can be shared by multiple datasets in the + file.

    Format of Data: See the tables below.
    +
    + + +
    + + + + + + + + + + + + + + + + + + + + + + + + +
    Datatype Message
    bytebytebytebyte
    Class and VersionClass Bit Field, Bits 0-7Class Bit Field, Bits 8-15Class Bit Field, Bits 16-23
    Size

    +
    Properties
    +
    +
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Class and Version

    +

    The version of the datatype message and the datatype’s + class information are packed together in this field. The version + number is packed in the top 4 bits of the field and the class is + contained in the bottom 4 bits.

    +

    The version number information is used for changes in the + format of the datatype message and is described here:

    + + + + + + + + + + + + + + + + + + + + + + +
    VersionDescription
    0Never used
    1Used by early versions of the library to encode compound + datatypes with explicit array fields. See the compound datatype + description below for further details.
    2Used when an array datatype needs to be encoded.
    3Used when a VAX byte-ordered type needs to be encoded. + Packs various other datatype classes more efficiently also.
    +

    + +

    The class of the datatype determines the format for the class + bit field and properties portion of the datatype message, which are + described below. The following classes are currently defined:

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    ValueDescription
    0Fixed-Point
    1Floating-Point
    2Time
    3String
    4Bit field
    5Opaque
    6Compound
    7Reference
    8Enumerated
    9Variable-Length
    10Array
    +

    + +

    Class Bit Fields

    +

    The information in these bit fields is specific to each + datatype class and is described below. All bits not defined for a + datatype class are set to zero.

    +

    Size

    +

    The size of a datatype element in bytes.

    +

    Properties

    +

    This variable-sized sequence of bytes encodes information + specific to each datatype class and is described for each class + below. If there is no property information specified for a datatype + class, the size of this field is zero bytes.

    +
    +
    + + +
    +

    Class specific information for Fixed-Point Numbers (Class 0):

    + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Fixed-point Bit Field Description
    BitsMeaning

    0

    + Byte Order. If zero, byte order is little-endian; otherwise, + byte order is big endian. +

    1, 2

    + Padding type. Bit 1 is the lo_pad bit and bit 2 is the + hi_pad bit. If a datum has unused bits at either end, then the + lo_pad or hi_pad bit is copied to those locations. +

    3

    + Signed. If this bit is set then the fixed-point number is in + 2’s complement form. +

    4-23

    Reserved (zero).

    +
    + +
    +
    + + + + + + + + + + + + + + +
    Fixed-Point Property Description
    ByteByteByteByte
    Bit OffsetBit Precision
    +
    + +
    +
    + + + + + + + + + + + + + + + + +
    Field NameDescription

    Bit Offset

    +

    The bit offset of the first significant bit of the + fixed-point value within the datatype. The bit offset specifies the + number of bits “to the right of” the value (which are + set to the lo_pad bit value).

    +

    Bit Precision

    +

    The number of bits of precision of the fixed-point value + within the datatype. This value, combined with the datatype + element’s size and the Bit Offset field specifies the number + of bits “to the left of” the value (which are set to + the hi_pad bit value).

    +
    +
    + + +
    +

    Class specific information for Floating-Point Numbers (Class 1):

    + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Floating-Point Bit Field Description
    BitsMeaning

    0, 6

    + Byte Order. These two non-contiguous bits specify the + “endianness” of the bytes in the datatype element. +

    + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Bit 6Bit 0Description
    00Byte order is little-endian
    01Byte order is big-endian
    10Reserved
    11Byte order is VAX-endian
    +

    1, 2, 3

    + Padding type. Bit 1 is the low bits pad type, bit 2 is the + high bits pad type, and bit 3 is the internal bits pad type. If a + datum has unused bits at either end or between the sign bit, + exponent, or mantissa, then the value of bit 1, 2, or 3 is copied + to those locations. +

    4-5

    + Mantissa Normalization. This 2-bit bit field specifies how + the most significant bit of the mantissa is managed. +

    + + + + + + + + + + + + + + + + + + + + + + +
    ValueDescription
    0No normalization
    1The most significant bit of the mantissa is always set + (except for 0.0).
    2The most significant bit of the mantissa is not stored, + but is implied to be set.
    3Reserved.
    +

    7

    Reserved (zero).

    8-15

    + Sign Location. This is the bit position of the sign bit. + Bits are numbered with the least significant bit zero. +

    16-23

    Reserved (zero).

    +
    + +
    +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + +
    Floating-Point Property Description
    ByteByteByteByte
    Bit OffsetBit Precision
    Exponent LocationExponent SizeMantissa LocationMantissa Size
    Exponent Bias
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Bit Offset

    +

    The bit offset of the first significant bit of the + floating-point value within the datatype. The bit offset specifies + the number of bits “to the right of” the value.

    +

    Bit Precision

    +

    The number of bits of precision of the floating-point value + within the datatype.

    +

    Exponent Location

    +

    The bit position of the exponent field. Bits are numbered + with the least significant bit number zero.

    +

    Exponent Size

    +

    The size of the exponent field in bits.

    +

    Mantissa Location

    +

    The bit position of the mantissa field. Bits are numbered + with the least significant bit number zero.

    +

    Mantissa Size

    +

    The size of the mantissa field in bits.

    +

    Exponent Bias

    +

    The bias of the exponent field.

    +
    +
    + + +
    +

    Class specific information for Time (Class 2):

    + + +
    + + + + + + + + + + + + + + + + + +
    Time Bit Field Description
    BitsMeaning

    0

    + Byte Order. If zero, byte order is little-endian; otherwise, + byte order is big endian. +

    1-23

    Reserved (zero).

    +
    + +
    +
    + + + + + + + + + + + +
    Time Property Description
    ByteByte
    Bit Precision
    +
    + +
    +
    + + + + + + + + + + + +
    Field NameDescription

    Bit Precision

    +

    The number of bits of precision of the time value.

    +
    +
    + + +
    +

    Class specific information for Strings (Class 3):

    + + +
    + + + + + + + + + + + + + + + + + + + + + + +
    String Bit Field Description
    BitsMeaning

    0-3

    + Padding type. This four-bit value determines the type of + padding to use for the string. The values are: + +

    + + + + + + + + + + + + + + + + + + + + + + + + + +
    ValueDescription
    0Null Terminate: A zero byte marks the end of the string + and is guaranteed to be present after converting a long string to + a short string. When converting a short string to a long string + the value is padded with additional null characters as necessary. +
    1Null Pad: Null characters are added to the end of the + value during conversions from short values to long values but + conversion in the opposite direction simply truncates the value. +
    2Space Pad: Space characters are added to the end of the + value during conversions from short values to long values but + conversion in the opposite direction simply truncates the value. + This is the Fortran representation of the string.
    3-15Reserved
    +

    4-7

    + Character Set. The character set used to encode the string. +

    + + + + + + + + + + + + + + + + + + + + +
    ValueDescription
    0ASCII character set encoding
    1UTF-8 character set encoding
    2-15Reserved
    +

    8-23

    Reserved (zero).

    +
    + +

    There are no properties defined for the string class.

    + + +

    Class specific information for bit fields (Class 4):

    + +
    + + + + + + + + + + + + + + + + + + + + + + +
    Bitfield Bit Field Description
    BitsMeaning

    0

    + Byte Order. If zero, byte order is little-endian; otherwise, + byte order is big endian. +

    1, 2

    + Padding type. Bit 1 is the lo_pad type and bit 2 is the + hi_pad type. If a datum has unused bits at either end, then the + lo_pad or hi_pad bit is copied to those locations. +

    3-23

    Reserved (zero).

    +
    + +
    +
    + + + + + + + + + + + + + + +
    Bit Field Property Description
    ByteByteByteByte
    Bit OffsetBit Precision
    +
    + +
    +
    + + + + + + + + + + + + + + + +
    Field NameDescription

    Bit Offset

    +

    The bit offset of the first significant bit of the bit field + within the datatype. The bit offset specifies the number of bits + “to the right of” the value.

    +

    Bit Precision

    +

    The number of bits of precision of the bit field within the + datatype.

    +
    +
    + + +
    +

    Class specific information for Opaque (Class 5):

    + +
    + + + + + + + + + + + + + + + + + +
    Opaque Bit Field Description
    BitsMeaning

    0-7

    Length of ASCII tag in bytes.

    8-23

    Reserved (zero).

    +
    + +
    +
    + + + + + + + + + + + + + +
    Opaque Property Description
    ByteByteByteByte

    ASCII Tag

    +
    + +
    +
    + + + + + + + + + + +
    Field NameDescription

    ASCII Tag

    +

    This NUL-terminated string provides a description for the + opaque type. It is NUL-padded to a multiple of 8 bytes.

    +
    +
    + + +
    +

    Class specific information for Compound (Class 6):

    + +
    + + + + + + + + + + + + + + + + + +
    Compound Bit Field Description
    BitsMeaning

    0-15

    + Number of Members. This field contains the number of members + defined for the compound datatype. The member definitions are + listed in the Properties field of the data type message. +

    16-23

    Reserved (zero).

    +
    + + +

    The Properties field of a compound datatype is a list of the + member definitions of the compound datatype. The member definitions + appear one after another with no intervening bytes. The member types + are described with a (recursively) encoded datatype message.

    + +

    Note that the property descriptions are different for different + versions of the datatype version. Additionally note that the version 0 + datatype encoding is deprecated and has been replaced with later + encodings in versions of the HDF5 Library from the 1.4 release onward.

    + + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Compound Properties Description for Datatype + Version 1
    ByteByteByteByte

    Name
    +
    Byte Offset of Member
    DimensionalityReserved (zero)
    Dimension Permutation
    Reserved (zero)
    Dimension #1 Size (required)
    Dimension #2 Size (required)
    Dimension #3 Size (required)
    Dimension #4 Size (required)

    Member Type Message
    +
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Name

    +

    This NUL-terminated string provides a description for the + opaque type. It is NUL-padded to a multiple of 8 bytes.

    +

    Byte Offset of Member

    +

    This is the byte offset of the member within the datatype.

    +

    Dimensionality

    +

    If set to zero, this field indicates a scalar member. If set + to a value greater than zero, this field indicates that the member + is an array of values. For array members, the size of the array is + indicated by the ‘Size of Dimension n’ field in this + message.

    +

    Dimension Permutation

    +

    This field was intended to allow an array field to have its + dimensions permuted, but this was never implemented. This field + should always be set to zero.

    +

    Dimension #n Size

    +

    This field is the size of a dimension of the array field as + stored in the file. The first dimension stored in the list of + dimensions is the slowest changing dimension and the last dimension + stored is the fastest changing dimension.

    +

    Member Type Message

    +

    This field is a datatype message describing the datatype of + the member.

    +
    +
    + +
    +
    +
    + + + + + + + + + + + + + + + + + + + + + + +
    Compound Properties Description for Datatype + Version 2
    ByteByteByteByte

    Name
    +
    Byte Offset of Member

    Member Type Message
    +
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Name

    +

    This NUL-terminated string provides a description for the + opaque type. It is NUL-padded to a multiple of 8 bytes.

    +

    Byte Offset of Member

    +

    This is the byte offset of the member within the datatype.

    +

    Member Type Message

    +

    This field is a datatype message describing the datatype of + the member.

    +
    +
    + + +
    +
    +
    + + + + + + + + + + + + + + + + + + + + + + +
    Compound Properties Description for Datatype + Version 3
    ByteByteByteByte

    Name
    +
    Byte Offset of Member (variable size)

    Member Type Message
    +
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Name

    + This NUL-terminated string provides a description for the opaque + type. It is not NUL-padded to a multiple of 8 bytes. +

    Byte Offset of Member

    This is the byte offset of the member within the + datatype. The field size is the minimum number of bytes necessary, + based on the size of the datatype element. For example, a datatype + element size of less than 256 bytes uses a 1 byte length, a + datatype element size of 256-65535 bytes uses a 2 byte length, and + so on.

    Member Type Message

    This field is a datatype message describing the + datatype of the member.

    +
    + + +
    +

    Class specific information for Reference (Class 7):

    + +
    + + + + + + + + + + + + + + + + + +
    Reference Bit Field Description
    BitsMeaning

    0-3

    + Type. This four-bit value contains the type of reference + described. The values defined are: + +

    + + + + + + + + + + + + + + + + + + + + +
    ValueDescription
    0Object Reference: A reference to another object in this + HDF5 file.
    1Dataset Region Reference: A reference to a region within + a dataset in this HDF5 file.
    2-15Reserved
    +

    4-23

    Reserved (zero).

    +
    + +

    There are no properties defined for the reference class.

    + + +
    +

    Class specific information for Enumeration (Class 8):

    + +
    + + + + + + + + + + + + + + + + + +
    Enumeration Bit Field Description
    BitsMeaning

    0-15

    + Number of Members. The number of name/value pairs defined + for the enumeration type. +

    16-23

    Reserved (zero).

    +
    + +
    +
    +
    + + + + + + + + + + + + + + + + + + + + + + +
    Enumeration Property Description for Datatype + Versions 1 & 2
    ByteByteByteByte

    Base Type
    +

    Names
    +

    Values
    +
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Base Type

    +

    Each enumeration type is based on some parent type, usually + an integer. The information for that parent type is described + recursively by this field.

    +

    Names

    +

    The name for each name/value pair. Each name is stored as a + null terminated ASCII string in a multiple of eight bytes. The + names are in no particular order.

    +

    Values

    +

    The list of values in the same order as the names. The values + are packed (no inter-value padding) and the size of each value is + determined by the parent type.

    +
    +
    + +
    +
    +
    + + + + + + + + + + + + + + + + + + + + + + +
    Enumeration Property Description for Datatype + Version 3
    ByteByteByteByte

    Base Type
    +

    Names
    +

    Values
    +
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Base Type

    +

    Each enumeration type is based on some parent type, usually + an integer. The information for that parent type is described + recursively by this field.

    +

    Names

    +

    + The name for each name/value pair. Each name is stored as a null + terminated ASCII string, not padded to a multiple of eight + bytes. The names are in no particular order. +

    +

    Values

    +

    The list of values in the same order as the names. The values + are packed (no inter-value padding) and the size of each value is + determined by the parent type.

    +
    +
    + + + +
    +

    Class specific information for Variable-Length (Class 9):

    + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Variable-Length Bit Field Description
    BitsMeaning

    0-3

    + Type. This four-bit value contains the type of + variable-length datatype described. The values defined are: + +

    + + + + + + + + + + + + + + + + + + + + +
    ValueDescription
    0Sequence: A variable-length sequence of any datatype. + Variable-length sequences do not have padding or character set + information.
    1String: A variable-length sequence of characters. + Variable-length strings have padding and character set + information.
    2-15Reserved
    +

    4-7

    + Padding type. (variable-length string only) This four-bit + value determines the type of padding used for variable-length + strings. The values are the same as for the string padding type, as + follows: +

    + + + + + + + + + + + + + + + + + + + + + + + + + +
    ValueDescription
    0Null terminate: A zero byte marks the end of a string and + is guaranteed to be present after converting a long string to a + short string. When converting a short string to a long string, + the value is padded with additional null characters as necessary. +
    1Null pad: Null characters are added to the end of the + value during conversion from a short string to a longer string. + Conversion from a long string to a shorter string simply + truncates the value.
    2Space pad: Space characters are added to the end of the + value during conversion from a short string to a longer string. + Conversion from a long string to a shorter string simply + truncates the value. This is the Fortran representation of the + string.
    3-15Reserved
    +

    + +

    This value is set to zero for variable-length sequences.

    8-11

    + Character Set. (variable-length string only) This four-bit + value specifies the character set to be used for encoding the + string: +

    + + + + + + + + + + + + + + + + + + + + +
    ValueDescription
    0ASCII character set encoding
    1UTF-8 character set encoding
    2-15Reserved
    +

    + +

    This value is set to zero for variable-length sequences.

    12-23

    Reserved (zero).

    +
    + +
    +
    +
    + + + + + + + + + + + + + + +
    Variable-Length Property Description
    ByteByteByteByte

    Base Type
    +
    +
    + +
    +
    + + + + + + + + + + + +
    Field NameDescription

    Base Type

    +

    Each variable-length type is based on some parent type. The + information for that parent type is described recursively by this + field.

    +
    +
    + + +
    +

    Class specific information for Array (Class 10):

    + +

    There are no bit fields defined for the array class.

    + +

    Note that the dimension information defined in the property for + this datatype class is independent of dataspace information for a + dataset. The dimension information here describes the dimensionality of + the information within a data element (or a component of an element, if + the array datatype is nested within another datatype) and the dataspace + for a dataset describes the size and locations of the elements in a + dataset.

    + + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Array Property Description for Datatype Version 2
    ByteByteByteByte
    DimensionalityReserved (zero)
    Dimension #1 Size
    .
    .
    .
    Dimension #n Size
    Permutation Index #1
    .
    .
    .
    Permutation Index #n

    Base Type
    +
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Dimensionality

    +

    This value is the number of dimensions that the array has.

    +

    Dimension #n Size

    +

    This value is the size of the dimension of the array as + stored in the file. The first dimension stored in the list of + dimensions is the slowest changing dimension and the last dimension + stored is the fastest changing dimension.

    +

    Permutation Index #n

    +

    This value is the index permutation used to map each + dimension from the canonical representation to an alternate axis + for each dimension. Currently, dimension permutations are not + supported, and these indices should be set to the index position + minus one. In other words, the first dimension should be set to 0, + the second dimension should be set to 1, and so on.

    +

    Base Type

    +

    Each array type is based on some parent type. The information + for that parent type is described recursively by this field.

    +
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Array Property Description for Datatype Version 3
    ByteByteByteByte
    DimensionalityThis space inserted + only to align table nicely
    Dimension #1 Size
    .
    .
    .
    Dimension #n Size

    Base Type
    +
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Dimensionality

    +

    This value is the number of dimensions that the array has.

    +

    Dimension #n Size

    +

    This value is the size of the dimension of the array as + stored in the file. The first dimension stored in the list of + dimensions is the slowest changing dimension and the last dimension + stored is the fastest changing dimension.

    +

    Base Type

    +

    Each array type is based on some parent type. The information + for that parent type is described recursively by this field.

    +
    +
    + + + +
    +

    + IV.A.2.e. The Data Storage - Fill + Value (Old) Message +

    + + +
    + + + + + + + + + + + + + + + + + + + + +
    Header Message Name: Fill Value (old)
    Header Message Type: 0x0004
    Length: Varies
    Status: Optional; may not be repeated.
    Description:

    The fill value message stores a single data value + which is returned to the application when an uninitialized data + element is read from a dataset. The fill value is interpreted with + the same datatype as the dataset. If no fill value message is + present then a fill value of all zero bytes is assumed.

    +

    This fill value message is deprecated in favor of the + “new” fill value message (Message Type 0x0005) and is + only written to the file for forward compatibility with versions of + the HDF5 Library before the 1.6.0 version. Additionally, it only + appears for datasets with a user-defined fill value (as opposed to + the library default fill value or an explicitly set + “undefined” fill value).

    Format of Data: See the tables below.
    +
    + + +
    + + + + + + + + + + + + + + + + + +
    Fill Value Message (Old)
    bytebytebytebyte
    Size

    Fill Value (optional, variable + size)
    +
    +
    + +
    +
    + + + + + + + + + + + + + + + +
    Field NameDescription

    Size

    +

    This is the size of the Fill Value field in bytes.

    +

    Fill Value

    +

    The fill value. The bytes of the fill value are interpreted + using the same datatype as for the dataset.

    +
    +
    + + +
    +

    + IV.A.2.f. The Data Storage - Fill Value + Message +

    + + +
    + + + + + + + + + + + + + + + + + + + + +
    Header Message Name: Fill Value
    Header Message Type: 0x0005
    Length: Varies
    Status: Required for dataset objects; may + not be repeated.
    Description:The fill value message stores a single data value which is + returned to the application when an uninitialized data element is + read from a dataset. The fill value is interpreted with the same + datatype as the dataset.
    Format of Data: See the tables below.
    +
    + + +
    + + + + + + + + + + + + + + + + + + + + + + + + +
    Fill Value Message - Versions 1 & 2
    bytebytebytebyte
    VersionSpace Allocation TimeFill Value Write TimeFill Value Defined
    Size (optional)

    Fill Value (optional, variable + size)
    +
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Version

    +

    The version number information is used for changes in the + format of the fill value message and is described here:

    + + + + + + + + + + + + + + + + + + + + + + +
    VersionDescription
    0Never used
    1Initial version of this message.
    2In this version, the Size and Fill Value fields are only + present if the Fill Value Defined field is set to 1.
    3This version packs the other fields in the message more + efficiently than version 2.
    +

    +

    +

    Space Allocation Time

    +

    When the storage space for the dataset’s raw data will + be allocated. The allowed values are:

    + + + + + + + + + + + + + + + + + + + + + + +
    ValueDescription
    0Not used.
    1Early allocation. Storage space for the entire dataset + should be allocated in the file when the dataset is created.
    2Late allocation. Storage space for the entire dataset + should not be allocated until the dataset is written to.
    3Incremental allocation. Storage space for the dataset + should not be allocated until the portion of the dataset is + written to. This is currently used in conjunction with chunked + data storage for datasets.
    +

    + +

    Fill Value Write Time

    +

    At the time that storage space for the dataset’s raw + data is allocated, this value indicates whether the fill value + should be written to the raw data storage elements. The allowed + values are:

    + + + + + + + + + + + + + + + + + + +
    ValueDescription
    0On allocation. The fill value is always written to the + raw data storage when the storage space is allocated.
    1Never. The fill value should never be written to the raw + data storage.
    2Fill value written if set by user. The fill value will be + written to the raw data storage when the storage space is + allocated only if the user explicitly set the fill value. If the + fill value is the library default or is undefined, it will not be + written to the raw data storage.
    +

    + +

    Fill Value Defined

    +

    This value indicates if a fill value is defined for this + dataset. If this value is 0, the fill value is undefined. If this + value is 1, a fill value is defined for this dataset. For version 2 + or later of the fill value message, this value controls the + presence of the Size and Fill Value fields.

    +

    Size

    +

    This is the size of the Fill Value field in bytes. This field + is not present if the Version field is greater than 1, and the Fill + Value Defined field is set to 0.

    +

    Fill Value

    +

    The fill value. The bytes of the fill value are interpreted + using the same datatype as for the dataset. This field is not + present if the Version field is greater than 1, and the Fill Value + Defined field is set to 0.

    +
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + +
    Fill Value Message - Version 3
    bytebytebytebyte
    VersionFlagsThis space inserted + only to align table nicely
    Size (optional)

    Fill Value (optional, variable + size)
    +
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Version

    +

    The version number information is used for changes in the + format of the fill value message and is described here:

    + + + + + + + + + + + + + + + + + + + + + + +
    VersionDescription
    0Never used
    1Initial version of this message.
    2In this version, the Size and Fill Value fields are only + present if the Fill Value Defined field is set to 1.
    3This version packs the other fields in the message more + efficiently than version 2.
    +

    + +

    Flags

    +

    When the storage space for the dataset’s raw data will + be allocated. The allowed values are:

    + + + + + + + + + + + + + + + + + + + + + + + + + + +
    BitsDescription
    0-1Space Allocation Time, with the same values as versions 1 + and 2 of the message.
    2-3Fill Value Write Time, with the same values as versions 1 + and 2 of the message.
    4Fill Value Undefined, indicating that the fill value has + been marked as “undefined” for this dataset. Bits 4 + and 5 cannot both be set.
    5Fill Value Defined, with the same values as versions 1 + and 2 of the message. Bits 4 and 5 cannot both be set.
    6-7Reserved (zero).
    +

    + +

    Size

    +

    This is the size of the Fill Value field in bytes. This field + is not present if the Version field is greater than 1, and the Fill + Value Defined flag is set to 0.

    +

    Fill Value

    +

    The fill value. The bytes of the fill value are interpreted + using the same datatype as for the dataset. This field is not + present if the Version field is greater than 1, and the Fill Value + Defined flag is set to 0.

    +
    +
    + + +
    +

    + IV.A.2.g. The Link Message +

    + + +
    + + + + + + + + + + + + + + + + + + + + +
    Header Message Name: Link
    Header Message Type: 0x0006
    Length: Varies
    Status: Optional; may be repeated.
    Description:

    This message encodes the information for a link in a + group’s object header, when the group is storing its links + “compactly”, or in the group’s fractal heap, when + the group is storing its links “densely”.

    +

    + A group is storing its links compactly when the fractal heap + address in the Link Info + Message is set to the “undefined address” value. +

    Format of Data: See the tables below.
    +
    + + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Link Message
    bytebytebytebyte
    VersionFlagsLink type (optional)This space inserted only to align + table nicely

    Creation Order (8 bytes, + optional)
    +
    Link Name Character Set (optional)Length of Link Name (variable size)This space inserted + only to align table nicely
    Link Name (variable size)

    Link Information (variable size)
    +
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Version

    The version number for this message. This document + describes version 1.

    Flags

    This field contains information about the link and + controls the presence of other fields below.

    + + + + + + + + + + + + + + + + + + + + + + + + + + +
    BitsDescription
    0-1Determines the size of the Length of Link Name + field. + + + + + + + + + + + + + + + + + + + + + + +
    ValueDescription
    0The size of the Length of Link Name field is + 1 byte. +
    1The size of the Length of Link Name field is + 2 bytes. +
    2The size of the Length of Link Name field is + 4 bytes. +
    3The size of the Length of Link Name field is + 8 bytes. +
    +
    2Creation Order Field Present: if set, the Creation + Order field is present. If not set, creation order information + is not stored for links in this group. +
    3Link Type Field Present: if set, the link is not a hard + link and the Link Type field is present. If not set, the + link is a hard link. +
    4Link Name Character Set Field Present: if set, the link + name is not represented with the ASCII character set and the Link + Name Character Set field is present. If not set, the link name + is represented with the ASCII character set. +
    5-7Reserved (zero).
    +

    Link type

    This is the link class type and can be one of the + following values:

    + + + + + + + + + + + + + + + + + + + + + + + + + + +
    ValueDescription
    0A hard link (should never be stored in the file)
    1A soft link.
    2-63Reserved for future HDF5 internal use.
    64An external link.
    65-255Reserved, but available for user-defined link types.
    +

    + +

    + This field is present if bit 3 of Flags is set. +

    Creation Order

    This 64-bit value is an index of the link’s + creation time within the group. Values start at 0 when the group is + created an increment by one for each link added to the group. + Removing a link from a group does not change existing links’ + creation order field.

    +

    + This field is present if bit 2 of Flags is set. +

    Link Name Character Set

    This is the character set for encoding the + link’s name:

    + + + + + + + + + + + + + + + +
    ValueDescription
    0ASCII character set encoding (this should never be stored + in the file)
    1UTF-8 character set encoding
    +

    + +

    + This field is present if bit 4 of Flags is set. +

    Length of link name

    + This is the length of the link’s name. The size of this field + depends on bits 0 and 1 of Flags. +

    Link name

    This is the name of the link, non-NULL terminated.

    Link information

    + The format of this field depends on the link type. +

    +

    + For hard links, the field is formatted as follows: + +

    + + + + + +
    Size of Offsets bytes:The address of the object header for the + object that the link points to.
    +

    + +

    + For soft links, the field is formatted as follows: + +

    + + + + + + + + + +
    Bytes 1-2:Length of soft link value.
    Length of soft link value bytes:A non-NULL-terminated string storing the value of the + soft link.
    +

    + +

    + For external links, the field is formatted as follows: + +

    + + + + + + + + + +
    Bytes 1-2:Length of external link value.
    Length of external link value bytes:The first byte contains the version number in the upper 4 + bits and flags in the lower 4 bits for the external link. Both + version and flags are defined to be zero in this document. The + remaining bytes consist of two NULL-terminated strings, with no + padding between them. The first string is the name of the HDF5 + file containing the object linked to and the second string is the + full path to the object linked to, within the HDF5 file’s + group hierarchy.
    +

    + +

    + For user-defined links, the field is formatted as follows: + +

    + + + + + + + + + +
    Bytes 1-2:Length of user-defined data.
    Length of user-defined link value bytes:The data supplied for the user-defined link type.
    +

    +
    + +
    +

    + IV.A.2.h. The Data Storage - + External Data Files Message +

    + + +
    + + + + + + + + + + + + + + + + + + + + +
    Header Message Name: External Data Files
    Header Message Type: 0x0007
    Length: Varies
    Status: Optional; may not be repeated.
    Description:The external data storage message indicates that the data + for an object is stored outside the HDF5 file. The filename of the + object is stored as a Universal Resource Location (URL) of the + actual filename containing the data. An external file list record + also contains the byte offset of the start of the data within the + file and the amount of space reserved in the file for that data.
    Format of Data: See the tables below.
    +
    + + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    External File List Message
    bytebytebytebyte
    VersionReserved (zero)
    Allocated SlotsUsed Slots

    Heap AddressO
    +

    Slot Definitions...
    +
    + + + + + + +
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
    + +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Version

    +

    The version number information is used for changes in the + format of External Data Storage Message and is described here:

    + + + + + + + + + + + + + +
    VersionDescription
    0Never used.
    1The current version used by the library.
    +

    + +

    Allocated Slots

    +

    The total number of slots allocated in the message. Its value + must be at least as large as the value contained in the Used Slots + field. (The current library simply uses the number of Used Slots + for this message)

    +

    Used Slots

    +

    The number of initial slots which contains valid information.

    +

    Heap Address

    +

    This is the address of a local heap which contains the names + for the external files (The local heap information can be found in + Disk Format Level 1D in this document). The name at offset zero in + the heap is always the empty string.

    +

    Slot Definitions

    +

    The slot definitions are stored in order according to the + array addresses they represent.

    +
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + +
    External File List Slot
    bytebytebytebyte

    Name Offset in Local HeapL
    +

    Offset in External Data FileL
    +

    Data Size in External FileL
    +
    + + + + + + +
     (Items marked with an ‘L’ in the + above table are of the size specified in “Size of + Lengths” field in the superblock.)
    + +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Name Offset in Local Heap

    +

    + The byte offset within the local name heap for the name of the + file. File names are stored as a URL which has a protocol name, a + host name, a port number, and a file name: + + protocol:port//host/file + + . If the protocol is omitted then “file:” is assumed. + If the port number is omitted then a default port for that protocol + is used. If both the protocol and the port number are omitted then + the colon can also be omitted. If the double slash and host name + are omitted then “localhost” is assumed. The file name + is the only mandatory part, and if the leading slash is missing + then it is relative to the application’s current working + directory (the use of relative names is not recommended). +

    +

    Offset in External Data File

    +

    This is the byte offset to the start of the data in the + specified file. For files that contain data for a single dataset + this will usually be zero.

    +

    Data Size in External File

    +

    This is the total number of bytes reserved in the specified + file for raw data storage. For a file that contains exactly one + complete dataset which is not extendable, the size will usually be + the exact size of the dataset. However, by making the size larger + one allows HDF5 to extend the dataset. The size can be set to a + value larger than the entire file since HDF5 will read zeroes past + the end of the file without failing.

    +
    +
    + + +
    +

    + IV.A.2.i. The Data Storage - Layout Message +

    + + +
    + + + + + + + + + + + + + + + + + + + + +
    Header Message Name: Data Storage - + Layout
    Header Message Type: 0x0008
    Length: Varies
    Status: Required for datasets; may not be + repeated.
    Description:Data layout describes how the elements of a + multi-dimensional array are stored in the HDF5 file. Three types of + data layout are supported: +
      +
    1. Contiguous: The array is stored in one contiguous area of + the file. This layout requires that the size of the array be + constant: data manipulations such as chunking, compression, + checksums, or encryption are not permitted. The message stores the + total storage size of the array. The offset of an element from the + beginning of the storage area is computed as in a C array.
    2. +
    3. Chunked: The array domain is regularly decomposed into + chunks, and each chunk is allocated and stored separately. This + layout supports arbitrary element traversals, compression, + encryption, and checksums. (these features are described in other + messages). The message stores the size of a chunk instead of the + size of the entire array; the storage size of the entire array can + be calculated by traversing the B-tree that stores the chunk + addresses.
    4. +
    5. Compact: The array is stored in one contiguous block, as + part of this object header message.
    6. +
    +
    Format of Data: See the tables below.
    +
    + + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Data Layout Message (Versions 1 and 2)
    bytebytebytebyte
    VersionDimensionalityLayout ClassReserved (zero)
    Reserved (zero)

    Data AddressO (optional)
    +
    Dimension 0 Size
    Dimension 1 Size
    ...
    Dimension #n Size
    Dataset Element Size (optional)
    Compact Data Size (optional)

    Compact Data... (variable size, + optional)
    +
    + + + + + + +
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
    + +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Version

    +

    The version number information is used for changes in the + format of the data layout message and is described here:

    + + + + + + + + + + + + + + + + + + + + +
    VersionDescription
    0Never used.
    1Used by version 1.4 and before of the library to encode + layout information. Data space is always allocated when the data + set is created.
    2Used by version 1.6.x of the library to encode layout + information. Data space is allocated only when it is necessary.
    +

    +

    Dimensionality

    An array has a fixed dimensionality. This field + specifies the number of dimension size fields later in the message. + The value stored for chunked storage is 1 greater than the number + of dimensions in the dataset’s dataspace. For example, 2 is + stored for a 1 dimensional dataset.

    Layout Class

    The layout class specifies the type of storage for + the data and how the other fields of the layout message are to be + interpreted.

    + + + + + + + + + + + + + + + + + + + + +
    ValueDescription
    0Compact Storage
    1Contiguous Storage
    2Chunked Storage
    +

    Data Address

    For contiguous storage, this is the address of the + raw data in the file. For chunked storage this is the address of + the v1 B-tree that is used to look up the addresses of the chunks. + This field is not present for compact storage. If the version for + this message is greater than 1, the address may have the + “undefined address” value, to indicate that storage has + not yet been allocated for this array.

    Dimension #n Size

    For contiguous and compact storage the dimensions + define the entire size of the array while for chunked storage they + define the size of a single chunk. In all cases, they are in units + of array elements (not bytes). The first dimension stored in the + list of dimensions is the slowest changing dimension and the last + dimension stored is the fastest changing dimension.

    Dataset Element Size

    The size of a dataset element, in bytes. This field + is only present for chunked storage.

    Compact Data Size

    This field is only present for compact data storage. + It contains the size of the raw data for the dataset array, in + bytes.

    Compact Data

    This field is only present for compact data storage. + It contains the raw data for the dataset array.

    +
    + +
    +

    Version 3 of this message re-structured the format into specific + properties that are required for each layout class.

    + + +
    + + + + + + + + + + + + + + + + + + + +
    + Data Layout Message (Version 3) +
    bytebytebytebyte
    VersionLayout ClassThis space inserted + only to align table nicely

    Properties (variable size)
    +
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Version

    +

    The version number information is used for changes in the + format of layout message and is described here:

    + + + + + + + + + + +
    VersionDescription
    3Used by the version 1.6.3 and later of the library to + store properties for each layout class.
    +

    +

    Layout Class

    The layout class specifies the type of storage for + the data and how the other fields of the layout message are to be + interpreted.

    + + + + + + + + + + + + + + + + + + + + +
    ValueDescription
    0Compact Storage
    1Contiguous Storage
    2Chunked Storage
    +

    Properties

    This variable-sized field encodes information + specific to each layout class and is described below. If there is + no property information specified for a layout class, the size of + this field is zero bytes.

    +
    + +
    +

    Class-specific information for compact layout (Class 0): (Note: + The dimensionality information is in the Dataspace message)

    + + +
    + + + + + + + + + + + + + + + + + + +
    Compact Storage Property Description
    bytebytebytebyte
    SizeThis space inserted + only to align table nicely

    Raw Data... (variable size)
    +
    +
    + +
    +
    + + + + + + + + + + + + + + + +
    Field NameDescription

    Size

    This field contains the size of the raw data for the + dataset array, in bytes.

    Raw Data

    This field contains the raw data for the dataset + array.

    +
    + + +
    +

    Class-specific information for contiguous layout (Class 1): + (Note: The dimensionality information is in the Dataspace message)

    + + +
    + + + + + + + + + + + + + + + + + +
    Contiguous Storage Property Description
    bytebytebytebyte

    AddressO
    +

    SizeL
    +
    + + + + + + + + + + +
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
     (Items marked with an ‘L’ in the above table are + of the size specified in “Size of Lengths” field in the + superblock.)
    + +
    + +
    +
    + + + + + + + + + + + + + + + +
    Field NameDescription

    Address

    This is the address of the raw data in the file. The + address may have the “undefined address” value, to + indicate that storage has not yet been allocated for this array.

    Size

    This field contains the size allocated to store the + raw data, in bytes.

    +
    + + +
    +

    Class-specific information for chunked layout (Class 2):

    + + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Chunked Storage Property Description
    bytebytebytebyte
    DimensionalityThis space inserted + only to align table nicely

    AddressO
    +
    Dimension 0 Size
    Dimension 1 Size
    ...
    Dimension #n Size
    Dataset Element Size
    + + + + + + +
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
    + +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Dimensionality

    A chunk has a fixed dimensionality. This field + specifies the number of dimension size fields later in the message.

    Address

    This is the address of the v1 B-tree that is used to + look up the addresses of the chunks that actually store portions of + the array data. The address may have the “undefined + address” value, to indicate that storage has not yet been + allocated for this array.

    Dimension #n Size

    These values define the dimension size of a single + chunk, in units of array elements (not bytes). The first dimension + stored in the list of dimensions is the slowest changing dimension + and the last dimension stored is the fastest changing dimension.

    Dataset Element Size

    The size of a dataset element, in bytes.

    +
    + +
    +

    + IV.A.2.j. The Bogus Message +

    + + +
    + + + + + + + + + + + + + + + + + + + + +
    Header Message Name: Bogus
    Header Message Type: 0x0009
    Length: 4 bytes
    Status: For testing only; should never be + stored in a valid file.
    Description:This message is used for testing the HDF5 Library’s + response to an “unknown” message type and should never + be encountered in a valid HDF5 file.
    Format of Data: See the tables below.
    +
    + + +
    + + + + + + + + + + + + + +
    Bogus Message
    bytebytebytebyte
    Bogus Value
    +
    + +
    +
    + + + + + + + + + + +
    Field NameDescription

    Bogus Value

    +

    + This value should always be: + 0xdeadbeef + . +

    +
    +
    + +
    +

    + IV.A.2.k. The Group Info Message +

    + + +
    + + + + + + + + + + + + + + + + + + + + +
    Header Message Name: Group Info
    Header Message Type: 0x000A
    Length: Varies
    Status: Optional; may not be repeated.
    Description:

    + This message stores information for the constants defining a + “new style” group’s behavior. Constant + information will be stored in this message and variable information + will be stored in the Link Info + message. +

    +

    Note: the “estimated entry” information below is + used when determining the size of the object header for the group + when it is created.

    Format of Data: See the tables below.
    +
    + + +
    + + + + + + + + + + + + + + + + + + + + + + + + +
    Group Info Message
    bytebytebytebyte
    VersionFlagsLink Phase Change: Maximum Compact Value (optional)
    Link Phase Change: Minimum Dense Value (optional)Estimated Number of Entries (optional)
    Estimated Link Name Length of Entries (optional)This space inserted + only to align table nicely
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Version

    The version number for this message. This document + describes version 0.

    Flags

    This is the group information flag with the following + definition:

    + + + + + + + + + + + + + + + + + + +
    BitDescription
    0If set, link phase change values are stored.
    1If set, the estimated entry information is non-default + and is stored.
    2-7Reserved
    +

    Link Phase Change: Maximum Compact Value

    The is the maximum number of links to store + “compactly” (in the group’s object header).

    +

    + This field is present if bit 0 of Flags is set. +

    Link Phase Change: Minimum Dense Value

    + This is the minimum number of links to store “densely” + (in the group’s fractal heap). The fractal heap’s + address is located in the Link Info + message. +

    +

    + This field is present if bit 0 of Flags is set. +

    Estimated Number of Entries

    This is the estimated number of entries in groups.

    +

    + If this field is not present, the default value of + 4 + will be used for the estimated number of group entries. +

    +

    + This field is present if bit 1 of Flags is set. +

    Estimated Link Name Length of Entries

    This is the estimated length of entry name.

    +

    + If this field is not present, the default value of + 8 + will be used for the estimated link name length of group entries. +

    +

    + This field is present if bit 1 of Flags is set. +

    +
    +

    + +
    +

    + IV.A.2.l. The Data Storage - Filter + Pipeline Message +

    + + +
    + + + + + + + + + + + + + + + + + + + + +
    Header Message Name: Data Storage - + Filter Pipeline
    Header Message Type: 0x000B
    Length: Varies
    Status: Optional; may not be repeated.
    Description:

    This message describes the filter pipeline which + should be applied to the data stream by providing filter + identification numbers, flags, a name, and client data.

    +

    This message may be present in the object headers of both + dataset and group objects. For datasets, it specifies the filters + to apply to raw data. For groups, it specifies the filters to apply + to the group’s fractal heap. Currently, only datasets using + chunked data storage use the filter pipeline on their raw data.

    Format of Data: See the tables below.
    +
    + + +
    + + + + + + + + + + + + + + + + + + + + + + + +
    Filter Pipeline Message - Version 1
    bytebytebytebyte
    VersionNumber of FiltersReserved (zero)
    Reserved (zero)

    Filter Description List (variable + size)
    +
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Version

    The version number for this message. This table + describes version 1.

    Number of Filters

    The total number of filters described in this + message. The maximum possible number of filters in a message is 32.

    Filter Description List

    A description of each filter. A filter description + appears in the next table.

    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Filter Description
    bytebytebytebyte
    Filter Identification ValueName Length
    FlagsNumber Client Data Values

    Name (variable size, optional)
    +

    Client Data (variable size, + optional)
    +
    Padding (variable size, optional)
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Filter Identification Value

    +

    + This value, often referred to as a filter identifier, is designed + to be a unique identifier for the filter. Values from zero through + 32,767 are reserved for filters supported by The HDF Group in the + HDF5 Library and for filters requested and supported by third + parties. Filters supported by The HDF Group are documented + immediately below. Information on 3rd-party filters can be found at + The HDF Group’s + Contributions page. +

    + +

    + To request a filter identifier, please contact The HDF + Group’s Help Desk at The HDF Group Help Desk. + You will be asked to provide the following information: +

    +
      +
    1. Contact information for the developer requesting the new + identifier
    2. +
    3. A short description of the new filter
    4. +
    5. Links to any relevant information, including licensing + information
    6. +
    +

    Values from 32768 to 65535 are reserved for non-distributed + uses (for example, internal company usage) or for application usage + when testing a feature. The HDF Group does not track or document + the use of the filters with identifiers from this range.

    + +

    The filters currently in library version 1.8.0 are listed + below:

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    IdentificationNameDescription
    0N/AReserved
    1deflateGZIP deflate compression
    2shuffleData element shuffling
    3fletcher32Fletcher32 checksum
    4szipSZIP compression
    5nbitN-bit packing
    6scaleoffsetScale and offset encoded values
    +

    +

    Name Length

    Each filter has an optional null-terminated ASCII + name and this field holds the length of the name including the null + termination padded with nulls to be a multiple of eight. If the + filter has no name then a value of zero is stored in this field.

    Flags

    The flags indicate certain properties for a filter. + The bit values defined so far are:

    + + + + + + + + + + + + + + + +
    BitDescription
    0If set then the filter is an optional filter. During + output, if an optional filter fails it will be silently skipped + in the pipeline.
    1-15Reserved (zero)
    +

    Number of Client Data Values

    + Each filter can store integer values to control how the filter + operates. The number of entries in the Client Data array + is stored in this field. +

    Name

    + If the Name Length field is non-zero then it will contain + the size of this field, padded to a multiple of eight. This field + contains a null-terminated, ASCII character string to serve as a + comment/name for the filter. +

    Client Data

    + This is an array of four-byte integers which will be passed to the + filter function. The Client Data Number of Values + determines the number of elements in the array. +

    Padding

    Four bytes of zeroes are added to the message at this + point if the Client Data Number of Values field contains an odd + number.

    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + +
    Filter Pipeline Message - Version 2
    bytebytebytebyte
    VersionNumber of FiltersThis space inserted + only to align table nicely

    Filter Description List (variable + size)
    +
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Version

    The version number for this message. This table + describes version 2.

    Number of Filters

    The total number of filters described in this + message. The maximum possible number of filters in a message is 32.

    Filter Description List

    A description of each filter. A filter description + appears in the next table.

    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Filter Description
    bytebytebytebyte
    Filter Identification ValueName Length (optional)
    FlagsNumber Client Data Values

    Name (variable size, optional)
    +

    Client Data (variable size, + optional)
    +
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Filter Identification Value

    +

    + This value, often referred to as a filter identifier, is designed + to be a unique identifier for the filter. Values from zero through + 32,767 are reserved for filters supported by The HDF Group in the + HDF5 Library and for filters requested and supported by third + parties. Filters supported by The HDF Group are documented + immediately below. Information on 3rd-party filters can be found at + The HDF Group’s + Contributions page. +

    + +

    + To request a filter identifier, please contact The HDF + Group’s Help Desk at The HDF Group Help Desk. + You will be asked to provide the following information: +

    +
      +
    1. Contact information for the developer requesting the new + identifier
    2. +
    3. A short description of the new filter
    4. +
    5. Links to any relevant information, including licensing + information
    6. +
    +

    Values from 32768 to 65535 are reserved for non-distributed + uses (for example, internal company usage) or for application usage + when testing a feature. The HDF Group does not track or document + the use of the filters with identifiers from this range.

    + +

    The filters currently in library version 1.8.0 are listed + below:

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    IdentificationNameDescription
    0N/AReserved
    1deflateGZIP deflate compression
    2shuffleData element shuffling
    3fletcher32Fletcher32 checksum
    4szipSZIP compression
    5nbitN-bit packing
    6scaleoffsetScale and offset encoded values
    +

    +

    Name Length

    Each filter has an optional null-terminated ASCII + name and this field holds the length of the name including the null + termination padded with nulls to be a multiple of eight. If the + filter has no name then a value of zero is stored in this field.

    +

    + Filters with IDs less than 256 (in other words, filters that are + defined in this format documentation) do not store the Name + Length or Name fields. +

    Flags

    The flags indicate certain properties for a filter. + The bit values defined so far are:

    + + + + + + + + + + + + + + + +
    BitDescription
    0If set then the filter is an optional filter. During + output, if an optional filter fails it will be silently skipped + in the pipeline.
    1-15Reserved (zero)
    +

    Number of Client Data Values

    + Each filter can store integer values to control how the filter + operates. The number of entries in the Client Data array + is stored in this field. +

    Name

    + If the Name Length field is non-zero then it will contain + the size of this field, not padded to a multiple of eight. + This field contains a non-null-terminated, ASCII character + string to serve as a comment/name for the filter. +

    +

    + Filters that are defined in this format documentation such as + deflate and shuffle do not store the Name Length or Name + fields. +

    Client Data

    + This is an array of four-byte integers which will be passed to the + filter function. The Client Data Number of Values + determines the number of elements in the array. +

    +
    + +
    +

    + IV.A.2.m. The Attribute Message +

    + + +
    + + + + + + + + + + + + + + + + + + + + +
    Header Message Name: Attribute
    Header Message Type: 0x000C
    Length: Varies
    Status: Optional; may be repeated.
    Description:

    + The Attribute message is used to store objects in the HDF5 + file which are used as attributes, or “metadata” about + the current object. An attribute is a small dataset; it has a name, + a datatype, a dataspace, and raw data. Since attributes are stored + in the object header, they should be relatively small (in other + words, less than 64KB). They can be associated with any type of + object which has an object header (groups, datasets, or committed + (named) datatypes). +

    +

    + In 1.8.x versions of the library, attributes can be larger than + 64KB. See the + “Special Issues” section of the Attributes chapter in + the HDF5 User Guide for more information. +

    +

    Note: Attributes on an object must have unique names: the + HDF5 Library currently enforces this by causing the creation of an + attribute with a duplicate name to fail. Attributes on different + objects may have the same name, however.

    Format of Data: See the tables below.
    +
    + + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Attribute Message (Version 1)
    bytebytebytebyte
    VersionReserved (zero)Name Size
    Datatype SizeDataspace Size

    Name (variable size)
    +

    Datatype (variable size)
    +

    Dataspace (variable size)
    +

    Data (variable size)
    +
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Version

    The version number information is used for changes in + the format of the attribute message and is described here:

    + + + + + + + + + + + + + + + +
    VersionDescription
    0Never used.
    1Used by the library before version 1.6 to encode + attribute message. This version does not support shared + datatypes.
    +

    Name Size

    + The length of the attribute name in bytes including the null + terminator. Note that the Name field below may contain + additional padding not represented by this field. +

    Datatype Size

    + The length of the datatype description in the Datatype + field below. Note that the Datatype field may contain + additional padding not represented by this field. +

    Dataspace Size

    + The length of the dataspace description in the Dataspace + field below. Note that the Dataspace field may contain + additional padding not represented by this field. +

    Name

    The null-terminated attribute name. This field is + padded with additional null characters to make it a multiple of + eight bytes.

    Datatype

    The datatype description follows the same format as + described for the datatype object header message. This field is + padded with additional zero bytes to make it a multiple of eight + bytes.

    Dataspace

    The dataspace description follows the same format as + described for the dataspace object header message. This field is + padded with additional zero bytes to make it a multiple of eight + bytes.

    Data

    + The raw data for the attribute. The size is determined from the + datatype and dataspace descriptions. This field is not + padded with additional bytes. +

    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Attribute Message (Version 2)
    bytebytebytebyte
    VersionFlagsName Size
    Datatype SizeDataspace Size

    Name (variable size)
    +

    Datatype (variable size)
    +

    Dataspace (variable size)
    +

    Data (variable size)
    +
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Version

    The version number information is used for changes in + the format of the attribute message and is described here:

    + + + + + + + + + + +
    VersionDescription
    2Used by the library of version 1.6.x and after to encode + attribute messages. This version supports shared datatypes. The + fields of name, datatype, and dataspace are not padded with + additional bytes of zero.
    +

    Flags

    This bit field contains extra information about + interpreting the attribute message:

    + + + + + + + + + + + + + + + +
    BitDescription
    0If set, datatype is shared.
    1If set, dataspace is shared.
    +

    Name Size

    The length of the attribute name in bytes including + the null terminator.

    Datatype Size

    + The length of the datatype description in the Datatype + field below. +

    Dataspace Size

    + The length of the dataspace description in the Dataspace + field below. +

    Name

    + The null-terminated attribute name. This field is not + padded with additional bytes. +

    Datatype

    The datatype description follows the same format as + described for the datatype object header message.

    +

    + If the Flag field indicates this attribute’s + datatype is shared, this field will contain a “shared + message” encoding instead of the datatype encoding. +

    +

    + This field is not padded with additional bytes. +

    Dataspace

    The dataspace description follows the same format as + described for the dataspace object header message.

    +

    + If the Flag field indicates this attribute’s + dataspace is shared, this field will contain a “shared + message” encoding instead of the dataspace encoding. +

    +

    + This field is not padded with additional bytes. +

    Data

    The raw data for the attribute. The size is + determined from the datatype and dataspace descriptions.

    +

    + This field is not padded with additional zero bytes. +

    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Attribute Message (Version 3)
    bytebytebytebyte
    VersionFlagsName Size
    Datatype SizeDataspace Size
    Name Character Set EncodingThis space inserted + only to align table nicely

    Name (variable size)
    +

    Datatype (variable size)
    +

    Dataspace (variable size)
    +

    Data (variable size)
    +
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Version

    The version number information is used for changes in + the format of the attribute message and is described here:

    + + + + + + + + + + +
    VersionDescription
    3Used by the library of version 1.8.x and after to encode + attribute messages. This version supports attributes with + non-ASCII names.
    +

    Flags

    This bit field contains extra information about + interpreting the attribute message:

    + + + + + + + + + + + + + + + +
    BitDescription
    0If set, datatype is shared.
    1If set, dataspace is shared.
    +

    Name Size

    The length of the attribute name in bytes including + the null terminator.

    Datatype Size

    + The length of the datatype description in the Datatype + field below. +

    Dataspace Size

    + The length of the dataspace description in the Dataspace + field below. +

    Name Character Set Encoding

    The character set encoding for the attribute’s + name:

    + + + + + + + + + + + + + + + +
    ValueDescription
    0ASCII character set encoding
    1UTF-8 character set encoding
    +

    Name

    + The null-terminated attribute name. This field is not + padded with additional bytes. +

    Datatype

    The datatype description follows the same format as + described for the datatype object header message.

    +

    + If the Flag field indicates this attribute’s + datatype is shared, this field will contain a “shared + message” encoding instead of the datatype encoding. +

    +

    + This field is not padded with additional bytes. +

    Dataspace

    The dataspace description follows the same format as + described for the dataspace object header message.

    +

    + If the Flag field indicates this attribute’s + dataspace is shared, this field will contain a “shared + message” encoding instead of the dataspace encoding. +

    +

    + This field is not padded with additional bytes. +

    Data

    The raw data for the attribute. The size is + determined from the datatype and dataspace descriptions.

    +

    + This field is not padded with additional zero bytes. +

    +
    + +
    +

    + IV.A.2.n. The Object Comment Message +

    + + +
    + + + + + + + + + + + + + + + + + + + + +
    Header Message Name: Object Comment
    Header Message Type: 0x000D
    Length: Varies
    Status: Optional; may not be repeated.
    Description:The object comment is designed to be a short description of + an object. An object comment is a sequence of non-zero (\0) + ASCII characters with no other formatting included by the library. +
    Format of Data: See the tables below.
    +
    + + +
    + + + + + + + + + + + + + +
    Name Message
    bytebytebytebyte

    Comment (variable size)
    +
    +
    + +
    +
    + + + + + + + + + + +
    Field NameDescription

    Name

    A null terminated ASCII character string.

    +
    + +
    +

    + IV.A.2.o. The Object + Modification Time (Old) Message +

    + + +
    + + + + + + + + + + + + + + + + + + + + +
    Header Message Name: Object Modification + Time (Old)
    Header Message Type: 0x000E
    Length: Fixed
    Status: Optional; may not be repeated.
    Description:

    The object modification date and time is a timestamp + which indicates (using ISO-8601 date and time format) the last + modification of an object. The time is updated when any object + header message changes according to the system clock where the + change was posted. All fields of this message should be interpreted + as coordinated universal time (UTC).

    +

    + This modification time message is deprecated in favor of the + “new” Object + Modification Time message and is no longer written to the file in + versions of the HDF5 Library after the 1.6.0 version. +

    Format of Data: See the tables below.
    +
    + + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Modification Time Message
    bytebytebytebyte
    Year
    MonthDay of Month
    HourMinute
    SecondReserved
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Year

    + The four-digit year as an ASCII string. For example, + 1998 + . +

    Month

    + The month number as a two digit ASCII string where January is + 01 + and December is + 12 + . +

    Day of Month

    + The day number within the month as a two digit ASCII string. The + first day of the month is + 01 + . +

    Hour

    + The hour of the day as a two digit ASCII string where midnight is + 00 + and 11:00pm is + 23 + . +

    Minute

    + The minute of the hour as a two digit ASCII string where the first + minute of the hour is + 00 + and the last is + 59 + . +

    Second

    + The second of the minute as a two digit ASCII string where the + first second of the minute is + 00 + and the last is + 59 + . +

    Reserved

    This field is reserved and should always be zero.

    +
    + +
    +

    + IV.A.2.p. The Shared Message Table + Message +

    + + +
    + + + + + + + + + + + + + + + + + + + + +
    Header Message Name: Shared Message Table
    Header Message Type: 0x000F
    Length: Fixed
    Status: Optional; may not be repeated.
    Description:This message is used to locate the table of shared object + header message (SOHM) indexes. Each index consists of information to + find the shared messages from either the heap or object header. This + message is only found in the superblock extension. +
    Format of Data: See the tables below.
    +
    + + +
    + + + + + + + + + + + + + + + + + + + + + + + + +
    Shared Message Table Message
    bytebytebytebyte
    VersionThis space inserted + only to align table nicely

    Shared Object Header Message Table + AddressO
    +
    Number of IndicesThis space inserted + only to align table nicely
    + + + + + + +
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
    + +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Version

    The version number for this message. This document + describes version 0.

    Shared Object Header Message Table Address

    This field is the address of the master table for + shared object header message indexes.

    Number of Indices

    This field is the number of indices in the master + table.

    +
    + +
    +

    + IV.A.2.q. The Object Header + Continuation Message +

    + + +
    + + + + + + + + + + + + + + + + + + + + +
    Header Message Name: Object Header + Continuation
    Header Message Type: 0x0010
    Length: Fixed
    Status: Optional; may be repeated.
    Description:The object header continuation is the location in the file + of a block containing more header messages for the current data + object. This can be used when header blocks become too large or are + likely to change over time.
    Format of Data: See the tables below.
    +
    + + +
    + + + + + + + + + + + + + + + + + +
    Object Header Continuation Message
    bytebytebytebyte

    OffsetO
    +

    LengthL
    +
    + + + + + + + + + + +
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
     (Items marked with an ‘L’ in the above table are + of the size specified in “Size of Lengths” field in the + superblock.)
    + +
    + +
    +
    + + + + + + + + + + + + + + + +
    Field NameDescription

    Offset

    This value is the address in the file where the + header continuation block is located.

    Length

    This value is the length in bytes of the header + continuation block in the file.

    +
    +
    + +

    The format of the header continuation block that this message + points to depends on the version of the object header that the message + is contained within.

    + +

    + Continuation blocks for version 1 object headers have no special + formatting information; they are merely a list of object header message + info sequences (type, size, flags, reserved bytes and data for each + message sequence). See the description of Version 1 Data Object Header Prefix. +

    + +

    + Continuation blocks for version 2 object headers do have + special formatting information as described here (see also the + description of Version 2 Data + Object Header Prefix.): +

    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Version 2 Object Header Continuation Block
    bytebytebytebyte
    Signature
    Header Message Type #1Size of Header Message Data #1Header Message #1 Flags
    Header Message #1 Creation Order (optional)This space inserted + only to align table nicely

    Header Message Data #1
    +
    .
    .
    .
    Header Message Type #nSize of Header Message Data #nHeader Message #n Flags
    Header Message #n Creation Order (optional)This space inserted + only to align table nicely

    Header Message Data #n
    +
    Gap (optional, variable size)
    Checksum
    +
    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Signature

    +

    + The ASCII character string “ + OCHK + ” is used to indicate the beginning of an object header + continuation block. This gives file consistency checking utilities + a better chance of reconstructing a damaged file. +

    +

    Header Message #n Type

    +

    Same format as version 1 of the object header, described + above.

    +

    Size of Header Message #n Data

    +

    Same format as version 1 of the object header, described + above.

    +

    Header Message #n Flags

    +

    Same format as version 1 of the object header, described + above.

    +

    Header Message #n Creation Order

    +

    This field stores the order that a message of a given type + was created in.

    +

    + This field is present if bit 2 of flags is set. +

    +

    Header Message #n Data

    +

    Same format as version 1 of the object header, described + above.

    +

    Gap

    +

    A gap in an object header chunk is inferred by the end of the + messages for the chunk before the beginning of the chunk’s + checksum. Gaps are always smaller than the size of an object header + message prefix (message type + message size + message flags).

    +

    Gaps are formed when a message (typically an attribute + message) in an earlier chunk is deleted and a message from a later + chunk that does not quite fit into the free space is moved into the + earlier chunk.

    +

    Checksum

    +

    This is the checksum for the object header chunk.

    +
    +
    + +
    +

    + IV.A.2.r. The Symbol Table Message +

    + + +
    + + + + + + + + + + + + + + + + + + + + +
    Header Message Name: Symbol Table Message
    Header Message Type: 0x0011
    Length: Fixed
    Status: Required for “old + style” groups; may not be repeated.
    Description:Each “old style” group has a v1 B-tree and a + local heap for storing symbol table entries, which are located with + this message.
    Format of data: See the tables below.
    +
    + +
    + + -
    -

    Class-specific information for chunked layout (Class 2):

    - - -
    -
    + Symbol Table Message +
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - Chunked Storage Property Description -
    bytebytebytebyte
    DimensionalityThis space inserted only to align table nicely

    AddressO

    Dimension 0 Size
    Dimension 1 Size
    ...
    Dimension #n Size
    Dataset Element Size
    + + byte + byte + byte + byte + - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
    +
    v1 B-tree AddressO
    +
    + -
    + +
    Local Heap AddressO
    +
    + + -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - -
    Field NameDescription

    Dimensionality

    A chunk has a fixed dimensionality. This field specifies - the number of dimension size fields later in the message.

    Address

    This is the address of the v1 B-tree that is used to look up the - addresses of the chunks that actually store portions of the array - data. The address may have the “undefined address” value, to - indicate that storage has not yet been allocated for this array.

    Dimension #n Size

    These values define the dimension size of a single chunk, in - units of array elements (not bytes). The first dimension stored in - the list of dimensions is the slowest changing dimension and the - last dimension stored is the fastest changing dimension. -

    -

    Dataset Element Size

    The size of a dataset element, in bytes. -

    -
    -
    + + + + + +
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
    + +

    -

    IV.A.2.j. The Bogus Message

    +
    + + + + + - -
    +
    + + + + + + + + +
    Field NameDescription

    v1 B-tree Address

    This value is the address of the v1 B-tree containing + the symbol table entries for the group.

    Local Heap Address

    This value is the address of the local heap + containing the link names for the symbol table entries for the + group.

    +
    + +
    +

    + IV.A.2.s. The Object Modification + Time Message +

    + + +
    - - - - - - - -
    Header Message Name: Bogus
    Header Message Type: 0x0009
    Length: 4 bytes
    Status: For testing only; should never - be stored in a valid file.
    Description:This message is used for testing the HDF5 Library’s - response to an “unknown” message type and should - never be encountered in a valid HDF5 file.
    Format of Data: See the tables - below.
    - - -
    - - - - - - - - - - - - - -
    - Bogus Message -
    bytebytebytebyte
    Bogus Value
    -
    + + Header Message Name: Object Modification + Time + + + Header Message Type: 0x0012 + + + Length: Fixed + + + Status: Optional; may not be repeated. + + + Description: + The object modification time is a timestamp which indicates + the time of the last modification of an object. The time is updated + when any object header message changes according to the system clock + where the change was posted. + + + Format of Data: See the tables below. + + + + -
    -
    - - - - - - - - - - -
    Field NameDescription

    Bogus Value

    -

    This value should always be: 0xdeadbeef.

    -
    -
    +
    + + + + + + + + + + + + + + + + + + +
    Modification Time Message
    bytebytebytebyte
    VersionReserved (zero)
    Seconds After UNIX Epoch
    +

    -

    IV.A.2.k. The Group Info Message -

    +
    + + + + + - -
    +
    + + + + + + + + +
    Field NameDescription

    Version

    The version number is used for changes in the format + of Object Modification Time and is described here:

    + + + + + + + + + + + + + + + +
    VersionDescription
    0Never used.
    1Used by Version 1.6.1 and after of the library to encode + time. In this version, the time is the seconds after Epoch.
    +

    Seconds After UNIX Epoch

    A 32-bit unsigned integer value that stores the + number of seconds since 0 hours, 0 minutes, 0 seconds, January 1, + 1970, Coordinated Universal Time.

    +
    + +
    +

    + IV.A.2.t. The B-tree ‘K’ + Values Message +

    + + +
    - - - - - - - -
    Header Message Name: Group Info
    Header Message Type: 0x000A
    Length: Varies
    Status: Optional; may not be - repeated.
    Description:

    This message stores information for the constants defining - a “new style” group’s behavior. Constant - information will be stored in this message and variable - information will be stored in the - Link Info message.

    -

    Note: the “estimated entry” information below is - used when determining the size of the object header for the - group when it is created.

    Format of Data: See the tables - below.
    - - -
    - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + +
    - Group Info Message -
    bytebytebytebyte
    VersionFlagsLink Phase Change: Maximum Compact Value (optional)
    Link Phase Change: Minimum Dense Value (optional)Estimated Number of Entries (optional)
    Estimated Link Name Length of Entries (optional)This space inserted only to align table nicely
    Header Message Name: B-tree + ‘K’ Values
    Header Message Type: 0x0013
    Length: Fixed
    Status: Optional; may not be repeated.
    Description:This message retrieves non-default ‘K’ values + for internal and leaf nodes of a group or indexed storage v1 + B-trees. This message is only found in the superblock + extension. +
    Format of Data: See the tables below.
    + + - -
    +
    + + -
    -
    -
    B-tree ‘K’ Values Message
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + -
    Field NameDescription

    Version

    The version number for this message. This document describes version 0.

    -

    Flags

    This is the group information flag with the following definition: - - - - - - - - - - - - - - - - - - - -
    BitDescription
    0If set, link phase change values are stored. -
    1If set, the estimated entry information is non-default - and is stored. -
    2-7Reserved

    -

    Link Phase Change: Maximum Compact Value

    The is the maximum number of links to store “compactly” (in - the group’s object header).

    -

    This field is present if bit 0 of Flags is set.

    -

    Link Phase Change: Minimum Dense Value

    This is the minimum number of links to store “densely” (in - the group’s fractal heap). The fractal heap’s address is - located in the Link Info - message.

    -

    This field is present if bit 0 of Flags is set.

    -

    Estimated Number of Entries

    This is the estimated number of entries in groups.

    -

    If this field is not present, the default value of 4 - will be used for the estimated number of group entries.

    -

    This field is present if bit 1 of Flags is set.

    -

    Estimated Link Name Length of Entries

    This is the estimated length of entry name.

    -

    If this field is not present, the default value of 8 - will be used for the estimated link name length of group entries.

    -

    This field is present if bit 1 of Flags is set.

    -
    bytebytebytebyte
    -
    -

    + + Version + Indexed Storage Internal Node K + This space inserted only to align + table nicely + + + + Group Internal Node K + Group Leaf Node K + + +
    -

    IV.A.2.l. The Data Storage - Filter -Pipeline Message

    +
    + + + + + - -
    +
    + + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Version

    The version number for this message. This document + describes version 0.

    Indexed Storage Internal Node K

    This is the node ‘K’ value for each + internal node of an indexed storage v1 B-tree. See the description + of this field in version 0 and 1 of the superblock as well the + section on v1 B-trees.

    Group Internal Node K

    This is the node ‘K’ value for each + internal node of a group v1 B-tree. See the description of this + field in version 0 and 1 of the superblock as well as the section + on v1 B-trees.

    Group Leaf Node K

    This is the node ‘K’ value for each leaf + node of a group v1 B-tree. See the description of this field in + version 0 and 1 of the superblock as well as the section on v1 + B-trees.

    +
    + +
    +

    + IV.A.2.u. The Driver Info Message +

    + + +
    - - - - - - - -
    Header Message Name: - Data Storage - Filter Pipeline
    Header Message Type: 0x000B
    Length: Varies
    Status: Optional; may not be - repeated.
    Description:

    This message describes the filter pipeline which should - be applied to the data stream by providing filter identification - numbers, flags, a name, and client data.

    -

    This message may be present in the object headers of both - dataset and group objects. For datasets, it specifies the - filters to apply to raw data. For groups, it specifies the - filters to apply to the group’s fractal heap. Currently, - only datasets using chunked data storage use the filter - pipeline on their raw data.

    Format of Data: See the tables - below.
    - - -
    - - - - - - - - - - - - - - - - - - - - - - - -
    - Filter Pipeline Message - Version 1 -
    bytebytebytebyte
    VersionNumber of FiltersReserved (zero)
    Reserved (zero)

    Filter Description List (variable size)

    -
    + + Header Message Name: Driver Info + + + Header Message Type: 0x0014 + + + Length: Varies + + + Status: Optional; may not be repeated. + -
    -
    - - - - - - - - - - - - - - - - - - - - -
    Field NameDescription

    Version

    The version number for this message. This table - describes version 1.

    Number of Filters

    The total number of filters described in this - message. The maximum possible number of filters in a - message is 32.

    Filter Description List

    A description of each filter. A filter description - appears in the next table.

    -
    + + Description: + This message contains information needed by the file driver + to reopen a file. This message is only found in the + superblock extension: see the + “Disk Format: Level 0C - Superblock Extension” section + for more information. For more information on the fields in the + driver info message, see the “Disk + Format : Level 0B - File Driver Info” section; those who use + the multi and family file drivers will find this section + particularly helpful. + + + + Format of Data: See the tables below. + + + + -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - Filter Description -
    bytebytebytebyte
    Filter Identification ValueName Length
    FlagsNumber Client Data Values

    Name (variable size, optional)


    Client Data (variable size, optional)

    Padding (variable size, optional)
    -
    +
    + + -
    -
    -
    Driver Info Message
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Field NameDescription

    Filter Identification Value

    -

    - This value, often referred to as a filter identifier, - is designed to be a unique identifier for the filter. - Values from zero through 32,767 are reserved for filters - supported by The HDF Group in the HDF5 Library and for - filters requested and supported by third parties. - Filters supported by The HDF Group are documented immediately - below. Information on 3rd-party filters can be found at - The HDF Group’s - - Contributions page.

    - -

    - To request a filter identifier, please contact - The HDF Group’s Help Desk at - The HDF Group Help Desk. - You will be asked to provide the following information:

    -
      -
    1. Contact information for the developer requesting the - new identifier
    2. -
    3. A short description of the new filter
    4. -
    5. Links to any relevant information, including licensing - information
    6. -
    -

    - Values from 32768 to 65535 are reserved for non-distributed uses - (for example, internal company usage) or for application usage - when testing a feature. The HDF Group does not track or document - the use of the filters with identifiers from this range.

    - -

    - The filters currently in library version 1.8.0 are - listed below: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    IdentificationNameDescription
    0N/AReserved
    1deflateGZIP deflate compression
    2shuffleData element shuffling
    3fletcher32Fletcher32 checksum
    4szipSZIP compression
    5nbitN-bit packing
    6scaleoffsetScale and offset encoded values
    -

    Name Length

    Each filter has an optional null-terminated ASCII name - and this field holds the length of the name including the - null termination padded with nulls to be a multiple of - eight. If the filter has no name then a value of zero is - stored in this field.

    Flags

    The flags indicate certain properties for a filter. The - bit values defined so far are: - - - - - - - - - - - - - - - -
    BitDescription
    0If set then the filter is an optional filter. - During output, if an optional filter fails it will be - silently skipped in the pipeline.
    1-15Reserved (zero)

    -

    Number of Client Data Values

    Each filter can store integer values to control - how the filter operates. The number of entries in the - Client Data array is stored in this field.

    Name

    If the Name Length field is non-zero then it will - contain the size of this field, padded to a multiple of eight. This - field contains a null-terminated, ASCII character - string to serve as a comment/name for the filter.

    Client Data

    This is an array of four-byte integers which will be - passed to the filter function. The Client Data Number of - Values determines the number of elements in the array.

    Padding

    Four bytes of zeroes are added to the message at this - point if the Client Data Number of Values field contains - an odd number.

    -
    + + byte + byte + byte + byte + -
    -
    - - - - - - - - - - - - - - - - - - - -
    - Filter Pipeline Message - Version 2 -
    bytebytebytebyte
    VersionNumber of FiltersThis space inserted only to align table nicely

    Filter Description List (variable size)

    -
    + + Version + This space inserted + only to align table nicely + + +
    Driver Identification + -
    -
    - - - - - - - - - - - - - - - - - - - - -
    Field NameDescription

    Version

    The version number for this message. This table - describes version 2.

    Number of Filters

    The total number of filters described in this - message. The maximum possible number of filters in a - message is 32.

    Filter Description List

    A description of each filter. A filter description - appears in the next table.

    -
    + + Driver Information Size + This space inserted + only to align table nicely + -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - Filter Description -
    bytebytebytebyte
    Filter Identification ValueName Length (optional)
    FlagsNumber Client Data Values

    Name (variable size, optional)


    Client Data (variable size, optional)

    -
    + +
    +
    Driver Information (variable size)
    +
    +
    + -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Field NameDescription

    Filter Identification Value

    -

    - This value, often referred to as a filter identifier, - is designed to be a unique identifier for the filter. - Values from zero through 32,767 are reserved for filters - supported by The HDF Group in the HDF5 Library and for - filters requested and supported by third parties. - Filters supported by The HDF Group are documented immediately - below. Information on 3rd-party filters can be found at - The HDF Group’s - - Contributions page.

    - -

    - To request a filter identifier, please contact - The HDF Group’s Help Desk at - The HDF Group Help Desk. - You will be asked to provide the following information:

    -
      -
    1. Contact information for the developer requesting the - new identifier
    2. -
    3. A short description of the new filter
    4. -
    5. Links to any relevant information, including licensing - information
    6. -
    -

    - Values from 32768 to 65535 are reserved for non-distributed uses - (for example, internal company usage) or for application usage - when testing a feature. The HDF Group does not track or document - the use of the filters with identifiers from this range.

    - -

    - The filters currently in library version 1.8.0 are - listed below: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    IdentificationNameDescription
    0N/AReserved
    1deflateGZIP deflate compression
    2shuffleData element shuffling
    3fletcher32Fletcher32 checksum
    4szipSZIP compression
    5nbitN-bit packing
    6scaleoffsetScale and offset encoded values
    -

    Name Length

    Each filter has an optional null-terminated ASCII name - and this field holds the length of the name including the - null termination padded with nulls to be a multiple of - eight. If the filter has no name then a value of zero is - stored in this field.

    -

    Filters with IDs less than 256 (in other words, filters - that are defined in this format documentation) do not store - the Name Length or Name fields. -

    -

    Flags

    The flags indicate certain properties for a filter. The - bit values defined so far are: - - - - - - - - - - - - - - - -
    BitDescription
    0If set then the filter is an optional filter. - During output, if an optional filter fails it will be - silently skipped in the pipeline.
    1-15Reserved (zero)

    -

    Number of Client Data Values

    Each filter can store integer values to control - how the filter operates. The number of entries in the - Client Data array is stored in this field.

    Name

    If the Name Length field is non-zero then it will - contain the size of this field, not padded to a multiple - of eight. This field contains a non-null-terminated, - ASCII character string to serve as a comment/name for the filter. -

    -

    Filters that are defined in this format documentation - such as deflate and shuffle do not store the Name - Length or Name fields. -

    -

    Client Data

    This is an array of four-byte integers which will be - passed to the filter function. The Client Data Number of - Values determines the number of elements in the array.

    -
    -
    + +
    -

    IV.A.2.m. The Attribute Message

    +
    + + + + + - -
    +
    + + + + + + + + + + + + + + + + + + +
    Field NameDescription

    Version

    The version number for this message. This document + describes version 0.

    Driver Identification

    This is an eight-byte ASCII string without null + termination which identifies the driver.

    Driver Information Size

    + The size in bytes of the Driver Information field of this + message. +

    Driver Information

    Driver information is stored in a format defined by + the file driver.

    +
    + +
    +

    + IV.A.2.v. The Attribute Info Message +

    + + +
    - - - - - - - -
    Header Message Name: Attribute
    Header Message Type: 0x000C
    Length: Varies
    Status: Optional; may be - repeated.
    Description:

    The Attribute message is used to store objects - in the HDF5 file which are used as attributes, or - “metadata” about the current object. An attribute - is a small dataset; it has a name, a datatype, a dataspace, and - raw data. Since attributes are stored in the object header, they - should be relatively small (in other words, less than 64KB). - They can be associated with any type of object which has an - object header (groups, datasets, or committed (named) - datatypes).

    -

    In 1.8.x versions of the library, attributes can be larger - than 64KB. See the - - “Special Issues” section of the Attributes chapter - in the HDF5 User Guide for more information.

    -

    Note: Attributes on an object must have unique names: - the HDF5 Library currently enforces this by causing the - creation of an attribute with a duplicate name to fail. - Attributes on different objects may have the same name, - however.

    Format of Data: See the tables - below.
    - - -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - Attribute Message (Version 1) -
    bytebytebytebyte
    VersionReserved (zero)Name Size
    Datatype SizeDataspace Size

    Name (variable size)


    Datatype (variable size)


    Dataspace (variable size)


    Data (variable size)

    -
    + + Header Message Name: Attribute Info + + + Header Message Type: 0x0015 + + + Length: Varies + + + Status: Optional; may not be repeated. + + + Description: + This message stores information about the attributes on an + object, such as the maximum creation index for the attributes + created and the location of the attribute storage when the + attributes are stored “densely”. + + + Format of Data: See the tables below. + + + + -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Field NameDescription

    Version

    The version number information is used for changes in the format of the - attribute message and is described here: - - - - - - - - - - - - - - - -
    VersionDescription
    0Never used.
    1Used by the library before version 1.6 to encode attribute message. - This version does not support shared datatypes.

    -

    Name Size

    The length of the attribute name in bytes including the - null terminator. Note that the Name field below may - contain additional padding not represented by this - field.

    Datatype Size

    The length of the datatype description in the Datatype - field below. Note that the Datatype field may contain - additional padding not represented by this field.

    Dataspace Size

    The length of the dataspace description in the Dataspace - field below. Note that the Dataspace field may contain - additional padding not represented by this field.

    Name

    The null-terminated attribute name. This field is - padded with additional null characters to make it a - multiple of eight bytes.

    Datatype

    The datatype description follows the same format as - described for the datatype object header message. This - field is padded with additional zero bytes to make it a - multiple of eight bytes.

    Dataspace

    The dataspace description follows the same format as - described for the dataspace object header message. This - field is padded with additional zero bytes to make it a - multiple of eight bytes.

    Data

    The raw data for the attribute. The size is determined - from the datatype and dataspace descriptions. This - field is not padded with additional bytes.

    -
    +
    + + -
    -
    -
    Attribute Info Message
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - Attribute Message (Version 2) -
    bytebytebytebyte
    VersionFlagsName Size
    Datatype SizeDataspace Size

    Name (variable size)


    Datatype (variable size)


    Dataspace (variable size)


    Data (variable size)

    -
    + + byte + byte + byte + byte + -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Field NameDescription

    Version

    The version number information is used for changes in the - format of the attribute message and is described here: - - - - - - - - - - -
    VersionDescription
    2Used by the library of version 1.6.x and after to encode - attribute messages. - This version supports shared datatypes. The fields of - name, datatype, and dataspace are not padded with - additional bytes of zero. -

    -

    Flags

    This bit field contains extra information about - interpreting the attribute message: - - - - - - - - - - - - - - - - -
    BitDescription
    0If set, datatype is shared.
    1If set, dataspace is shared.

    -

    Name Size

    The length of the attribute name in bytes including the - null terminator.

    Datatype Size

    The length of the datatype description in the Datatype - field below.

    Dataspace Size

    The length of the dataspace description in the Dataspace - field below.

    Name

    The null-terminated attribute name. This field is not - padded with additional bytes.

    Datatype

    The datatype description follows the same format as - described for the datatype object header message. -

    -

    If the - Flag field indicates this attribute’s datatype is - shared, this field will contain a “shared message” encoding - instead of the datatype encoding. -

    -

    This field is not padded with additional bytes. -

    -

    Dataspace

    The dataspace description follows the same format as - described for the dataspace object header message. -

    -

    If the - Flag field indicates this attribute’s dataspace is - shared, this field will contain a “shared message” encoding - instead of the dataspace encoding. -

    -

    This field is not padded with additional bytes.

    -

    Data

    The raw data for the attribute. The size is determined - from the datatype and dataspace descriptions. -

    -

    This field is not padded with additional zero bytes. -

    -
    -
    + + Version + Flags + Maximum Creation Index (optional) + + +
    Fractal Heap AddressO
    +
    + + +
    Attribute Name v2 B-tree AddressO
    +
    + + +
    Attribute Creation Order v2 B-tree + AddressO (optional)
    +
    + -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - Attribute Message (Version 3) -
    bytebytebytebyte
    VersionFlagsName Size
    Datatype SizeDataspace Size
    Name Character Set EncodingThis space inserted only to align table nicely

    Name (variable size)


    Datatype (variable size)


    Dataspace (variable size)


    Data (variable size)

    -
    + -
    -
    - - - - - - - - - + + + + + + + + + + + + + + + + +
    Field NameDescription

    Version

    The version number information is used for changes in the - format of the attribute message and is described here: + + + + + +
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
    + + + +
    +

    + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Field NameDescription

    Version

    The version number for this message. This document + describes version 0.

    Flags

    This is the attribute index information flag with the + following definition:

    - - - - - - - - - -
    VersionDescription
    3Used by the library of version 1.8.x and after to - encode attribute messages. - This version supports attributes with non-ASCII names. -

    -

    Flags

    This bit field contains extra information about - interpreting the attribute message: - - - - - - - - - - - - - - - - -
    BitDescription
    0If set, datatype is shared.
    1If set, dataspace is shared.

    -

    Name Size

    The length of the attribute name in bytes including the - null terminator.

    Datatype Size

    The length of the datatype description in the Datatype - field below.

    Dataspace Size

    The length of the dataspace description in the Dataspace - field below.

    Name Character Set Encoding

    The character set encoding for the attribute’s name: - - - - - - - - - - - - - - - -
    ValueDescription
    0ASCII character set encoding -
    1UTF-8 character set encoding -
    -

    -

    Name

    The null-terminated attribute name. This field is not - padded with additional bytes.

    Datatype

    The datatype description follows the same format as - described for the datatype object header message. -

    -

    If the - Flag field indicates this attribute’s datatype is - shared, this field will contain a “shared message” encoding - instead of the datatype encoding. -

    -

    This field is not padded with additional bytes. -

    -

    Dataspace

    The dataspace description follows the same format as - described for the dataspace object header message. -

    -

    If the - Flag field indicates this attribute’s dataspace is - shared, this field will contain a “shared message” encoding - instead of the dataspace encoding. -

    -

    This field is not padded with additional bytes.

    -

    Data

    The raw data for the attribute. The size is determined - from the datatype and dataspace descriptions. -

    -

    This field is not padded with additional zero bytes. -

    -
    -
    +
    BitDescription
    0If set, creation order for attributes is tracked.
    1If set, creation order for attributes is indexed.
    2-7Reserved
    +

    + + + +

    Maximum Creation Index

    +

    The is the maximum creation order index value for the + attributes on the object.

    +

    + This field is present if bit 0 of Flags is set. +

    + + + +

    Fractal Heap Address

    +

    This is the address of the fractal heap to store + dense attributes.

    + -
    -

    IV.A.2.n. The Object Comment -Message

    + +

    Attribute Name v2 B-tree Address

    +

    This is the address of the version 2 B-tree to index + the names of densely stored attributes.

    + - -
    - - - - - - - - -
    Header Message Name: Object - Comment
    Header Message Type: 0x000D
    Length: Varies
    Status: Optional; may not be - repeated.
    Description:The object comment is designed to be a short description of - an object. An object comment is a sequence of non-zero - (\0) ASCII characters with no other formatting - included by the library.
    Format of Data: See the tables - below.
    - - -
    - - - - - - - - - - - - - -
    - Name Message -
    bytebytebytebyte

    Comment (variable size)

    -
    + +

    Attribute Creation Order v2 B-tree Address

    +

    This is the address of the version 2 B-tree to index + the creation order of densely stored attributes.

    +

    + This field is present if bit 1 of Flags is set. +

    + -
    -
    - - - - - - - - - - -
    Field NameDescription

    Name

    A null terminated ASCII character string.

    -
    + +

    -

    IV.A.2.o. The Object -Modification Time (Old) Message

    +

    + IV.A.2.w. The Object Reference Count + Message +

    - -
    + +
    - - - - - - - -
    Header Message Name: Object - Modification Time (Old)
    Header Message Type: 0x000E
    Length: Fixed
    Status: Optional; may not be - repeated.
    Description:

    The object modification date and time is a timestamp - which indicates (using ISO-8601 date and time format) the last - modification of an object. The time is updated when any object - header message changes according to the system clock where the - change was posted. All fields of this message should be - interpreted as coordinated universal time (UTC).

    -

    This modification time message is deprecated in favor of - the “new” Object - Modification Time message and is no longer written to the - file in versions of the HDF5 Library after the 1.6.0 - version.

    Format of Data: See the tables - below.
    - - -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - Modification Time Message -
    bytebytebytebyte
    Year
    MonthDay of Month
    HourMinute
    SecondReserved
    -
    + + Header Message Name: Object Reference + Count + + + Header Message Type: 0x0016 + + + Length: Fixed + + + Status: Optional; may not be repeated. + + + Description: + This message stores the number of hard links (in groups or + objects) pointing to an object: in other words, its reference + count. + + + + Format of Data: See the tables below. + + +
    + -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Field NameDescription

    Year

    The four-digit year as an ASCII string. For example, - 1998. -

    Month

    The month number as a two digit ASCII string where - January is 01 and December is 12.

    Day of Month

    The day number within the month as a two digit ASCII - string. The first day of the month is 01.

    Hour

    The hour of the day as a two digit ASCII string where - midnight is 00 and 11:00pm is 23.

    Minute

    The minute of the hour as a two digit ASCII string where - the first minute of the hour is 00 and - the last is 59.

    Second

    The second of the minute as a two digit ASCII string - where the first second of the minute is 00 - and the last is 59.

    Reserved

    This field is reserved and should always be zero.

    -
    +
    + + -
    -

    IV.A.2.p. The Shared Message Table -Message

    + + + + + + - -
    -
    Object Reference Count
    bytebytebytebyte
    - - - - - - - -
    Header Message Name: Shared Message - Table
    Header Message Type: 0x000F
    Length: Fixed
    Status: Optional; may not be - repeated.
    Description:This message is used to locate the table of shared object - header message (SOHM) indexes. Each index consists of information - to find the shared messages from either the heap or object header. - This message is only found in the superblock - extension.
    Format of Data: See the tables - below.
    - - -
    - - - - - - - - - - - - - - - - - - - - - - - + + + + -
    - Shared Message Table Message -
    bytebytebytebyte
    VersionThis space inserted only to align table nicely

    Shared Object Header Message Table AddressO

    Number of IndicesThis space inserted only to align table nicely
    VersionThis space inserted + only to align table nicely
    + + Reference count + + +
    - +
    +
    +
    - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
    + Field Name + Description + -
    + +

    Version

    +

    The version number for this message. This document + describes version 0.

    + -
    -
    - - - - - - - - - - - - - - - - - - - - + + + + -
    Field NameDescription

    Version

    The version number for this message. This document describes version 0.

    Shared Object Header Message Table Address

    This field is the address of the master table for shared - object header message indexes.

    -

    Number of Indices

    This field is the number of indices in the master table. -

    Reference Count

    The unsigned 32-bit integer is the reference count + for the object. This message is only present in “version + 2” (or later) object headers, and if not present those object + header versions, the reference count for the object is assumed to + be 1.

    -
    + +
    -

    IV.A.2.q. The Object Header -Continuation Message

    +

    + IV.A.2.x. The File Space Info Message +

    - -
    + +
    - - - - - - - -
    Header Message Name: Object Header - Continuation
    Header Message Type: 0x0010
    Length: Fixed
    Status: Optional; may be - repeated.
    Description:The object header continuation is the location in the file - of a block containing more header messages for the current data - object. This can be used when header blocks become too large or - are likely to change over time.
    Format of Data: See the tables - below.
    - - -
    - - - - - - - + - - + - - + -
    - Object Header Continuation Message -
    bytebytebytebyteHeader Message Name: File Space Info

    OffsetO

    Header Message Type: 0x0018

    LengthL

    Length: Fixed
    - - - - + + - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
    Status: Optional; may not be repeated.
      - (Items marked with an ‘L’ in the above table are of the size - specified in “Size of Lengths” field in the superblock.) -
    + Description: + This message stores the file space management strategy (see + description below) that the library uses in handling file space + request for the file. It also contains the free-space section + threshold used by the library’s free-space managers for the + file. If the strategy is 1, this message also contains the addresses + of the file’s free-space managers which track free space for + each type of file space allocation. There are six basic types of + file space allocation: superblock, B-tree, raw data, global heap, + local heap, and object header. See the description of Free-space Manager as well the + description of allocation types in Appendix + B. + + + + Format of Data: See the tables below. + + +
    + - +
    + + -
    -
    -
    File Space Info
    - - + + + + - - + + + - - - + -
    Field NameDescriptionbytebytebytebyte

    Offset

    This value is the address in the file where the - header continuation block is located.

    VersionStrategyThresholdL

    Length

    This value is the length in bytes of the header continuation - block in the file.

    Super-block Free-space Manager AddressO
    -
    -
    - -

    The format of the header continuation block that this message points - to depends on the version of the object header that the message is - contained within. -

    - -

    - Continuation blocks for version 1 object headers have no special - formatting information; they are merely a list of object header - message info sequences (type, size, flags, reserved bytes and data - for each message sequence). See the description - of Version 1 Data Object Header Prefix. -

    - -

    Continuation blocks for version 2 object headers do have - special formatting information as described here - (see also the description of - Version 2 Data Object Header Prefix.): -

    -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - Version 2 Object Header Continuation Block -
    bytebytebytebyte
    Signature
    Header Message Type #1Size of Header Message Data #1Header Message #1 Flags
    Header Message #1 Creation Order (optional)This space inserted only to align table nicely

    Header Message Data #1

    .
    .
    .
    Header Message Type #nSize of Header Message Data #nHeader Message #n Flags
    Header Message #n Creation Order (optional)This space inserted only to align table nicely

    Header Message Data #n

    Gap (optional, variable size)
    Checksum
    -
    - -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + -
    Field NameDescription

    Signature

    -

    The ASCII character string “OCHK” - is used to indicate the - beginning of an object header continuation block. This gives file - consistency checking utilities a better chance of reconstructing a - damaged file. -

    -

    Header Message #n Type

    -

    Same format as version 1 of the object header, described above. -

    Size of Header Message #n Data

    -

    Same format as version 1 of the object header, described above. -

    Header Message #n Flags

    -

    Same format as version 1 of the object header, described above. -

    Header Message #n Creation Order

    -

    This field stores the order that a message of a given type - was created in.

    -

    This field is present if bit 2 of flags is set.

    -

    Header Message #n Data

    -

    Same format as version 1 of the object header, described above. -

    Gap

    -

    A gap in an object header chunk is inferred by the end of the - messages for the chunk before the beginning of the chunk’s - checksum. Gaps are always smaller than the size of an - object header message prefix (message type + message size + - message flags).

    -

    Gaps are formed when a message (typically an attribute message) - in an earlier chunk is deleted and a message from a later - chunk that does not quite fit into the free space is moved - into the earlier chunk.

    -

    Checksum

    -

    This is the checksum for the object header chunk. -

    -
    B-tree Free-space Manager AddressO
    -
    + + Raw Data Free-space Manager AddressO + + + Global Heap Free-space Manager AddressO + + + Local Heap Free-space Manager AddressO + + + Object Header Free-space Manager AddressO + + -
    -

    IV.A.2.r. The Symbol Table -Message

    + + + + + + + + + +
     (Items marked with an ‘O’ in the + above table are of the size specified in “Size of + Offsets” field in the superblock.)
     (Items marked with an ‘L’ in the above table are + of the size specified in “Size of Lengths” field in the + superblock.)
    - -
    - - - - - - - - -
    Header Message Name: Symbol Table - Message
    Header Message Type: 0x0011
    Length: Fixed
    Status: Required for - “old style” groups; may not be repeated.
    Description:Each “old style” group has a v1 B-tree and a - local heap for storing symbol table entries, which are located - with this message.
    Format of data: See the tables - below.
    - - -
    - - + +
    +
    +
    - Symbol Table Message -
    - - - - + + - + + - + + -
    bytebytebytebyteField NameDescription

    v1 B-tree AddressO

    Version

    This is the version number of this message. This + document describes version 0.


    Local Heap AddressO

    Strategy

    This is the file space management strategy for the + file. There are four types of strategies:

    + + + + + + + + + + + + + + + + + + + + + + + + +
    ValueDescription
    1With this strategy, the HDF5 Library’s free-space + managers track the free space that results from the manipulation + of HDF5 objects in the HDF5 file. The free space information is + saved when the file is closed, and reloaded when the file is + reopened.
    When space is needed for file metadata or raw + data, the HDF5 Library first requests space from the + library’s free-space managers. If the request is not + satisfied, the library requests space from the aggregators. If + the request is still not satisfied, the library requests space + from the virtual file driver. That is, the library will use all + of the mechanisms for allocating space. +
    2This is the HDF5 Library’s default file space + management strategy. With this strategy, the library’s + free-space managers track the free space that results from the + manipulation of HDF5 objects in the HDF5 file. The free space + information is NOT saved when the file is closed and the free + space that exists upon file closing becomes unaccounted space in + the file.
    As with strategy #1, the library will try all + of the mechanisms for allocating space. When space is needed for + file metadata or raw data, the library first requests space from + the free-space managers. If the request is not satisfied, the + library requests space from the aggregators. If the request is + still not satisfied, the library requests space from the virtual + file driver. +
    3With this strategy, the HDF5 Library does not track free + space that results from the manipulation of HDF5 objects in the + HDF5 file and the free space becomes unaccounted space in the + file.
    When space is needed for file metadata or raw data, + the library first requests space from the aggregators. If the + request is not satisfied, the library requests space from the + virtual file driver. +
    4With this strategy, the HDF5 Library does not track free + space that results from the manipulation of HDF5 objects in the + HDF5 file and the free space becomes unaccounted space in the + file.
    When space is needed for file metadata or raw data, + the library requests space from the virtual file driver. +
    +

    - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
    +

    Threshold

    +

    + This is the free-space section threshold. The library’s + free-space managers will track only free-space sections with size + greater than or equal to threshold. The default is to + track free-space sections of all sizes. +

    + + +

    Superblock Free-space Manager Address

    +

    This is the address of the free-space manager for + H5FD_MEM_SUPER allocation type.

    + -
    + +

    B-tree Free-space Manager Address

    +

    This is the address of the free-space manager for + H5FD_MEM_BTREE allocation type.

    + -
    -
    - - - + + - - + + - - + + -
    Field NameDescription

    Raw Data Free-space Manager Address

    This is the address of the free-space manager for + H5FD_MEM_DRAW allocation type.

    v1 B-tree Address

    This value is the address of the v1 B-tree containing the - symbol table entries for the group.

    Global Heap Free-space Manager Address

    This is the address of the free-space manager for + H5FD_MEM_GHEAP allocation type.

    Local Heap Address

    This value is the address of the local heap containing - the link names for the symbol table entries for the group.

    Local Heap Free-space Manager Address

    This is the address of the free-space manager for + H5FD_MEM_LHEAP allocation type.

    -
    + +

    Object Header Free-space Manager Address

    +

    This is the address of the free-space manager for + H5FD_MEM_OHDR allocation type.

    + + +
    -

    IV.A.2.s. The Object -Modification Time Message

    - - -
    - - - - - - - - -
    Header Message Name: Object - Modification Time
    Header Message Type: 0x0012
    Length: Fixed
    Status: Optional; may not be - repeated.
    Description:The object modification time is a timestamp which indicates - the time of the last modification of an object. The time is - updated when any object header message changes according to - the system clock where the change was posted.
    Format of Data: See the tables - below.
    - - -
    - - - - - - - - - - - - - - - - - - -
    - Modification Time Message -
    bytebytebytebyte
    VersionReserved (zero)
    Seconds After UNIX Epoch
    -
    -
    -
    - - - - - - - - - - - - - - - -
    Field NameDescription

    Version

    The version number is used for changes in the format of Object Modification Time - and is described here: - - - - - - - - - - - - - - - -
    VersionDescription
    0Never used.
    1Used by Version 1.6.1 and after of the library to encode time. In - this version, the time is the seconds after Epoch.

    -

    Seconds After UNIX Epoch

    A 32-bit unsigned integer value that stores the number of - seconds since 0 hours, 0 minutes, 0 seconds, January 1, 1970, - Coordinated Universal Time.

    -

    -

    IV.A.2.t. The B-tree -‘K’ Values Message

    - - -
    - - - - - - - - -
    Header Message Name: B-tree - ‘K’ Values
    Header Message Type: 0x0013
    Length: Fixed
    Status: Optional; may not be - repeated.
    Description:This message retrieves non-default ‘K’ values - for internal and leaf nodes of a group or indexed storage v1 - B-trees. This message is only found in the superblock - extension.
    Format of Data: See the tables - below.
    - - -
    - - - - - - - - - - - - - - - - - - - - -
    - B-tree ‘K’ Values Message -
    bytebytebytebyte
    VersionIndexed Storage Internal Node KThis space inserted only to align table nicely
    Group Internal Node KGroup Leaf Node K
    -
    - -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - +

    + IV.B. Disk Format: Level 2B - Data Object + Data Storage +

    -
    Field NameDescription

    Version

    The version number for this message. This document describes - version 0.

    -

    Indexed Storage Internal Node K

    This is the node ‘K’ value for each internal node of an - indexed storage v1 B-tree. See the description of this field - in version 0 and 1 of the superblock as well the section on - v1 B-trees. -

    -

    Group Internal Node K

    This is the node ‘K’ value for each internal node of a group - v1 B-tree. See the description of this field in version 0 and - 1 of the superblock as well as the section on v1 B-trees. -

    -

    Group Leaf Node K

    This is the node ‘K’ value for each leaf node of a group v1 - B-tree. See the description of this field in version 0 and 1 - of the superblock as well as the section on v1 B-trees. -

    -
    -
    +

    The data for an object is stored separately from its header + information in the file and may not actually be located in the HDF5 + file itself if the header indicates that the data is stored externally. + The information for each record in the object is stored according to + the dimensionality of the object (indicated in the dataspace header + message). Multi-dimensional array data is stored in C order; in other + words, the “last” dimension changes fastest.

    + +

    Data whose elements are composed of atomic datatypes are stored + in IEEE format, unless they are specifically defined as being stored in + a different machine format with the architecture-type information from + the datatype header message. This means that each architecture will + need to [potentially] byte-swap data values into the internal + representation for that particular machine.

    + +

    Data with a variable-length datatype is stored in the global heap + of the HDF5 file. Global heap identifiers are stored in the data object + storage.

    + +

    Data whose elements are composed of reference datatypes are + stored in several different ways depending on the particular reference + type involved. Object pointers are just stored as the offset of the + object header being pointed to with the size of the pointer being the + same number of bytes as offsets in the file.

    -
    -

    IV.A.2.u. The Driver Info -Message

    +

    Dataset region references are stored as a heap-ID which points to + the following information within the file-heap: an offset of the object + pointed to, number-type information (same format as header message), + dimensionality information (same format as header message), sub-set + start and end information (in other words, a coordinate location for + each), and field start and end names (in other words, a [pointer to + the] string indicating the first field included and a [pointer to the] + string name for the last field).

    - -
    - - - - - - - - - -
    Header Message Name: Driver - Info
    Header Message Type: 0x0014
    Length: Varies
    Status: Optional; may not be - repeated.
    - Description:This message contains information needed by the file driver - to reopen a file. This message is only found in the - superblock extension: see the - “Disk Format: Level 0C - Superblock Extension” - section for more information. For more information on the fields - in the driver info message, see the - “Disk Format : Level 0B - File Driver Info” - section; those who use the multi and family file drivers will - find this section particularly helpful.
    Format of Data: See the tables - below.
    - - -
    - - - - - - - - - - - - - - - - - - - - - - - - - - +

    Data of a compound datatype is stored as a contiguous stream of + the items in the structure, with each item formatted according to its + datatype.

    -
    - Driver Info Message -
    bytebytebytebyte
    VersionThis space inserted only to align table nicely

    Driver Identification
    Driver Information SizeThis space inserted only to align table nicely


    Driver Information (variable size)


    -
    -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - -
    Field NameDescription

    Version

    The version number for this message. This document describes - version 0.

    -

    Driver Identification

    This is an eight-byte ASCII string without null termination which - identifies the driver. -

    -

    Driver Information Size

    The size in bytes of the Driver Information field of this - message.

    -

    Driver Information

    Driver information is stored in a format defined by the file driver.

    -
    -

    -

    IV.A.2.v. The Attribute Info -Message

    - - -
    - - - - - - - - -
    Header Message Name: Attribute - Info
    Header Message Type: 0x0015
    Length: Varies
    Status: Optional; may not be - repeated.
    Description:This message stores information about the attributes on an - object, such as the maximum creation index for the attributes - created and the location of the attribute storage when the - attributes are stored “densely”.
    Format of Data: See the tables - below.
    - - -
    - - - - - - - - - - - - - - - - - - - - - - - - +
    +
    +

    + V. Appendix A: Definitions +

    -
    - Attribute Info Message -
    bytebytebytebyte
    VersionFlagsMaximum Creation Index (optional)

    Fractal Heap AddressO


    Attribute Name v2 B-tree AddressO


    Attribute Creation Order v2 B-tree AddressO (optional)

    +

    Definitions of various terms used in this document are included + in this section.

    - +
    +
    - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
    - -
    - -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + -
    Field NameDescription

    Version

    The version number for this message. This document describes - version 0.

    -

    Flags

    This is the attribute index information flag with the - following definition: - - - - - - - - - - - - - - - - - - - -
    BitDescription
    0If set, creation order for attributes is tracked. -
    1If set, creation order for attributes is indexed. -
    2-7Reserved

    - -

    Maximum Creation Index

    The is the maximum creation order index value for the - attributes on the object.

    -

    This field is present if bit 0 of Flags is set.

    -

    Fractal Heap Address

    This is the address of the fractal heap to store dense - attributes.

    -

    Attribute Name v2 B-tree Address

    This is the address of the version 2 B-tree to index the - names of densely stored attributes.

    -

    Attribute Creation Order v2 B-tree Address

    This is the address of the version 2 B-tree to index the - creation order of densely stored attributes.

    -

    This field is present if bit 1 of Flags is set.

    -
    TermDefinition
    -
    + + Undefined Address + The undefined address for a + file is a file address with all bits set: in other words, 0xffff...ff. + + -
    -

    IV.A.2.w. The Object Reference -Count Message

    + + Unlimited Size + The unlimited size for a size is + a value with all bits set: in other words, 0xffff...ff. + + - -
    - - - - - - - - -
    Header Message Name: Object Reference - Count
    Header Message Type: 0x0016
    Length: Fixed
    Status: Optional; may not be - repeated.
    Description:This message stores the number of hard links (in groups or - objects) pointing to an object: in other words, its - reference count.
    Format of Data: See the tables - below.
    - - -
    - - - - - - - - - - - - - - - - - - -
    - Object Reference Count -
    bytebytebytebyte
    VersionThis space inserted only to align table nicely
    Reference count
    -
    + + -
    -
    - - - - - - - - - - - - - - - -
    Field NameDescription

    Version

    The version number for this message. This document describes - version 0.

    -

    Reference Count

    The unsigned 32-bit integer is the reference count for the - object. This message is only present in “version 2” - (or later) object headers, and if not present those object - header versions, the reference count for the object is assumed - to be 1.

    -
    -

    -

    IV.A.2.x. The File Space Info -Message

    - - -
    - - - - - - - - -
    Header Message Name: File Space - Info
    Header Message Type: 0x0018
    Length: Fixed
    Status: Optional; may not be - repeated.
    - Description:This message stores the file space management strategy (see - description below) that the library uses in handling file space - request for the file. It also contains the free-space section - threshold used by the library’s free-space managers for - the file. If the strategy is 1, this message also contains the - addresses of the file’s free-space managers which track - free space for each type of file space allocation. There are - six basic types of file space allocation: superblock, B-tree, - raw data, global heap, local heap, and object header. See the - description of Free-space - Manager as well the description of allocation types in - Appendix B.
    Format of Data: See the tables - below.
    - - -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - File Space Info -
    bytebytebytebyte
    VersionStrategyThresholdL
    Super-block Free-space Manager AddressO
    B-tree Free-space Manager AddressO
    Raw Data Free-space Manager AddressO
    Global Heap Free-space Manager AddressO
    Local Heap Free-space Manager AddressO
    Object Header Free-space Manager AddressO
    +
    +
    +

    + VI. Appendix B: File Memory Allocation Types +

    - +

    There are six basic types of file memory allocation as follows:

    +
    +
    - - - - - -
      - (Items marked with an ‘O’ in the above table are of the size - specified in “Size of Offsets” field in the superblock.) -
      - (Items marked with an ‘L’ in the above table are of the size - specified in “Size of Lengths” field in the superblock.) -
    - -
    + Basic Allocation Type + Description + -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Field NameDescription

    Version

    This is the version number of this message. This document describes - version 0.

    -

    Strategy

    This is the file space management strategy for the file. - There are four types of strategies: - - - - - - - - - - - - - - - - - - - - - - - - -
    ValueDescription
    1With this strategy, the HDF5 Library’s free-space managers track the - free space that results from the manipulation of HDF5 objects - in the HDF5 file. The free space information is saved when the - file is closed, and reloaded when the file is reopened. -
    - When space is needed for file metadata or raw data, - the HDF5 Library first requests space from the library’s free-space - managers. If the request is not satisfied, the library requests space - from the aggregators. If the request is still not satisfied, - the library requests space from the virtual file driver. - That is, the library will use all of the mechanisms for allocating - space. -
    2This is the HDF5 Library’s default file space management strategy. - With this strategy, the library’s free-space managers track the free space - that results from the manipulation of HDF5 objects in the HDF5 file. - The free space information is NOT saved when the file is closed and - the free space that exists upon file closing becomes unaccounted - space in the file. -
    - As with strategy #1, the library will try all of the mechanisms - for allocating space. When space is needed for file metadata or - raw data, the library first requests space from the free-space - managers. If the request is not satisfied, the library requests - space from the aggregators. If the request is still not satisfied, - the library requests space from the virtual file driver. -
    3With this strategy, the HDF5 Library does not track free space that results - from the manipulation of HDF5 objects in the HDF5 file and - the free space becomes unaccounted space in the file. -
    - When space is needed for file metadata or raw data, - the library first requests space from the aggregators. - If the request is not satisfied, the library requests space from - the virtual file driver. -
    4With this strategy, the HDF5 Library does not track free space that results - from the manipulation of HDF5 objects in the HDF5 file and - the free space becomes unaccounted space in the file. -
    - When space is needed for file metadata or raw data, - the library requests space from the virtual file driver. -

    -

    Threshold

    This is the free-space section threshold. - The library’s free-space managers will track only - free-space sections with size greater than or equal to - threshold. The default is to track free-space - sections of all sizes.

    -

    Superblock Free-space Manager Address

    This is the address of the free-space manager for - H5FD_MEM_SUPER allocation type. -

    -

    B-tree Free-space Manager Address

    This is the address of the free-space manager for - H5FD_MEM_BTREE allocation type. -

    -

    Raw Data Free-space Manager Address

    This is the address of the free-space manager for - H5FD_MEM_DRAW allocation type. -

    -

    Global Heap Free-space Manager Address

    This is the address of the free-space manager for - H5FD_MEM_GHEAP allocation type. -

    -

    Local Heap Free-space Manager Address

    This is the address of the free-space manager for - H5FD_MEM_LHEAP allocation type. -

    -

    Object Header Free-space Manager Address

    This is the address of the free-space manager for - H5FD_MEM_OHDR allocation type. -

    -
    -
    -
    + + H5FD_MEM_SUPER + File memory allocated for Superblock. + + + H5FD_MEM_BTREE + File memory allocated for B-tree. + -
    -

    -IV.B. Disk Format: Level 2B - Data Object Data Storage

    + + H5FD_MEM_DRAW + File memory allocated for raw data. + -

    The data for an object is stored separately from its header - information in the file and may not actually be located in the HDF5 file - itself if the header indicates that the data is stored externally. The - information for each record in the object is stored according to the - dimensionality of the object (indicated in the dataspace header message). - Multi-dimensional array data is stored in C order; in other words, the - “last” dimension changes fastest.

    - -

    Data whose elements are composed of atomic datatypes are stored in IEEE - format, unless they are specifically defined as being stored in a different - machine format with the architecture-type information from the datatype - header message. This means that each architecture will need to [potentially] - byte-swap data values into the internal representation for that particular - machine.

    - -

    Data with a variable-length datatype is stored in the global heap - of the HDF5 file. Global heap identifiers are stored in the - data object storage.

    - -

    Data whose elements are composed of reference datatypes are stored in - several different ways depending on the particular reference type involved. - Object pointers are just stored as the offset of the object header being - pointed to with the size of the pointer being the same number of bytes as - offsets in the file.

    + + H5FD_MEM_GHEAP + File memory allocated for Global Heap. + -

    Dataset region references are stored as a heap-ID which points to -the following information within the file-heap: an offset of the object -pointed to, number-type information (same format as header message), -dimensionality information (same format as header message), sub-set start -and end information (in other words, a coordinate location for each), -and field start and end names (in other words, a [pointer to the] string -indicating the first field included and a [pointer to the] string name -for the last field).

    + + H5FD_MEM_LHEAP + File memory allocated for Local Heap. + -

    Data of a compound datatype is stored as a contiguous stream of the items - in the structure, with each item formatted according to its datatype.

    + + H5FD_MEM_OHDR + File memory allocated for Object Header. + + + +

    There are other file memory allocation types that are mapped to + the above six basic allocation types because they are similar in + nature. The mapping is listed in the following table:

    +
    + + + + + -
    -
    -
    -

    -V. Appendix A: Definitions

    + + + + -

    Definitions of various terms used in this document are included in -this section.

    + + + + -
    -
    Basic Allocation TypeMapping of Allocation Types to Basic Allocation Types
    H5FD_MEM_SUPERnone
    H5FD_MEM_BTREEH5FD_MEM_SOHM_INDEX
    - - - - + + + + - - - - + + + + - - - - + + + + + + + +
    TermDefinition
    H5FD_MEM_DRAWH5FD_MEM_FHEAP_HUGE_OBJ
    Undefined AddressThe undefined - address for a file is a file address with all bits - set: in other words, 0xffff...ff.
    H5FD_MEM_GHEAPnone
    Unlimited SizeThe unlimited size - for a size is a value with all bits set: in other words, - 0xffff...ff.
    H5FD_MEM_LHEAPH5FD_MEM_FHEAP_DBLOCK, H5FD_MEM_FSPACE_SINFO
    H5FD_MEM_OHDRH5FD_MEM_FHEAP_HDR, H5FD_MEM_FHEAP_IBLOCK, + H5FD_MEM_FSPACE_HDR, H5FD_MEM_SOHM_TABLE
    -
    + +

    Allocation types that are mapped to basic allocation types are + described below:

    +
    + + + + + -
    -
    -
    -

    -VI. Appendix B: File Memory Allocation Types

    + + + + -

    There are six basic types of file memory allocation as follows: -

    -
    -
    Allocation TypeDescription
    H5FD_MEM_FHEAP_HDRFile memory allocated for Fractal Heap Header.
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Basic Allocation TypeDescription
    H5FD_MEM_SUPERFile memory allocated for Superblock.
    H5FD_MEM_BTREEFile memory allocated for B-tree.
    H5FD_MEM_DRAWFile memory allocated for raw data.
    H5FD_MEM_GHEAPFile memory allocated for Global Heap.
    H5FD_MEM_LHEAPFile memory allocated for Local Heap.
    H5FD_MEM_OHDRFile memory allocated for Object Header.
    -
    + + H5FD_MEM_FHEAP_DBLOCK + File memory allocated for Fractal Heap Direct + Blocks. + -

    There are other file memory allocation types that are mapped to the -above six basic allocation types because they are similar in nature. -The mapping is listed in the following table: -

    + + H5FD_MEM_FHEAP_IBLOCK + File memory allocated for Fractal Heap Indirect + Blocks. + -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Basic Allocation TypeMapping of Allocation Types to Basic Allocation Types
    H5FD_MEM_SUPERnone
    H5FD_MEM_BTREEH5FD_MEM_SOHM_INDEX
    H5FD_MEM_DRAWH5FD_MEM_FHEAP_HUGE_OBJ
    H5FD_MEM_GHEAPnone
    H5FD_MEM_LHEAPH5FD_MEM_FHEAP_DBLOCK, H5FD_MEM_FSPACE_SINFO
    H5FD_MEM_OHDRH5FD_MEM_FHEAP_HDR, H5FD_MEM_FHEAP_IBLOCK, H5FD_MEM_FSPACE_HDR, H5FD_MEM_SOHM_TABLE
    -
    + + H5FD_MEM_FHEAP_HUGE_OBJ + File memory allocated for huge objects in the fractal heap. + -

    Allocation types that are mapped to basic allocation types are described below: -

    + + H5FD_MEM_FSPACE_HDR + File memory allocated for Free-space Manager + Header. + -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Allocation TypeDescription
    H5FD_MEM_FHEAP_HDRFile memory allocated for Fractal Heap Header.
    H5FD_MEM_FHEAP_DBLOCKFile memory allocated for Fractal Heap Direct Blocks.
    H5FD_MEM_FHEAP_IBLOCKFile memory allocated for Fractal Heap Indirect Blocks.
    H5FD_MEM_FHEAP_HUGE_OBJFile memory allocated for huge objects in the fractal heap.
    H5FD_MEM_FSPACE_HDRFile memory allocated for Free-space Manager Header.
    H5FD_MEM_FSPACE_SINFOFile memory allocated for Free-space Section List of the free-space manager.
    H5FD_MEM_SOHM_TABLEFile memory allocated for Shared Object Header Message Table.
    H5FD_MEM_SOHM_INDEXFile memory allocated for Shared Message Record List.
    -
    - + + H5FD_MEM_FSPACE_SINFO + File memory allocated for Free-space Section List + of the free-space manager. + + + + H5FD_MEM_SOHM_TABLE + File memory allocated for Shared Object Header + Message Table. + + + H5FD_MEM_SOHM_INDEX + File memory allocated for Shared Message Record + List. + + + + + diff --git a/doxygen/examples/H5.format.html b/doxygen/examples/H5.format.html index 378f7a3c398..c52e8ea3b8c 100644 --- a/doxygen/examples/H5.format.html +++ b/doxygen/examples/H5.format.html @@ -170,17 +170,14 @@ @@ -14647,9 +14644,9 @@

    IV.A.2.m. The Attribute Message

    datatypes).

    In 1.8.x versions of the library, attributes can be larger than 64KB. See the - + “Special Issues” section of the Attributes chapter - in the HDF5 User Guide for more information.

    + in the HDF5 User’s Guide for more information.

    Note: Attributes on an object must have unique names: the HDF5 Library currently enforces this by causing the creation of an attribute with a duplicate name to fail. @@ -19601,7 +19598,7 @@

    VIII.A. Dataspace Encoding

    0 - If set, it a a regular hyperslab, otherwise, irregular. + If set, it is a regular hyperslab, otherwise, irregular. diff --git a/doxygen/examples/H5R_examples.c b/doxygen/examples/H5R_examples.c new file mode 100644 index 00000000000..b40b99270bf --- /dev/null +++ b/doxygen/examples/H5R_examples.c @@ -0,0 +1,171 @@ +/* -*- c-file-style: "stroustrup" -*- */ + +#include "hdf5.h" + +#include +#include +#include + +int +main(void) +{ + int ret_val = EXIT_SUCCESS; + + //! + { + __label__ fail_file, fail_fspace, fail_dset, fail_sel, fail_aspace, fail_attr, fail_awrite; + hid_t file, fspace, dset, aspace, attr; + H5R_ref_t ref; + + if ((file = H5Fcreate("reference.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) == H5I_INVALID_HID) { + ret_val = EXIT_FAILURE; + goto fail_file; + } + // create a region reference which selects all elements of the dataset at "/data" + if ((fspace = H5Screate_simple(2, (hsize_t[]){10, 20}, NULL)) == H5I_INVALID_HID) { + ret_val = EXIT_FAILURE; + goto fail_fspace; + } + if ((dset = H5Dcreate(file, "data", H5T_STD_I32LE, fspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) == + H5I_INVALID_HID) { + ret_val = EXIT_FAILURE; + goto fail_dset; + } + if (H5Sselect_all(fspace) < 0 || H5Rcreate_region(file, "data", fspace, H5P_DEFAULT, &ref) < 0) { + ret_val = EXIT_FAILURE; + goto fail_sel; + } + // store the region reference in a scalar attribute of the root group called "region" + if ((aspace = H5Screate(H5S_SCALAR)) == H5I_INVALID_HID) { + ret_val = EXIT_FAILURE; + goto fail_aspace; + } + if ((attr = H5Acreate(file, "region", H5T_STD_REF, aspace, H5P_DEFAULT, H5P_DEFAULT)) == + H5I_INVALID_HID) { + ret_val = EXIT_FAILURE; + goto fail_attr; + } + if (H5Awrite(attr, H5T_STD_REF, &ref) < 0) { + ret_val = EXIT_FAILURE; + goto fail_awrite; + } + +fail_awrite: + H5Aclose(attr); +fail_attr: + H5Sclose(aspace); +fail_aspace: + H5Rdestroy(&ref); +fail_sel: + H5Dclose(dset); +fail_dset: + H5Sclose(fspace); +fail_fspace: + H5Fclose(file); +fail_file:; + } + //! + + //! + { + __label__ fail_file, fail_attr, fail_aread; + hid_t file, attr; + H5R_ref_t ref; + + if ((file = H5Fopen("reference.h5", H5F_ACC_RDONLY, H5P_DEFAULT)) == H5I_INVALID_HID) { + ret_val = EXIT_FAILURE; + goto fail_file; + } + + // read the dataset region reference from the attribute + if ((attr = H5Aopen(file, "region", H5P_DEFAULT)) == H5I_INVALID_HID) { + ret_val = EXIT_FAILURE; + goto fail_attr; + } + if (H5Aread(attr, H5T_STD_REF, &ref) < 0) { + ret_val = EXIT_FAILURE; + goto fail_aread; + } + assert(H5Rget_type(&ref) == H5R_DATASET_REGION2); + + // get an HDF5 path name for the dataset of the region reference + { + char buf[255]; + if (H5Rget_obj_name(&ref, H5P_DEFAULT, buf, 255) < 0) { + ret_val = EXIT_FAILURE; + } + printf("Object name: \"%s\"\n", buf); + } + + H5Rdestroy(&ref); +fail_aread: + H5Aclose(attr); +fail_attr: + H5Fclose(file); +fail_file:; + } + //! + + //! + { + __label__ fail_file, fail_attr, fail_ref; + hid_t file, attr; + H5R_ref_t ref; + + if ((file = H5Fopen("reference.h5", H5F_ACC_RDWR, H5P_DEFAULT)) == H5I_INVALID_HID) { + ret_val = EXIT_FAILURE; + goto fail_file; + } + + // H5T_STD_REF is a generic reference type + // we can "update" the attribute value to refer to the attribute itself + if ((attr = H5Aopen(file, "region", H5P_DEFAULT)) == H5I_INVALID_HID) { + ret_val = EXIT_FAILURE; + goto fail_attr; + } + if (H5Rcreate_attr(file, "data", "region", H5P_DEFAULT, &ref) < 0) { + ret_val = EXIT_FAILURE; + goto fail_ref; + } + + assert(H5Rget_type(&ref) == H5R_ATTR); + + if (H5Awrite(attr, H5T_STD_REF, &ref) < 0) { + ret_val = EXIT_FAILURE; + } + + H5Rdestroy(&ref); +fail_ref: + H5Aclose(attr); +fail_attr: + H5Fclose(file); +fail_file:; + } + //! + + //! + { + __label__ fail_file, fail_ref; + hid_t file; + H5R_ref_t ref; + + // create an HDF5 object reference to the root group + if ((file = H5Fopen("reference.h5", H5F_ACC_RDONLY, H5P_DEFAULT)) == H5I_INVALID_HID) { + ret_val = EXIT_FAILURE; + goto fail_file; + } + if (H5Rcreate_object(file, ".", H5P_DEFAULT, &ref) < 0) { + ret_val = EXIT_FAILURE; + goto fail_ref; + } + + // H5Rdestroy() releases all resources associated with an HDF5 reference + H5Rdestroy(&ref); +fail_ref: + H5Fclose(file); +fail_file:; + } + //! + + return ret_val; +} diff --git a/doxygen/examples/VFL.html b/doxygen/examples/VFL.html index 624d942d9f5..78d163208ad 100644 --- a/doxygen/examples/VFL.html +++ b/doxygen/examples/VFL.html @@ -10,17 +10,14 @@ diff --git a/doxygen/examples/core_menu.md b/doxygen/examples/menus/core_menu.md similarity index 91% rename from doxygen/examples/core_menu.md rename to doxygen/examples/menus/core_menu.md index 8c82cc55bd7..3fd7d11aa2a 100644 --- a/doxygen/examples/core_menu.md +++ b/doxygen/examples/menus/core_menu.md @@ -20,6 +20,10 @@ HDF5 datatypes describe the element type of HDF5 datasets and attributes.
    HDF5 library error reporting. +- @ref H5ES "Event Set (H5ES)" +
    +HDF5 event set life cycle used with HDF5 VOL connectors that enable the asynchronous feature in HDF5. + - @ref H5F "Files (H5F)"
    Manage HDF5 files. diff --git a/doxygen/examples/fortran_menu.md b/doxygen/examples/menus/fortran_menu.md similarity index 100% rename from doxygen/examples/fortran_menu.md rename to doxygen/examples/menus/fortran_menu.md diff --git a/doxygen/examples/high_level_menu.md b/doxygen/examples/menus/high_level_menu.md similarity index 100% rename from doxygen/examples/high_level_menu.md rename to doxygen/examples/menus/high_level_menu.md diff --git a/doxygen/examples/java_menu.md b/doxygen/examples/menus/java_menu.md similarity index 100% rename from doxygen/examples/java_menu.md rename to doxygen/examples/menus/java_menu.md diff --git a/doxygen/examples/tables/fileDriverLists.dox b/doxygen/examples/tables/fileDriverLists.dox new file mode 100644 index 00000000000..1aae3ce492a --- /dev/null +++ b/doxygen/examples/tables/fileDriverLists.dox @@ -0,0 +1,139 @@ +/** File Driver List + * +//! [file_driver_table] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    I/O file drivers
    File DriverDescription
    #H5FD_COREStore in memory (optional backing store to disk file).
    #H5FD_FAMILYStore in a set of files.
    #H5FD_LOGStore in logging file.
    #H5FD_MPIOStore using MPI/IO.
    #H5FD_MULTIStore in multiple files. There are several options to control layout.
    #H5FD_SEC2Serial I/O to file using Unix “section 2” functions.
    #H5FD_STDIOSerial I/O to file using Unix “stdio” functions.
    +//! [file_driver_table] + * + * +//! [supported_file_driver_table] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Supported file drivers
    Driver NameDriver IdentifierDescriptionRelated API
    POSIX#H5FD_SEC2This driver uses POSIX file-system functions like read and write to perform I/O to a single, +permanent file on local disk with no system buffering. This driver is POSIX-compliant and is +the default file driver for all systems.#H5Pset_fapl_sec2
    Direct#H5FD_DIRECTThis is the #H5FD_SEC2 driver except data is written to or read from the file +synchronously without being cached by the system.#H5Pset_fapl_direct
    Log#H5FD_LOGThis is the #H5FD_SEC2 driver with logging capabilities.#H5Pset_fapl_log
    Windows#H5FD_WINDOWSThis driver was modified in HDF5-1.8.8 to be a wrapper of the POSIX driver, +#H5FD_SEC2. This change should not affect user applications.#H5Pset_fapl_windows
    STDIO#H5FD_STDIOThis driver uses functions from the standard C stdio.h to perform I/O +to a single, permanent file on local disk with additional system buffering.#H5Pset_fapl_stdio
    Memory#H5FD_COREWith this driver, an application can work with a file in memory for faster reads and +writes. File contents are kept in memory until the file is closed. At closing, the memory +version of the file can be written back to disk or abandoned.#H5Pset_fapl_core
    Family#H5FD_FAMILYWith this driver, the HDF5 file’s address space is partitioned into pieces and sent to +separate storage files using an underlying driver of the user’s choice. This driver is for +systems that do not support files larger than 2 gigabytes.#H5Pset_fapl_family
    Multi#H5FD_MULTIWith this driver, data can be stored in multiple files according to the type of the data. +I/O might work better if data is stored in separate files based on the type of data. The Split +driver is a special case of this driver.#H5Pset_fapl_multi
    SplitH5FD_SPLITThis file driver splits a file into two parts. One part stores metadata, and the other part +stores raw data. This splitting a file into two parts is a limited case of the Multi driver.#H5Pset_fapl_split
    Parallel#H5FD_MPIOThis is the standard HDF5 file driver for parallel file systems. This driver uses the MPI +standard for both communication and file I/O.#H5Pset_fapl_mpio
    Parallel POSIXH5FD_MPIPOSIXThis driver is no longer available
    StreamH5FD_STREAMThis driver is no longer available.
    +//! [supported_file_driver_table] + * + */ diff --git a/doxygen/examples/tables/predefinedDatatypes.dox b/doxygen/examples/tables/predefinedDatatypes.dox new file mode 100644 index 00000000000..2427d0c1782 --- /dev/null +++ b/doxygen/examples/tables/predefinedDatatypes.dox @@ -0,0 +1,629 @@ +/** Predefined Datatypes List + * +//! [predefined_ieee_datatypes_table] + + + + + + + + + + + + + + + + + + + + + + +
    Predefined IEEE Floating Point Datatypes
    DatatypeDescription
    #H5T_IEEE_F32BE32-bit big-endian IEEE floating point
    #H5T_IEEE_F32LE32-bit little-endian IEEE floating point
    #H5T_IEEE_F64BE64-bit big-endian IEEE floating point
    #H5T_IEEE_F64LE64-bit little-endian IEEE floating point
    +//! [predefined_ieee_datatypes_table] + * + * +//! [predefined_std_datatypes_table] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Predefined Standard Datatypes
    DatatypeDescription
    #H5T_STD_I8BE8-bit big-endian signed integer (2's complement)
    #H5T_STD_I8LE8-bit little-endian signed integer (2's complement)
    #H5T_STD_I16BE16-bit big-endian signed integer (2's complement)
    #H5T_STD_I16LE16-bit little-endian signed integer (2's complement)
    #H5T_STD_I32BE32-bit big-endian signed integer (2's complement)
    #H5T_STD_I32LE32-bit little-endian signed integer (2's complement)
    #H5T_STD_I64BE64-bit big-endian signed integer (2's complement)
    #H5T_STD_I64LE64-bit little-endian signed integer (2's complement)
    #H5T_STD_U8BE8-bit big-endian unsigned integer
    #H5T_STD_U8LE8-bit little-endian unsigned integer
    #H5T_STD_U16BE16-bit big-endian unsigned integer
    #H5T_STD_U16LE16-bit little-endian unsigned integer
    #H5T_STD_U32BE32-bit big-endian unsigned integer
    #H5T_STD_U32LE32-bit little-endian unsigned integer
    #H5T_STD_U64BE64-bit big-endian unsigned integer
    #H5T_STD_U64LE64-bit little-endian unsigned integer
    #H5T_STD_B8BE8-bit big-endian bitfield
    #H5T_STD_B8LE8-bit little-endian bitfield
    #H5T_STD_B16BE16-bit big-endian bitfield
    #H5T_STD_B16LE16-bit little-endian bitfield
    #H5T_STD_B32BE32-bit big-endian bitfield
    #H5T_STD_B32LE32-bit little-endian bitfield
    #H5T_STD_B64BE64-bit big-endian bitfield
    #H5T_STD_B64LE64-bit little-endian bitfield
    #H5T_STD_REF_OBJObject reference
    #H5T_STD_REF_DSETREGDataset region reference
    #H5T_STD_REFGeneric reference
    +//! [predefined_std_datatypes_table] + * + * +//! [predefined_unix_datatypes_table] + + + + + + + + + + + + + + + + + + + + + + +
    Predefined UNIX-specific Datatypes
    DatatypeDescription
    #H5T_UNIX_D32BE32-bit big-endian
    #H5T_UNIX_D32LE32-bit little-endian
    #H5T_UNIX_D64BE64-bit big-endian
    #H5T_UNIX_D64LE64-bit little-endian
    +//! [predefined_unix_datatypes_table] + * + * +//! [predefined_string_datatypes_table] + + + + + + + + + + + + + + + +
    Predefined String Datatypes
    DatatypeDescription
    #H5T_C_S1String datatype in C (size defined in bytes rather than in bits)
    #H5T_FORTRAN_S1String datatype in Fortran (as defined for the HDF5 C library)
    +//! [predefined_string_datatypes_table] + * + * +//! [predefined_intel_datatypes_table] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Predefined Intel-specific Datatypes
    DatatypeDescription
    #H5T_INTEL_I88-bit little-endian signed integer (2's complement)
    #H5T_INTEL_I1616-bit little-endian signed integer (2's complement)
    #H5T_INTEL_I3232-bit little-endian signed integer (2's complement)
    #H5T_INTEL_I6464-bit little-endian signed integer (2's complement)
    #H5T_INTEL_U88-bit little-endian unsigned integer
    #H5T_INTEL_U1616-bit little-endian unsigned integer
    #H5T_INTEL_U3232-bit little-endian unsigned integer
    #H5T_INTEL_U6464-bit little-endian unsigned integer
    #H5T_INTEL_B88-bit little-endian bitfield
    #H5T_INTEL_B1616-bit little-endian bitfield
    #H5T_INTEL_B3232-bit little-endian bitfield
    #H5T_INTEL_B6464-bit little-endian bitfield
    #H5T_INTEL_F3232-bit little-endian IEEE floating point
    #H5T_INTEL_F6464-bit little-endian IEEE floating point
    +//! [predefined_intel_datatypes_table] + * + * +//! [predefined_dec_datatypes_table] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Predefined DEC Alpha-specific Datatypes
    DatatypeDescription
    #H5T_ALPHA_I88-bit little-endian signed integer (2's complement)
    #H5T_ALPHA_I1616-bit little-endian signed integer (2's complement)
    #H5T_ALPHA_I3232-bit little-endian signed integer (2's complement)
    #H5T_ALPHA_I6464-bit little-endian signed integer (2's complement)
    #H5T_ALPHA_U88-bit little-endian unsigned integer
    #H5T_ALPHA_U1616-bit little-endian unsigned integer
    #H5T_ALPHA_U3232-bit little-endian unsigned integer
    #H5T_ALPHA_U6464-bit little-endian unsigned integer
    #H5T_ALPHA_B88-bit little-endian bitfield
    #H5T_ALPHA_B1616-bit little-endian bitfield
    #H5T_ALPHA_B3232-bit little-endian bitfield
    #H5T_ALPHA_B6464-bit little-endian bitfield
    #H5T_ALPHA_F3232-bit little-endian IEEE floating point
    #H5T_ALPHA_F6464-bit little-endian IEEE floating point
    +//! [predefined_dec_datatypes_table] + * + * +//! [predefined_openvms_datatypes_table] + + + + + + + + + + + + + + +
    Predefined OpenVMS DEC Alpha-specific Datatypes
    DatatypeDescription
    #H5T_VAX_F3232-bit floating point (Corresponds to F_Floating type)
    #H5T_VAX_F6464-bit floating point (Corresponds to G_Floating type)
    +//! [predefined_openvms_datatypes_table] + * + * +//! [predefined_mips_datatypes_table] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Predefined MIPS-specific Datatypes
    DatatypeDescription
    #H5T_MIPS_I88-bit big-endian signed integer (2's complement)
    #H5T_MIPS_I1616-bit big-endian signed integer (2's complement)
    #H5T_MIPS_I3232-bit big-endian signed integer (2's complement)
    #H5T_MIPS_I6464-bit big-endian signed integer (2's complement)
    #H5T_MIPS_U88-bit big-endian unsigned integer
    #H5T_MIPS_U1616-bit big-endian unsigned integer
    #H5T_MIPS_U3232-bit big-endian unsigned integer
    #H5T_MIPS_U6464-bit big-endian unsigned integer
    #H5T_MIPS_B88-bit big-endian bitfield
    #H5T_MIPS_B1616-bit big-endian bitfield
    #H5T_MIPS_B3232-bit big-endian bitfield
    #H5T_MIPS_B6464-bit big-endian bitfield
    #H5T_MIPS_F3232-bit big-endian IEEE floating point
    #H5T_MIPS_F6464-bit big-endian IEEE floating point
    +//! [predefined_mips_datatypes_table] + * + * +//! [predefined_native_datatypes_table] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Predefined Native Datatypes
    DatatypeDescription
    #H5T_NATIVE_CHARC-style char
    #H5T_NATIVE_SCHARC-style signed char
    #H5T_NATIVE_UCHARC-style unsigned signed char
    #H5T_NATIVE_SHORTC-style short
    #H5T_NATIVE_USHORTC-style unsigned short
    #H5T_NATIVE_INTC-style int
    #H5T_NATIVE_UINTC-style unsigned int
    #H5T_NATIVE_LONGC-style long
    #H5T_NATIVE_ULONGC-style unsigned long
    #H5T_NATIVE_LLONGC-style long long
    #H5T_NATIVE_ULLONGC-style unsigned long long
    #H5T_NATIVE_FLOATC-style float
    #H5T_NATIVE_DOUBLEC-style double
    #H5T_NATIVE_LDOUBLEC-style long double
    #H5T_NATIVE_B88-bit bitfield based on native types
    #H5T_NATIVE_B1616-bit bitfield based on native types
    #H5T_NATIVE_B3232-bit bitfield based on native types
    #H5T_NATIVE_B6464-bit bitfield based on native types
    #H5T_NATIVE_OPAQUEopaque unit based on native types
    #H5T_NATIVE_HADDRaddress type based on native types
    #H5T_NATIVE_HSIZEsize type based on native types
    #H5T_NATIVE_HSSIZEsigned size type based on native types
    #H5T_NATIVE_HERRerror code type based on native types
    #H5T_NATIVE_HBOOLBoolean type based on native types
    +//! [predefined_native_datatypes_table] + * + * +//! [predefined_c9x_datatypes_table] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Predefined ANSI C9x-specific Native Integer Datatypes
    DatatypeDescription
    #H5T_NATIVE_INT88-bit signed integer (2's complement)
    #H5T_NATIVE_UINT88-bit unsigned integer
    #H5T_NATIVE_INT_LEAST88-bit signed integer (2's complement) with storage to use least amount of space
    #H5T_NATIVE_UINT_LEAST88-bit unsigned integer with storage to use least amount of space
    #H5T_NATIVE_INT_FAST88-bit signed integer (2's complement) with storage to maximize performance
    #H5T_NATIVE_UINT_FAST88-bit unsigned integer with storage to maximize performance
    #H5T_NATIVE_INT1616-bit signed integer (2's complement)
    #H5T_NATIVE_UINT1616-bit unsigned integer
    #H5T_NATIVE_INT_LEAST1616-bit signed integer (2's complement) with storage to use least amount of space
    #H5T_NATIVE_UINT_LEAST1616-bit unsigned integer with storage to use least amount of space
    #H5T_NATIVE_INT_FAST1616-bit signed integer (2's complement) with storage to maximize performance
    #H5T_NATIVE_UINT_FAST1616-bit unsigned integer with storage to maximize performance
    #H5T_NATIVE_INT3232-bit signed integer (2's complement)
    #H5T_NATIVE_UINT3232-bit unsigned integer
    #H5T_NATIVE_INT_LEAST3232-bit signed integer (2's complement) with storage to use least amount of space
    #H5T_NATIVE_UINT_LEAST3232-bit unsigned integer with storage to use least amount of space
    #H5T_NATIVE_INT_FAST3232-bit signed integer (2's complement) with storage to maximize performance
    #H5T_NATIVE_UINT_FAST3232-bit unsigned integer with storage to maximize performance
    #H5T_NATIVE_INT6464-bit signed integer (2's complement)
    #H5T_NATIVE_UINT6464-bit unsigned integer
    #H5T_NATIVE_INT_LEAST6464-bit signed integer (2's complement) with storage to use least amount of space
    #H5T_NATIVE_UINT_LEAST6464-bit unsigned integer with storage to use least amount of space
    #H5T_NATIVE_INT_FAST6464-bit signed integer (2's complement) with storage to maximize performance
    #H5T_NATIVE_UINT_FAST6464-bit unsigned integer with storage to maximize performance
    +//! [predefined_c9x_datatypes_table] + * + */ diff --git a/doxygen/examples/tables/propertyLists.dox b/doxygen/examples/tables/propertyLists.dox new file mode 100644 index 00000000000..375fd509702 --- /dev/null +++ b/doxygen/examples/tables/propertyLists.dox @@ -0,0 +1,955 @@ +/** Property List Tables + * +//! [plcr_table] + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Property list class root functions (H5P)
    FunctionPurpose
    #H5PcloseTerminates access to a property list.
    #H5PcopyCopies an existing property list to create a new property list.
    #H5PcreateCreates a new property list as an instance of a property list class.
    #H5Pencode/#H5PdecodeEncodes/ecodes property list into/from a binary object buffer.
    #H5Pget_classReturns the property list class identifier for a property list
    +//! [plcr_table] + * +//! [plcra_table] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Property list class root (Advanced) functions (H5P)
    FunctionPurpose
    #H5Pclose_classCloses an existing property list class.
    #H5Pcopy_propCopies a property from one list or class to another.
    #H5Pcreate_classCreates a new property list class.
    #H5PequalCompares two property lists or classes for equality.
    #H5PexistQueries whether a property name exists in a property list or class.
    #H5Pget_class_nameRetrieves the name of a class.
    #H5Pget_class_parentRetrieves the parent class of a property class.
    #H5Pget_npropsQueries the number of properties in a property list or class.
    #H5Pget_sizeQueries the size of a property value in bytes.
    #H5PinsertRegisters a temporary property with a property list.
    #H5Pisa_classDetermines whether a property list is a member of a class.
    #H5PiterateIterates over properties in a property class or list
    #H5Pregister/#H5PunregisterRegisters/removes a permanent property with/from a property list class
    #H5PremoveRemoves a property from a property list.
    #H5Pset/#H5PgetSets/queries a property list value
    +//! [plcra_table] + * +//! [fcpl_table] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    File creation property list functions (H5P)
    FunctionPurpose
    #H5Pset_userblock/#H5Pget_userblockSets/retrieves size of userblock.
    #H5Pset_sizes/#H5Pget_sizesSets/retrieves byte size of offsets and lengths used to address objects in HDF5 file.
    #H5Pset_sym_k/#H5Pget_sym_kSets/retrieves size of parameters used to control symbol table nodes.
    #H5Pset_istore_k/#H5Pget_istore_kSets/retrieves size of parameter used to control B-trees for indexing chunked datasets.
    #H5Pset_file_space_page_size/#H5Pget_file_space_page_sizeSets or retrieves the file space page size used in paged aggregation and paged buffering.
    #H5Pset_file_space_strategy/#H5Pget_file_space_strategySets or retrieves the file space handling strategy, the persisting free-space and the free-space section size.
    #H5Pset_shared_mesg_nindexes/#H5Pget_shared_mesg_nindexesSets or retrieves number of shared object header message indexes in file +creation property list.
    #H5Pset_shared_mesg_indexConfigures the specified shared object header message index.
    #H5Pget_shared_mesg_indexRetrieves the configuration settings for a shared message index.
    #H5Pset_shared_mesg_phase_change/#H5Pget_shared_mesg_phase_changeSets or retrieves shared object header message storage phase change thresholds.
    #H5Pget_version
    +//! [fcpl_table] + * +//! [fapl_table] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    File access property list functions (H5P)
    FunctionPurpose
    #H5Pset_alignment/#H5Pget_alignmentSets/retrieves alignment properties.
    #H5Pset_cache/#H5Pget_cacheSets/retrieves metadata cache and raw data chunk cache parameters.
    #H5Pset_core_write_tracking/#H5Pget_core_write_trackingSets/retrieves write tracking information for core driver.
    #H5Pset_elink_file_cache_size/#H5Pget_elink_file_cache_sizeSets/retrieves the size of the external link open file cache from the specified +file access property list.
    #H5Pset_evict_on_close/#H5Pget_evict_on_closeSet/get the file access property list setting that determines whether an HDF5 object will be evicted from the library's metadata cache when it is closed.
    #H5Pset_gc_references/#H5Pget_gc_referencesSets/retrieves garbage collecting references flag.
    #H5Pset_family_offsetSets offset property for low-level access to a file in a family of files.
    #H5Pget_family_offsetRetrieves a data offset from the file access property list.
    #H5Pset_fclose_degree/#H5Pget_fclose_degreeSets/retrieves file close degree property.
    #H5Pset_file_imageSets an initial file image in a memory buffer.
    #H5Pget_file_imageRetrieves a copy of the file image designated as the initial content and structure of a file.
    #H5Pset_file_image_callbacks/#H5Pget_file_image_callbacksSets/gets the callbacks for working with file images.
    #H5Pset_file_locking/#H5Pget_file_lockingSets/retrieves file locking property values.
    #H5Pset_meta_block_size/#H5Pget_meta_block_sizeSets the minimum metadata blocksize or retrieves the current metadata block size setting.
    #H5Pset_metadata_read_attempts/#H5Pget_metadata_read_attemptsSets/gets the number of read attempts from a file access property list.
    #H5Pset_mdc_config/#H5Pget_mdc_configSet/get the initial metadata cache configuration in the indicated file access property list.
    #H5Pset_mdc_image_config/#H5Pget_mdc_image_configSet/get the metadata cache image option for a file access property list.
    #H5Pset_mdc_log_options/#H5Pget_mdc_log_optionsSet/get the metadata cache logging options.
    #H5Pset_multi_type/#H5Pget_multi_typeSets/gets the type of data property for the MULTI driver.
    #H5Pset_object_flush_cb/#H5Pget_object_flush_cbSet/get the object flush property values from the file access property list.
    #H5Pset_page_buffer_size/#H5Pget_page_buffer_sizeSet/get the the maximum size for the page buffer.
    #H5Pset_sieve_buf_size/#H5Pget_sieve_buf_sizeSets/retrieves maximum size of data sieve buffer.
    #H5Pset_libver_boundsSets bounds on library versions, and indirectly format versions, to be used +when creating objects.
    #H5Pget_libver_boundsRetrieves library version bounds settings that indirectly control the format +versions used when creating objects.
    #H5Pset_small_data_block_sizeSets the size of a contiguous block reserved for small data.
    #H5Pget_small_data_block_sizeRetrieves the current small data block size setting.
    #H5Pset_volSets the file VOL connector for a file access property list.
    #H5Pget_vol_cap_flagsRetrieves the capability flags for the VOL connector that will be used with a file access property list.
    #H5Pget_vol_idRetrieves the identifier of the current VOL connector.
    #H5Pget_vol_infoRetrieves a copy of the VOL information for a connector.
    #H5Pset_mpi_params/#H5Pget_mpi_paramsSets/retrieves the MPI communicator and info.
    #H5Pset_coll_metadata_write/#H5Pget_coll_metadata_writeSets/retrieves metadata write mode setting.
    +//! [fapl_table] + * +//! [fd_pl_table] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    File driver property list functions (H5P)
    FunctionPurpose
    #H5Pset_driverSets a file driver.
    #H5Pget_driverReturns the identifier for the driver used to create a file.
    #H5Pget_driver_infoReturns a pointer to file driver information.
    #H5Pset_driver_by_nameSets a file driver according to a given driver name.
    #H5Pset_driver_by_valueSets a file driver according to a given driver value.
    #H5Pget_driver_config_strRetrieves a string representation of the configuration for the driver.
    #H5Pset_fapl_core/#H5Pget_fapl_coreSets the driver for buffered memory files (in RAM) or retrieves information regarding +the driver.
    #H5Pset_fapl_direct/#H5Pget_fapl_directSets up use of the direct I/O driver or retrieves the direct I/O driver settings.
    #H5Pset_fapl_family/#H5Pget_fapl_familySets driver for file families, designed for systems that do not support files +larger than 2 gigabytes, or retrieves information regarding driver.
    #H5Pset_fapl_hdfs/#H5Pget_fapl_hdfs.
    #H5Pset_fapl_ioc/#H5Pget_fapl_iocModifies/queries the file driver properties of the I/O concentrator driver.
    #H5Pset_fapl_logSets logging driver.
    #H5Pset_fapl_mirror/#H5Pget_fapl_mirrorModifies/queries the file driver properties of the mirror driver.
    #H5Pset_fapl_mpio/#H5Pget_fapl_mpioSets driver for files on parallel file systems (MPI I/O) or retrieves information +regarding the driver.
    H5Pset_fapl_mpiposix/H5Pget_fapl_mpiposixNo longer available.
    #H5Pset_fapl_multi/#H5Pget_fapl_multiSets driver for multiple files, separating categories of metadata and raw data, +or retrieves information regarding driver.
    #H5Pset_fapl_onion/#H5Pget_fapl_onionModifies/queries the file driver properties of the onion driver.
    #H5Pset_fapl_sec2Sets driver for unbuffered permanent files or retrieves information regarding driver.
    #H5Pset_fapl_splitSets driver for split files, a limited case of multiple files with one metadata file +and one raw data file.
    #H5Pset_fapl_stdioSets driver for buffered permanent files.
    #H5Pset_fapl_subfiling/#H5Pget_fapl_subfilingModifies/queries the file driver properties of the subfiling driver.
    #H5Pset_fapl_windowsSets the Windows I/O driver.
    #H5Pset_multi_typeSpecifies type of data to be accessed via the MULTI driver enabling more direct access.
    #H5Pget_multi_typeRetrieves type of data property for MULTI driver.
    +//! [fd_pl_table] + * +//! [dcpl_table] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Dataset creation property list functions (H5P)
    FunctionPurpose
    #H5Pset_layoutSets the type of storage used to store the raw data for a dataset.
    #H5Pget_layoutReturns the layout of the raw data for a dataset.
    #H5Pset_chunkSets the size of the chunks used to store a chunked layout dataset.
    #H5Pget_chunkRetrieves the size of chunks for the raw data of a chunked layout dataset.
    #H5Pset_chunk_opts/#H5Pget_chunk_optsSets/gets the edge chunk option setting from a dataset creation property list.
    #H5Pset_deflateSets compression method and compression level.
    #H5Pset_fill_valueSets the fill value for a dataset.
    #H5Pget_fill_valueRetrieves a dataset fill value.
    #H5Pfill_value_definedDetermines whether the fill value is defined.
    #H5Pset_fill_timeSets the time when fill values are written to a dataset.
    #H5Pget_fill_timeRetrieves the time when fill value are written to a dataset.
    #H5Pset_alloc_timeSets the timing for storage space allocation.
    #H5Pget_alloc_timeRetrieves the timing for storage space allocation.
    #H5Pset_filterAdds a filter to the filter pipeline.
    #H5Pall_filters_availVerifies that all required filters are available.
    #H5Pget_nfiltersReturns the number of filters in the pipeline.
    #H5Pget_filterReturns information about a filter in a pipeline. +The C function is a macro: \see \ref api-compat-macros.
    #H5Pget_filter_by_idReturns information about the specified filter. +The C function is a macro: \see \ref api-compat-macros.
    #H5Pmodify_filterModifies a filter in the filter pipeline.
    #H5Premove_filterDeletes one or more filters in the filter pipeline.
    #H5Pset_fletcher32Sets up use of the Fletcher32 checksum filter.
    #H5Pset_nbitSets up use of the n-bit filter.
    #H5Pset_scaleoffsetSets up use of the scale-offset filter.
    #H5Pset_shuffleSets up use of the shuffle filter.
    #H5Pset_szipSets up use of the Szip compression filter.
    #H5Pset_externalAdds an external file to the list of external files.
    #H5Pget_external_countReturns the number of external files for a dataset.
    #H5Pget_externalReturns information about an external file.
    #H5Pset_char_encodingSets the character encoding used to encode a string. Use to set ASCII or UTF-8 character +encoding for object names.
    #H5Pget_char_encodingRetrieves the character encoding used to create a string.
    #H5Pset_virtualSets the mapping between virtual and source datasets.
    #H5Pget_virtual_countGets the number of mappings for the virtual dataset.
    #H5Pget_virtual_dsetnameGets the name of a source dataset used in the mapping.
    #H5Pget_virtual_filenameGets the filename of a source dataset used in the mapping.
    #H5Pget_virtual_srcspaceGets a dataspace identifier for the selection within the source dataset used in the mapping.
    #H5Pget_virtual_vspaceGets a dataspace identifier for the selection within the virtual dataset used in the mapping.
    #H5Pset_dset_no_attrs_hint/#H5Pget_dset_no_attrs_hintSets/gets the flag to create minimized dataset object headers.
    +//! [dcpl_table] + * +//! [dapl_table] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Dataset access property list functions (H5P)
    FunctionPurpose
    #H5Pset_bufferSets type conversion and background buffers.
    #H5Pget_bufferReads buffer settings.
    #H5Pset_append_flush/#H5Pget_append_flushSets/gets the values of the append property that is set up in the dataset access property list.
    #H5Pset_chunk_cache/#H5Pget_chunk_cacheSets/gets the raw data chunk cache parameters.
    #H5Pset_efile_prefix/#H5Pget_efile_prefixSets/gets the prefix for external raw data storage files as set in the dataset access property list.
    #H5Pset_virtual_prefix/#H5Pget_virtual_prefixSets/gets the prefix to be applied to VDS source file paths.
    #H5Pset_virtual_printf_gap/#H5Pget_virtual_printf_gapSets/gets the maximum number of missing source files and/or datasets with the printf-style names when getting the extent for an unlimited virtual dataset.
    #H5Pset_virtual_view/#H5Pget_virtual_viewSets/gets the view of the virtual dataset (VDS) to include or exclude missing mapped elements.
    +//! [dapl_table] + * +//! [dxpl_table] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Data transfer property list functions (H5P)
    C FunctionPurpose
    #H5Pset_btree_ratios/#H5Pget_btree_ratiosSets/gets B-tree split ratios for a dataset transfer property list.
    #H5Pset_bufferMaximum size for the type conversion buffer and the background buffer. May also supply +pointers to application-allocated buffers.
    #H5Pset_data_transform/#H5Pget_data_transformSets/gets a data transform expression.
    #H5Pset_dataset_io_hyperslab_selectionSets a hyperslab file selection for a dataset I/O operation.
    #H5Pset_edc_check/#H5Pget_edc_checkSets/gets whether to enable error-detection when reading a dataset.
    #H5Pset_hyper_vector_sizeset the number of "I/O vectors" (offset and length pairs) which are to be +accumulated in memory before being issued to the lower levels +of the library for reading or writing the actual data.
    #H5Pset_filter_callbackSets user-defined filter callback function.
    #H5Pset_hyper_vector_size/#H5Pget_hyper_vector_sizeSets/gets number of I/O vectors to be read/written in hyperslab I/O.
    #H5Pset_type_conv_cb/#H5Pget_type_conv_cbSets/gets user-defined datatype conversion callback function.
    #H5Pset_vlen_mem_manager/#H5Pget_vlen_mem_managerSets/gets the memory manager for variable-length datatype allocation in #H5Dread and +#H5Dvlen_reclaim.
    #H5Pset_dxpl_mpio/#H5Pget_dxpl_mpioSets/gets data transfer mode.
    #H5Pset_dxpl_mpio_chunk_optSets a flag specifying linked-chunk I/O or multi-chunk I/O.
    #H5Pset_dxpl_mpio_chunk_opt_numSets a numeric threshold for linked-chunk I/O.
    #H5Pset_dxpl_mpio_chunk_opt_ratioSets a ratio threshold for collective I/O.
    #H5Pset_dxpl_mpio_collective_optSets a flag governing the use of independent versus collective I/O.
    #H5Pget_mpio_actual_chunk_opt_modeGets the type of chunk optimization that HDF5 actually performed on the last parallel I/O call.
    #H5Pget_mpio_actual_io_modeGets the type of I/O that HDF5 actually performed on the last parallel I/O call.
    #H5Pget_mpio_no_collective_causeGets local and global causes that broke collective I/O on the last parallel I/O call.
    H5Pset_preserve/H5Pget_preserveNo longer available, deprecated as it no longer has any effect.
    +//! [dxpl_table] + * +//! [gcpl_table] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Group creation property list functions (H5P)
    FunctionPurpose
    #H5Pall_filters_availVerifies that all required filters are available.
    #H5Pget_filterReturns information about a filter in a pipeline. The +C function is a macro: \see \ref api-compat-macros.
    #H5Pget_filter_by_idReturns information about the specified filter. The +C function is a macro: \see \ref api-compat-macros.
    #H5Pget_nfiltersReturns the number of filters in the pipeline.
    #H5Pmodify_filterModifies a filter in the filter pipeline.
    #H5Premove_filterDeletes one or more filters in the filter pipeline.
    #H5Pset_deflateSets the deflate (GNU gzip) compression method and compression level.
    #H5Pset_filterAdds a filter to the filter pipeline.
    #H5Pset_fletcher32Sets up use of the Fletcher32 checksum filter.
    #H5Pset_local_heap_size_hint#H5Pget_local_heap_size_hint/Sets/gets the anticipated maximum size of a local heap.
    #H5Pset_link_phase_changeSets the parameters for conversion between compact and dense groups.
    #H5Pget_link_phase_changeQueries the settings for conversion between compact and dense groups.
    #H5Pset_est_link_infoSets estimated number of links and length of link names in a group.
    #H5Pget_est_link_infoQueries data required to estimate required local heap or object header size.
    #H5Pset_nlinksSets maximum number of soft or user-defined link traversals.
    #H5Pget_nlinksRetrieves the maximum number of link traversals.
    #H5Pset_link_creation_orderSets creation order tracking and indexing for links in a group.
    #H5Pget_link_creation_orderQueries whether link creation order is tracked and/or indexed in a group.
    #H5Pset_char_encodingSets the character encoding used to encode a string. Use to set ASCII or UTF-8 character +encoding for object names.
    #H5Pget_char_encodingRetrieves the character encoding used to create a string.
    +//! [gcpl_table] + * +//! [gapl_table] + + + + + + + + + + +
    Group access property list functions (H5P)
    FunctionPurpose
    #H5Pset_all_coll_metadata_ops/#H5Pget_all_coll_metadata_opsSets/gets metadata I/O mode for read operations
    +//! [gapl_table] + * +//! [lapl_table] + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Link access property list functions (H5P)
    FunctionPurpose
    #H5Pset_elink_cb/#H5Pget_elink_cbSets/gets the external link traversal callback function.
    #H5Pset_elink_acc_flags/#H5Pget_elink_acc_flagsSets/gets the external link traversal file access flag.
    #H5Pset_elink_fapl/#H5Pget_elink_faplSets/gets a file access property list for use in accessing a file pointed to by an external link
    #H5Pset_elink_prefix/#H5Pget_elink_prefixSets/gets prefix to be applied to external link paths.
    #H5Pset_nlinks/#H5Pget_nlinksSets/gets maximum number of soft or user-defined link traversals.
    +//! [lapl_table] + * +//! [ocpl_table] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Object creation property list functions (H5P)
    FunctionPurpose
    #H5Pset_attr_creation_order/#H5Pget_attr_creation_orderSets/gets tracking and indexing of attribute creation order.
    #H5Pset_attr_phase_change/#H5Pget_attr_phase_changeSets/gets attribute storage phase change thresholds
    #H5Pset_filter/#H5Pget_filterAdds/gets a filter to/from the filter pipeline.
    #H5Pget_filter_by_idReturns information about a filter in a pipeline.
    #H5Pget_nfiltersReturns information about the specified filter.
    #H5Pset_obj_track_times/#H5Pget_obj_track_timesSets/gets the recording of times associated with an object.
    #H5Pmodify_filterModifies a filter in the filter pipeline.
    #H5Premove_filterDelete one or more filters in the filter pipeline.
    #H5Pset_fletcher32Sets up use of the Fletcher32 checksum filter.
    +//! [ocpl_table] + * +//! [ocpypl_table] + + + + + + + + + + + + + + + + + + + + + + +
    Object copy property list functions (H5P)
    FunctionPurpose
    #H5Padd_merge_committed_dtype_pathAdds a path to the list of paths that will be searched in the destination file for a matching committed datatype.
    #H5Pfree_merge_committed_dtype_pathsClears the list of paths stored in the object copy property list.
    #H5Pset_copy_object/#H5Pget_copy_objectSets/gets the properties to be used when an object is copied.
    #H5Pset_mcdt_search_cb/#H5Pget_mcdt_search_cbSets/gets the callback function that H5Ocopy() will invoke before searching for a matching committed datatype.
    +//! [ocpypl_table] + * +//! [strcpl_table] + + + + + + + + + + +
    String creation property list functions (H5P)
    FunctionPurpose
    #H5Pset_char_encoding/#H5Pget_char_encodingSets/gets the character encoding used to encode link and attribute names.
    +//! [strcpl_table] + * +//! [lcpl_table] + + + + + + + + + + +
    Link creation property list functions (H5P)
    FunctionPurpose
    #H5Pset_create_intermediate_group/#H5Pget_create_intermediate_groupSpecifies/retrieves whether to create missing intermediate groups.
    +//! [lcpl_table] + * +//! [acpl_table] + + + + + + + + + + +
    Attribute creation property list functions (H5P)
    FunctionPurpose
    #H5Pset_char_encoding/#H5Pget_char_encodingSets/gets the character encoding used to encode link and attribute names.
    +//! [acpl_table] + * + */ + \ No newline at end of file diff --git a/doxygen/examples/tables/volAPIs.dox b/doxygen/examples/tables/volAPIs.dox new file mode 100644 index 00000000000..6b9df9b8f08 --- /dev/null +++ b/doxygen/examples/tables/volAPIs.dox @@ -0,0 +1,637 @@ +/** VOL API List + * +//! [vol_native_table] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Alphabetical list of HDF5 API calls specific to the native VOL connector
    APIDescription
    #H5Aget_num_attrsdeprecated
    #H5Aiterate1deprecated
    #H5Dchunk_iter
    H5DdebugInternal API routines
    H5Dformat_convertInternal API routines
    H5Dget_chunk_index_typeInternal API routines
    #H5Dget_chunk_info
    #H5Dget_chunk_info_by_coord
    #H5Dget_chunk_storage_size
    #H5Dget_num_chunks
    #H5Dget_offset
    #H5Dread_chunk
    #H5Dwrite_chunk
    H5FD*
    #H5Fclear_elink_file_cache
    H5Fformat_convertInternal API routines
    #H5Fget_dset_no_attrs_hint
    #H5Fget_eoa
    #H5Fget_file_image
    #H5Fget_filesize
    #H5Fget_free_sections
    #H5Fget_freespace
    #H5Fget_info1deprecated
    #H5Fget_info2
    #H5Fget_mdc_config
    #H5Fget_mdc_hit_rate
    #H5Fget_mdc_image_info
    #H5Fget_mdc_logging_status
    #H5Fget_mdc_size
    #H5Fget_metadata_read_retry_info
    #H5Fget_mpi_atomicity
    #H5Fget_page_buffering_stats
    #H5Fget_vfd_handle
    #H5Fincrement_filesize
    #H5Fis_hdf5deprecated
    #H5Freset_mdc_hit_rate_stats
    #H5Freset_page_buffering_stats
    #H5Fset_dset_no_attrs_hint
    #H5Fset_latest_formatdeprecated
    #H5Fset_libver_bounds
    #H5Fset_mdc_config
    #H5Fset_mpi_atomicity
    #H5Fstart_mdc_logging
    #H5Fstart_swmr_write
    #H5Fstop_mdc_logging
    #H5Gget_commentdeprecated
    #H5Giteratedeprecated
    #H5Gget_info
    #H5Gget_info_by_name
    #H5Gget_info_by_idx
    #H5Gget_objinfodeprecated
    #H5Gget_objname_by_idxdeprecated
    #H5Gget_objtype_by_idxdeprecated
    #H5Gset_commentdeprecated
    #H5Lget_info1deprecated
    #H5Lget_info_by_idx1deprecated
    #H5Literate1deprecated
    #H5Literate_by_name1deprecated
    #H5Lvisit1deprecated
    #H5Lvisit_by_name1deprecated
    #H5Oare_mdc_flushes_disabled
    #H5Odisable_mdc_flushes
    #H5Oenable_mdc_flushes
    #H5Oget_comment
    #H5Oget_comment_by_name
    #H5Oget_info_by_idx1deprecated
    #H5Oget_info_by_idx2deprecated
    #H5Oget_info_by_name1deprecated
    #H5Oget_info_by_name2deprecated
    #H5Oget_info1deprecated
    #H5Oget_info2deprecated
    #H5Oget_native_info
    #H5Oget_native_info_by_idx
    #H5Oget_native_info_by_name
    #H5Oopen_by_addrdeprecated
    #H5Oset_comment
    #H5Oset_comment_by_name
    #H5Ovisit1deprecated
    #H5Ovisit by name1deprecated
    #H5Ovisit2deprecated
    #H5Ovisit by name2deprecated
    +//! [vol_native_table] + * + * +//! [vol_independent_table] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Alphabetical list of VOL-independent HDF5 API calls
    APIDescription
    H5*
    #H5Dfill
    #H5Dgather
    #H5Diterate
    #H5Dscatter
    #H5Dvlen_reclaimdeprecated
    #H5Dvlen_get_buf_size
    H5E*
    H5I*
    #H5Lis_registered
    #H5Lregister
    #H5Lunpack_elink_val
    #H5Lunregister
    H5PL*
    H5P*
    H5S*
    H5T*non-committed
    H5VL*
    H5Z*
    +//! [vol_independent_table] + * + * +//! [vol_optional_table] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    List of Native VOL Optional Operation Values By Subclass
    SubclassAPI ReferenceDefinition
    H5VL_SUBCLS_ATTR#H5Aiterate (deprecated routine)#H5VL_NATIVE_ATTR_ITERATE_OLD
    H5VL_SUBCLS_DATASETH5Dformat_convert (internal)#H5VL_NATIVE_DATASET_FORMAT_CONVERT
    H5Dget_chunk_index_type (internal)#H5VL_NATIVE_DATASET_GET_CHUNK_INDEX_TYPE
    #H5Dget_chunk_storage_size#H5VL_NATIVE_DATASET_GET_CHUNK_STORAGE_SIZE
    #H5Dget_num_chunks#H5VL_NATIVE_DATASET_GET_NUM_CHUNKS
    #H5Dget_chunk_info#H5VL_NATIVE_DATASET_GET_CHUNK_INFO_BY_IDX
    #H5Dget_chunk_info_by_coord#H5VL_NATIVE_DATASET_GET_CHUNK_INFO_BY_COORD
    #H5Dread_chunk#H5VL_NATIVE_DATASET_CHUNK_READ
    #H5Dwrite_chunk#H5VL_NATIVE_DATASET_CHUNK_WRITE
    #H5Dvlen_get_buf_size#H5VL_NATIVE_DATASET_GET_VLEN_BUF_SIZE
    #H5Dget_offset#H5VL_NATIVE_DATASET_GET_OFFSET
    #H5Dget_offset#H5VL_NATIVE_DATASET_CHUNK_ITER
    H5VL_SUBCLS_FILE#H5Fclear_elink_file_cache#H5VL_NATIVE_FILE_CLEAR_ELINK_CACHE
    #H5Fget_file_image#H5VL_NATIVE_FILE_GET_FILE_IMAGE
    #H5Fget_free_sections#H5VL_NATIVE_FILE_GET_FREE_SECTIONS
    #H5Fget_freespace#H5VL_NATIVE_FILE_GET_FREE_SPACE
    #H5Fget_info1 / #H5Fget_info2#H5VL_NATIVE_FILE_GET_INFO
    #H5Fget_mdc_config#H5VL_NATIVE_FILE_GET_MDC_CONF
    #H5Fget_mdc_hit_rate#H5VL_NATIVE_FILE_GET_MDC_HR
    #H5Fget_mdc_size#H5VL_NATIVE_FILE_GET_MDC_SIZE
    #H5Fget_filesize#H5VL_NATIVE_FILE_GET_SIZE
    #H5Fget_vfd_handle#H5VL_NATIVE_FILE_GET_VFD_HANDLE
    #H5Freset_mdc_hit_rate_stats#H5VL_NATIVE_FILE_RESET_MDC_HIT_RATE
    #H5Fset_mdc_config#H5VL_NATIVE_FILE_SET_MDC_CONFIG
    #H5Fget_metadata_read_retry_info#H5VL_NATIVE_FILE_GET_METADATA_READ_RETRY_INFO
    #H5Fstart_swmr_write#H5VL_NATIVE_FILE_START_SWMR_WRITE
    #H5Fstart_mdc_logging#H5VL_NATIVE_FILE_START_MDC_LOGGING
    #H5Fstop_mdc_logging#H5VL_NATIVE_FILE_STOP_MDC_LOGGING
    #H5Fget_mdc_logging_status#H5VL_NATIVE_FILE_GET_MDC_LOGGING_STATUS
    H5Fformat_convert (internal)#H5VL_NATIVE_FILE_FORMAT_CONVERT
    #H5Freset_page_buffering_stats#H5VL_NATIVE_FILE_RESET_PAGE_BUFFERING_STATS
    #H5Fget_page_buffering_stats#H5VL_NATIVE_FILE_GET_PAGE_BUFFERING_STATS
    #H5Fget_mdc_image_info#H5VL_NATIVE_FILE_GET_MDC_IMAGE_INFO
    #H5Fget_eoa#H5VL_NATIVE_FILE_GET_EOA
    #H5Fincrement_filesize#H5VL_NATIVE_FILE_INCR_FILESIZE
    #H5Fset_latest_format/#H5Fset_libver_bounds#H5VL_NATIVE_FILE_SET_LIBVER_BOUNDS
    #H5Fget_dset_no_attrs_hint#H5VL_NATIVE_FILE_GET_MIN_DSET_OHDR_FLAG
    #H5Fset_dset_no_attrs_hint#H5VL_NATIVE_FILE_SET_MIN_DSET_OHDR_FLAG
    #H5Fget_mpi_atomicity#H5VL_NATIVE_FILE_GET_MPI_ATOMICITY
    #H5Fset_mpi_atomicity#H5VL_NATIVE_FILE_SET_MPI_ATOMICITY
    Adjust file after open, with wrapping context#H5VL_NATIVE_FILE_POST_OPEN
    H5VL_SUBCLS_GROUP#H5Giterate (deprecated routine)#H5VL_NATIVE_GROUP_ITERATE_OLD
    #H5Gget_objinfo (deprecated routine)#H5VL_NATIVE_GROUP_GET_OBJINFO
    H5VL_SUBCLS_OBJECT#H5Gget_comment, #H5Oget_comment, #H5Oget_comment_by_name#H5VL_NATIVE_OBJECT_GET_COMMENT
    #H5Gset_comment, #H5Oset_comment, #H5Oset_comment_by_name#H5VL_NATIVE_OBJECT_SET_COMMENT
    #H5Odisable_mdc_flushes#H5VL_NATIVE_OBJECT_DISABLE_MDC_FLUSHES
    #H5Oenable_mdc_flushes#H5VL_NATIVE_OBJECT_ENABLE_MDC_FLUSHES
    #H5Oare_mdc_flushes_disabled#H5VL_NATIVE_OBJECT_ARE_MDC_FLUSHES_DISABLED
    #H5Oget_native_info, #H5Oget_native_info_by_idx, #H5Oget_native_info_by_name#H5VL_NATIVE_OBJECT_GET_NATIVE_INFO/td> +
    +//! [vol_optional_table] + * + */ diff --git a/doxygen/hdf5_header.html b/doxygen/hdf5_header.html index 23f41f9b501..36a32653ab9 100644 --- a/doxygen/hdf5_header.html +++ b/doxygen/hdf5_header.html @@ -21,7 +21,7 @@ -
    Please, help us to better know about our user community by answering the following short survey: https://www.hdfgroup.org/website-survey/
    +
    Please, help us to better serve our user community by answering the following short survey: https://www.hdfgroup.org/website-survey/
    diff --git a/doxygen/hdf5doxy_layout.xml b/doxygen/hdf5doxy_layout.xml index f7c47bf667a..d156f2c1785 100644 --- a/doxygen/hdf5doxy_layout.xml +++ b/doxygen/hdf5doxy_layout.xml @@ -4,14 +4,13 @@ - - - - - + + + + diff --git a/src/H5Amodule.h b/src/H5Amodule.h index e3bfe6f209e..0c31f71fad2 100644 --- a/src/H5Amodule.h +++ b/src/H5Amodule.h @@ -364,7 +364,7 @@ * will be ignored by HDF5. * * The use of ASCII or UTF-8 characters is determined by the character encoding property. See - * #H5Pset_char_encoding in the \ref RM. + * #H5Pset_char_encoding in the \ref RM. * *

    No Special I/O or Storage

    * diff --git a/src/H5Dmodule.h b/src/H5Dmodule.h index 00751a91b3f..8d2f23a142b 100644 --- a/src/H5Dmodule.h +++ b/src/H5Dmodule.h @@ -182,250 +182,11 @@ * * * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - *
    Dataset creation property list functions (H5P)
    FunctionPurpose
    #H5Pset_layoutSets the type of storage used to store the raw data for a dataset.
    #H5Pget_layoutReturns the layout of the raw data for a dataset.
    #H5Pset_chunkSets the size of the chunks used to store a chunked layout dataset.
    #H5Pget_chunkRetrieves the size of chunks for the raw data of a chunked layout dataset.
    #H5Pset_deflateSets compression method and compression level.
    #H5Pset_fill_valueSets the fill value for a dataset.
    #H5Pget_fill_valueRetrieves a dataset fill value.
    #H5Pfill_value_definedDetermines whether the fill value is defined.
    #H5Pset_fill_timeSets the time when fill values are written to a dataset.
    #H5Pget_fill_timeRetrieves the time when fill value are written to a dataset.
    #H5Pset_alloc_timeSets the timing for storage space allocation.
    #H5Pget_alloc_timeRetrieves the timing for storage space allocation.
    #H5Pset_filterAdds a filter to the filter pipeline.
    #H5Pall_filters_availVerifies that all required filters are available.
    #H5Pget_nfiltersReturns the number of filters in the pipeline.
    #H5Pget_filterReturns information about a filter in a pipeline. - * The C function is a macro: \see \ref api-compat-macros.
    #H5Pget_filter_by_idReturns information about the specified filter. - * The C function is a macro: \see \ref api-compat-macros.
    #H5Pmodify_filterModifies a filter in the filter pipeline.
    #H5Premove_filterDeletes one or more filters in the filter pipeline.
    #H5Pset_fletcher32Sets up use of the Fletcher32 checksum filter.
    #H5Pset_nbitSets up use of the n-bit filter.
    #H5Pset_scaleoffsetSets up use of the scale-offset filter.
    #H5Pset_shuffleSets up use of the shuffle filter.
    #H5Pset_szipSets up use of the Szip compression filter.
    #H5Pset_externalAdds an external file to the list of external files.
    #H5Pget_external_countReturns the number of external files for a dataset.
    #H5Pget_externalReturns information about an external file.
    #H5Pset_char_encodingSets the character encoding used to encode a string. Use to set ASCII or UTF-8 character - * encoding for object names.
    #H5Pget_char_encodingRetrieves the character encoding used to create a string.
    + * \anchor dcpl_table_tag Dataset creation property list functions (H5P) + * \snippet{doc} tables/propertyLists.dox dcpl_table * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - *
    Dataset access property list functions (H5P)
    FunctionPurpose
    #H5Pset_bufferSets type conversion and background buffers.
    #H5Pget_bufferReads buffer settings.
    #H5Pset_chunk_cacheSets the raw data chunk cache parameters.
    #H5Pget_chunk_cacheRetrieves the raw data chunk cache parameters.
    #H5Pset_edc_checkSets whether to enable error-detection when reading a dataset.
    #H5Pget_edc_checkDetermines whether error-detection is enabled for dataset reads.
    #H5Pset_filter_callbackSets user-defined filter callback function.
    #H5Pset_data_transformSets a data transform expression.
    #H5Pget_data_transformRetrieves a data transform expression.
    #H5Pset_type_conv_cbSets user-defined datatype conversion callback function.
    #H5Pget_type_conv_cbGets user-defined datatype conversion callback function.
    #H5Pset_hyper_vector_sizeSets number of I/O vectors to be read/written in hyperslab I/O.
    #H5Pget_hyper_vector_sizeRetrieves number of I/O vectors to be read/written in hyperslab I/O.
    #H5Pset_btree_ratiosSets B-tree split ratios for a dataset transfer property list.
    #H5Pget_btree_ratiosGets B-tree split ratios for a dataset transfer property list.
    #H5Pset_vlen_mem_managerSets the memory manager for variable-length datatype allocation in #H5Dread and - * #H5Dvlen_reclaim.
    #H5Pget_vlen_mem_managerGets the memory manager for variable-length datatype allocation in #H5Dread and - * #H5Dvlen_reclaim.
    #H5Pset_dxpl_mpioSets data transfer mode.
    #H5Pget_dxpl_mpioReturns the data transfer mode.
    #H5Pset_dxpl_mpio_chunk_optSets a flag specifying linked-chunk I/O or multi-chunk I/O.
    #H5Pset_dxpl_mpio_chunk_opt_numSets a numeric threshold for linked-chunk I/O.
    #H5Pset_dxpl_mpio_chunk_opt_ratioSets a ratio threshold for collective I/O.
    #H5Pset_dxpl_mpio_collective_optSets a flag governing the use of independent versus collective I/O.
    #H5Pset_multi_typeSets the type of data property for the MULTI driver.
    #H5Pget_multi_typeRetrieves the type of data property for the MULTI driver.
    #H5Pset_small_data_block_sizeSets the size of a contiguous block reserved for small data.
    #H5Pget_small_data_block_sizeRetrieves the current small data block size setting.
    + * \anchor dapl_table_tag Dataset access property list functions (H5P) + * \snippet{doc} tables/propertyLists.dox dapl_table * * \subsection subsec_dataset_program Programming Model for Datasets * This section explains the programming model for datasets. @@ -1106,41 +867,7 @@ * the pipeline processing: the pipeline and filter operations are identical no matter what data access * mechanism is used. * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - *
    I/O file drivers
    File DriverDescription
    #H5FD_COREStore in memory (optional backing store to disk file).
    #H5FD_FAMILYStore in a set of files.
    #H5FD_LOGStore in logging file.
    #H5FD_MPIOStore using MPI/IO.
    #H5FD_MULTIStore in multiple files. There are several options to control layout.
    #H5FD_SEC2Serial I/O to file using Unix “section 2” functions.
    #H5FD_STDIOSerial I/O to file using Unix “stdio” functions.
    + * \snippet{doc} tables/propertyLists.dox lcpl_table * * Each file driver writes/reads contiguous blocks of bytes from a logically contiguous address * space. The file driver is responsible for managing the details of the different physical storage @@ -1157,29 +884,7 @@ * Data transfer properties set optional parameters that control parts of the data pipeline. The * function listing below shows transfer properties that control the behavior of the library. * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - *
    Data transfer property list functions
    C FunctionPurpose
    #H5Pset_bufferMaximum size for the type conversion buffer and the background buffer. May also supply - * pointers to application-allocated buffers.
    #H5Pset_hyper_vector_sizeset the number of "I/O vectors" (offset and length pairs) which are to be - * accumulated in memory before being issued to the lower levels - * of the library for reading or writing the actual data.
    #H5Pset_btree_ratiosSet the B-tree split ratios for a dataset transfer property list. The split ratios determine - * what percent of children go in the first node when a node splits.
    + * \snippet{doc} tables/fileDriverLists.dox file_driver_table * * Some filters and file drivers require or use additional parameters from the application program. * These can be passed in the data transfer property list. The table below shows file driver property @@ -1897,10 +1602,10 @@ allocated if necessary. * byte 0 * * - * ???????? - * ????SPPP - * PPPPPPPP - * PPPP???? + * ???????? + * ????SPPP + * PPPPPPPP + * PPPP???? * * * diff --git a/src/H5Dpublic.h b/src/H5Dpublic.h index db628a3e9f2..7cf4952f3fe 100644 --- a/src/H5Dpublic.h +++ b/src/H5Dpublic.h @@ -464,6 +464,9 @@ H5_DLL hid_t H5Dget_type(hid_t dset_id); * a copy of the dataset creation property list associated with * the dataset specified by \p dset_id. * + * The creation property list identifier should be released with + * H5Pclose() to prevent resource leaks. + * */ H5_DLL hid_t H5Dget_create_plist(hid_t dset_id); @@ -633,6 +636,7 @@ H5_DLL herr_t H5Dget_num_chunks(hid_t dset_id, hid_t fspace_id, hsize_t *nchunks */ H5_DLL herr_t H5Dget_chunk_info_by_coord(hid_t dset_id, const hsize_t *offset, unsigned *filter_mask, haddr_t *addr, hsize_t *size); + /** * -------------------------------------------------------------------------- * \ingroup H5D @@ -656,7 +660,7 @@ H5_DLL herr_t H5Dget_chunk_info_by_coord(hid_t dset_id, const hsize_t *offset, u * Iterate over all chunked datasets and chunks in a file. * \snippet H5D_examples.c H5Ovisit_cb * - * \since 1.12.3, 1.13.0 + * \since 1.12.3 * */ H5_DLL herr_t H5Dchunk_iter(hid_t dset_id, hid_t dxpl_id, H5D_chunk_iter_op_t cb, void *op_data); diff --git a/src/H5ESpublic.h b/src/H5ESpublic.h index 6b8b2a96065..6180487324f 100644 --- a/src/H5ESpublic.h +++ b/src/H5ESpublic.h @@ -2,7 +2,7 @@ * Copyright by The HDF Group. * * All rights reserved. * * * - * This file is part of HDF5. The full HDF5 copyright notice, including * + * This file is part of HDF5. The full HDF5 copyright notice, including * * terms governing use, modification, and redistribution, is contained in * * the COPYING file, which can be found at the root of the source code * * distribution tree, or in https://www.hdfgroup.org/licenses. * @@ -28,12 +28,14 @@ /* Public Typedefs */ /*******************/ -/* Asynchronous operation status */ +/** + * Asynchronous operation status + */ typedef enum H5ES_status_t { - H5ES_STATUS_IN_PROGRESS, /* Operation(s) have not yet completed */ - H5ES_STATUS_SUCCEED, /* Operation(s) have completed, successfully */ - H5ES_STATUS_FAIL, /* An operation has completed, but failed */ - H5ES_STATUS_CANCELED /* Operation(s) has been canceled */ + H5ES_STATUS_IN_PROGRESS, /**< Operation(s) have not yet completed */ + H5ES_STATUS_SUCCEED, /**< Operation(s) have completed, successfully */ + H5ES_STATUS_CANCELED, /**< Operation(s) has been canceled */ + H5ES_STATUS_FAIL /**< An operation has completed, but failed */ } H5ES_status_t; /********************/ diff --git a/src/H5FDmpio.h b/src/H5FDmpio.h index ba508ebf86c..a70f34be92f 100644 --- a/src/H5FDmpio.h +++ b/src/H5FDmpio.h @@ -104,10 +104,10 @@ H5_DLL herr_t H5Pset_fapl_mpio(hid_t fapl_id, MPI_Comm comm, MPI_Info info); * \param[out] info MPI-2 info object * \returns \herr_t * - * \details H5Pget_fapl_mpio() returns duplicates of the stored MPI communicator + * \details If the file access property list is set to the #H5FD_MPIO driver, + * H5Pget_fapl_mpio() returns duplicates of the stored MPI communicator * and Info object through the \p comm and \p info pointers, if those - * values are non-null. The file access property list must be set to the - * #H5FD_MPIO driver. + * values are non-null. * * Since the MPI communicator and Info object are duplicates of the * stored information, future modifications to the access property list diff --git a/src/H5FDpublic.h b/src/H5FDpublic.h index c66a46cee2e..cd476b966ef 100644 --- a/src/H5FDpublic.h +++ b/src/H5FDpublic.h @@ -18,8 +18,13 @@ #ifndef H5FDpublic_H #define H5FDpublic_H -#include "H5public.h" -#include "H5Fpublic.h" /*for H5F_close_degree_t */ +/* Public headers needed by this file */ +#include "H5public.h" /* Generic Functions */ +#include "H5Fpublic.h" /* Files */ + +/*****************/ +/* Public Macros */ +/*****************/ #define H5_HAVE_VFL 1 /*define a convenient app feature test*/ #define H5FD_VFD_DEFAULT 0 /* Default VFL driver value */ @@ -462,7 +467,26 @@ H5_DLL herr_t H5FDtruncate(H5FD_t *file, hid_t dxpl_id, hbool_t closing); H5_DLL herr_t H5FDlock(H5FD_t *file, hbool_t rw); H5_DLL herr_t H5FDunlock(H5FD_t *file); -/* Allows querying a VFD ID for features before the file is opened */ +/** + * \ingroup H5FD + * + * \brief Allows querying a VFD ID for features before the file is opened + * + * \param[in] driver_id Virtual File Driver (VFD) ID + * \param[out] flags VFD flags supported + * + * \return \herr_t + * + * \details Queries a virtual file driver (VFD) for feature flags. Takes a + * VFD hid_t so it can be used before the file is opened. For example, + * this could be used to check if a VFD supports SWMR. + * + * \note The flags obtained here are just those of the base driver and + * do not take any configuration options (e.g., set via a fapl + * call) into consideration. + * + * \since 1.10.2 + */ H5_DLL herr_t H5FDdriver_query(hid_t driver_id, unsigned long *flags /*out*/); #ifdef __cplusplus diff --git a/src/H5Fmodule.h b/src/H5Fmodule.h index 523d6bf7d54..6939f4ac23d 100644 --- a/src/H5Fmodule.h +++ b/src/H5Fmodule.h @@ -411,204 +411,15 @@ * * * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - *
    File creation property list functions
    FunctionPurpose
    #H5Pset_userblock/#H5Pget_userblockSets/retrieves size of userblock.
    #H5Pset_sizes/#H5Pget_sizesSets/retrieves byte size of offsets and lengths used to address objects in HDF5 file.
    #H5Pset_sym_k/#H5Pget_sym_kSets/retrieves size of parameters used to control symbol table nodes.
    #H5Pset_istore_k/#H5Pget_istore_kSets/retrieves size of parameter used to control B-trees for indexing chunked datasets.
    #H5Pset_file_imageSets an initial file image in a memory buffer.
    #H5Pget_file_imageRetrieves a copy of the file image designated as the initial content and structure of a file.
    #H5Pset_shared_mesg_nindexes/#H5Pget_shared_mesg_nindexesSets or retrieves number of shared object header message indexes in file - * creation property list.
    #H5Pset_shared_mesg_indexConfigures the specified shared object header message index.
    #H5Pget_shared_mesg_indexRetrieves the configuration settings for a shared message index.
    #H5Pset_shared_mesg_phase_change/#H5Pget_shared_mesg_phase_changeSets or retrieves shared object header message storage phase change thresholds.
    #H5Pget_version
    + * \anchor fcpl_table_tag File creation property list functions (H5P) + * \snippet{doc} tables/propertyLists.dox fcpl_table * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - *
    File access property list functions
    FunctionPurpose
    #H5Pset_alignment/#H5Pget_alignmentSets/retrieves alignment properties.
    #H5Pset_cache/#H5Pget_cacheSets/retrieves metadata cache and raw data chunk cache parameters.
    #H5Pset_elink_file_cache_size/#H5Pget_elink_file_cache_sizeSets/retrieves the size of the external link open file cache from the specified - * file access property list.
    #H5Pset_gc_references/#H5Pget_gc_referencesSets/retrieves garbage collecting references flag.
    #H5Pset_family_offsetSets offset property for low-level access to a file in a family of files.
    #H5Pget_family_offsetRetrieves a data offset from the file access property list.
    #H5Pset_meta_block_size/#H5Pget_meta_block_sizeSets the minimum metadata blocksize or retrieves the current metadata block size setting.
    #H5Pset_mdc_configSet the initial metadata cache configuration in the indicated File Access Property List - * to the supplied value.
    #H5Pget_mdc_configGet the current initial metadata cache config-uration from the indicated File Access - * Property List.
    #H5Pset_sieve_buf_size/#H5Pget_sieve_buf_sizeSets/retrieves maximum size of data sieve buffer.
    #H5Pset_libver_boundsSets bounds on library versions, and indirectly format versions, to be used - * when creating objects.
    #H5Pget_libver_boundsRetrieves library version bounds settings that indirectly control the format - * versions used when creating objects.
    #H5Pset_small_data_block_sizeSets the size of a contiguous block reserved for small data.
    #H5Pget_small_data_block_sizeRetrieves the current small data block size setting.
    + * \anchor fapl_table_tag File access property list functions (H5P) + * \snippet{doc} tables/propertyLists.dox fapl_table + * + * \anchor fd_pl_table_tag File driver property list functions (H5P) + * \snippet{doc} tables/propertyLists.dox fd_pl_table * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - *
    File driver functions
    FunctionPurpose
    #H5Pset_driverSets a file driver.
    #H5Pget_driverReturns the identifier for the driver used to create a file.
    #H5Pget_driver_infoReturns a pointer to file driver information.
    #H5Pset_fapl_core/#H5Pget_fapl_coreSets the driver for buffered memory files (in RAM) or retrieves information regarding - * the driver.
    #H5Pset_fapl_direct/#H5Pget_fapl_directSets up use of the direct I/O driver or retrieves the direct I/O driver settings.
    #H5Pset_fapl_family/#H5Pget_fapl_familySets driver for file families, designed for systems that do not support files - * larger than 2 gigabytes, or retrieves information regarding driver.
    #H5Pset_fapl_logSets logging driver.
    #H5Pset_fapl_mpio/#H5Pget_fapl_mpioSets driver for files on parallel file systems (MPI I/O) or retrieves information - * regarding the driver.
    H5Pset_fapl_mpiposix/H5Pget_fapl_mpiposixNo longer available.
    #H5Pset_fapl_multi/#H5Pget_fapl_multiSets driver for multiple files, separating categories of metadata and raw data, - * or retrieves information regarding driver.
    #H5Pset_fapl_sec2Sets driver for unbuffered permanent files or retrieves information regarding driver.
    #H5Pset_fapl_splitSets driver for split files, a limited case of multiple files with one metadata file - * and one raw data file.
    #H5Pset_fapl_stdioSets driver for buffered permanent files.
    #H5Pset_fapl_windowsSets the Windows I/O driver.
    #H5Pset_multi_typeSpecifies type of data to be accessed via the MULTI driver enabling more direct access.
    #H5Pget_multi_typeRetrieves type of data property for MULTI driver.
    * * \subsection subsec_file_create Creating or Opening an HDF5 File * This section describes in more detail how to create and how to open files. @@ -865,100 +676,7 @@ * #H5FD_SEC2. Alternative layouts and drivers are designed to suit the needs of a variety of * systems, environments, and applications. The drivers are listed in the table below. * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - *
    Supported file drivers
    Driver NameDriver IdentifierDescriptionRelated API
    POSIX#H5FD_SEC2This driver uses POSIX file-system functions like read and write to perform I/O to a single, - * permanent file on local disk with no system buffering. This driver is POSIX-compliant and is - * the default file driver for all systems.#H5Pset_fapl_sec2
    Direct#H5FD_DIRECTThis is the #H5FD_SEC2 driver except data is written to or read from the file - * synchronously without being cached by the system.#H5Pset_fapl_direct
    Log#H5FD_LOGThis is the #H5FD_SEC2 driver with logging capabilities.#H5Pset_fapl_log
    Windows#H5FD_WINDOWSThis driver was modified in HDF5-1.8.8 to be a wrapper of the POSIX driver, - * #H5FD_SEC2. This change should not affect user applications.#H5Pset_fapl_windows
    STDIO#H5FD_STDIOThis driver uses functions from the standard C stdio.h to perform I/O - * to a single, permanent file on local disk with additional system buffering.#H5Pset_fapl_stdio
    Memory#H5FD_COREWith this driver, an application can work with a file in memory for faster reads and - * writes. File contents are kept in memory until the file is closed. At closing, the memory - * version of the file can be written back to disk or abandoned.#H5Pset_fapl_core
    Family#H5FD_FAMILYWith this driver, the HDF5 file’s address space is partitioned into pieces and sent to - * separate storage files using an underlying driver of the user’s choice. This driver is for - * systems that do not support files larger than 2 gigabytes.#H5Pset_fapl_family
    Multi#H5FD_MULTIWith this driver, data can be stored in multiple files according to the type of the data. - * I/O might work better if data is stored in separate files based on the type of data. The Split - * driver is a special case of this driver.#H5Pset_fapl_multi
    SplitH5FD_SPLITThis file driver splits a file into two parts. One part stores metadata, and the other part - * stores raw data. This splitting a file into two parts is a limited case of the Multi driver.#H5Pset_fapl_split
    Parallel#H5FD_MPIOThis is the standard HDF5 file driver for parallel file systems. This driver uses the MPI - * standard for both communication and file I/O.#H5Pset_fapl_mpio
    Parallel POSIXH5FD_MPIPOSIXThis driver is no longer available
    StreamH5FD_STREAMThis driver is no longer available.
    + * \snippet{doc} tables/fileDriverLists.dox supported_file_driver_table * * For more information, see the HDF5 Reference Manual entries for the function calls shown in * the column on the right in the table above. diff --git a/src/H5Gmodule.h b/src/H5Gmodule.h index a112a40c04a..3946110936f 100644 --- a/src/H5Gmodule.h +++ b/src/H5Gmodule.h @@ -481,100 +481,7 @@ * * * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - *
    Group creation property list functions
    FunctionPurpose
    #H5Pall_filters_availVerifies that all required filters are available.
    #H5Pget_filterReturns information about a filter in a pipeline. The - * C function is a macro: \see \ref api-compat-macros.
    #H5Pget_filter_by_idReturns information about the specified filter. The - * C function is a macro: \see \ref api-compat-macros.
    #H5Pget_nfiltersReturns the number of filters in the pipeline.
    #H5Pmodify_filterModifies a filter in the filter pipeline.
    #H5Premove_filterDeletes one or more filters in the filter pipeline.
    #H5Pset_deflateSets the deflate (GNU gzip) compression method and compression level.
    #H5Pset_filterAdds a filter to the filter pipeline.
    #H5Pset_fletcher32Sets up use of the Fletcher32 checksum filter.
    #H5Pset_link_phase_changeSets the parameters for conversion between compact and dense groups.
    #H5Pget_link_phase_changeQueries the settings for conversion between compact and dense groups.
    #H5Pset_est_link_infoSets estimated number of links and length of link names in a group.
    #H5Pget_est_link_infoQueries data required to estimate required local heap or object header size.
    #H5Pset_nlinksSets maximum number of soft or user-defined link traversals.
    #H5Pget_nlinksRetrieves the maximum number of link traversals.
    #H5Pset_link_creation_orderSets creation order tracking and indexing for links in a group.
    #H5Pget_link_creation_orderQueries whether link creation order is tracked and/or indexed in a group.
    #H5Pset_create_intermediate_groupSpecifies in the property list whether to create missing intermediate groups.
    #H5Pget_create_intermediate_groupDetermines whether the property is set to enable creating missing intermediate groups.
    #H5Pset_char_encodingSets the character encoding used to encode a string. Use to set ASCII or UTF-8 character - * encoding for object names.
    #H5Pget_char_encodingRetrieves the character encoding used to create a string.
    + * \snippet{doc} tables/propertyLists.dox gcpl_table * * * @@ -1017,7 +924,7 @@ * containing thousands to millions of members. Links are stored in * a fractal heap and indexed with an improved B-tree. * \li The new implementation also enables the use of link names consisting of - * non-ASCII character sets (see H5Pset_char_encoding()) and is + * non-ASCII character sets (see #H5Pset_char_encoding) and is * required for all link types other than hard or soft links, e.g., * external and user-defined links (see the \ref H5L APIs). * diff --git a/src/H5Gpublic.h b/src/H5Gpublic.h index 74f0da7437a..a6633954c55 100644 --- a/src/H5Gpublic.h +++ b/src/H5Gpublic.h @@ -120,7 +120,7 @@ extern "C" { * * \since 1.8.0 * - * \see H5Gopen2(), H5Gclose() + * \see H5Gopen2() * */ H5_DLL hid_t H5Gcreate2(hid_t loc_id, const char *name, hid_t lcpl_id, hid_t gcpl_id, hid_t gapl_id); @@ -167,7 +167,7 @@ H5_DLL hid_t H5Gcreate2(hid_t loc_id, const char *name, hid_t lcpl_id, hid_t gcp * H5Gclose() when the group is no longer needed so that resource * leaks will not develop. * - * \see H5Olink(), H5Dcreate(), \ref api-compat-macros + * \see H5Olink(), H5Gcreate() * * \since 1.8.0 * @@ -199,7 +199,7 @@ H5_DLL hid_t H5Gcreate_anon(hid_t loc_id, hid_t gcpl_id, hid_t gapl_id); * * \since 1.8.0 * - * \see H5Gcreate2(), H5Gclose() + * \see H5Gcreate2() * */ H5_DLL hid_t H5Gopen2(hid_t loc_id, const char *name, hid_t gapl_id); @@ -218,12 +218,10 @@ H5_DLL hid_t H5Gopen2(hid_t loc_id, const char *name, hid_t gapl_id); * property list associated with the group specified by \p group_id. * * The creation property list identifier should be released with - * H5Gclose() to prevent resource leaks. + * H5Pclose() to prevent resource leaks. * * \since 1.8.0 * - * \see H5Gcreate2(), H5Gclose() - * */ H5_DLL hid_t H5Gget_create_plist(hid_t group_id); @@ -250,8 +248,6 @@ H5_DLL hid_t H5Gget_create_plist(hid_t group_id); * * \since 1.8.0 * - * \see H5Gcreate2(), H5Gclose() - * */ H5_DLL herr_t H5Gget_info(hid_t loc_id, H5G_info_t *ginfo); @@ -284,8 +280,6 @@ H5_DLL herr_t H5Gget_info(hid_t loc_id, H5G_info_t *ginfo); * * \since 1.8.0 * - * \see H5Gcreate2(), H5Gclose() - * */ H5_DLL herr_t H5Gget_info_by_name(hid_t loc_id, const char *name, H5G_info_t *ginfo, hid_t lapl_id); @@ -331,8 +325,6 @@ H5_DLL herr_t H5Gget_info_by_name(hid_t loc_id, const char *name, H5G_info_t *gi * * \since 1.8.0 * - * \see H5Gcreate2(), H5Gclose() - * */ H5_DLL herr_t H5Gget_info_by_idx(hid_t loc_id, const char *group_name, H5_index_t idx_type, H5_iter_order_t order, hsize_t n, H5G_info_t *ginfo, hid_t lapl_id); @@ -360,8 +352,6 @@ H5_DLL herr_t H5Gget_info_by_idx(hid_t loc_id, const char *group_name, H5_index_ * * \since 1.8.0 * - * \see H5Gcreate2(), H5Gclose() - * */ H5_DLL herr_t H5Gflush(hid_t group_id); @@ -385,8 +375,6 @@ H5_DLL herr_t H5Gflush(hid_t group_id); * * \since 1.8.0 * - * \see H5Gcreate2(), H5Gclose() - * */ H5_DLL herr_t H5Grefresh(hid_t group_id); diff --git a/src/H5Lpublic.h b/src/H5Lpublic.h index 9e3084d2f1b..e487b0fe024 100644 --- a/src/H5Lpublic.h +++ b/src/H5Lpublic.h @@ -733,7 +733,7 @@ H5_DLL htri_t H5Lexists(hid_t loc_id, const char *name, hid_t lapl_id); * \p cset specifies the character set in which the link name is * encoded. Valid values include the following: * \csets - * This value is set with H5Pset_char_encoding(). + * This value is set with #H5Pset_char_encoding. * * \c token is the location that a hard link points to, and * \c val_size is the size of a soft link or user defined link value. @@ -888,10 +888,12 @@ H5_DLL ssize_t H5Lget_name_by_idx(hid_t loc_id, const char *group_name, H5_index * not been indexed by the index type, they will first be sorted by * that index then the iteration will begin; if the links have been * so indexed, the sorting step will be unnecessary, so the iteration - * may begin more quickly. + * may begin more quickly. Valid values include the following: + * \indexes * * \p order specifies the order in which objects are to be inspected - * along the index \p idx_type. + * along the index \p idx_type. Valid values include the following: + * \orders * * \p idx_p tracks the iteration and allows an iteration to be * resumed if it was stopped before all members were processed. It is @@ -1680,7 +1682,7 @@ typedef herr_t (*H5L_iterate1_t)(hid_t group, const char *name, const H5L_info1_ * \c cset specifies the character set in which the link name is * encoded. Valid values include the following: * \csets - * This value is set with H5Pset_char_encoding(). + * This value is set with #H5Pset_char_encoding. * * \c address and \c val_size are returned for hard and symbolic * links, respectively. Symbolic links include soft and external links @@ -1796,10 +1798,12 @@ H5_DLL herr_t H5Lget_info_by_idx1(hid_t loc_id, const char *group_name, H5_index * not been indexed by the index type, they will first be sorted by * that index then the iteration will begin; if the links have been * so indexed, the sorting step will be unnecessary, so the iteration - * may begin more quickly. + * may begin more quickly. Valid values include the following: + * \indexes * * \p order specifies the order in which objects are to be inspected - * along the index \p idx_type. + * along the index \p idx_type. Valid values include the following: + * \orders * * \p idx_p tracks the iteration and allows an iteration to be * resumed if it was stopped before all members were processed. It is diff --git a/src/H5MMpublic.h b/src/H5MMpublic.h index 70ac6445ea5..d4aba98d330 100644 --- a/src/H5MMpublic.h +++ b/src/H5MMpublic.h @@ -14,8 +14,6 @@ /*------------------------------------------------------------------------- * * Created: H5MMpublic.h - * Jul 10 1997 - * Robb Matzke * * Purpose: Public declarations for the H5MM (memory management) * package. diff --git a/src/H5Mpublic.h b/src/H5Mpublic.h index 25d3058fbd1..24d207c3fef 100644 --- a/src/H5Mpublic.h +++ b/src/H5Mpublic.h @@ -208,6 +208,9 @@ H5_DLL hid_t H5Mget_val_type(hid_t map_id); * \details H5Mget_create_plist() returns an identifier for a copy of the * creation property list for a map object specified by \p map_id. * + * The creation property list identifier should be released with + * H5Pclose() to prevent resource leaks. + * * \since 1.12.0 * */ diff --git a/src/H5Opublic.h b/src/H5Opublic.h index cc131e169c7..a6455920e3f 100644 --- a/src/H5Opublic.h +++ b/src/H5Opublic.h @@ -304,7 +304,7 @@ H5_DLL hid_t H5Oopen_by_token(hid_t loc_id, H5O_token_t token); * * \return \hid_tv{object} * - * \details H5Open_by_idx() opens the nth object in the group specified by \p loc_id + * \details H5Oopen_by_idx() opens the nth object in the group specified by \p loc_id * and \p group_name. * * \p loc_id specifies a location identifier. diff --git a/src/H5PLextern.h b/src/H5PLextern.h index 7f3df5e2f92..d136051beda 100644 --- a/src/H5PLextern.h +++ b/src/H5PLextern.h @@ -2,7 +2,7 @@ * Copyright by The HDF Group. * * All rights reserved. * * * - * This file is part of HDF5. The full HDF5 copyright notice, including * + * This file is part of HDF5. The full HDF5 copyright notice, including * * terms governing use, modification, and redistribution, is contained in * * the COPYING file, which can be found at the root of the source code * * distribution tree, or in https://www.hdfgroup.org/licenses. * diff --git a/src/H5PLmodule.h b/src/H5PLmodule.h index 66a24fdcbb6..fff1e95e682 100644 --- a/src/H5PLmodule.h +++ b/src/H5PLmodule.h @@ -2,7 +2,7 @@ * Copyright by The HDF Group. * * All rights reserved. * * * - * This file is part of HDF5. The full HDF5 copyright notice, including * + * This file is part of HDF5. The full HDF5 copyright notice, including * * terms governing use, modification, and redistribution, is contained in * * the COPYING file, which can be found at the root of the source code * * distribution tree, or in https://www.hdfgroup.org/licenses. * diff --git a/src/H5PLpublic.h b/src/H5PLpublic.h index 55ff59485f1..a886375ee9e 100644 --- a/src/H5PLpublic.h +++ b/src/H5PLpublic.h @@ -2,7 +2,7 @@ * Copyright by The HDF Group. * * All rights reserved. * * * - * This file is part of HDF5. The full HDF5 copyright notice, including * + * This file is part of HDF5. The full HDF5 copyright notice, including * * terms governing use, modification, and redistribution, is contained in * * the COPYING file, which can be found at the root of the source code * * distribution tree, or in https://www.hdfgroup.org/licenses. * diff --git a/src/H5Pmodule.h b/src/H5Pmodule.h index c7ab3bd65f6..04eb1246bb9 100644 --- a/src/H5Pmodule.h +++ b/src/H5Pmodule.h @@ -789,9 +789,9 @@ *
  • \ref subsec_file_property_lists
  • *
  • \ref subsubsec_file_examples_props
  • *
  • \ref subsubsec_file_examples_access
  • - *
  • "File creation property list functions (H5P)"
  • - *
  • "File access property list functions (H5P)"
  • - *
  • "File driver functions (H5P)"
  • + *
  • \ref dcpl_table_tag "Dataset creation property list functions (H5P)"
  • + *
  • \ref fapl_table_tag "File access property list functions (H5P)"
  • + *
  • \ref fd_pl_table_tag "File driver property list functions (H5P)"
  • * \li In the \ref sec_attribute chapter, see "Attribute creation property list functions (H5P)". * \li In the \ref sec_group chapter, see "Group creation property list functions (H5P)". * \li Property lists are discussed throughout \ref sec_dataset. @@ -799,16 +799,16 @@ * All property list functions are described in the \ref H5P section of the * \ref RM. The function index at the top of the page provides a categorized listing * grouped by property list class. Those classes are listed below: - * \li File creation properties - * \li File access properties - * \li Group creation properties - * \li Dataset creation properties - * \li Dataset access properties - * \li Dataset transfer properties - * \li Link creation properties - * \li Link access properties - * \li Object creation properties - * \li Object copy properties + * \li \ref FCPL + * \li \ref FAPL + * \li \ref GCPL + * \li \ref DCPL + * \li \ref DAPL + * \li \ref DXPL + * \li \ref LCPL + * \li \ref LAPL + * \li \ref OCPL + * \li \ref OCPYPL * * Additional categories not related to the class structure are as follows: * \li General property list operations @@ -894,135 +894,186 @@ * or writing data. Property lists can be modified by adding or changing * properties. Property lists are deleted by closing the associated handles. * - *
    Other external link functions
    - * - * - * - * - * - * - * - * - * - *
    CreateRead
    - * \snippet{lineno} H5P_examples.c create - * - * \snippet{lineno} H5P_examples.c read - *
    UpdateDelete
    - * \snippet{lineno} H5P_examples.c update - * - * \snippet{lineno} H5P_examples.c delete - *
    + * \ref PLCR + * \snippet{doc} tables/propertyLists.dox plcr_table + * + * \ref PLCR + * \snippet{doc} tables/propertyLists.dox plcra_table + * + * \ref PLCR / \ref OCPL / \ref GCPL + * \snippet{doc} tables/propertyLists.dox fcpl_table + * + * \ref PLCR + * \snippet{doc} tables/propertyLists.dox fapl_table + * \snippet{doc} tables/propertyLists.dox fd_pl_table + * + * \ref PLCR + * \snippet{doc} tables/propertyLists.dox lapl_table + * + * \ref PLCR / \ref OCPL + * \snippet{doc} tables/propertyLists.dox dcpl_table + * + * \ref PLCR / \ref LAPL + * \snippet{doc} tables/propertyLists.dox dapl_table + * + * \ref PLCR / \ref OCPL + * \snippet{doc} tables/propertyLists.dox gcpl_table + * + * \ref PLCR / \ref LAPL + * \snippet{doc} tables/propertyLists.dox gapl_table + * + * \ref PLCR + * \snippet{doc} tables/propertyLists.dox ocpl_table + * + * \ref PLCR + * \snippet{doc} tables/propertyLists.dox ocpypl_table + * + * \ref PLCR + * \snippet{doc} tables/propertyLists.dox strcpl_table + * + * \ref PLCR / \ref STRCPL + * \snippet{doc} tables/propertyLists.dox lcpl_table + * + * \ref PLCR / \ref STRCPL + * \snippet{doc} tables/propertyLists.dox acpl_table + * * * \defgroup STRCPL String Creation Properties + * \ingroup H5P * Currently, there are only two creation properties that you can use to control * the creation of HDF5 attributes and links. The first creation property, the * choice of a character encoding, applies to both attributes and links. * The second creation property applies to links only, and advises the library * to automatically create missing intermediate groups when creating new objects. - * \ingroup H5P + * + * \snippet{doc} tables/propertyLists.dox strcpl_table * * \defgroup LCPL Link Creation Properties - * The first creation property, the choice of a character encoding, applies to - * both attributes and links. - * The second creation property applies to links only, and advises the library - * to automatically create missing intermediate groups when creating new objects. * \ingroup STRCPL + * This creation property applies to links only, and advises the library + * to automatically create missing intermediate groups when creating new objects. + * + * \snippet{doc} tables/propertyLists.dox lcpl_table * * @see STRCPL * * \defgroup ACPL Attribute Creation Properties - * The creation property, the choice of a character encoding, applies to attributes. * \ingroup STRCPL + * The creation property, the choice of a character encoding, applies to attributes. + * + * \snippet{doc} tables/propertyLists.dox acpl_table * * @see STRCPL * * \defgroup LAPL Link Access Properties * \ingroup H5P * + * \snippet{doc} tables/propertyLists.dox lapl_table + * * \defgroup DAPL Dataset Access Properties + * \ingroup LAPL * Use dataset access properties to modify the default behavior of the HDF5 * library when accessing datasets. The properties include adjusting the size * of the chunk cache, providing prefixes for external content and virtual * dataset file paths, and controlling flush behavior, etc. These properties * are \Emph{not} persisted with datasets, and can be adjusted at runtime before * a dataset is created or opened. - * \ingroup LAPL + * + * \snippet{doc} tables/propertyLists.dox dapl_table * * \defgroup DCPL Dataset Creation Properties + * \ingroup OCPL * Use dataset creation properties to control aspects of dataset creation such * as fill time, storage layout, compression methods, etc. * Unlike dataset access and transfer properties, creation properties \Emph{are} * stored with the dataset, and cannot be changed once a dataset has been * created. - * \ingroup OCPL + * + * \snippet{doc} tables/propertyLists.dox dcpl_table * * \defgroup DXPL Dataset Transfer Properties + * \ingroup H5P * Use dataset transfer properties to customize certain aspects of reading * and writing datasets such as transformations, MPI-IO I/O mode, error * detection, etc. These properties are \Emph{not} persisted with datasets, * and can be adjusted at runtime before a dataset is read or written. - * \ingroup H5P + * + * \snippet{doc} tables/propertyLists.dox dxpl_table * * \defgroup FAPL File Access Properties + * \ingroup H5P * Use file access properties to modify the default behavior of the HDF5 * library when accessing files. The properties include selecting a virtual * file driver (VFD), configuring the metadata cache (MDC), control * file locking, etc. These properties are \Emph{not} persisted with files, and * can be adjusted at runtime before a file is created or opened. - * \ingroup H5P + * + * \snippet{doc} tables/propertyLists.dox fapl_table + * \snippet{doc} tables/propertyLists.dox fd_pl_table * * \defgroup FCPL File Creation Properties + * \ingroup GCPL * Use file creation properties to control aspects of file creation such * as setting a file space management strategy or creating a user block. * Unlike file access properties, creation properties \Emph{are} * stored with the file, and cannot be changed once a file has been * created. - * \ingroup GCPL * - * \defgroup GAPL General Access Properties - * The functions in this section can be applied to different kinds of property - * lists. + * \snippet{doc} tables/propertyLists.dox fcpl_table + * + * \defgroup GAPL Group Access Properties * \ingroup LAPL + * The functions in this section can be applied to group property lists. + * + * \snippet{doc} tables/propertyLists.dox gapl_table * * \defgroup GCPL Group Creation Properties + * \ingroup OCPL * Use group creation properties to control aspects of group creation such * as storage layout, compression, and link creation order tracking. * Unlike file access properties, creation properties \Emph{are} * stored with the group, and cannot be changed once a group has been * created. - * \ingroup OCPL + * + * \snippet{doc} tables/propertyLists.dox gcpl_table * * \defgroup PLCR Property List Class Root - * Use the functions in this module to manage HDF5 property lists. * \ingroup H5P + * Use the functions in this module to manage HDF5 property lists. + * + * \snippet{doc} tables/propertyLists.dox plcr_table * * \defgroup PLCRA Property List Class Root (Advanced) + * \ingroup H5P * You can create and customize user-defined property list classes using the * functions described below. Arbitrary user-defined properties can also * be inserted into existing property lists as so-called temporary properties. - * \ingroup H5P * + * \snippet{doc} tables/propertyLists.dox plcra_table * * \defgroup OCPL Object Creation Properties * \ingroup H5P * + * \snippet{doc} tables/propertyLists.dox ocpl_table + * * \defgroup OCPYPL Object Copy Properties * \ingroup H5P * + * \snippet{doc} tables/propertyLists.dox ocpypl_table + * * \defgroup FMPL File Mount Properties - * Empty property class. * \ingroup H5P + * Empty property class. * * * \defgroup TCPL Datatype Creation Properties - * TCPL isn't supported yet. * \ingroup OCPL + * TCPL isn't supported yet. * * * \defgroup TAPL Datatype Access Properties - * TAPL isn't supported yet. * \ingroup LAPL + * TAPL isn't supported yet. * * * diff --git a/src/H5Ppublic.h b/src/H5Ppublic.h index 8990922907d..366bf812a9c 100644 --- a/src/H5Ppublic.h +++ b/src/H5Ppublic.h @@ -5675,6 +5675,9 @@ H5_DLL herr_t H5Pget_dset_no_attrs_hint(hid_t dcpl_id, hbool_t *minimize); * are null pointers then the corresponding information is not * returned. * + * \note On Windows, off_t is typically a 32-bit signed long value, which + * limits the valid offset that can be returned to 2 GiB. + * * \version 1.6.4 \p idx parameter type changed to unsigned. * \since 1.0.0 * @@ -8054,9 +8057,8 @@ H5_DLL herr_t H5Pget_mpio_no_collective_cause(hid_t plist_id, uint32_t *local_no uint32_t *global_no_collective_cause); #endif /* H5_HAVE_PARALLEL */ -/* Link creation property list (LCPL) routines */ /** - * \ingroup STRCPL + * \ingroup LCPL * * \brief Determines whether property is set to enable creating missing * intermediate groups @@ -8087,7 +8089,7 @@ H5_DLL herr_t H5Pget_mpio_no_collective_cause(hid_t plist_id, uint32_t *local_no */ H5_DLL herr_t H5Pget_create_intermediate_group(hid_t plist_id, unsigned *crt_intmd /*out*/); /** - * \ingroup STRCPL + * \ingroup LCPL * * \brief Specifies in property list whether to create missing * intermediate groups @@ -8469,9 +8471,8 @@ H5_DLL herr_t H5Pget_map_iterate_hints(hid_t mapl_id, size_t *key_prefetch_size size_t *key_alloc_size /*out*/); #endif /* H5_HAVE_MAP_API */ -/* String creation property list (STRCPL) routines */ /** - * \ingroup STRCPL + * \ingroup ACPL * * \brief Retrieves the character encoding used to create a link or * attribute name @@ -8500,7 +8501,7 @@ H5_DLL herr_t H5Pget_map_iterate_hints(hid_t mapl_id, size_t *key_prefetch_size */ H5_DLL herr_t H5Pget_char_encoding(hid_t plist_id, H5T_cset_t *encoding /*out*/); /** - * \ingroup STRCPL + * \ingroup ACPL * * \brief Sets the character encoding used to encode link and attribute * names diff --git a/src/H5Tmodule.h b/src/H5Tmodule.h index 83f74672f20..590a60c1684 100644 --- a/src/H5Tmodule.h +++ b/src/H5Tmodule.h @@ -3872,26 +3872,6 @@ filled according to the value of this property. The padding can be: * to HDF5 files and linked to groups as HDF5 datatype objects or so-called * \Emph{committed datatypes}. * - * - * - * - * - * - * - * - * - * - * - *
    CreateRead
    - * \snippet{lineno} H5T_examples.c create - * - * \snippet{lineno} H5T_examples.c read - *
    UpdateDelete
    - * \snippet{lineno} H5T_examples.c update - * - * \snippet{lineno} H5T_examples.c delete - *
    - * * \defgroup ARRAY Array Datatypes * \ingroup H5T * \defgroup ATOM Atomic Datatypes @@ -3912,33 +3892,36 @@ filled according to the value of this property. The padding can be: * * \defgroup PDT Predefined Datatypes * \ingroup H5T - * \details What is a predefined HDF5 datatype? - * \todo Fill in the blanks! * * \defgroup PDTCPU By CPU * \ingroup PDT * \details CPU-specific datatypes * \defgroup PDTALPHA DEC Alpha * \ingroup PDTCPU + * \snippet{doc} tables/predefinedDatatypes.dox predefined_dec_datatypes_table * \defgroup PDTX86 AMD & INTEL * \ingroup PDTCPU + * \snippet{doc} tables/predefinedDatatypes.dox predefined_intel_datatypes_table * \defgroup PDTMIPS SGI MIPS * \ingroup PDTCPU + * \snippet{doc} tables/predefinedDatatypes.dox predefined_mips_datatypes_table * * \defgroup PDTIEEE IEEE * \ingroup PDT * \details The IEEE floating point types in big- and little-endian byte orders. + * \snippet{doc} tables/predefinedDatatypes.dox predefined_ieee_datatypes_table * * \defgroup PDTSTD Standard Datatypes * \ingroup PDT * \details These are "standard" types. For instance, signed (2's complement) * and unsigned integers of various sizes in big- and little-endian * byte orders. + * \snippet{doc} tables/predefinedDatatypes.dox predefined_std_datatypes_table * * \defgroup PDTUNIX UNIX-specific Datatypes * \ingroup PDT * \details Types which are particular to Unix. - * \todo Fill in the blanks! + * \snippet{doc} tables/predefinedDatatypes.dox predefined_unix_datatypes_table * * \defgroup PDTNAT Native Datatypes * \ingroup PDT @@ -3952,13 +3935,16 @@ filled according to the value of this property. The padding can be: * \li The datatype \c LLONG corresponds C's \Code{long long} and * \c LDOUBLE is \Code{long double}. These types might be the same * as \c LONG and \c DOUBLE, respectively. + * \snippet{doc} tables/predefinedDatatypes.dox predefined_native_datatypes_table + * * \defgroup PDTC9x C9x Integer Datatypes * \ingroup PDTNAT * \details C9x integer types - * \todo Fill in the blanks! + * \snippet{doc} tables/predefinedDatatypes.dox predefined_c9x_datatypes_table * * \defgroup PDTS Strings * \ingroup PDT + * \snippet{doc} tables/predefinedDatatypes.dox predefined_string_datatypes_table * */ diff --git a/src/H5Tpublic.h b/src/H5Tpublic.h index 13e92f6e799..29008dbe66a 100644 --- a/src/H5Tpublic.h +++ b/src/H5Tpublic.h @@ -1083,7 +1083,7 @@ H5_DLLVAR hid_t H5T_NATIVE_UINT_FAST64_g; * When creating a fixed-length string datatype, \p size will * be the length of the string in bytes. The length of the * string in characters will depend on i the encoding used; see - * H5Pset_char_encoding(). + * #H5Pset_char_encoding. * * ENUMs created with this function have a signed native integer * base datatype. Use H5Tenum_create() if a different integer base diff --git a/src/H5VLmodule.h b/src/H5VLmodule.h index 1ad0c8da7a6..9fc14abd415 100644 --- a/src/H5VLmodule.h +++ b/src/H5VLmodule.h @@ -27,18 +27,18 @@ #define H5_MY_PKG_ERR H5E_VOL #define H5_MY_PKG_INIT YES -/** \page H5VL_UG The HDF5 VOL plugin +/** \page H5VL_UG The HDF5 Virtual Object Layer (VOL) * - * \section sec_vol The HDF5 VOL plugin + * \section sec_vol The HDF5 Virtual Object Layer (VOL) * - * \section subsec_vol_intro Introduction + * \subsection subsec_vol_intro Introduction * The virtual object layer is an abstraction layer in the HDF5 library that intercepts all API calls - * that could potentially access objects in an HDF5 container and forwards those calls to a VOL connector, - * which implements the storage. The user or application gets the benefit of using the familiar and - * widely-used HDF5 data model and API, but can map the physical storage of the HDF5 file and objects - * to storage that better meets the application's data needs. + * that could potentially access objects in an HDF5 container and forwards those calls to a VOL + * connector, which implements the storage. The user or application gets the benefit of using the + * familiar and widely-used HDF5 data model and API, but can map the physical storage of the HDF5 file + * and objects to storage that better meets the application’s data needs. * - * \section subsec_vol_abstract_layer The VOL Abstraction Layer + * \subsection subsec_vol_abstract_layer The VOL Abstraction Layer * The VOL lies just under the public API. When a storage-oriented public APIcall is made, the library * performs a few sanity checks on the input parameters and then immediately invokes a VOL callback, * which resolves to an implementation in the VOL connector that was selected when opening or creating @@ -74,11 +74,11 @@ * For more information about which calls go through the VOL and the mechanism by which this is implemented, * see the connector author and library internals documentation. * - * \section subsec_vol_connect VOL Connectors + * \subsection subsec_vol_connect VOL Connectors * A VOL connector can be implemented in several ways: * \li as a shared or static library linked to an application * \li as a dynamically loaded plugin, implemented as a shared library - * \li and even as an internal connector, built into the HDF5 libraryitself + * \li and even as an internal connector, built into the HDF5 library itself * * This section mostly focuses on external connectors, both libraries and plugins, as those are expected * to be much more common than internal implementations. @@ -122,7 +122,9 @@ * \todo Describe the VOL plugin life cycle. * * \defgroup ASYNC Asynchronous Functions - * \brief Asynchronous Functions + * \brief List of the asynchronous functions. + * \note The argument \p es_id associated with the asynchronous APIs is the \Emph{event set id}. See H5ES for + *context. * * \defgroup H5VLDEF Definitions * \ingroup H5VL From c420176d4791bec62336ea4ba25bcec3475a64bb Mon Sep 17 00:00:00 2001 From: Larry Knox Date: Fri, 10 Mar 2023 09:52:06 -0600 Subject: [PATCH 025/108] Fix new codespell issues (#2521) (#2530) * Fix new codespell issues (#2521) * Fix new codespell issues * Have codespell ignore ./config/sanitizer/sanitizers.cmake * Fix typo in genparser. --------- Co-authored-by: Mark Kittisopikul --- .github/workflows/codespell.yml | 4 ++-- bin/genparser | 2 +- config/toolchain/aarch64.cmake | 2 +- configure.ac | 2 +- doxygen/dox/LearnBasics2.dox | 2 +- doxygen/examples/FileFormat.html | 2 +- doxygen/examples/IOFlow.html | 2 +- fortran/src/H5Pf.c | 2 +- fortran/test/tH5O_F03.F90 | 2 +- src/H5FScache.c | 2 +- src/H5PLpath.c | 2 +- src/H5S.c | 2 +- testpar/t_bigio.c | 2 +- 13 files changed, 14 insertions(+), 14 deletions(-) diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml index 1bd7ebc7ec8..3ceda828683 100644 --- a/.github/workflows/codespell.yml +++ b/.github/workflows/codespell.yml @@ -11,5 +11,5 @@ jobs: - uses: actions/checkout@v3 - uses: codespell-project/actions-codespell@master with: - skip: ./bin/trace,./hl/tools/h5watch/h5watch.c,./tools/test/h5jam/tellub.c,./config/sanitizer/LICENSE,./tools/test/h5repack/testfiles/*.dat - ignore_words_list: isnt,inout,nd,parms,parm,ba,offsetP,ser,ois,had,fiter,fo,clude,refere,minnum,offsetp,creat,ans:,eiter,lastr,ans,isn't,ifset,sur,trun,dne,tthe,hda,filname,te,htmp,minnum,ro,oce,ot,msdos + skip: ./bin/trace,./hl/tools/h5watch/h5watch.c,./tools/test/h5jam/tellub.c,./config/sanitizer/LICENSE,./config/sanitizer/sanitizers.cmake,./tools/test/h5repack/testfiles/*.dat + ignore_words_list: isnt,inout,nd,parms,parm,ba,offsetP,ser,ois,had,fiter,fo,clude,refere,minnum,offsetp,creat,ans:,eiter,lastr,ans,isn't,ifset,sur,trun,dne,tthe,hda,filname,te,htmp,ake,gord,numer,ro,oce,ot,msdos diff --git a/bin/genparser b/bin/genparser index 9ea315263c2..bb44a91e650 100755 --- a/bin/genparser +++ b/bin/genparser @@ -21,7 +21,7 @@ # # There is NO dependency in either the autotools or CMake to regenerate # the parser code. If you modify H5LT analyze.l or H5LTparse.y, you -# will need to run this scrpit manually on a system with a suitable +# will need to run this script manually on a system with a suitable # lexer and parser generator. # # IMPORTANT OS X NOTE diff --git a/config/toolchain/aarch64.cmake b/config/toolchain/aarch64.cmake index aa84a742654..03f4e5e9f58 100644 --- a/config/toolchain/aarch64.cmake +++ b/config/toolchain/aarch64.cmake @@ -2,7 +2,7 @@ set(TOOLCHAIN_PREFIX aarch64-linux-gnu) set(ANDROID_NDK /opt/android-ndk-linux) set (CMAKE_SYSTEM_NAME Android) set (CMAKE_ANDROID_ARCH_ABI x86_64) -#set (CMAKE_ANDROID_STANDALONE_TOOLCHAIN ${ANDROID_NDK}/build/cmake/andriod.toolchain.cmake) +#set (CMAKE_ANDROID_STANDALONE_TOOLCHAIN ${ANDROID_NDK}/build/cmake/android.toolchain.cmake) set (CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}-gcc) set (CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}-g++) #set (CMAKE_RC_COMPILER ${TOOLCHAIN_PREFIX}-windres) diff --git a/configure.ac b/configure.ac index 68d573d0890..6379b41f411 100644 --- a/configure.ac +++ b/configure.ac @@ -2566,7 +2566,7 @@ AC_ARG_ENABLE([optimization], details. Alternatively, optimization options can be specified directly by specifying them as a - string value. These custom optimzation flags will + string value. These custom optimization flags will completely replace all other optimization flags. [default depends on build mode: debug=debug, production=high, clean=none] diff --git a/doxygen/dox/LearnBasics2.dox b/doxygen/dox/LearnBasics2.dox index ffcb9718409..6f94c7f7eb5 100644 --- a/doxygen/dox/LearnBasics2.dox +++ b/doxygen/dox/LearnBasics2.dox @@ -906,7 +906,7 @@ can be used to obtain information about the selection. The dataset with the region references was read by #H5Dread with the #H5T_STD_REF_DSETREG datatype specified. The read reference can be used to obtain the dataset identifier by calling #H5Rdereference or by obtaining -obtain spacial information (dataspace and selection) with the call to #H5Rget_region. +obtain spatial information (dataspace and selection) with the call to #H5Rget_region. The reference to the dataset region has information for both the dataset itself and its selection. In both functions: \li The first parameter is an identifier of the dataset with the region references. diff --git a/doxygen/examples/FileFormat.html b/doxygen/examples/FileFormat.html index fc35357f8ae..133bbc8c260 100644 --- a/doxygen/examples/FileFormat.html +++ b/doxygen/examples/FileFormat.html @@ -30,7 +30,7 @@
  • Document's Audience:

      -
    • Current H5 library designers and knowledgable external developers.
    • +
    • Current H5 library designers and knowledgeable external developers.
  • Background Reading:

    diff --git a/doxygen/examples/IOFlow.html b/doxygen/examples/IOFlow.html index 6b2c27e0827..e890edbb766 100644 --- a/doxygen/examples/IOFlow.html +++ b/doxygen/examples/IOFlow.html @@ -24,7 +24,7 @@
  • Document's Audience:

      -
    • Current H5 library designers and knowledgable external developers.
    • +
    • Current H5 library designers and knowledgeable external developers.
  • Background Reading:

    diff --git a/fortran/src/H5Pf.c b/fortran/src/H5Pf.c index 876457f54b5..77d5cd0f438 100644 --- a/fortran/src/H5Pf.c +++ b/fortran/src/H5Pf.c @@ -1756,7 +1756,7 @@ h5pset_filter_c(hid_t_f *prp_id, int_f *filter, int_f *flags, size_t_f *cd_nelmt * INPUTS * prp_id - property list identifier * OUTPUTS - * nfilters - number of filters defined in the filter pipline + * nfilters - number of filters defined in the filter pipeline * RETURNS * 0 on success, -1 on failure * AUTHOR diff --git a/fortran/test/tH5O_F03.F90 b/fortran/test/tH5O_F03.F90 index 2f9bcad3541..6cae6b3a0f2 100644 --- a/fortran/test/tH5O_F03.F90 +++ b/fortran/test/tH5O_F03.F90 @@ -442,7 +442,7 @@ SUBROUTINE test_obj_visit(total_error) ! Construct "interesting" file to visit CALL build_visit_file(fid) - ! Inialize udata for testing purposes + ! Initialize udata for testing purposes udata%info(1)%path(1:1) ="." udata%info(1)%type_obj = H5O_TYPE_GROUP_F udata%info(2)%path(1:12) = & diff --git a/src/H5FScache.c b/src/H5FScache.c index ecbc012e6bb..c3816878587 100644 --- a/src/H5FScache.c +++ b/src/H5FScache.c @@ -436,7 +436,7 @@ H5FS__cache_hdr_pre_serialize(H5F_t *f, void *_thing, haddr_t addr, size_t H5_AT * * H5F_addr_defined(fspace->addr) * - * will both be TRUE. If this contition does not hold, then + * will both be TRUE. If this condition does not hold, then * either the free space info is not persistent * (!H5F_addr_defined(fspace->addr)???) or the section info * contains no free space data that must be written to file diff --git a/src/H5PLpath.c b/src/H5PLpath.c index 1f78064fe6f..a6734cb134c 100644 --- a/src/H5PLpath.c +++ b/src/H5PLpath.c @@ -817,7 +817,7 @@ H5PL__find_plugin_in_path_table(const H5PL_search_params_t *search_params, hbool /*------------------------------------------------------------------------- * Function: H5PL__find_plugin_in_path * - * Purpose: Given a path, this function opens the directory and envokes + * Purpose: Given a path, this function opens the directory and invokes * another function to go through all files to find the right * plugin library. Two function definitions are for Unix and * Windows. diff --git a/src/H5S.c b/src/H5S.c index 6575b6fe2d6..47b3dfdb062 100644 --- a/src/H5S.c +++ b/src/H5S.c @@ -1650,7 +1650,7 @@ H5S_decode(const unsigned char **p) { H5F_t *f = NULL; /* Fake file structure*/ H5S_t *ds; /* Decoded dataspace */ - H5S_extent_t *extent; /* Entent of decoded dataspace */ + H5S_extent_t *extent; /* Extent of decoded dataspace */ const unsigned char *pp = (*p); /* Local pointer for decoding */ size_t extent_size; /* size of the extent message*/ uint8_t sizeof_size; /* 'Size of sizes' for file */ diff --git a/testpar/t_bigio.c b/testpar/t_bigio.c index 2bd40061d24..7e9e71509b3 100644 --- a/testpar/t_bigio.c +++ b/testpar/t_bigio.c @@ -1866,7 +1866,7 @@ main(int argc, char **argv) /* Having set the bigio handling to a size that is manageable, * we'll set our 'bigcount' variable to be 2X that limit so * that we try to ensure that our bigio handling is actually - * envoked and tested. + * invoked and tested. */ if (newsize != oldsize) bigcount = newsize * 2; From 22751322e346c1f903755cf8c6369bc417f1d42f Mon Sep 17 00:00:00 2001 From: Larry Knox Date: Mon, 13 Mar 2023 07:35:49 -0500 Subject: [PATCH 026/108] Update Copyright headers. (#2534) --- .autom4te.cfg | 1 - .h5chkright.ini | 1 - Makefile.am | 1 - Makefile.dist | 1 - acsite.m4 | 1 - bin/Makefile.am | 1 - bin/bbrelease | 1 - bin/buildhdf5 | 1 - bin/checkapi | 1 - bin/checkposix | 1 - bin/chkconfigure | 2 +- bin/chkcopyright | 79 +++++++------------ bin/debug-ohdr | 1 - bin/dependencies | 1 - bin/deploy | 1 - bin/distdep | 1 - bin/errors | 1 - bin/gcov_script | 1 - bin/h5cc.in | 1 - bin/h5redeploy.in | 1 - bin/h5vers | 1 - bin/iostats | 1 - bin/make_err | 2 - bin/make_overflow | 2 - bin/make_vers | 2 - bin/mkdirs | 1 - bin/newer | 1 - bin/pkgscrpts/h5rmflags | 1 - bin/pkgscrpts/makeHDF5BinaryTarfiles.pl | 1 - bin/pkgscrpts/makeInternalREADME.pl | 10 +-- bin/pkgscrpts/makeOuterREADME.pl | 10 +-- bin/release | 1 - bin/runbkgprog | 1 - bin/switch_maint_mode | 1 - bin/trace | 1 - bin/warnhist | 10 +-- bin/yodconfigure | 1 - c++/Makefile.am | 1 - c++/examples/Makefile.am | 1 - c++/examples/chunks.cpp | 1 - c++/examples/compound.cpp | 1 - c++/examples/create.cpp | 1 - c++/examples/extend_ds.cpp | 1 - c++/examples/h5group.cpp | 1 - c++/examples/h5tutr_cmprss.cpp | 1 - c++/examples/h5tutr_crtatt.cpp | 1 - c++/examples/h5tutr_crtdat.cpp | 1 - c++/examples/h5tutr_crtgrp.cpp | 1 - c++/examples/h5tutr_crtgrpar.cpp | 1 - c++/examples/h5tutr_crtgrpd.cpp | 1 - c++/examples/h5tutr_extend.cpp | 1 - c++/examples/h5tutr_rdwt.cpp | 1 - c++/examples/h5tutr_subset.cpp | 1 - c++/examples/readdata.cpp | 1 - c++/examples/testh5c++.sh.in | 1 - c++/examples/writedata.cpp | 1 - c++/src/H5AbstractDs.cpp | 1 - c++/src/H5AbstractDs.h | 1 - c++/src/H5Alltypes.h | 1 - c++/src/H5ArrayType.cpp | 1 - c++/src/H5ArrayType.h | 1 - c++/src/H5AtomType.cpp | 1 - c++/src/H5AtomType.h | 1 - c++/src/H5Attribute.cpp | 1 - c++/src/H5Attribute.h | 1 - c++/src/H5Classes.h | 1 - c++/src/H5CommonFG.cpp | 1 - c++/src/H5CommonFG.h | 1 - c++/src/H5CompType.cpp | 1 - c++/src/H5CompType.h | 1 - c++/src/H5Cpp.h | 1 - c++/src/H5CppDoc.h | 1 - c++/src/H5DaccProp.cpp | 1 - c++/src/H5DaccProp.h | 1 - c++/src/H5DataSet.cpp | 1 - c++/src/H5DataSet.h | 1 - c++/src/H5DataSpace.cpp | 1 - c++/src/H5DataSpace.h | 1 - c++/src/H5DataType.cpp | 1 - c++/src/H5DataType.h | 1 - c++/src/H5DcreatProp.cpp | 1 - c++/src/H5DcreatProp.h | 1 - c++/src/H5DxferProp.cpp | 1 - c++/src/H5DxferProp.h | 1 - c++/src/H5EnumType.cpp | 1 - c++/src/H5EnumType.h | 1 - c++/src/H5Exception.cpp | 1 - c++/src/H5Exception.h | 1 - c++/src/H5FaccProp.cpp | 1 - c++/src/H5FaccProp.h | 1 - c++/src/H5FcreatProp.cpp | 1 - c++/src/H5FcreatProp.h | 1 - c++/src/H5File.cpp | 1 - c++/src/H5File.h | 1 - c++/src/H5FloatType.cpp | 1 - c++/src/H5FloatType.h | 1 - c++/src/H5Group.cpp | 1 - c++/src/H5Group.h | 1 - c++/src/H5IdComponent.cpp | 1 - c++/src/H5IdComponent.h | 1 - c++/src/H5Include.h | 1 - c++/src/H5IntType.cpp | 1 - c++/src/H5IntType.h | 1 - c++/src/H5LaccProp.cpp | 1 - c++/src/H5LaccProp.h | 1 - c++/src/H5LcreatProp.cpp | 1 - c++/src/H5LcreatProp.h | 1 - c++/src/H5Library.cpp | 1 - c++/src/H5Library.h | 1 - c++/src/H5Location.cpp | 1 - c++/src/H5Location.h | 1 - c++/src/H5Object.cpp | 1 - c++/src/H5Object.h | 1 - c++/src/H5OcreatProp.cpp | 1 - c++/src/H5OcreatProp.h | 1 - c++/src/H5PredType.cpp | 1 - c++/src/H5PredType.h | 1 - c++/src/H5PropList.cpp | 1 - c++/src/H5PropList.h | 1 - c++/src/H5StrType.cpp | 1 - c++/src/H5StrType.h | 1 - c++/src/H5VarLenType.cpp | 1 - c++/src/H5VarLenType.h | 1 - c++/src/Makefile.am | 1 - c++/src/footer.html | 2 - c++/src/h5c++.in | 1 - c++/src/header.html | 1 - c++/test/H5srcdir_str.h.in | 1 - c++/test/Makefile.am | 1 - c++/test/dsets.cpp | 1 - c++/test/h5cpputil.cpp | 1 - c++/test/h5cpputil.h | 1 - c++/test/tarray.cpp | 1 - c++/test/tattr.cpp | 1 - c++/test/tcompound.cpp | 1 - c++/test/tdspl.cpp | 1 - c++/test/testhdf5.cpp | 1 - c++/test/tfile.cpp | 1 - c++/test/tfilter.cpp | 1 - c++/test/th5s.cpp | 1 - c++/test/titerate.cpp | 1 - c++/test/tobject.cpp | 1 - c++/test/trefer.cpp | 1 - c++/test/ttypes.cpp | 1 - c++/test/tvlstr.cpp | 1 - config/BlankForm | 1 - config/Makefile.am.blank | 1 - config/apple | 1 - config/clang-cxxflags | 1 - config/clang-flags | 1 - config/cmake/libh5cc.in | 1 - config/commence.am | 1 - config/conclude.am | 1 - config/conclude_fc.am | 1 - config/cygwin | 1 - config/examples.am | 1 - config/freebsd | 1 - config/gnu-cxxflags | 1 - config/gnu-fflags | 1 - config/gnu-flags | 1 - config/ibm-aix | 1 - config/ibm-flags | 1 - config/intel-fflags | 1 - config/intel-flags | 1 - config/linux-gnu | 1 - config/linux-gnuaout | 1 - config/linux-gnulibc1 | 1 - config/linux-gnulibc2 | 1 - config/lt_vers.am | 1 - config/netbsd | 1 - config/pgi-fflags | 1 - config/pgi-flags | 1 - config/site-specific/BlankForm | 1 - config/solaris | 1 - configure.ac | 1 - examples/Makefile.am | 1 - examples/h5_attribute.c | 1 - examples/h5_chunk_read.c | 1 - examples/h5_cmprss.c | 1 - examples/h5_compound.c | 1 - examples/h5_crtatt.c | 1 - examples/h5_crtdat.c | 1 - examples/h5_crtgrp.c | 1 - examples/h5_crtgrpar.c | 1 - examples/h5_crtgrpd.c | 1 - examples/h5_debug_trace.c | 11 +-- examples/h5_drivers.c | 1 - examples/h5_dtransform.c | 1 - examples/h5_elink_unix2win.c | 1 - examples/h5_extend.c | 1 - examples/h5_extend_write.c | 1 - examples/h5_extlink.c | 1 - examples/h5_group.c | 1 - examples/h5_interm_group.c | 1 - examples/h5_mount.c | 1 - examples/h5_rdwt.c | 1 - examples/h5_read.c | 1 - examples/h5_ref2reg_deprec.c | 1 - examples/h5_reference_deprec.c | 1 - examples/h5_select.c | 1 - examples/h5_shared_mesg.c | 1 - examples/h5_subset.c | 1 - examples/h5_vds.c | 1 - examples/h5_write.c | 1 - examples/ph5example.c | 1 - examples/testh5cc.sh.in | 1 - fortran/Makefile.am | 1 - fortran/examples/Makefile.am | 1 - fortran/examples/compound.f90 | 1 - .../examples/compound_complex_fortran2003.f90 | 1 - fortran/examples/compound_fortran2003.f90 | 1 - fortran/examples/h5_cmprss.f90 | 1 - fortran/examples/h5_crtatt.f90 | 1 - fortran/examples/h5_crtdat.f90 | 1 - fortran/examples/h5_crtgrp.f90 | 1 - fortran/examples/h5_crtgrpar.f90 | 1 - fortran/examples/h5_crtgrpd.f90 | 1 - fortran/examples/h5_extend.f90 | 1 - fortran/examples/h5_rdwt.f90 | 1 - fortran/examples/h5_subset.f90 | 1 - fortran/examples/hyperslab.f90 | 1 - fortran/examples/mountexample.f90 | 1 - fortran/examples/nested_derived_type.f90 | 1 - fortran/examples/ph5example.f90 | 1 - fortran/examples/refobjexample.f90 | 1 - fortran/examples/refregexample.f90 | 1 - fortran/examples/rwdset_fortran2003.f90 | 1 - fortran/examples/selectele.f90 | 1 - fortran/examples/testh5fc.sh.in | 1 - fortran/src/H5Af.c | 1 - fortran/src/H5Aff.F90 | 1 - fortran/src/H5Df.c | 1 - fortran/src/H5Dff.F90 | 1 - fortran/src/H5Ef.c | 1 - fortran/src/H5Eff.F90 | 1 - fortran/src/H5Ff.c | 1 - fortran/src/H5Fff.F90 | 1 - fortran/src/H5Gf.c | 1 - fortran/src/H5Gff.F90 | 1 - fortran/src/H5If.c | 1 - fortran/src/H5Iff.F90 | 1 - fortran/src/H5Lf.c | 1 - fortran/src/H5Lff.F90 | 1 - fortran/src/H5Of.c | 1 - fortran/src/H5Off.F90 | 1 - fortran/src/H5Pf.c | 1 - fortran/src/H5Pff.F90 | 1 - fortran/src/H5Rf.c | 1 - fortran/src/H5Rff.F90 | 1 - fortran/src/H5Sf.c | 1 - fortran/src/H5Sff.F90 | 1 - fortran/src/H5Tf.c | 1 - fortran/src/H5Tff.F90 | 1 - fortran/src/H5VLff.F90 | 1 - fortran/src/H5Zf.c | 1 - fortran/src/H5Zff.F90 | 1 - fortran/src/H5_buildiface.F90 | 1 - fortran/src/H5_f.c | 1 - fortran/src/H5_ff.F90 | 1 - fortran/src/H5f90.h | 1 - fortran/src/H5f90global.F90 | 1 - fortran/src/H5f90i.h | 1 - fortran/src/H5f90kit.c | 1 - fortran/src/H5f90proto.h | 1 - fortran/src/H5fortkit.F90 | 1 - fortran/src/H5match_types.c | 3 - fortran/src/HDF5.F90 | 1 - fortran/src/Makefile.am | 1 - fortran/src/h5fc.in | 1 - fortran/test/H5_test_buildiface.F90 | 1 - fortran/test/Makefile.am | 1 - fortran/test/fflush1.F90 | 1 - fortran/test/fflush2.F90 | 1 - fortran/test/fortranlib_test.F90 | 1 - fortran/test/fortranlib_test_1_8.F90 | 1 - fortran/test/fortranlib_test_F03.F90 | 1 - fortran/test/t.c | 1 - fortran/test/t.h | 1 - fortran/test/tH5A.F90 | 1 - fortran/test/tH5A_1_8.F90 | 1 - fortran/test/tH5D.F90 | 1 - fortran/test/tH5E.F90 | 1 - fortran/test/tH5E_F03.F90 | 1 - fortran/test/tH5F.F90 | 1 - fortran/test/tH5F_F03.F90 | 1 - fortran/test/tH5G.F90 | 1 - fortran/test/tH5G_1_8.F90 | 1 - fortran/test/tH5I.F90 | 1 - fortran/test/tH5L_F03.F90 | 1 - fortran/test/tH5MISC_1_8.F90 | 1 - fortran/test/tH5O.F90 | 1 - fortran/test/tH5O_F03.F90 | 1 - fortran/test/tH5P.F90 | 1 - fortran/test/tH5P_F03.F90 | 1 - fortran/test/tH5R.F90 | 1 - fortran/test/tH5S.F90 | 1 - fortran/test/tH5Sselect.F90 | 1 - fortran/test/tH5T.F90 | 1 - fortran/test/tH5T_F03.F90 | 1 - fortran/test/tH5VL.F90 | 1 - fortran/test/tH5Z.F90 | 1 - fortran/test/tHDF5.F90 | 1 - fortran/test/tHDF5_1_8.F90 | 1 - fortran/test/tHDF5_F03.F90 | 1 - fortran/test/tf.F90 | 1 - fortran/test/vol_connector.F90 | 1 - fortran/testpar/Makefile.am | 1 - fortran/testpar/hyper.f90 | 1 - fortran/testpar/mdset.f90 | 1 - fortran/testpar/ptest.f90 | 1 - hl/Makefile.am | 1 - hl/c++/Makefile.am | 1 - hl/c++/examples/Makefile.am | 1 - hl/c++/examples/ptExampleFL.cpp | 1 - hl/c++/src/H5PacketTable.cpp | 1 - hl/c++/src/H5PacketTable.h | 1 - hl/c++/src/Makefile.am | 1 - hl/c++/test/Makefile.am | 1 - hl/c++/test/ptableTest.cpp | 1 - hl/c++/test/ptableTest.h | 1 - hl/examples/Makefile.am | 1 - hl/examples/ex_ds1.c | 1 - hl/examples/ex_image1.c | 1 - hl/examples/ex_image2.c | 1 - hl/examples/ex_lite1.c | 1 - hl/examples/ex_lite2.c | 1 - hl/examples/ex_lite3.c | 1 - hl/examples/ex_table_01.c | 1 - hl/examples/ex_table_02.c | 1 - hl/examples/ex_table_03.c | 1 - hl/examples/ex_table_04.c | 1 - hl/examples/ex_table_05.c | 1 - hl/examples/ex_table_06.c | 1 - hl/examples/ex_table_07.c | 1 - hl/examples/ex_table_08.c | 1 - hl/examples/ex_table_09.c | 1 - hl/examples/ex_table_10.c | 1 - hl/examples/ex_table_11.c | 1 - hl/examples/ex_table_12.c | 1 - hl/examples/pal_rgb.h | 1 - hl/examples/ptExampleFL.c | 1 - hl/fortran/Makefile.am | 1 - hl/fortran/examples/Makefile.am | 1 - hl/fortran/examples/ex_ds1.f90 | 1 - hl/fortran/examples/exlite.f90 | 1 - hl/fortran/src/H5DSfc.c | 1 - hl/fortran/src/H5DSff.F90 | 1 - hl/fortran/src/H5HL_buildiface.F90 | 1 - hl/fortran/src/H5IMcc.c | 1 - hl/fortran/src/H5IMcc.h | 1 - hl/fortran/src/H5IMfc.c | 1 - hl/fortran/src/H5IMff.F90 | 1 - hl/fortran/src/H5LTf90proto.h | 1 - hl/fortran/src/H5LTfc.c | 1 - hl/fortran/src/H5LTff.F90 | 1 - hl/fortran/src/H5TBfc.c | 1 - hl/fortran/src/H5TBff.F90 | 1 - hl/fortran/src/Makefile.am | 1 - hl/fortran/test/Makefile.am | 1 - hl/fortran/test/tstds.F90 | 1 - hl/fortran/test/tstimage.F90 | 1 - hl/fortran/test/tstlite.F90 | 1 - hl/fortran/test/tsttable.F90 | 1 - hl/src/H5DO.c | 1 - hl/src/H5DOpublic.h | 1 - hl/src/H5DS.c | 1 - hl/src/H5DSprivate.h | 1 - hl/src/H5DSpublic.h | 1 - hl/src/H5HLprivate2.h | 1 - hl/src/H5IM.c | 1 - hl/src/H5IMprivate.h | 1 - hl/src/H5IMpublic.h | 1 - hl/src/H5LD.c | 1 - hl/src/H5LDprivate.h | 1 - hl/src/H5LDpublic.h | 1 - hl/src/H5LT.c | 1 - hl/src/H5LTanalyze.c | 1 - hl/src/H5LTanalyze.l | 1 - hl/src/H5LTparse.y | 1 - hl/src/H5LTprivate.h | 1 - hl/src/H5LTpublic.h | 1 - hl/src/H5PT.c | 1 - hl/src/H5PTprivate.h | 1 - hl/src/H5PTpublic.h | 1 - hl/src/H5TB.c | 1 - hl/src/H5TBprivate.h | 1 - hl/src/H5TBpublic.h | 1 - hl/src/Makefile.am | 1 - hl/src/hdf5_hl.h | 1 - hl/test/H5srcdir_str.h.in | 1 - hl/test/Makefile.am | 1 - hl/test/gen_test_ds.c | 1 - hl/test/h5hltest.h | 1 - hl/test/pal_rgb.h | 1 - hl/test/test_ds.c | 1 - hl/test/test_dset_append.c | 1 - hl/test/test_file_image.c | 1 - hl/test/test_h5do_compat.c | 1 - hl/test/test_image.c | 1 - hl/test/test_ld.c | 1 - hl/test/test_lite.c | 1 - hl/test/test_packet.c | 1 - hl/test/test_table.c | 1 - hl/tools/Makefile.am | 1 - hl/tools/gif2h5/Makefile.am | 1 - hl/tools/gif2h5/decompress.c | 1 - hl/tools/gif2h5/gif.h | 1 - hl/tools/gif2h5/gif2hdf.c | 1 - hl/tools/gif2h5/gif2mem.c | 1 - hl/tools/gif2h5/gifread.c | 1 - hl/tools/gif2h5/h52gifgentst.c | 1 - hl/tools/gif2h5/h52giftest.sh.in | 1 - hl/tools/gif2h5/hdf2gif.c | 1 - hl/tools/gif2h5/hdfgifwr.c | 1 - hl/tools/gif2h5/writehdf.c | 1 - hl/tools/h5watch/Makefile.am | 1 - hl/tools/h5watch/extend_dset.c | 1 - hl/tools/h5watch/h5watch.c | 1 - hl/tools/h5watch/h5watchgentest.c | 1 - hl/tools/h5watch/testh5watch.sh.in | 1 - java/src/hdf/hdf5lib/H5.java | 1 - java/src/hdf/hdf5lib/HDF5Constants.java | 1 - java/src/hdf/hdf5lib/HDFArray.java | 1 - java/src/hdf/hdf5lib/HDFNativeData.java | 1 - .../hdf5lib/exceptions/HDF5AtomException.java | 1 - .../exceptions/HDF5AttributeException.java | 1 - .../exceptions/HDF5BtreeException.java | 1 - .../exceptions/HDF5DataFiltersException.java | 1 - .../exceptions/HDF5DataStorageException.java | 1 - .../HDF5DatasetInterfaceException.java | 1 - .../HDF5DataspaceInterfaceException.java | 1 - .../HDF5DatatypeInterfaceException.java | 1 - .../hdf/hdf5lib/exceptions/HDF5Exception.java | 1 - .../HDF5ExternalFileListException.java | 1 - .../HDF5FileInterfaceException.java | 1 - .../HDF5FunctionArgumentException.java | 1 - .../HDF5FunctionEntryExitException.java | 1 - .../hdf5lib/exceptions/HDF5HeapException.java | 1 - .../HDF5InternalErrorException.java | 1 - .../hdf5lib/exceptions/HDF5JavaException.java | 1 - .../exceptions/HDF5LibraryException.java | 1 - .../exceptions/HDF5LowLevelIOException.java | 1 - .../HDF5MetaDataCacheException.java | 1 - .../exceptions/HDF5ObjectHeaderException.java | 1 - .../HDF5PropertyListInterfaceException.java | 1 - .../exceptions/HDF5ReferenceException.java | 1 - .../HDF5ResourceUnavailableException.java | 1 - .../exceptions/HDF5SymbolTableException.java | 1 - java/src/jni/exceptionImp.c | 1 - java/src/jni/h5Constants.c | 1 - java/src/jni/h5Imp.c | 1 - java/src/jni/h5aImp.c | 1 - java/src/jni/h5dImp.c | 1 - java/src/jni/h5fImp.c | 1 - java/src/jni/h5gImp.c | 1 - java/src/jni/h5iImp.c | 1 - java/src/jni/h5pImp.c | 1 - java/src/jni/h5rImp.c | 1 - java/src/jni/h5sImp.c | 1 - java/src/jni/h5tImp.c | 1 - java/src/jni/h5util.c | 1 - java/src/jni/h5util.h | 1 - java/src/jni/h5zImp.c | 1 - java/src/jni/nativeData.c | 1 - m4/aclocal_fc.m4 | 2 +- src/H5.c | 1 - src/H5A.c | 1 - src/H5AC.c | 1 - src/H5ACdbg.c | 1 - src/H5ACmpio.c | 1 - src/H5ACpkg.h | 1 - src/H5ACprivate.h | 1 - src/H5ACproxy_entry.c | 1 - src/H5ACpublic.h | 1 - src/H5Abtree2.c | 1 - src/H5Adense.c | 1 - src/H5Adeprec.c | 1 - src/H5Aint.c | 1 - src/H5Apkg.h | 1 - src/H5Aprivate.h | 1 - src/H5Apublic.h | 1 - src/H5Atest.c | 1 - src/H5B.c | 1 - src/H5B2.c | 1 - src/H5B2cache.c | 1 - src/H5B2dbg.c | 1 - src/H5B2hdr.c | 1 - src/H5B2int.c | 1 - src/H5B2internal.c | 1 - src/H5B2leaf.c | 1 - src/H5B2pkg.h | 1 - src/H5B2private.h | 1 - src/H5B2stat.c | 1 - src/H5B2test.c | 1 - src/H5Bcache.c | 1 - src/H5Bdbg.c | 1 - src/H5Bpkg.h | 1 - src/H5Bprivate.h | 1 - src/H5C.c | 1 - src/H5CS.c | 1 - src/H5CSprivate.h | 1 - src/H5Cdbg.c | 1 - src/H5Cepoch.c | 1 - src/H5Cimage.c | 1 - src/H5Clog.c | 1 - src/H5Clog.h | 1 - src/H5Clog_json.c | 1 - src/H5Clog_trace.c | 1 - src/H5Cmpio.c | 1 - src/H5Cpkg.h | 1 - src/H5Cprefetched.c | 1 - src/H5Cprivate.h | 1 - src/H5Cpublic.h | 1 - src/H5Cquery.c | 1 - src/H5Ctag.c | 1 - src/H5Ctest.c | 1 - src/H5D.c | 1 - src/H5Dbtree.c | 1 - src/H5Dbtree2.c | 1 - src/H5Dchunk.c | 1 - src/H5Dcompact.c | 1 - src/H5Dcontig.c | 1 - src/H5Ddbg.c | 1 - src/H5Ddeprec.c | 1 - src/H5Dearray.c | 1 - src/H5Defl.c | 1 - src/H5Dfarray.c | 1 - src/H5Dfill.c | 1 - src/H5Dint.c | 1 - src/H5Dio.c | 1 - src/H5Dlayout.c | 1 - src/H5Dmpio.c | 1 - src/H5Dnone.c | 1 - src/H5Doh.c | 1 - src/H5Dpkg.h | 1 - src/H5Dprivate.h | 1 - src/H5Dpublic.h | 1 - src/H5Dscatgath.c | 1 - src/H5Dselect.c | 1 - src/H5Dsingle.c | 1 - src/H5Dtest.c | 1 - src/H5E.c | 1 - src/H5EA.c | 1 - src/H5EAcache.c | 1 - src/H5EAdbg.c | 1 - src/H5EAdblkpage.c | 1 - src/H5EAdblock.c | 1 - src/H5EAhdr.c | 1 - src/H5EAiblock.c | 1 - src/H5EAint.c | 1 - src/H5EApkg.h | 1 - src/H5EAprivate.h | 1 - src/H5EAsblock.c | 1 - src/H5EAstat.c | 1 - src/H5EAtest.c | 1 - src/H5Edeprec.c | 1 - src/H5Eint.c | 1 - src/H5Epkg.h | 1 - src/H5Eprivate.h | 1 - src/H5Epublic.h | 1 - src/H5F.c | 1 - src/H5FA.c | 1 - src/H5FAcache.c | 1 - src/H5FAdbg.c | 1 - src/H5FAdblkpage.c | 1 - src/H5FAdblock.c | 1 - src/H5FAhdr.c | 1 - src/H5FAint.c | 1 - src/H5FApkg.h | 1 - src/H5FAprivate.h | 1 - src/H5FAstat.c | 1 - src/H5FAtest.c | 1 - src/H5FD.c | 1 - src/H5FDcore.c | 1 - src/H5FDcore.h | 1 - src/H5FDdirect.c | 1 - src/H5FDdirect.h | 1 - src/H5FDfamily.c | 1 - src/H5FDfamily.h | 1 - src/H5FDint.c | 1 - src/H5FDlog.c | 1 - src/H5FDlog.h | 1 - src/H5FDmpi.c | 1 - src/H5FDmpi.h | 1 - src/H5FDmpio.c | 1 - src/H5FDmpio.h | 1 - src/H5FDmulti.c | 1 - src/H5FDmulti.h | 1 - src/H5FDpkg.h | 1 - src/H5FDprivate.h | 1 - src/H5FDpublic.h | 1 - src/H5FDsec2.c | 1 - src/H5FDsec2.h | 1 - src/H5FDspace.c | 1 - src/H5FDstdio.c | 1 - src/H5FDstdio.h | 1 - src/H5FDtest.c | 1 - src/H5FDwindows.c | 1 - src/H5FDwindows.h | 1 - src/H5FL.c | 1 - src/H5FLprivate.h | 1 - src/H5FO.c | 1 - src/H5FOprivate.h | 1 - src/H5FS.c | 1 - src/H5FScache.c | 1 - src/H5FSdbg.c | 1 - src/H5FSint.c | 1 - src/H5FSpkg.h | 1 - src/H5FSprivate.h | 1 - src/H5FSsection.c | 1 - src/H5FSstat.c | 1 - src/H5FStest.c | 1 - src/H5Faccum.c | 1 - src/H5Fcwfs.c | 1 - src/H5Fdbg.c | 1 - src/H5Fdeprec.c | 1 - src/H5Fefc.c | 1 - src/H5Ffake.c | 1 - src/H5Fint.c | 1 - src/H5Fio.c | 1 - src/H5Fmount.c | 1 - src/H5Fmpi.c | 1 - src/H5Fpkg.h | 1 - src/H5Fprivate.h | 1 - src/H5Fpublic.h | 1 - src/H5Fquery.c | 1 - src/H5Fsfile.c | 1 - src/H5Fspace.c | 1 - src/H5Fsuper.c | 1 - src/H5Fsuper_cache.c | 1 - src/H5Ftest.c | 1 - src/H5G.c | 1 - src/H5Gbtree2.c | 1 - src/H5Gcache.c | 1 - src/H5Gcompact.c | 1 - src/H5Gdense.c | 1 - src/H5Gdeprec.c | 1 - src/H5Gent.c | 1 - src/H5Gint.c | 1 - src/H5Glink.c | 1 - src/H5Gloc.c | 1 - src/H5Gname.c | 1 - src/H5Gnode.c | 1 - src/H5Gobj.c | 1 - src/H5Goh.c | 1 - src/H5Gpkg.h | 1 - src/H5Gprivate.h | 1 - src/H5Gpublic.h | 1 - src/H5Groot.c | 1 - src/H5Gstab.c | 1 - src/H5Gtest.c | 1 - src/H5Gtraverse.c | 1 - src/H5HF.c | 1 - src/H5HFbtree2.c | 1 - src/H5HFcache.c | 1 - src/H5HFdbg.c | 1 - src/H5HFdblock.c | 1 - src/H5HFdtable.c | 1 - src/H5HFhdr.c | 1 - src/H5HFhuge.c | 1 - src/H5HFiblock.c | 1 - src/H5HFiter.c | 1 - src/H5HFman.c | 1 - src/H5HFpkg.h | 1 - src/H5HFprivate.h | 1 - src/H5HFsection.c | 1 - src/H5HFspace.c | 1 - src/H5HFstat.c | 1 - src/H5HFtest.c | 1 - src/H5HFtiny.c | 1 - src/H5HG.c | 1 - src/H5HGcache.c | 1 - src/H5HGdbg.c | 1 - src/H5HGpkg.h | 1 - src/H5HGprivate.h | 1 - src/H5HGquery.c | 1 - src/H5HL.c | 1 - src/H5HLcache.c | 1 - src/H5HLdbg.c | 1 - src/H5HLdblk.c | 1 - src/H5HLint.c | 1 - src/H5HLpkg.h | 1 - src/H5HLprfx.c | 1 - src/H5HLprivate.h | 1 - src/H5I.c | 1 - src/H5Idbg.c | 1 - src/H5Iint.c | 1 - src/H5Ipkg.h | 1 - src/H5Iprivate.h | 1 - src/H5Ipublic.h | 1 - src/H5Itest.c | 1 - src/H5L.c | 1 - src/H5Ldeprec.c | 1 - src/H5Lexternal.c | 1 - src/H5Lpkg.h | 1 - src/H5Lprivate.h | 1 - src/H5Lpublic.h | 1 - src/H5M.c | 1 - src/H5MF.c | 1 - src/H5MFaggr.c | 1 - src/H5MFdbg.c | 1 - src/H5MFpkg.h | 1 - src/H5MFprivate.h | 1 - src/H5MFsection.c | 1 - src/H5MM.c | 1 - src/H5MMprivate.h | 1 - src/H5MMpublic.h | 1 - src/H5Mpkg.h | 1 - src/H5Mprivate.h | 1 - src/H5Mpublic.h | 1 - src/H5O.c | 1 - src/H5Oainfo.c | 1 - src/H5Oalloc.c | 1 - src/H5Oattr.c | 1 - src/H5Oattribute.c | 1 - src/H5Obogus.c | 1 - src/H5Obtreek.c | 1 - src/H5Ocache.c | 1 - src/H5Ocache_image.c | 1 - src/H5Ochunk.c | 1 - src/H5Ocont.c | 1 - src/H5Ocopy.c | 1 - src/H5Odbg.c | 1 - src/H5Odeprec.c | 1 - src/H5Odrvinfo.c | 1 - src/H5Odtype.c | 1 - src/H5Oefl.c | 1 - src/H5Ofill.c | 1 - src/H5Oflush.c | 1 - src/H5Ofsinfo.c | 1 - src/H5Oginfo.c | 1 - src/H5Oint.c | 1 - src/H5Olayout.c | 1 - src/H5Olinfo.c | 1 - src/H5Olink.c | 1 - src/H5Omessage.c | 1 - src/H5Omtime.c | 1 - src/H5Oname.c | 1 - src/H5Onull.c | 1 - src/H5Opkg.h | 1 - src/H5Opline.c | 1 - src/H5Oprivate.h | 1 - src/H5Opublic.h | 1 - src/H5Orefcount.c | 1 - src/H5Osdspace.c | 1 - src/H5Oshared.c | 1 - src/H5Oshared.h | 1 - src/H5Oshmesg.c | 1 - src/H5Ostab.c | 1 - src/H5Otest.c | 1 - src/H5Ounknown.c | 1 - src/H5P.c | 1 - src/H5PB.c | 1 - src/H5PBpkg.h | 1 - src/H5PBprivate.h | 1 - src/H5PLpkg.h | 1 - src/H5Pacpl.c | 1 - src/H5Pdapl.c | 1 - src/H5Pdcpl.c | 1 - src/H5Pdeprec.c | 1 - src/H5Pdxpl.c | 1 - src/H5Pencdec.c | 1 - src/H5Pfapl.c | 1 - src/H5Pfcpl.c | 1 - src/H5Pfmpl.c | 1 - src/H5Pgcpl.c | 1 - src/H5Pint.c | 1 - src/H5Plapl.c | 1 - src/H5Plcpl.c | 1 - src/H5Pmapl.c | 1 - src/H5Pmcpl.c | 1 - src/H5Pocpl.c | 1 - src/H5Pocpypl.c | 1 - src/H5Ppkg.h | 1 - src/H5Pprivate.h | 1 - src/H5Ppublic.h | 1 - src/H5Pstrcpl.c | 1 - src/H5Ptest.c | 1 - src/H5R.c | 1 - src/H5RS.c | 1 - src/H5RSprivate.h | 1 - src/H5Rdeprec.c | 1 - src/H5Rint.c | 1 - src/H5Rpkg.h | 1 - src/H5Rprivate.h | 1 - src/H5Rpublic.h | 1 - src/H5S.c | 1 - src/H5SL.c | 1 - src/H5SLprivate.h | 1 - src/H5SM.c | 1 - src/H5SMbtree2.c | 1 - src/H5SMcache.c | 1 - src/H5SMmessage.c | 1 - src/H5SMpkg.h | 1 - src/H5SMprivate.h | 1 - src/H5SMtest.c | 1 - src/H5Sall.c | 1 - src/H5Sdbg.c | 1 - src/H5Sdeprec.c | 1 - src/H5Shyper.c | 1 - src/H5Smpio.c | 1 - src/H5Snone.c | 1 - src/H5Spkg.h | 1 - src/H5Spoint.c | 1 - src/H5Sprivate.h | 1 - src/H5Spublic.h | 1 - src/H5Sselect.c | 1 - src/H5Stest.c | 1 - src/H5T.c | 1 - src/H5TS.c | 1 - src/H5TSprivate.h | 1 - src/H5Tarray.c | 1 - src/H5Tbit.c | 1 - src/H5Tcommit.c | 1 - src/H5Tcompound.c | 1 - src/H5Tconv.c | 1 - src/H5Tcset.c | 1 - src/H5Tdbg.c | 1 - src/H5Tdeprec.c | 1 - src/H5Tenum.c | 1 - src/H5Tfields.c | 1 - src/H5Tfixed.c | 1 - src/H5Tfloat.c | 1 - src/H5Tnative.c | 1 - src/H5Toffset.c | 1 - src/H5Toh.c | 1 - src/H5Topaque.c | 1 - src/H5Torder.c | 1 - src/H5Tpad.c | 1 - src/H5Tpkg.h | 1 - src/H5Tprecis.c | 1 - src/H5Tprivate.h | 1 - src/H5Tpublic.h | 1 - src/H5Tstrpad.c | 1 - src/H5Tvisit.c | 1 - src/H5Tvlen.c | 1 - src/H5UC.c | 1 - src/H5UCprivate.h | 1 - src/H5VM.c | 1 - src/H5VMprivate.h | 1 - src/H5WB.c | 1 - src/H5WBprivate.h | 1 - src/H5Z.c | 1 - src/H5Zdeflate.c | 1 - src/H5Zfletcher32.c | 1 - src/H5Znbit.c | 1 - src/H5Zpkg.h | 1 - src/H5Zprivate.h | 1 - src/H5Zpublic.h | 1 - src/H5Zscaleoffset.c | 1 - src/H5Zshuffle.c | 1 - src/H5Zszip.c | 1 - src/H5Ztrans.c | 1 - src/H5api_adpt.h | 1 - src/H5checksum.c | 1 - src/H5dbg.c | 1 - src/H5detect.c | 5 -- src/H5err.txt | 1 - src/H5make_libsettings.c | 2 - src/H5mpi.c | 1 - src/H5overflow.txt | 1 - src/H5private.h | 1 - src/H5public.h | 1 - src/H5system.c | 1 - src/H5timer.c | 1 - src/H5trace.c | 1 - src/H5vers.txt | 1 - src/H5win32defs.h | 1 - src/Makefile.am | 1 - src/hdf5.h | 1 - test/H5srcdir.h | 1 - test/H5srcdir_str.h.in | 1 - test/Makefile.am | 1 - test/accum.c | 1 - test/accum_swmr_reader.c | 1 - test/app_ref.c | 1 - test/big.c | 1 - test/bittests.c | 1 - test/btree2.c | 1 - test/cache.c | 1 - test/cache_api.c | 1 - test/cache_common.c | 1 - test/cache_common.h | 1 - test/cache_image.c | 1 - test/cache_logging.c | 1 - test/chunk_info.c | 1 - test/cmpd_dset.c | 1 - test/cross_read.c | 1 - test/dangle.c | 1 - test/del_many_dense_attrs.c | 1 - test/direct_chunk.c | 1 - test/dsets.c | 1 - test/dt_arith.c | 1 - test/dtransform.c | 1 - test/dtypes.c | 1 - test/earray.c | 1 - test/efc.c | 1 - test/enc_dec_plist.c | 1 - test/enc_dec_plist_cross_platform.c | 1 - test/enum.c | 1 - test/err_compat.c | 1 - test/error_test.c | 1 - test/evict_on_close.c | 1 - test/extend.c | 1 - test/external.c | 1 - test/external_common.c | 1 - test/external_common.h | 1 - test/external_env.c | 1 - test/external_fname.h | 1 - test/farray.c | 1 - test/fheap.c | 1 - test/file_image.c | 1 - test/filenotclosed.c | 1 - test/fillval.c | 1 - test/filter_fail.c | 1 - test/flush1.c | 1 - test/flush2.c | 1 - test/flushrefresh.c | 1 - test/freespace.c | 1 - test/gen_bad_compound.c | 1 - test/gen_bad_offset.c | 1 - test/gen_bad_ohdr.c | 1 - test/gen_bogus.c | 1 - test/gen_bounds.c | 1 - test/gen_cross.c | 1 - test/gen_deflate.c | 1 - test/gen_file_image.c | 1 - test/gen_filespace.c | 1 - test/gen_filters.c | 1 - test/gen_mergemsg.c | 1 - test/gen_new_array.c | 1 - test/gen_new_fill.c | 1 - test/gen_new_group.c | 1 - test/gen_new_mtime.c | 1 - test/gen_new_super.c | 1 - test/gen_noencoder.c | 1 - test/gen_nullspace.c | 1 - test/gen_old_array.c | 1 - test/gen_old_group.c | 1 - test/gen_old_layout.c | 1 - test/gen_old_mtime.c | 1 - test/gen_plist.c | 1 - test/gen_sizes_lheap.c | 1 - test/gen_specmetaread.c | 1 - test/gen_udlinks.c | 1 - test/genall5.c | 1 - test/genall5.h | 1 - test/getname.c | 1 - test/gheap.c | 1 - test/h5test.c | 1 - test/h5test.h | 1 - test/hyperslab.c | 1 - test/istore.c | 1 - test/lheap.c | 1 - test/links.c | 1 - test/links_env.c | 1 - test/mf.c | 1 - test/mount.c | 1 - test/mtime.c | 1 - test/ntypes.c | 1 - test/objcopy.c | 1 - test/objcopy_ref.c | 1 - test/ohdr.c | 1 - test/page_buffer.c | 1 - test/reserved.c | 1 - test/set_extent.c | 1 - test/space_overflow.c | 1 - test/stab.c | 1 - test/swmr.c | 1 - test/swmr_addrem_writer.c | 1 - test/swmr_common.c | 1 - test/swmr_common.h | 1 - test/swmr_generator.c | 1 - test/swmr_reader.c | 1 - test/swmr_remove_reader.c | 1 - test/swmr_remove_writer.c | 1 - test/swmr_sparse_reader.c | 1 - test/swmr_sparse_writer.c | 1 - test/swmr_start_write.c | 1 - test/swmr_writer.c | 1 - test/tarray.c | 1 - test/tattr.c | 1 - test/tcheck_version.c | 1 - test/tchecksum.c | 1 - test/tconfig.c | 1 - test/tcoords.c | 1 - test/testabort_fail.sh.in | 1 - test/testcheck_version.sh.in | 1 - test/testerror.sh.in | 1 - test/testexternal_env.sh.in | 1 - test/testflushrefresh.sh.in | 1 - test/testframe.c | 1 - test/testhdf5.c | 1 - test/testhdf5.h | 1 - test/testlibinfo.sh.in | 1 - test/testlinks_env.sh.in | 1 - test/testmeta.c | 1 - test/testswmr.sh.in | 1 - test/testvds_env.sh.in | 1 - test/testvdsswmr.sh.in | 1 - test/tfile.c | 1 - test/tgenprop.c | 1 - test/th5o.c | 1 - test/th5s.c | 1 - test/tid.c | 1 - test/timer.c | 11 +-- test/titerate.c | 1 - test/tmeta.c | 1 - test/tmisc.c | 1 - test/trefer.c | 1 - test/trefer_deprec.c | 1 - test/trefstr.c | 1 - test/tselect.c | 1 - test/tskiplist.c | 1 - test/tsohm.c | 1 - test/ttime.c | 1 - test/ttsafe.c | 1 - test/ttsafe.h | 1 - test/ttsafe_acreate.c | 1 - test/ttsafe_attr_vlen.c | 1 - test/ttsafe_cancel.c | 1 - test/ttsafe_dcreate.c | 1 - test/ttsafe_error.c | 1 - test/ttsafe_rec_rw_lock.c | 1 - test/tunicode.c | 1 - test/tvlstr.c | 1 - test/tvltypes.c | 1 - test/unlink.c | 1 - test/vds_swmr.h | 1 - test/vds_swmr_gen.c | 1 - test/vds_swmr_reader.c | 1 - test/vds_swmr_writer.c | 1 - test/vfd.c | 1 - testpar/Makefile.am | 1 - testpar/t_2Gio.c | 1 - testpar/t_cache.c | 1 - testpar/t_cache_image.c | 1 - testpar/t_chunk_alloc.c | 1 - testpar/t_coll_chunk.c | 1 - testpar/t_dset.c | 1 - testpar/t_file.c | 1 - testpar/t_file_image.c | 1 - testpar/t_filter_read.c | 1 - testpar/t_filters_parallel.c | 11 +-- testpar/t_filters_parallel.h | 11 +-- testpar/t_init_term.c | 1 - testpar/t_mdset.c | 1 - testpar/t_mpi.c | 1 - testpar/t_pflush1.c | 1 - testpar/t_pflush2.c | 1 - testpar/t_ph5basic.c | 1 - testpar/t_pread.c | 1 - testpar/t_prestart.c | 1 - testpar/t_prop.c | 1 - testpar/t_pshutdown.c | 1 - testpar/t_span_tree.c | 1 - testpar/testpar.h | 1 - testpar/testpflush.sh.in | 1 - testpar/testphdf5.c | 1 - testpar/testphdf5.h | 1 - tools/Makefile.am | 1 - tools/lib/Makefile.am | 1 - tools/lib/h5diff.c | 1 - tools/lib/h5diff.h | 1 - tools/lib/h5diff_array.c | 1 - tools/lib/h5diff_attr.c | 1 - tools/lib/h5diff_dset.c | 1 - tools/lib/h5diff_util.c | 1 - tools/lib/h5tools.c | 1 - tools/lib/h5tools.h | 1 - tools/lib/h5tools_dump.c | 1 - tools/lib/h5tools_dump.h | 1 - tools/lib/h5tools_error.h | 1 - tools/lib/h5tools_filters.c | 1 - tools/lib/h5tools_ref.c | 1 - tools/lib/h5tools_ref.h | 1 - tools/lib/h5tools_str.c | 1 - tools/lib/h5tools_str.h | 1 - tools/lib/h5tools_type.c | 1 - tools/lib/h5tools_utils.c | 1 - tools/lib/h5tools_utils.h | 1 - tools/lib/h5trav.c | 1 - tools/lib/h5trav.h | 1 - tools/lib/io_timer.h | 1 - tools/lib/ph5diff.h | 1 - tools/src/Makefile.am | 1 - tools/src/h5copy/Makefile.am | 1 - tools/src/h5copy/h5copy.c | 1 - tools/src/h5diff/Makefile.am | 1 - tools/src/h5diff/h5diff_common.c | 1 - tools/src/h5diff/h5diff_common.h | 1 - tools/src/h5diff/h5diff_main.c | 1 - tools/src/h5diff/ph5diff_main.c | 1 - tools/src/h5dump/Makefile.am | 1 - tools/src/h5dump/h5dump.c | 1 - tools/src/h5dump/h5dump.h | 1 - tools/src/h5dump/h5dump_ddl.c | 1 - tools/src/h5dump/h5dump_ddl.h | 1 - tools/src/h5dump/h5dump_defines.h | 1 - tools/src/h5dump/h5dump_extern.h | 1 - tools/src/h5dump/h5dump_xml.c | 1 - tools/src/h5dump/h5dump_xml.h | 1 - tools/src/h5format_convert/Makefile.am | 1 - tools/src/h5format_convert/h5format_convert.c | 1 - tools/src/h5import/Makefile.am | 1 - tools/src/h5import/h5import.c | 1 - tools/src/h5import/h5import.h | 1 - tools/src/h5jam/Makefile.am | 1 - tools/src/h5jam/h5jam.c | 1 - tools/src/h5jam/h5unjam.c | 1 - tools/src/h5ls/Makefile.am | 1 - tools/src/h5ls/h5ls.c | 1 - tools/src/h5perf/Makefile.am | 1 - tools/src/h5perf/perf.c | 1 - tools/src/h5repack/Makefile.am | 1 - tools/src/h5repack/h5repack.c | 1 - tools/src/h5repack/h5repack.h | 1 - tools/src/h5repack/h5repack_copy.c | 1 - tools/src/h5repack/h5repack_filters.c | 1 - tools/src/h5repack/h5repack_main.c | 1 - tools/src/h5repack/h5repack_opttable.c | 1 - tools/src/h5repack/h5repack_parse.c | 1 - tools/src/h5repack/h5repack_refs.c | 1 - tools/src/h5repack/h5repack_verify.c | 1 - tools/src/h5stat/Makefile.am | 1 - tools/src/h5stat/h5stat.c | 1 - tools/src/misc/Makefile.am | 1 - tools/src/misc/h5clear.c | 1 - tools/src/misc/h5debug.c | 1 - tools/src/misc/h5mkgrp.c | 1 - tools/src/misc/h5repart.c | 1 - tools/test/Makefile.am | 1 - tools/test/h5copy/Makefile.am | 1 - tools/test/h5copy/h5copygentest.c | 1 - tools/test/h5copy/testh5copy.sh.in | 1 - tools/test/h5diff/Makefile.am | 1 - tools/test/h5diff/h5diffgentest.c | 1 - tools/test/h5diff/testh5diff.sh.in | 1 - tools/test/h5diff/testph5diff.sh.in | 1 - tools/test/h5dump/Makefile.am | 1 - tools/test/h5dump/binread.c | 1 - tools/test/h5dump/h5dumpgentest.c | 1 - tools/test/h5dump/testh5dump.sh.in | 1 - tools/test/h5dump/testh5dumppbits.sh.in | 1 - tools/test/h5dump/testh5dumpvds.sh.in | 1 - tools/test/h5dump/testh5dumpxml.sh.in | 1 - tools/test/h5format_convert/Makefile.am | 1 - tools/test/h5format_convert/h5fc_chk_idx.c | 1 - tools/test/h5format_convert/h5fc_gentest.c | 1 - tools/test/h5format_convert/testh5fc.sh.in | 1 - tools/test/h5import/Makefile.am | 1 - tools/test/h5import/h5importtest.c | 1 - tools/test/h5import/h5importtestutil.sh.in | 1 - tools/test/h5jam/Makefile.am | 1 - tools/test/h5jam/getub.c | 1 - tools/test/h5jam/h5jamgentest.c | 1 - tools/test/h5jam/tellub.c | 1 - tools/test/h5jam/testh5jam.sh.in | 1 - tools/test/h5ls/Makefile.am | 1 - tools/test/h5ls/testh5ls.sh.in | 1 - tools/test/h5ls/testh5lsvds.sh.in | 1 - tools/test/h5repack/Makefile.am | 1 - tools/test/h5repack/h5repack.sh.in | 1 - tools/test/h5repack/h5repackgentest.c | 1 - tools/test/h5repack/h5repacktst.c | 1 - .../test/h5repack/testh5repack_detect_szip.c | 1 - tools/test/h5stat/Makefile.am | 1 - tools/test/h5stat/h5stat_gentest.c | 1 - tools/test/h5stat/testh5stat.sh.in | 1 - tools/test/misc/Makefile.am | 1 - tools/test/misc/clear_open_chk.c | 1 - tools/test/misc/h5clear_gentest.c | 1 - tools/test/misc/h5repart_gentest.c | 1 - tools/test/misc/repart_test.c | 1 - tools/test/misc/talign.c | 1 - tools/test/misc/testh5clear.sh.in | 1 - tools/test/misc/testh5mkgrp.sh.in | 1 - tools/test/misc/testh5repart.sh.in | 1 - tools/test/misc/vds/Makefile.am | 1 - tools/test/misc/vds/UC_1.h | 1 - tools/test/misc/vds/UC_1_one_dim_gen.c | 1 - tools/test/misc/vds/UC_2.h | 1 - tools/test/misc/vds/UC_2_two_dims_gen.c | 1 - tools/test/misc/vds/UC_3.h | 1 - tools/test/misc/vds/UC_3_gaps_gen.c | 1 - tools/test/misc/vds/UC_4.h | 1 - tools/test/misc/vds/UC_4_printf_gen.c | 1 - tools/test/misc/vds/UC_5.h | 1 - tools/test/misc/vds/UC_5_stride_gen.c | 1 - tools/test/misc/vds/UC_common.h | 1 - tools/test/perform/Makefile.am | 1 - tools/test/perform/build_h5perf_alone.sh | 1 - .../test/perform/build_h5perf_serial_alone.sh | 1 - tools/test/perform/chunk.c | 1 - tools/test/perform/chunk_cache.c | 1 - tools/test/perform/direct_write_perf.c | 1 - tools/test/perform/gen_report.pl | 1 - tools/test/perform/iopipe.c | 1 - tools/test/perform/overhead.c | 1 - tools/test/perform/perf_meta.c | 1 - tools/test/perform/zip_perf.c | 1 - utils/test/Makefile.am | 1 - utils/test/swmr_check_compat_vfd.c | 1 - 1203 files changed, 58 insertions(+), 1303 deletions(-) diff --git a/.autom4te.cfg b/.autom4te.cfg index 95af4d27b3b..3872ddb4400 100644 --- a/.autom4te.cfg +++ b/.autom4te.cfg @@ -1,5 +1,4 @@ # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/.h5chkright.ini b/.h5chkright.ini index c9ca1c180f1..02d3cadd48f 100644 --- a/.h5chkright.ini +++ b/.h5chkright.ini @@ -1,5 +1,4 @@ # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/Makefile.am b/Makefile.am index 2a544f48662..72d49597b4e 100644 --- a/Makefile.am +++ b/Makefile.am @@ -1,6 +1,5 @@ # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/Makefile.dist b/Makefile.dist index 9647f1c9158..5ae99040d34 100644 --- a/Makefile.dist +++ b/Makefile.dist @@ -1,7 +1,6 @@ # Top-level distributed Makefile -*- makefile -*- # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/acsite.m4 b/acsite.m4 index 5cddc1efe33..1782033dc84 100644 --- a/acsite.m4 +++ b/acsite.m4 @@ -2,7 +2,6 @@ dnl ------------------------------------------------------------------------- dnl ------------------------------------------------------------------------- dnl dnl Copyright by The HDF Group. -dnl Copyright by the Board of Trustees of the University of Illinois. dnl All rights reserved. dnl dnl This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/bin/Makefile.am b/bin/Makefile.am index 25df36fd501..96a4969ab01 100644 --- a/bin/Makefile.am +++ b/bin/Makefile.am @@ -1,6 +1,5 @@ # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/bin/bbrelease b/bin/bbrelease index 9699cf08e43..c59e2e5fcf4 100755 --- a/bin/bbrelease +++ b/bin/bbrelease @@ -1,7 +1,6 @@ #!/bin/sh # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/bin/buildhdf5 b/bin/buildhdf5 index 786c35dccb8..4c848841b00 100755 --- a/bin/buildhdf5 +++ b/bin/buildhdf5 @@ -1,7 +1,6 @@ #!/bin/sh # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/bin/checkapi b/bin/checkapi index b1d17b68de4..619f93394e4 100755 --- a/bin/checkapi +++ b/bin/checkapi @@ -1,7 +1,6 @@ #!/usr/bin/env perl # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/bin/checkposix b/bin/checkposix index bca259dee77..8aed831e0bc 100755 --- a/bin/checkposix +++ b/bin/checkposix @@ -4,7 +4,6 @@ use warnings; # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/bin/chkconfigure b/bin/chkconfigure index b2b53c6b0b0..db4010cc3fa 100755 --- a/bin/chkconfigure +++ b/bin/chkconfigure @@ -1,6 +1,6 @@ #!/bin/sh ## -## Copyright by the Board of Trustees of the University of Illinois. +## Copyright by The HDF Group. ## All rights reserved. ## ## This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/bin/chkcopyright b/bin/chkcopyright index eec371da345..83b36489cd3 100755 --- a/bin/chkcopyright +++ b/bin/chkcopyright @@ -1,7 +1,6 @@ #! /bin/sh # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including @@ -39,7 +38,6 @@ NFIXEDFILES=0 # Number of files fixed. NFIXFAILEDFILES=0 # Number of files fix failed. NUMBEGINLINES=60 # Copyright notice should be located within the # this number of lines at the beginning of the file. -UICOPYRIGHTSTR="Copyright by the Board of Trustees of the University of Illinois" THGCOPYRIGHTSTR="Copyright by The HDF Group." PASSEDLOG=/tmp/h5chkright_passed.$$ @@ -111,113 +109,92 @@ BUILDCOPYRIGHT() # C and C++ source Copyright notice cat > ${C_COPYRIGHT} << \EOF * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * * terms governing use, modification, and redistribution, is contained in * - * the files COPYING and Copyright.html. COPYING can be found at the root * - * of the source code distribution tree; Copyright.html can be found at the * - * root level of an installed copy of the electronic HDF5 document set and * - * is linked from the top-level documents page. It can also be found at * - * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * - * access to either file, you may request a copy from help@hdfgroup.org. * + * the COPYING file, which can be found at the root of the source code + * distribution tree, or in https://www.hdfgroup.org/licenses. + * If you do not have access to either file, you may request a copy from + * help@hdfgroup.org. EOF # Fortran9X source Copyright notice cat > ${FTN_COPYRIGHT} << \EOF ! Copyright by The HDF Group. * -! Copyright by the Board of Trustees of the University of Illinois. * ! All rights reserved. * ! * ! This file is part of HDF5. The full HDF5 copyright notice, including * ! terms governing use, modification, and redistribution, is contained in * -! the files COPYING and Copyright.html. COPYING can be found at the root * -! of the source code distribution tree; Copyright.html can be found at the * -! root level of an installed copy of the electronic HDF5 document set and * -! is linked from the top-level documents page. It can also be found at * -! http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * -! access to either file, you may request a copy from help@hdfgroup.org. * +! the COPYING file, which can be found at the root of the source code +! distribution tree, or in https://www.hdfgroup.org/licenses. +! If you do not have access to either file, you may request a copy from +! help@hdfgroup.org. EOF # HTML file Copyright notice cat > ${HTM_COPYRIGHT} << \EOF * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * * terms governing use, modification, and redistribution, is contained in * - * the files COPYING and Copyright.html. COPYING can be found at the root * - * of the source code distribution tree; Copyright.html can be found at the * - * root level of an installed copy of the electronic HDF5 document set and * - * is linked from the top-level documents page. It can also be found at * - * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * - * access to either file, you may request a copy from help@hdfgroup.org. * + * the COPYING file, which can be found at the root of the source code + * distribution tree, or in https://www.hdfgroup.org/licenses. + * If you do not have access to either file, you may request a copy from + * help@hdfgroup.org. EOF # Shell style Copyright notice cat > ${SH_COPYRIGHT} << \EOF # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in -# the files COPYING and Copyright.html. COPYING can be found at the root -# of the source code distribution tree; Copyright.html can be found at the -# root level of an installed copy of the electronic HDF5 document set and -# is linked from the top-level documents page. It can also be found at -# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have -# access to either file, you may request a copy from help@hdfgroup.org. +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. EOF # Shell style Copyright notice (2nd type) cat > ${SH_COPYRIGHT2} << \EOF ## Copyright by The HDF Group. -## Copyright by the Board of Trustees of the University of Illinois. ## All rights reserved. ## ## This file is part of HDF5. The full HDF5 copyright notice, including ## terms governing use, modification, and redistribution, is contained in -## the files COPYING and Copyright.html. COPYING can be found at the root -## of the source code distribution tree; Copyright.html can be found at the -## root level of an installed copy of the electronic HDF5 document set and -## is linked from the top-level documents page. It can also be found at -## http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have -## access to either file, you may request a copy from help@hdfgroup.org. +## the COPYING file, which can be found at the root of the source code +## distribution tree, or in https://www.hdfgroup.org/licenses. +## If you do not have access to either file, you may request a copy from +## help@hdfgroup.org. EOF # Windows Batch file Copyright notice cat > ${WINBAT_COPYRIGHT} << \EOF @REM Copyright by The HDF Group. -@REM Copyright by the Board of Trustees of the University of Illinois. @REM All rights reserved. @REM @REM This file is part of HDF5. The full HDF5 copyright notice, including @REM terms governing use, modification, and redistribution, is contained in -@REM the files COPYING and Copyright.html. COPYING can be found at the root -@REM of the source code distribution tree; Copyright.html can be found at the -@REM root level of an installed copy of the electronic HDF5 document set and -@REM is linked from the top-level documents page. It can also be found at -@REM http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have -@REM access to either file, you may request a copy from help@hdfgroup.org. +@REM the COPYING file, which can be found at the root of the source code +@REM distribution tree, or in https://www.hdfgroup.org/licenses. +@REM If you do not have access to either file, you may request a copy from +@REM help@hdfgroup.org. EOF # configure.ac file Copyright notice cat > ${CONFIGURE_AC_COPYRIGHT} << \EOF dnl Copyright by The HDF Group. -dnl Copyright by the Board of Trustees of the University of Illinois. dnl All rights reserved. dnl dnl This file is part of HDF5. The full HDF5 copyright notice, including dnl terms governing use, modification, and redistribution, is contained in -dnl the files COPYING and Copyright.html. COPYING can be found at the root -dnl of the source code distribution tree; Copyright.html can be found at the -dnl root level of an installed copy of the electronic HDF5 document set and -dnl is linked from the top-level documents page. It can also be found at -dnl http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have -dnl access to either file, you may request a copy from help@hdfgroup.org. +dnl the COPYING file, which can be found at the root of the source code +dnl distribution tree, or in https://www.hdfgroup.org/licenses. +dnl If you do not have access to either file, you may request a copy from +dnl help@hdfgroup.org. EOF } diff --git a/bin/debug-ohdr b/bin/debug-ohdr index 7becb42c49a..1106af3d6a2 100755 --- a/bin/debug-ohdr +++ b/bin/debug-ohdr @@ -1,7 +1,6 @@ #!/usr/bin/env perl # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/bin/dependencies b/bin/dependencies index 11712552722..b2f23958c34 100755 --- a/bin/dependencies +++ b/bin/dependencies @@ -1,7 +1,6 @@ #!/usr/bin/env perl # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/bin/deploy b/bin/deploy index cb9a467d2d8..818fa0722e6 100755 --- a/bin/deploy +++ b/bin/deploy @@ -1,7 +1,6 @@ #!/bin/sh # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/bin/distdep b/bin/distdep index 693f3a207ea..fcda2170f2d 100755 --- a/bin/distdep +++ b/bin/distdep @@ -4,7 +4,6 @@ eval 'exec perl -p -x -S $0 ${1+"$@"}' if 0; # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/bin/errors b/bin/errors index 5d23cf107b3..9473636907b 100755 --- a/bin/errors +++ b/bin/errors @@ -7,7 +7,6 @@ use Text::Tabs; # CHANGED. THIS SCRIPT NO LONGER WORKS! --rpm # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/bin/gcov_script b/bin/gcov_script index 679d675d0fe..06b2ad1b442 100755 --- a/bin/gcov_script +++ b/bin/gcov_script @@ -1,7 +1,6 @@ #! /bin/sh # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/bin/h5cc.in b/bin/h5cc.in index 966bb4012dc..4eef3c95eee 100644 --- a/bin/h5cc.in +++ b/bin/h5cc.in @@ -1,7 +1,6 @@ #! /bin/sh ## # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/bin/h5redeploy.in b/bin/h5redeploy.in index 86183e8df4e..d73cca71eb7 100644 --- a/bin/h5redeploy.in +++ b/bin/h5redeploy.in @@ -1,7 +1,6 @@ #! /bin/sh # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/bin/h5vers b/bin/h5vers index 04d5d03acc8..a78c24d71a5 100755 --- a/bin/h5vers +++ b/bin/h5vers @@ -7,7 +7,6 @@ require 5.003; use strict; # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/bin/iostats b/bin/iostats index c42a1f9ff3c..d8a8933470a 100755 --- a/bin/iostats +++ b/bin/iostats @@ -1,7 +1,6 @@ #!/usr/bin/env perl # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/bin/make_err b/bin/make_err index f2b044a163c..31cb9644133 100755 --- a/bin/make_err +++ b/bin/make_err @@ -5,7 +5,6 @@ use warnings; # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including @@ -32,7 +31,6 @@ sub print_copyright ($) { print $fh "/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n"; print $fh " * Copyright by The HDF Group. *\n"; - print $fh " * Copyright by the Board of Trustees of the University of Illinois. *\n"; print $fh " * All rights reserved. *\n"; print $fh " * *\n"; print $fh " * This file is part of HDF5. The full HDF5 copyright notice, including *\n"; diff --git a/bin/make_overflow b/bin/make_overflow index 37d6dedfe73..33cbdfee3ec 100755 --- a/bin/make_overflow +++ b/bin/make_overflow @@ -10,7 +10,6 @@ my @ctypes = ( () ); # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including @@ -61,7 +60,6 @@ sub print_copyright ($) { print $fh "/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n"; print $fh " * Copyright by The HDF Group. *\n"; - print $fh " * Copyright by the Board of Trustees of the University of Illinois. *\n"; print $fh " * All rights reserved. *\n"; print $fh " * *\n"; print $fh " * This file is part of HDF5. The full HDF5 copyright notice, including *\n"; diff --git a/bin/make_vers b/bin/make_vers index 064563f5b9a..936d49559ca 100755 --- a/bin/make_vers +++ b/bin/make_vers @@ -19,7 +19,6 @@ $indent = 2; # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including @@ -46,7 +45,6 @@ sub print_copyright ($) { print $fh "/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n"; print $fh " * Copyright by The HDF Group. *\n"; - print $fh " * Copyright by the Board of Trustees of the University of Illinois. *\n"; print $fh " * All rights reserved. *\n"; print $fh " * *\n"; print $fh " * This file is part of HDF5. The full HDF5 copyright notice, including *\n"; diff --git a/bin/mkdirs b/bin/mkdirs index 3a36aa25b33..4e66eb5f0e2 100755 --- a/bin/mkdirs +++ b/bin/mkdirs @@ -1,7 +1,6 @@ #! /bin/sh # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/bin/newer b/bin/newer index 5f0fdca5c8c..c36df0353af 100755 --- a/bin/newer +++ b/bin/newer @@ -1,7 +1,6 @@ #!/bin/sh # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/bin/pkgscrpts/h5rmflags b/bin/pkgscrpts/h5rmflags index d0f87c0d344..39c4281c2a4 100755 --- a/bin/pkgscrpts/h5rmflags +++ b/bin/pkgscrpts/h5rmflags @@ -1,7 +1,6 @@ #! /bin/sh # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/bin/pkgscrpts/makeHDF5BinaryTarfiles.pl b/bin/pkgscrpts/makeHDF5BinaryTarfiles.pl index d1042dc8ea9..820403f4d8d 100755 --- a/bin/pkgscrpts/makeHDF5BinaryTarfiles.pl +++ b/bin/pkgscrpts/makeHDF5BinaryTarfiles.pl @@ -2,7 +2,6 @@ # makeTarFiles.pl # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/bin/pkgscrpts/makeInternalREADME.pl b/bin/pkgscrpts/makeInternalREADME.pl index 355036f34ed..cbc6b4fc7af 100755 --- a/bin/pkgscrpts/makeInternalREADME.pl +++ b/bin/pkgscrpts/makeInternalREADME.pl @@ -6,12 +6,10 @@ # # This file is part of HDF4. The full HDF4 copyright notice, including # terms governing use, modification, and redistribution, is contained in -# the files COPYING and Copyright.html. COPYING can be found at the root -# of the source code distribution tree; Copyright.html can be found at the -# root level of an installed copy of the electronic HDF4 document set and -# is linked from the top-level documents page. It can also be found at -# http://hdfgroup.org/HDF4/doc/Copyright.html. If you do not have -# access to either file, you may request a copy from help@hdfgroup.org. +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. # use warnings; diff --git a/bin/pkgscrpts/makeOuterREADME.pl b/bin/pkgscrpts/makeOuterREADME.pl index 602ad28b6eb..12d4bc2ea92 100755 --- a/bin/pkgscrpts/makeOuterREADME.pl +++ b/bin/pkgscrpts/makeOuterREADME.pl @@ -6,12 +6,10 @@ # # This file is part of HDF4. The full HDF4 copyright notice, including # terms governing use, modification, and redistribution, is contained in -# the files COPYING and Copyright.html. COPYING can be found at the root -# of the source code distribution tree; Copyright.html can be found at the -# root level of an installed copy of the electronic HDF4 document set and -# is linked from the top-level documents page. It can also be found at -# http://hdfgroup.org/HDF4/doc/Copyright.html. If you do not have -# access to either file, you may request a copy from help@hdfgroup.org. +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. # use warnings; diff --git a/bin/release b/bin/release index ff8a11e3637..359d7c0aff4 100755 --- a/bin/release +++ b/bin/release @@ -1,7 +1,6 @@ #!/bin/sh # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/bin/runbkgprog b/bin/runbkgprog index b0d4b7347c2..945f5f3aa34 100755 --- a/bin/runbkgprog +++ b/bin/runbkgprog @@ -5,7 +5,6 @@ $indent=4; # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/bin/switch_maint_mode b/bin/switch_maint_mode index af63d6a952a..9813eebb09d 100755 --- a/bin/switch_maint_mode +++ b/bin/switch_maint_mode @@ -1,7 +1,6 @@ #!/bin/sh # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/bin/trace b/bin/trace index 60bce177745..d372a150bb4 100755 --- a/bin/trace +++ b/bin/trace @@ -1,7 +1,6 @@ #!/usr/bin/env perl ## # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/bin/warnhist b/bin/warnhist index 669cbccc47c..680a0e88305 100755 --- a/bin/warnhist +++ b/bin/warnhist @@ -8,12 +8,10 @@ use warnings; # # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in -# the files COPYING and Copyright.html. COPYING can be found at the root -# of the source code distribution tree; Copyright.html can be found at the -# root level of an installed copy of the electronic HDF5 document set and -# is linked from the top-level documents page. It can also be found at -# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have -# access to either file, you may request a copy from help@hdfgroup.org. +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. # # Quincey Koziol # 9 Aug 2013 diff --git a/bin/yodconfigure b/bin/yodconfigure index a91507dc866..76f45a8dfe8 100755 --- a/bin/yodconfigure +++ b/bin/yodconfigure @@ -1,7 +1,6 @@ #!/bin/sh # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/c++/Makefile.am b/c++/Makefile.am index 5d8258b2d3a..25713697297 100644 --- a/c++/Makefile.am +++ b/c++/Makefile.am @@ -1,6 +1,5 @@ # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/c++/examples/Makefile.am b/c++/examples/Makefile.am index a316e1be4e0..b97b5d14cd1 100644 --- a/c++/examples/Makefile.am +++ b/c++/examples/Makefile.am @@ -1,6 +1,5 @@ # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/c++/examples/chunks.cpp b/c++/examples/chunks.cpp index 3717fcba62c..cc5d7c9fa65 100644 --- a/c++/examples/chunks.cpp +++ b/c++/examples/chunks.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/examples/compound.cpp b/c++/examples/compound.cpp index 89758454d87..f9397a9151f 100644 --- a/c++/examples/compound.cpp +++ b/c++/examples/compound.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/examples/create.cpp b/c++/examples/create.cpp index e55230f205f..bd179f34318 100644 --- a/c++/examples/create.cpp +++ b/c++/examples/create.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/examples/extend_ds.cpp b/c++/examples/extend_ds.cpp index 24f5f3eac32..8449c757da7 100644 --- a/c++/examples/extend_ds.cpp +++ b/c++/examples/extend_ds.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/examples/h5group.cpp b/c++/examples/h5group.cpp index 6ab7c621a01..0779aa910c2 100644 --- a/c++/examples/h5group.cpp +++ b/c++/examples/h5group.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/examples/h5tutr_cmprss.cpp b/c++/examples/h5tutr_cmprss.cpp index 7c55709ac97..6d9c109f4ed 100644 --- a/c++/examples/h5tutr_cmprss.cpp +++ b/c++/examples/h5tutr_cmprss.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/examples/h5tutr_crtatt.cpp b/c++/examples/h5tutr_crtatt.cpp index 76a4730b5de..0b18de1f1e9 100644 --- a/c++/examples/h5tutr_crtatt.cpp +++ b/c++/examples/h5tutr_crtatt.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/examples/h5tutr_crtdat.cpp b/c++/examples/h5tutr_crtdat.cpp index 93a245c97b8..d23675f5f2c 100644 --- a/c++/examples/h5tutr_crtdat.cpp +++ b/c++/examples/h5tutr_crtdat.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/examples/h5tutr_crtgrp.cpp b/c++/examples/h5tutr_crtgrp.cpp index 45d192943f9..5b13dc3b005 100644 --- a/c++/examples/h5tutr_crtgrp.cpp +++ b/c++/examples/h5tutr_crtgrp.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/examples/h5tutr_crtgrpar.cpp b/c++/examples/h5tutr_crtgrpar.cpp index 7c7bab5cb5b..a1a74277794 100644 --- a/c++/examples/h5tutr_crtgrpar.cpp +++ b/c++/examples/h5tutr_crtgrpar.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/examples/h5tutr_crtgrpd.cpp b/c++/examples/h5tutr_crtgrpd.cpp index 14b45c0b5b9..19bf99d2d29 100644 --- a/c++/examples/h5tutr_crtgrpd.cpp +++ b/c++/examples/h5tutr_crtgrpd.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/examples/h5tutr_extend.cpp b/c++/examples/h5tutr_extend.cpp index bd7e9a501f5..b725f008375 100644 --- a/c++/examples/h5tutr_extend.cpp +++ b/c++/examples/h5tutr_extend.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/examples/h5tutr_rdwt.cpp b/c++/examples/h5tutr_rdwt.cpp index 3e1ee8233d5..3f441099631 100644 --- a/c++/examples/h5tutr_rdwt.cpp +++ b/c++/examples/h5tutr_rdwt.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/examples/h5tutr_subset.cpp b/c++/examples/h5tutr_subset.cpp index 5c62c89cb5e..1f278f04f87 100644 --- a/c++/examples/h5tutr_subset.cpp +++ b/c++/examples/h5tutr_subset.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/examples/readdata.cpp b/c++/examples/readdata.cpp index e247a8eaab8..1deed36a8c0 100644 --- a/c++/examples/readdata.cpp +++ b/c++/examples/readdata.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/examples/testh5c++.sh.in b/c++/examples/testh5c++.sh.in index 44dc9e7d77f..ac95ba60860 100644 --- a/c++/examples/testh5c++.sh.in +++ b/c++/examples/testh5c++.sh.in @@ -1,7 +1,6 @@ #! /bin/sh # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/c++/examples/writedata.cpp b/c++/examples/writedata.cpp index f2b715ac20e..3f9d70b61d6 100644 --- a/c++/examples/writedata.cpp +++ b/c++/examples/writedata.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5AbstractDs.cpp b/c++/src/H5AbstractDs.cpp index 70d8531653a..3eeb1971644 100644 --- a/c++/src/H5AbstractDs.cpp +++ b/c++/src/H5AbstractDs.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5AbstractDs.h b/c++/src/H5AbstractDs.h index b84867b2cb9..49bcfebea09 100644 --- a/c++/src/H5AbstractDs.h +++ b/c++/src/H5AbstractDs.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5Alltypes.h b/c++/src/H5Alltypes.h index 7a14774f8ff..b18add8d1fb 100644 --- a/c++/src/H5Alltypes.h +++ b/c++/src/H5Alltypes.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5ArrayType.cpp b/c++/src/H5ArrayType.cpp index 6999f1bf70e..afa1c560057 100644 --- a/c++/src/H5ArrayType.cpp +++ b/c++/src/H5ArrayType.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5ArrayType.h b/c++/src/H5ArrayType.h index b93ae4aa33b..0b80042a37c 100644 --- a/c++/src/H5ArrayType.h +++ b/c++/src/H5ArrayType.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5AtomType.cpp b/c++/src/H5AtomType.cpp index 0b480464c96..8f50317cf33 100644 --- a/c++/src/H5AtomType.cpp +++ b/c++/src/H5AtomType.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5AtomType.h b/c++/src/H5AtomType.h index e00a3087eac..4418a38518b 100644 --- a/c++/src/H5AtomType.h +++ b/c++/src/H5AtomType.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5Attribute.cpp b/c++/src/H5Attribute.cpp index 9f51a420410..55d158d1692 100644 --- a/c++/src/H5Attribute.cpp +++ b/c++/src/H5Attribute.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5Attribute.h b/c++/src/H5Attribute.h index 71bfa141eb5..80e6db97a5d 100644 --- a/c++/src/H5Attribute.h +++ b/c++/src/H5Attribute.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5Classes.h b/c++/src/H5Classes.h index 7820ce2564e..b4a0670d468 100644 --- a/c++/src/H5Classes.h +++ b/c++/src/H5Classes.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5CommonFG.cpp b/c++/src/H5CommonFG.cpp index 8401b9c83bf..adbd61a0304 100644 --- a/c++/src/H5CommonFG.cpp +++ b/c++/src/H5CommonFG.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5CommonFG.h b/c++/src/H5CommonFG.h index d87f9ca1348..e6756177a72 100644 --- a/c++/src/H5CommonFG.h +++ b/c++/src/H5CommonFG.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5CompType.cpp b/c++/src/H5CompType.cpp index 3b38c6d81cd..1f7755e16ce 100644 --- a/c++/src/H5CompType.cpp +++ b/c++/src/H5CompType.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5CompType.h b/c++/src/H5CompType.h index 4424d894e8e..edc0d154861 100644 --- a/c++/src/H5CompType.h +++ b/c++/src/H5CompType.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5Cpp.h b/c++/src/H5Cpp.h index 202d5843f5a..34fdef2032d 100644 --- a/c++/src/H5Cpp.h +++ b/c++/src/H5Cpp.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5CppDoc.h b/c++/src/H5CppDoc.h index 4337a6fe911..37bd8a5cd07 100644 --- a/c++/src/H5CppDoc.h +++ b/c++/src/H5CppDoc.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5DaccProp.cpp b/c++/src/H5DaccProp.cpp index 1905f277c0f..70430794a80 100644 --- a/c++/src/H5DaccProp.cpp +++ b/c++/src/H5DaccProp.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5DaccProp.h b/c++/src/H5DaccProp.h index 398918df407..679fd704f0d 100644 --- a/c++/src/H5DaccProp.h +++ b/c++/src/H5DaccProp.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5DataSet.cpp b/c++/src/H5DataSet.cpp index 53e8166ef41..e3d196d8d57 100644 --- a/c++/src/H5DataSet.cpp +++ b/c++/src/H5DataSet.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5DataSet.h b/c++/src/H5DataSet.h index cc0d925d707..64ccabeb92a 100644 --- a/c++/src/H5DataSet.h +++ b/c++/src/H5DataSet.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5DataSpace.cpp b/c++/src/H5DataSpace.cpp index 5dfeb81246d..bd48383f336 100644 --- a/c++/src/H5DataSpace.cpp +++ b/c++/src/H5DataSpace.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5DataSpace.h b/c++/src/H5DataSpace.h index 5583baa51b9..ff0f7b7976f 100644 --- a/c++/src/H5DataSpace.h +++ b/c++/src/H5DataSpace.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5DataType.cpp b/c++/src/H5DataType.cpp index 7bb4bd612a7..45cf9d74f4a 100644 --- a/c++/src/H5DataType.cpp +++ b/c++/src/H5DataType.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5DataType.h b/c++/src/H5DataType.h index 7dd371b2b2f..339064b2619 100644 --- a/c++/src/H5DataType.h +++ b/c++/src/H5DataType.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5DcreatProp.cpp b/c++/src/H5DcreatProp.cpp index da19d8d4cc8..b6e68aab266 100644 --- a/c++/src/H5DcreatProp.cpp +++ b/c++/src/H5DcreatProp.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5DcreatProp.h b/c++/src/H5DcreatProp.h index b822c254caa..dfe963506e1 100644 --- a/c++/src/H5DcreatProp.h +++ b/c++/src/H5DcreatProp.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5DxferProp.cpp b/c++/src/H5DxferProp.cpp index 1728e8e3dda..0b5a5778b00 100644 --- a/c++/src/H5DxferProp.cpp +++ b/c++/src/H5DxferProp.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5DxferProp.h b/c++/src/H5DxferProp.h index 3925a57c113..dfdad5acf81 100644 --- a/c++/src/H5DxferProp.h +++ b/c++/src/H5DxferProp.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5EnumType.cpp b/c++/src/H5EnumType.cpp index 36eebf6e4fb..4ec4369855b 100644 --- a/c++/src/H5EnumType.cpp +++ b/c++/src/H5EnumType.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5EnumType.h b/c++/src/H5EnumType.h index 2501e4bae4b..e8d0ff92e36 100644 --- a/c++/src/H5EnumType.h +++ b/c++/src/H5EnumType.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5Exception.cpp b/c++/src/H5Exception.cpp index 67694390f11..81aca941cd2 100644 --- a/c++/src/H5Exception.cpp +++ b/c++/src/H5Exception.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5Exception.h b/c++/src/H5Exception.h index ee50aed5cf3..d55fba509f7 100644 --- a/c++/src/H5Exception.h +++ b/c++/src/H5Exception.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5FaccProp.cpp b/c++/src/H5FaccProp.cpp index f2c9295dde4..c1c0ef2c8bd 100644 --- a/c++/src/H5FaccProp.cpp +++ b/c++/src/H5FaccProp.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5FaccProp.h b/c++/src/H5FaccProp.h index 53c7799a1ca..b3fb371a6aa 100644 --- a/c++/src/H5FaccProp.h +++ b/c++/src/H5FaccProp.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5FcreatProp.cpp b/c++/src/H5FcreatProp.cpp index b51ba10fe0e..3b8fd949383 100644 --- a/c++/src/H5FcreatProp.cpp +++ b/c++/src/H5FcreatProp.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5FcreatProp.h b/c++/src/H5FcreatProp.h index 65daf8f2e72..42595e48c51 100644 --- a/c++/src/H5FcreatProp.h +++ b/c++/src/H5FcreatProp.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5File.cpp b/c++/src/H5File.cpp index 9735a3f3a11..f1cd137e6c9 100644 --- a/c++/src/H5File.cpp +++ b/c++/src/H5File.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5File.h b/c++/src/H5File.h index c6921f4704a..442eb536be7 100644 --- a/c++/src/H5File.h +++ b/c++/src/H5File.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5FloatType.cpp b/c++/src/H5FloatType.cpp index 33f83770844..9d43dbf17fc 100644 --- a/c++/src/H5FloatType.cpp +++ b/c++/src/H5FloatType.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5FloatType.h b/c++/src/H5FloatType.h index e8df757c3b5..25bbccbf8f5 100644 --- a/c++/src/H5FloatType.h +++ b/c++/src/H5FloatType.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5Group.cpp b/c++/src/H5Group.cpp index b017f4f6ee9..0fe8ff3cbcf 100644 --- a/c++/src/H5Group.cpp +++ b/c++/src/H5Group.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5Group.h b/c++/src/H5Group.h index b5f7e51110d..efbb07111e3 100644 --- a/c++/src/H5Group.h +++ b/c++/src/H5Group.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5IdComponent.cpp b/c++/src/H5IdComponent.cpp index 63af728b911..2a801249ca0 100644 --- a/c++/src/H5IdComponent.cpp +++ b/c++/src/H5IdComponent.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5IdComponent.h b/c++/src/H5IdComponent.h index 383d42d333f..d64bdb5ab7d 100644 --- a/c++/src/H5IdComponent.h +++ b/c++/src/H5IdComponent.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5Include.h b/c++/src/H5Include.h index f479eebd971..6062e9a6edd 100644 --- a/c++/src/H5Include.h +++ b/c++/src/H5Include.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5IntType.cpp b/c++/src/H5IntType.cpp index 27c449165bb..194ae21b0a4 100644 --- a/c++/src/H5IntType.cpp +++ b/c++/src/H5IntType.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5IntType.h b/c++/src/H5IntType.h index 6a469e4c53f..7f4f89b1880 100644 --- a/c++/src/H5IntType.h +++ b/c++/src/H5IntType.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5LaccProp.cpp b/c++/src/H5LaccProp.cpp index acdaa13d576..7c63bd0ff40 100644 --- a/c++/src/H5LaccProp.cpp +++ b/c++/src/H5LaccProp.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5LaccProp.h b/c++/src/H5LaccProp.h index 65a96735f1d..04fb681c23e 100644 --- a/c++/src/H5LaccProp.h +++ b/c++/src/H5LaccProp.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5LcreatProp.cpp b/c++/src/H5LcreatProp.cpp index 3851d56dc1d..2f3437531b8 100644 --- a/c++/src/H5LcreatProp.cpp +++ b/c++/src/H5LcreatProp.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5LcreatProp.h b/c++/src/H5LcreatProp.h index c0c66a43fcc..ac6ab7f8cbe 100644 --- a/c++/src/H5LcreatProp.h +++ b/c++/src/H5LcreatProp.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5Library.cpp b/c++/src/H5Library.cpp index 19c7ee78a73..5a94a9e19ed 100644 --- a/c++/src/H5Library.cpp +++ b/c++/src/H5Library.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5Library.h b/c++/src/H5Library.h index 3df8d56d9b3..37706391558 100644 --- a/c++/src/H5Library.h +++ b/c++/src/H5Library.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5Location.cpp b/c++/src/H5Location.cpp index 84f347827ca..b0210f82909 100644 --- a/c++/src/H5Location.cpp +++ b/c++/src/H5Location.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5Location.h b/c++/src/H5Location.h index 11c62ea45fd..01ab8a77a98 100644 --- a/c++/src/H5Location.h +++ b/c++/src/H5Location.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5Object.cpp b/c++/src/H5Object.cpp index 6873d6733bf..a308e6c46d1 100644 --- a/c++/src/H5Object.cpp +++ b/c++/src/H5Object.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5Object.h b/c++/src/H5Object.h index 3f74a88fb5d..c8e84677e7a 100644 --- a/c++/src/H5Object.h +++ b/c++/src/H5Object.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5OcreatProp.cpp b/c++/src/H5OcreatProp.cpp index dffdeb1022b..1391fc732f6 100644 --- a/c++/src/H5OcreatProp.cpp +++ b/c++/src/H5OcreatProp.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5OcreatProp.h b/c++/src/H5OcreatProp.h index 0aa825b943a..ab97ab06d1f 100644 --- a/c++/src/H5OcreatProp.h +++ b/c++/src/H5OcreatProp.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5PredType.cpp b/c++/src/H5PredType.cpp index 9458fb7117b..5f5934e523f 100644 --- a/c++/src/H5PredType.cpp +++ b/c++/src/H5PredType.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5PredType.h b/c++/src/H5PredType.h index 6ae225d86d5..161efc74da4 100644 --- a/c++/src/H5PredType.h +++ b/c++/src/H5PredType.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5PropList.cpp b/c++/src/H5PropList.cpp index 0ff1afb017b..4b2eaebd661 100644 --- a/c++/src/H5PropList.cpp +++ b/c++/src/H5PropList.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5PropList.h b/c++/src/H5PropList.h index 2df326b5e33..27fe8012d29 100644 --- a/c++/src/H5PropList.h +++ b/c++/src/H5PropList.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5StrType.cpp b/c++/src/H5StrType.cpp index 57bb2c96fc4..42011eef408 100644 --- a/c++/src/H5StrType.cpp +++ b/c++/src/H5StrType.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5StrType.h b/c++/src/H5StrType.h index 3e4013a6e42..f8ef3c96aa6 100644 --- a/c++/src/H5StrType.h +++ b/c++/src/H5StrType.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5VarLenType.cpp b/c++/src/H5VarLenType.cpp index 0270febfe59..6043e14a8d9 100644 --- a/c++/src/H5VarLenType.cpp +++ b/c++/src/H5VarLenType.cpp @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/H5VarLenType.h b/c++/src/H5VarLenType.h index 583b9cd57a5..26588c26d09 100644 --- a/c++/src/H5VarLenType.h +++ b/c++/src/H5VarLenType.h @@ -1,7 +1,6 @@ // C++ informative line for the emacs editor: -*- C++ -*- /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/c++/src/Makefile.am b/c++/src/Makefile.am index 74ef367285a..41815cd2b26 100644 --- a/c++/src/Makefile.am +++ b/c++/src/Makefile.am @@ -1,6 +1,5 @@ # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/c++/src/footer.html b/c++/src/footer.html index 5ad9b7cc422..7592925c9a1 100644 --- a/c++/src/footer.html +++ b/c++/src/footer.html @@ -13,8 +13,6 @@ Copyright by The HDF Group -
    - and the Board of Trustees of the University of Illinois diff --git a/c++/src/h5c++.in b/c++/src/h5c++.in index 573d20d55ce..078fa734fcf 100644 --- a/c++/src/h5c++.in +++ b/c++/src/h5c++.in @@ -1,7 +1,6 @@ #! /bin/sh # # Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. # All rights reserved. # # This file is part of HDF5. The full HDF5 copyright notice, including diff --git a/c++/src/header.html b/c++/src/header.html index 3c050c30688..8bacd7cffa3 100644 --- a/c++/src/header.html +++ b/c++/src/header.html @@ -7,7 +7,6 @@ int -chunk_cb(const hsize_t *offset, uint32_t filter_mask, haddr_t addr, uint32_t nbytes, void *op_data) +chunk_cb(const hsize_t *offset, unsigned filter_mask, haddr_t addr, hsize_t size, void *op_data) { // only print the allocated chunk size only - printf("%d\n", nbytes); + printf("%ld\n", size); return EXIT_SUCCESS; } //! @@ -67,7 +67,7 @@ H5Ovisit_cb(hid_t obj, const char *name, const H5O_info2_t *info, void *op_data) retval = -1; goto fail_fig; } - +fail_fig: fail_shape: H5Sclose(dspace); fail_dspace: From be54deeacce621ab974a98c8277496c54393c35c Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Fri, 17 Mar 2023 21:13:16 -0500 Subject: [PATCH 031/108] Change libaec URL to actively maintained repo GH#2552 (#2561) * Change libaec URL to actively maintained repo GH#2552 * Remove duplicated https --- config/cmake/README.md.cmake.in | 4 ++-- config/cmake/cacheinit.cmake | 2 +- release_docs/INSTALL_CMake.txt | 6 +++--- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/config/cmake/README.md.cmake.in b/config/cmake/README.md.cmake.in index b60e729e072..d7e4bc443a9 100644 --- a/config/cmake/README.md.cmake.in +++ b/config/cmake/README.md.cmake.in @@ -25,8 +25,8 @@ The official ZLIB and SZIP/Libaec pages are at: ZLIB: https://git.savannah.gnu.org/cgit/gzip.git/ https://git.savannah.gnu.org/cgit/gzip.git/tree/COPYING - SZIP/Libaec: https://gitlab.dkrz.de/k202009/libaec - https://gitlab.dkrz.de/k202009/libaec/-/blob/master/Copyright.txt + SZIP/Libaec: https://github.com/MathisRosenhauer/libaec + https://github.com/MathisRosenhauer/libaec/blob/master/LICENSE.txt Installation diff --git a/config/cmake/cacheinit.cmake b/config/cmake/cacheinit.cmake index 41293ef723f..7c5cc1e2bb5 100644 --- a/config/cmake/cacheinit.cmake +++ b/config/cmake/cacheinit.cmake @@ -59,7 +59,7 @@ set (LIBAEC_PACKAGE_NAME "libaec" CACHE STRING "Name of AEC SZIP package" FORCE) set (SZIP_TGZ_NAME "SZip.tar.gz" CACHE STRING "Use SZip from compressed file" FORCE) set (SZAEC_TGZ_NAME "LIBAEC.tar.gz" CACHE STRING "Use SZip AEC from compressed file" FORCE) set (USE_LIBAEC ON CACHE BOOL "Use libaec szip replacement" FORCE) -set (LIBAEC_TGZ_ORIGPATH "https://gitlab.dkrz.de/k202009/libaec/-/archive/v1.0.6" CACHE STRING "Use LIBAEC from original location" FORCE) +set (LIBAEC_TGZ_ORIGPATH "https://github.com/MathisRosenhauer/libaec/releases/download/v1.0.6/libaec-1.0.6.tar.gz" CACHE STRING "Use LIBAEC from original location" FORCE) set (LIBAEC_TGZ_ORIGNAME "libaec-v1.0.6.tar.gz" CACHE STRING "Use LIBAEC from original compressed file" FORCE) set (LIBAEC_USE_LOCALCONTENT OFF CACHE BOOL "Use local file for LIBAEC FetchContent" FORCE) diff --git a/release_docs/INSTALL_CMake.txt b/release_docs/INSTALL_CMake.txt index 31b7e46945b..9113af7213d 100644 --- a/release_docs/INSTALL_CMake.txt +++ b/release_docs/INSTALL_CMake.txt @@ -531,7 +531,7 @@ These five steps are described in detail below. set (SZIP_TGZ_NAME "SZip.tar.gz" CACHE STRING "Use SZip from compressed file" FORCE) set (SZAEC_TGZ_NAME "LIBAEC.tar.gz" CACHE STRING "Use SZip AEC from compressed file" FORCE) set (USE_LIBAEC ON CACHE BOOL "Use libaec szip replacement" FORCE) - set (LIBAEC_TGZ_ORIGPATH "https://gitlab.dkrz.de/k202009/libaec/-/archive/v1.0.6" CACHE STRING "Use LIBAEC from original location" FORCE) + set (LIBAEC_TGZ_ORIGPATH "https://github.com/MathisRosenhauer/libaec/releases/download/v1.0.6/libaec-1.0.6.tar.gz" CACHE STRING "Use LIBAEC from original location" FORCE) set (LIBAEC_TGZ_ORIGNAME "libaec-v1.0.6.tar.gz" CACHE STRING "Use LIBAEC from original compressed file" FORCE) set (LIBAEC_USE_LOCALCONTENT OFF CACHE BOOL "Use local file for LIBAEC FetchContent" FORCE) ####################### @@ -568,7 +568,7 @@ These five steps are described in detail below. ####### # fpzip ####### - set (FPZIP_GIT_URL "https://https://github.com/LLNL/fpzip" CACHE STRING "Use FPZIP from github repository" FORCE) + set (FPZIP_GIT_URL "https://github.com/LLNL/fpzip" CACHE STRING "Use FPZIP from github repository" FORCE) set (FPZIP_GIT_BRANCH "master" CACHE STRING "" FORCE) set (FPZIP_TGZ_NAME "fpzip.tar.gz" CACHE STRING "Use FPZIP from compressed file" FORCE) set (FPZIP_PACKAGE_NAME "fpzip" CACHE STRING "Name of FPZIP package" FORCE) @@ -869,7 +869,7 @@ else () H5_DEFAULT_PLUGINDIR "/usr/local/hdf5/lib/plugin" endif () if (BUILD_SZIP_WITH_FETCHCONTENT) - LIBAEC_TGZ_ORIGPATH "Use LIBAEC from original location" "https://gitlab.dkrz.de/k202009/libaec/-/archive/v1.0.6" + LIBAEC_TGZ_ORIGPATH "Use LIBAEC from original location" "https://github.com/MathisRosenhauer/libaec/releases/download/v1.0.6/libaec-1.0.6.tar.gz" LIBAEC_TGZ_ORIGNAME "Use LIBAEC from original compressed file" "libaec-v1.0.6.tar.gz" LIBAEC_USE_LOCALCONTENT "Use local file for LIBAEC FetchContent" OFF if (BUILD_ZLIB_WITH_FETCHCONTENT) From 3ce739cb63344b808f75a200e5e3cbf076504632 Mon Sep 17 00:00:00 2001 From: Dana Robinson <43805+derobins@users.noreply.github.com> Date: Mon, 20 Mar 2023 08:57:39 -0700 Subject: [PATCH 032/108] Bring release_docs scheme from develop (#2612) --- ..._0-1_8_0_rc3.txt => HISTORY-1_0-1_8_0.txt} | 0 release_docs/HISTORY-1_10.txt | 6562 ------- release_docs/HISTORY-1_8.txt | 14439 ---------------- release_docs/NEWSLETTER.txt | 25 + release_docs/README.md | 102 + 5 files changed, 127 insertions(+), 21001 deletions(-) rename release_docs/{HISTORY-1_0-1_8_0_rc3.txt => HISTORY-1_0-1_8_0.txt} (100%) delete mode 100644 release_docs/HISTORY-1_10.txt delete mode 100644 release_docs/HISTORY-1_8.txt create mode 100644 release_docs/NEWSLETTER.txt create mode 100644 release_docs/README.md diff --git a/release_docs/HISTORY-1_0-1_8_0_rc3.txt b/release_docs/HISTORY-1_0-1_8_0.txt similarity index 100% rename from release_docs/HISTORY-1_0-1_8_0_rc3.txt rename to release_docs/HISTORY-1_0-1_8_0.txt diff --git a/release_docs/HISTORY-1_10.txt b/release_docs/HISTORY-1_10.txt deleted file mode 100644 index 1fc3c60b1b1..00000000000 --- a/release_docs/HISTORY-1_10.txt +++ /dev/null @@ -1,6562 +0,0 @@ -HDF5 History -============ - -This file contains development history of the HDF5 1.10 branch - -09. Release Information for hdf5-1.10.7 -08. Release Information for hdf5-1.10.6 -07. Release Information for hdf5-1.10.5 -06. Release Information for hdf5-1.10.4 -05. Release Information for hdf5-1.10.3 -04. Release Information for hdf5-1.10.2 -03. Release Information for hdf5-1.10.1 -02. Release Information for hdf5-1.10.0-patch1 -01. Release Information for hdf5-1.10.0 - -[Search on the string '%%%%' for section breaks of each release.] - -%%%%1.10.7%%%% - -HDF5 version 1.10.7 released on 2020-09-11 -================================================================================ - - -INTRODUCTION - -This document describes the differences between this release and the previous -HDF5 release. It contains information on the platforms tested and known -problems in this release. For more details check the HISTORY*.txt files in the -HDF5 source. - -Note that documentation in the links below will be updated at the time of each -final release. - -Links to HDF5 documentation can be found on The HDF5 web page: - - https://portal.hdfgroup.org/display/HDF5/HDF5 - -The official HDF5 releases can be obtained from: - - https://www.hdfgroup.org/downloads/hdf5/ - - HDF5 binaries provided are fully tested with ZLIB and the free - Open Source SZIP successor Libaec (with BSD license). - The official ZLIB and SZIP/Libaec pages are at: - - ZLIB: http://www.zlib.net/ - http://www.zlib.net/zlib_license.html - SZIP/Libaec: https://gitlab.dkrz.de/k202009/libaec - https://gitlab.dkrz.de/k202009/libaec/-/blob/master/Copyright.txt - -Changes from Release to Release and New Features in the HDF5-1.10.x release series -can be found at: - - https://portal.hdfgroup.org/display/HDF5/HDF5+Application+Developer%27s+Guide - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS - -- New Features -- Support for new platforms and languages -- Bug Fixes since HDF5-1.10.6 -- Supported Platforms -- Tested Configuration Features Summary -- More Tested Platforms -- Known Problems -- CMake vs. Autotools installations - - -New Features -============ - - Configuration: - ------------- - - Disable memory sanity checks in the Autotools in release branches - - The library can be configured to use internal memory sanity checking, - which replaces C API calls like malloc(3) and free(3) with our own calls - which add things like heap canaries. These canaries can cause problems - when external filter plugins reallocate canary-marked buffers. - - For this reason, the default will be to not use the memory allocation - sanity check feature in release branches (e.g., hdf5_1_10_7). - Debug builds in development branches (e.g., develop, hdf5_1_10) will - still use them by default. - - This change only affects Autotools debug builds. Non-debug autotools - builds and all CMake builds do not enable this feature by default. - - (DER - 2020/08/19) - - - Add file locking configure and CMake options - - HDF5 1.10.0 introduced a file locking scheme, primarily to help - enforce SWMR setup. Formerly, the only user-level control of the scheme - was via the HDF5_USE_FILE_LOCKING environment variable. - - This change introduces configure-time options that control whether - or not file locking will be used and whether or not the library - ignores errors when locking has been disabled on the file system - (useful on some HPC Lustre installations). - - In both the Autotools and CMake, the settings have the effect of changing - the default property list settings (see the H5Pset/get_file_locking() - entry, below). - - The yes/no/best-effort file locking configure setting has also been - added to the libhdf5.settings file. - - Autotools: - - An --enable-file-locking=(yes|no|best-effort) option has been added. - - yes: Use file locking. - no: Do not use file locking. - best-effort: Use file locking and ignore "disabled" errors. - - CMake: - - Two self-explanatory options have been added: - - HDF5_USE_FILE_LOCKING - HDF5_IGNORE_DISABLED_FILE_LOCKS - - Setting both of these to ON is the equivalent to the Autotools' - best-effort setting. - - NOTE: - The precedence order of the various file locking control mechanisms is: - - 1) HDF5_USE_FILE_LOCKING environment variable (highest) - - 2) H5Pset_file_locking() - - 3) configure/CMake options (which set the property list defaults) - - 4) library defaults (currently best-effort) - - (DER - 2020/07/30, HDFFV-11092) - - - CMake option to link the generated Fortran MOD files into the include - directory. - - The Fortran generation of MOD files by a Fortran compile can produce - different binary files between SHARED and STATIC compiles with different - compilers and/or different platforms. Note that it has been found that - different versions of Fortran compilers will produce incompatible MOD - files. Currently, CMake will locate these MOD files in subfolders of - the include directory and add that path to the Fortran library target - in the CMake config file, which can be used by the CMake find library - process. For other build systems using the binary from a CMake install, - a new CMake configuration can be used to copy the pre-chosen version - of the Fortran MOD files into the install include directory. - - The default will depend on the configuration of - BUILD_STATIC_LIBS and BUILD_SHARED_LIBS: - YES YES Default to SHARED - YES NO Default to STATIC - NO YES Default to SHARED - NO NO Default to SHARED - The defaults can be overridden by setting the config option - HDF5_INSTALL_MOD_FORTRAN to one of NO, SHARED, or STATIC - - (ADB - 2020/07/09, HDFFV-11116) - - - CMake option to use AEC (open source SZip) library instead of SZip - - The open source AEC library is a replacement library for SZip. In - order to use it for hdf5, the libaec CMake source was changed to add - "-fPIC" and exclude test files. A new option USE_LIBAEC is required - to compensate for the different files produced by AEC build. - - Autotools does not build the compression libraries within hdf5 builds, - but will use an installed libaec when configured as before with the - option --with-libsz=. - - (ADB - 2020/04/22, OESS-65) - - - CMake ConfigureChecks.cmake file now uses CHECK_STRUCT_HAS_MEMBER - - Some handcrafted tests in HDFTests.c have been removed and the CMake - CHECK_STRUCT_HAS_MEMBER module has been used. - - (ADB - 2020/03/24, TRILAB-24) - - - Both build systems use same set of warnings flags - - GNU C, C++ and gfortran warnings flags were moved to files in a config - sub-folder named gnu-warnings. Flags that only are available for a specific - version of the compiler are in files named with that version. - Clang C warnings flags were moved to files in a config sub-folder - named clang-warnings. - Intel C, Fortran warnings flags were moved to files in a config sub-folder - named intel-warnings. - - There are flags in named "error-xxx" files with warnings that may - be promoted to errors. Some source files may still need fixes. - - There are also pairs of files named "developer-xxx" and "no-developer-xxx" - that are chosen by the CMake option:HDF5_ENABLE_DEV_WARNINGS or the - configure option:--enable-developer-warnings. - - In addition, CMake no longer applies these warnings for examples. - - (ADB - 2020/03/24, TRILAB-192) - - - Update CMake minimum version to 3.12 - - Updated CMake minimum version to 3.12 and added version checks - for Windows features. - - (ADB - 2020/02/05, TRILABS-142) - - - Fixed CMake include properties for Fortran libraries - - Corrected the library properties for Fortran to use the - correct path for the Fortran module files. - - (ADB - 2020/02/04, HDFFV-11012) - - - Added common warnings files for gnu and intel - - Added warnings files to use one common set of flags - during configure for both autotools and CMake build - systems. The initial implementation only affects a - general set of flags for gnu and intel compilers. - - (ADB - 2020/01/17) - - - Added new options to CMake for control of testing - - Added CMake options (default ON); - HDF5_TEST_SERIAL AND/OR HDF5_TEST_PARALLEL - combined with: - HDF5_TEST_TOOLS - HDF5_TEST_EXAMPLES - HDF5_TEST_SWMR - HDF5_TEST_FORTRAN - HDF5_TEST_CPP - HDF5_TEST_JAVA - - (ADB - 2020/01/15, HDFFV-11001) - - - Added Clang sanitizers to CMake for analyzer support if compiler is clang. - - Added CMake code and files to execute the Clang sanitizers if - HDF5_ENABLE_SANITIZERS is enabled and the USE_SANITIZER option - is set to one of the following: - Address - Memory - MemoryWithOrigins - Undefined - Thread - Leak - 'Address;Undefined' - - (ADB - 2019/12/12, TRILAB-135) - - - Library: - -------- - - Add metadata cache optimization to reduce skip list usage - - On file flush or close, the metadata cache attempts to write out - all dirty entries in increasing address order. To do this, it needs - an address sorted list of metadata entries. Further, since flushing - one metadata cache entry can dirty another, this list must support - efficient insertion and deletion. - - The metadata cache uses a skip list of all dirty entries for this - purpose. Before this release, this skip list was maintained at all - times. However, since profiling indicates that this imposes a - significant cost, we now construct and maintain the skip list only - when needed. Specifically, we enable the skip list and load it with - a list of all dirty entries in the metadata cache just before a flush, - and disable it after the flush. - - (JRM - 2020/08/17, HDFFV-11034) - - - Add BEST_EFFORT value to HDF5_USE_FILE_LOCKING environment variable - - This change adds a BEST_EFFORT to the TRUE/FALSE, 1/0 settings that - were previously accepted. This option turns on file locking but - ignores locking errors when the library detects that file locking - has been disabled on a file system (useful on some HPC Lustre - installations). - - The capitalization of BEST_EFFORT is mandatory. - - See the configure option discussion for HDFFV-11092 (above) for more - information on the file locking feature and how it's controlled. - - (DER - 2020/07/30, HDFFV-11092) - - - - Add H5Pset/get_file_locking() API calls - - This change adds new API calls which can be used to set or get the - file locking parameters. The single API call sets both the "use file - locking" flag and the "ignore disabled file locking" flag. - - When opening a file multiple times without closing, the file MUST be - opened with the same file locking settings. Opening a file with different - file locking settings will fail (this is similar to the behavior of - H5Pset_fclose_degree()). - - See the configure option discussion for HDFFV-11092 (above) for more - information on the file locking feature and how it's controlled. - - (DER - 2020/07/30, HDFFV-11092) - - - Add Mirror VFD - - Use TCP/IP sockets to perform write-only (W/O) file I/O on a remote - machine. Must be used in conjunction with the Splitter VFD. - - (JOS - 2020/03/13, TBD) - - - Add Splitter VFD - - Maintain separate R/W and W/O channels for "concurrent" file writes - to two files using a single HDF5 file handle. - - (JOS - 2020/03/13, TBD) - - - Fixed an assertion failure in the parallel library when collectively - filling chunks. As it is required that chunks be written in - monotonically non-decreasing order of offset in the file, this assertion - was being triggered when the list of chunk file space allocations being - passed to the collective chunk filling routine was not sorted according - to this particular requirement. - - The addition of a sort of the out of order chunks trades a bit of - performance for the elimination of this assertion and of any complaints - from MPI implementations about the file offsets used being out of order. - - (JTH - 2019/10/07) - - Fortran Library: - ---------------- - - Add wrappers for H5Pset/get_file_locking() API calls - - h5pget_file_locking_f() - h5pset_file_locking_f() - - See the configure option discussion for HDFFV-11092 (above) for more - information on the file locking feature and how it's controlled. - - (DER - 2020/07/30, HDFFV-11092) - - - Added new Fortran parameters: - - H5F_LIBVER_ERROR_F - H5F_LIBVER_NBOUNDS_F - H5F_LIBVER_V18_F - H5F_LIBVER_V110_F - - - Added new Fortran API: h5pget_libver_bounds_f - - (MSB - 2020/02/11, HDFFV-11018) - - C++ Library: - ------------ - - Add wrappers for H5Pset/get_file_locking() API calls - - FileAccPropList::setFileLocking() - FileAccPropList::getFileLocking() - - See the configure option discussion for HDFFV-11092 (above) for more - information on the file locking feature and how it's controlled. - - (DER - 2020/07/30, HDFFV-11092) - - Java Library: - ---------------- - - Add wrappers for H5Pset/get_file_locking() API calls - - H5Pset_file_locking() - H5Pget_use_file_locking() - H5Pget_ignore_disabled_file_locking() - - Unlike the C++ and Fortran wrappers, there are separate getters for the - two file locking settings, each of which returns a boolean value. - - See the configure option discussion for HDFFV-11092 (above) for more - information on the file locking feature and how it's controlled. - - (DER - 2020/07/30, HDFFV-11092) - - Tools: - ------ - - h5repack added options to control how external links are handled. - - Currently h5repack preserves external links and cannot copy and merge - data from the external files. Two options, merge and prune, were added to - control how to merge data from an external link into the resulting file. - --merge Follow external soft link recursively and merge data. - --prune Do not follow external soft links and remove link. - --merge --prune Follow external link, merge data and remove dangling link. - - (ADB - 2020/08/05, HDFFV-9984) - - High-Level APIs: - --------------- - - None - - C Packet Table API - ------------------ - - None - - Internal header file - -------------------- - - None - - Documentation - ------------- - - None - - -Support for new platforms, languages and compilers. -======================================= - - None - - -Bug Fixes since HDF5-1.10.6 release -================================== - - Library - ------- - - Fix bug and simplify collective metadata write operation when some ranks - have no entries to contribute. This fixes parallel regression test - failures with IBM SpectrumScale MPI on the Summit system at ORNL. - - (QAK - 2020/09/02) - - - Avoid setting up complex MPI types with 0-length vectors, which some - MPI implementations don't handle well. (In particular, IBM - SpectrumScale MPI on the Summit system at ORNL) - - (QAK - 2020/08/21) - - - Fixed use-of-uninitialized-value error - - Appropriate initialization of local structs was added to remove the - use-of-uninitialized-value errors reported by MemorySanitizer. - - (BMR - 2020/8/13, HDFFV-11101) - - - Creation of dataset with optional filter - - When the combination of type, space, etc doesn't work for filter - and the filter is optional, it was supposed to be skipped but it was - not skipped and the creation failed. - - A fix is applied to allow the creation of a dataset in such - situation, as specified in the user documentation. - - (BMR - 2020/8/13, HDFFV-10933) - - - Explicitly declared dlopen to use RTLD_LOCAL - - dlopen documentation states that if neither RTLD_GLOBAL nor - RTLD_LOCAL are specified, then the default behavior is unspecified. - The default on linux is usually RTLD_LOCAL while macos will default - to RTLD_GLOBAL. - - (ADB - 2020/08/12, HDFFV-11127) - - - Fixed issues CVE-2018-13870 and CVE-2018-13869 - - When a buffer overflow occurred because a name length was corrupted - and became very large, h5dump crashed on memory access violation. - - A check for reading past the end of the buffer was added to multiple - locations to prevent the crashes and h5dump now simply fails with an - error message when this error condition occurs. - - (BMR - 2020/7/31, HDFFV-11120 and HDFFV-11121) - - - H5Sset_extent_none() sets the dataspace class to H5S_NO_CLASS which - causes asserts/errors when passed to other dataspace API calls. - - H5S_NO_CLASS is an internal class value that should not have been - exposed via a public API call. - - In debug builds of the library, this can cause asserts to trip. In - non-debug builds, it will produce normal library errors. - - The new library behavior is for H5Sset_extent_none() to convert - the dataspace into one of type H5S_NULL, which is better handled - by the library and easier for developers to reason about. - - (DER - 2020/07/27, HDFFV-11027) - - - Fixed the segmentation fault when reading attributes with multiple threads - - It was reported that the reading of attributes with variable length string - datatype will crash with segmentation fault particularly when the number - of threads is high (>16 threads). The problem was due to the file pointer - that was set in the variable length string datatype for the attribute. - That file pointer was already closed when the attribute was accessed. - - The problem was fixed by setting the file pointer to the current opened - file pointer when the attribute was accessed. Similar patch up was done - before when reading dataset with variable length string datatype. - - (VC - 2020/07/13, HDFFV-11080) - - - Fixed issue CVE-2018-17438 - - A division by zero was discovered in H5D__select_io() of H5Dselect.c. - https://security-tracker.debian.org/tracker/CVE-2018-17438 - - A check was added to protect against division by zero. When such - situation occurs again, the normal HDF5 error handling will be invoked, - instead of segmentation fault. - - (BMR, DER - 2020/07/09, HDFFV-10587) - - - Fixed CVE-2018-17435 - - The tool h52gif produced a segfault when the size of an attribute message - was corrupted and caused a buffer overflow. - - The problem was fixed by verifying the attribute message's size against the - buffer size before accessing the buffer. h52gif was also fixed to display - the failure instead of silently exiting after the segfault was eliminated. - - (BMR - 2020/6/19, HDFFV-10591) - - - Don't allocate an empty (0-dimensioned) chunked dataset's chunk - index, until the dataset's dimensions are increased. - - (QAK - 2020/05/07) - - Configuration - ------------- - - Stopped addition of szip header and include directory path for - incompatible libsz - - szlib.h is the same for both 32-bit and 64-bit szip, and the header file - and its path were added to the HDF5 binary even though the configure - check of a function in libsz later failed and szip compression was not - enabled. The header file and include path are now added only when the - libsz function passes the configure check. - - (LRK - 2020/08/17, HDFFV-10830) - - - Added -fsanitize=address autotools configure option for Clang compiler - - Clang sanitizer options were also added for Clang compilers with CMake. - - (LRK, 2020/08/05, HDFFV-10836) - - - Updated testh5cc.sh.in for functions versioned in HDF5 1.10. - - testh5cc.sh previously tested that the correct version of a function - versioned in HDF5 1.6 or 1.8 was compiled when one of - H5_NO_DEPRECATED_SYMBOLS or H5_USE_16_API_DEFAULT were defined. This - test was extended for additional testing with H5_USE_18_API_DEFAULT. - - (LRK, 2020/06/22, HDFFV-11000) - - - Fixed CMake include properties for Fortran libraries - - Corrected the library properties for Fortran to use the - correct path for the Fortran module files. - - (ADB - 2020/02/04, HDFFV-11012) - - Performance - ------------- - - None - - Java Library: - ---------------- - - None - - Fortran - -------- - - Corrected INTERFACE INTENT(IN) to INTENT(OUT) for buf_size in h5fget_file_image_f. - - (MSB - 2020/2/18, HDFFV-11029) - - - Fixed configure issue when building HDF5 with NAG Fortran 7.0. - - HDF5 now accounts for the addition of half-precision floating-point - in NAG 7.0 with a KIND=16. - - (MSB - 2020/02/28, HDFFV-11033) - - Tools - ----- - - The tools library was updated by standardizing the error stack process. - - General sequence is: - h5tools_setprogname(PROGRAMNAME); - h5tools_setstatus(EXIT_SUCCESS); - h5tools_init(); - ... process the command-line (check for error-stack enable) ... - h5tools_error_report(); - ... (do work) ... - h5diff_exit(ret); - - (ADB - 2020/07/20, HDFFV-11066) - - - h5diff fixed a command line parsing error. - - h5diff would ignore the argument to -d (delta) if it is smaller than DBL_EPSILON. - The macro H5_DBL_ABS_EQUAL was removed and a direct value comparison was used. - - (ADB - 2020/07/20, HDFFV-10897) - - - h5diff added a command line option to ignore attributes. - - h5diff would ignore all objects with a supplied path if the exclude-path argument is used. - Adding the exclude-attribute argument will only eclude attributes, with the supplied path, - from comparison. - - (ADB - 2020/07/20, HDFFV-5935) - - - h5diff added another level to the verbose argument to print filenames. - - Added verbose level 3 that is level 2 plus the filenames. The levels are: - 0 : Identical to '-v' or '--verbose' - 1 : All level 0 information plus one-line attribute status summary - 2 : All level 1 information plus extended attribute status report - 3 : All level 2 information plus file names - - (ADB - 2020/07/20, HDFFV-10005) - - - h5repack was fixed to repack the reference attributes properly. - The code line that checks if the update of reference inside a compound - datatype is misplaced outside the code block loop that carries out the - check. In consequence, the next attribute that is not the reference - type was repacked again as the reference type and caused the failure of - repacking. The fix is to move the corresponding code line to the correct - code block. - - (KY -2020/02/10, HDFFV-11014) - - High-Level APIs: - ------ - - The H5DSis_scale function was updated to return "not a dimension scale" (0) - instead of failing (-1), when CLASS or DIMENSION_SCALE attributes are - not written according to Dimension Scales Specification. - - (EIP - 2020/08/12, HDFFV-10436) - - Fortran High-Level APIs: - ------ - - None - - Documentation - ------------- - - None - - F90 APIs - -------- - - None - - C++ APIs - -------- - - None - - Testing - ------- - - Stopped java/test/junit.sh.in installing libs for testing under ${prefix} - - Lib files needed are now copied to a subdirectory in the java/test - directory, and on Macs the loader path for libhdf5.xxxs.so is changed - in the temporary copy of libhdf5_java.dylib. - - (LRK, 2020/7/2, HDFFV-11063) - - -Supported Platforms -=================== - - Linux 3.10.0-1127.10.1.el7 gcc (GCC) 4.8.5 20150623 (Red Hat 4.8.5-39) - #1 SMP ppc64 GNU/Linux g++ (GCC) 4.8.5 20150623 (Red Hat 4.8.5-39) - (echidna) GNU Fortran (GCC) 4.8.5 20150623 (Red Hat 4.8.5-39) - - Linux 2.6.32-754.31.1.el6 IBM XL C/C++ V13.1 - #1 SMP ppc64 GNU/Linux IBM XL Fortran V15.1 - (ostrich) - - Linux 3.10.0-327.18.2.el7 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (jelly/kituo/moohan) Version 4.8.5 20150623 (Red Hat 4.8.5-4) - Version 4.9.3, Version 5.3.0, Version 6.3.0, - Version 7.2.0, Version 8.3.0, Version 9.1.0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 17.0.0.098 Build 20160721 - MPICH 3.3 compiled with GCC 7.2.0 - OpenMPI 4.0.0 compiled with GCC 7.2.0 - - SunOS 5.11 11.4.5.12.5.0 Sun C 5.15 SunOS_sparc 2017/05/30 - 32- and 64-bit Studio 12.6 Fortran 95 8.8 SunOS_sparc 2017/05/30 - (hedgehog) Sun C++ 5.15 SunOS_sparc 2017/05/30 - - Windows 7 x64 Visual Studio 2015 w/ Intel C, Fortran 2018 (cmake) - - Windows 10 x64 Visual Studio 2015 w/ Intel Fortran 18 (cmake) - Visual Studio 2017 w/ Intel Fortran 19 (cmake) - Visual Studio 2019 w/ Intel Fortran 19 (cmake) - Visual Studio 2019 w/ MSMPI 10.1 (cmake) - - macOS Mojave 10.14.6 Apple LLVM version 10.0.1 (clang-1001.0.46.4) - 64-bit gfortran GNU Fortran (GCC) 6.3.0 - (swallow) Intel icc/icpc/ifort version 19.0.4.233 20190416 - -Tested Configuration Features Summary -===================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90/ F90 C++ zlib SZIP - parallel F2003 parallel -Solaris2.11 32-bit n y/y n y y y -Solaris2.11 64-bit n y/n n y y y -Windows 7 x64 y y/y y y y y -Windows 10 y y/y n y y y -Windows 10 x64 y y/y n y y y -MacOS Sierra 10.12.6 64-bit n y/y n y y y -MacOS High Sierra 10.13.6 64-bit n y/y n y y y -MacOS Mojave 10.14.6 64-bit n y/y n y y y -CentOS 7.2 Linux 3.10.0 x86_64 PGI n y/y n y y y -CentOS 7.2 Linux 3.10.0 x86_64 GNU y y/y y y y y -CentOS 7.2 Linux 3.10.0 x86_64 Intel n y/y n y y y -Linux 2.6.32-754.31.1.el6.ppc64 XL n y/y n y y y - - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -Solaris2.11 32-bit y y y y -Solaris2.11 64-bit y y y y -Windows 7 x64 y y y y -Windows 10 y y y y -Windows 10 x64 y y y y -MacOS Sierra 10.12.6 64-bit y n y y -MacOS High Sierra 10.13.6 64-bit y n y y -MacOS Mojave 10.14.6 64-bit y n y y -CentOS 7.2 Linux 3.10.0 x86_64 PGI y y y n -CentOS 7.2 Linux 3.10.0 x86_64 GNU y y y y -CentOS 7.2 Linux 3.10.0 x86_64 Intel y y y n -Linux 2.6.32-754.31.1.el6.ppc64 XL y y y n - -Compiler versions for each platform are listed in the preceding -"Supported Platforms" table. - - -More Tested Platforms -===================== -The following platforms are not supported but have been tested for this release. - - Linux 2.6.32-573.22.1.el6 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (platypus) Version 4.4.7 20120313 - Version 4.9.3, 5.3.0, 6.2.0 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 19.10-0 - MPICH 3.1.4 compiled with GCC 4.9.3 - - Linux 2.6.32-754.31.1.el6 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-18) - #1 SMP ppc64 GNU/Linux g++ (GCC) 4.4.7 20120313 (Red Hat 4.4.7-18) - (ostrich) GNU Fortran (GCC) 4.4.7 20120313 (Red Hat 4.4.7-18) - - Linux 3.10.0-327.18.2.el7 GNU C (gcc) and C++ (g++) compilers - #1 SMP x86_64 GNU/Linux Version 4.8.5 20150623 (Red Hat 4.8.5-4) - (jelly) with NAG Fortran Compiler Release 6.1(Tozai) - GCC Version 7.1.0 - OpenMPI 2.1.6-GCC-7.2.0-2.29, - 3.1.3-GCC-7.2.0-2.29 - Intel(R) C (icc) and C++ (icpc) compilers - Version 17.0.0.098 Build 20160721 - with NAG Fortran Compiler Release 6.1(Tozai) - - Linux 3.10.0-327.10.1.el7 MPICH 3.1.4 compiled with GCC 4.9.3 - #1 SMP x86_64 GNU/Linux - (moohan) - - Linux-3.10.0-1127.0.0.1chaos openmpi-4.0.0 - #1 SMP x86_64 GNU/Linux clang/3.9.0, 8.0.1 - (quartz) gcc/7.3.0, 8.1.0 - intel/16.0.4 - - Linux-4.14.0-115.10.1.1 spectrum-mpi/rolling-release - #1 SMP ppc64le GNU/Linux clang/coral-2018.08.08 - (lassen) gcc/7.3.1 - xl/2019.02.07 - - Linux-4.12.14-150.52-default cray-mpich/7.7.10 - #1 SMP x86_64 GNU/Linux gcc/7.3.0, 8.2.0 - (cori) intel/19.0.3 - - Linux-4.4.180-94.107-default cray-mpich/7.7.6 - # 1SMP x86_64 GNU/Linux gcc/7.2.0, 8.2.0 - (mutrino) intel/17.0.4, 18.0.2, 19.0.4 - - Fedora 32 5.7.15-200.fc32.x86_64 Clang version 10.0.0 (Fedora 10.0.0-2.fc32) - #1 SMP x86_64 GNU/Linux GNU gcc (GCC) 10.2.1 20200723 (Red Hat 10.2.1-1) - GNU Fortran (GCC) 10.2.1 20200723 (Red Hat 10.2.1) - (cmake and autotools) - - Mac OS X El Capitan 10.11.6 Apple clang version 7.3.0 from Xcode 7.3 - 64-bit gfortran GNU Fortran (GCC) 5.2.0 - (osx1011test) Intel icc/icpc/ifort version 16.0.2 - - macOS Sierra 10.12.6 Apple LLVM version 9.0.0 (clang-900.39.2) - 64-bit gfortran GNU Fortran (GCC) 7.4.0 - (kite) Intel icc/icpc/ifort version 17.0.2 - - macOS High Sierra 10.13.6 Apple LLVM version 10.0.0 (clang-1000.10.44.4) - 64-bit gfortran GNU Fortran (GCC) 6.3.0 - (bear) Intel icc/icpc/ifort version 19.0.4.233 20190416 - - SunOS 5.11 11.3 Sun C 5.15 SunOS_sparc - 32- and 64-bit Sun Fortran 95 8.8 SunOS_sparc - (emu) Sun C++ 5.15 SunOS_sparc - - -Known Problems -============== - CMake files do not behave correctly with paths containing spaces. - Do not use spaces in paths because the required escaping for handling spaces - results in very complex and fragile build files. - ADB - 2019/05/07 - - At present, metadata cache images may not be generated by parallel - applications. Parallel applications can read files with metadata cache - images, but since this is a collective operation, a deadlock is possible - if one or more processes do not participate. - - Two tests fail attempting collective writes with OpenMPI 3.0.0/GCC-7.2.0-2.29: - testphdf5 (ecdsetw, selnone, cchunk1, cchunk3, cchunk4, and actualio) - t_shapesame (sscontig2) - - CPP ptable test fails on both VS2017 and VS2019 with Intel compiler, JIRA - issue: HDFFV-10628. This test will pass with VS2015 with Intel compiler. - - Known problems in previous releases can be found in the HISTORY*.txt files - in the HDF5 source. Please report any new problems found to - help@hdfgroup.org. - - -CMake vs. Autotools installations -================================= -While both build systems produce similar results, there are differences. -Each system produces the same set of folders on linux (only CMake works -on standard Windows); bin, include, lib and share. Autotools places the -COPYING and RELEASE.txt file in the root folder, CMake places them in -the share folder. - -The bin folder contains the tools and the build scripts. Additionally, CMake -creates dynamic versions of the tools with the suffix "-shared". Autotools -installs one set of tools depending on the "--enable-shared" configuration -option. - build scripts - ------------- - Autotools: h5c++, h5cc, h5fc - CMake: h5c++, h5cc, h5hlc++, h5hlcc - -The include folder holds the header files and the fortran mod files. CMake -the share folder. - -The bin folder contains the tools and the build scripts. Additionally, CMake -places the fortran mod files into separate shared and static subfolders, -while Autotools places one set of mod files into the include folder. Because -CMake produces a tools library, the header files for tools will appear in -the include folder. - -The lib folder contains the library files, and CMake adds the pkgconfig -subfolder with the hdf5*.pc files used by the bin/build scripts created by -the CMake build. CMake separates the C interface code from the fortran code by -creating C-stub libraries for each Fortran library. In addition, only CMake -installs the tools library. The names of the szip libraries are different -between the build systems. - -The share folder will have the most differences because CMake builds include -a number of CMake specific files for support of CMake's find_package and support -for the HDF5 Examples CMake project. - - - -%%%%1.10.6%%%% - -HDF5 version 1.10.6 released on 2019-12-23 -================================================================================ - - -INTRODUCTION - -This document describes the differences between this release and the previous -HDF5 release. It contains information on the platforms tested and known -problems in this release. For more details check the HISTORY*.txt files in the -HDF5 source. - -Note that documentation in the links below will be updated at the time of each -final release. - -Links to HDF5 documentation can be found on The HDF5 web page: - - https://portal.hdfgroup.org/display/HDF5/HDF5 - -The official HDF5 releases can be obtained from: - - https://www.hdfgroup.org/downloads/hdf5/ - -Changes from Release to Release and New Features in the HDF5-1.10.x release series -can be found at: - - https://portal.hdfgroup.org/display/HDF5/HDF5+Application+Developer%27s+Guide - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS - -- New Features -- Support for new platforms and languages -- Bug Fixes since HDF5-1.10.5 -- Supported Platforms -- Tested Configuration Features Summary -- More Tested Platforms -- Known Problems -- CMake vs. Autotools installations - - -New Features -============ - - Configuration: - ------------- - - Update CMake for VS2019 support - - CMake added support for VS2019 in version 3.15. Changes to the CMake - generator setting required changes to scripts. Also updated version - references in CMake files as necessary. - - (ADB - 2019/11/18, HDFFV-10962) - - - Update CMake options to match new autotools options - - Add configure options (autotools - CMake): - enable-asserts HDF5_ENABLE_ASSERTS - enable-symbols HDF5_ENABLE_SYMBOLS - enable-profiling HDF5_ENABLE_PROFILING - enable-optimization HDF5_ENABLE_OPTIMIZATION - In addition NDEBUG is no longer forced defined and relies on the CMake - process. - - (ADB - 2019/10/07, HDFFV-100901, HDFFV-10637, TRILAB-97) - - - Update CMake tests to use FIXTURES - - CMake test fixtures allow setup/cleanup tests and other dependency - requirements as properties for tests. This is more flexible for - modern CMake code. - - (ADB - 2019/07/23, HDFFV-10529) - - - Windows PDB files are always installed - - There are build configuration or flag settings for Windows that may not - generate PDB files. If those files are not generated then the install - utility will fail because those PDB files are not found. An optional - variable, DISABLE_PDB_FILES, was added to not install PDB files. - - (ADB - 2019/07/17, HDFFV-10424) - - - Add mingw CMake support with a toolchain file - - There have been a number of mingw issues that have been linked under - HDFFV-10845. It has been decided to implement the CMake cross-compiling - technique of toolchain files. We will use a linux platform with the mingw - compiler stack for testing. Only the C language is fully supported, and - the error tests are skipped. The C++ language works for static but shared - builds have a shared library issue with the mingw Standard Exception Handling - library, which is not available on Windows. Fortran has a common cross-compile - problem with the fortran configure tests. - - (ADB - 2019/07/12, HDFFV-10845, HDFFV-10595) - - - Windows PDB files are installed incorrectly - - For static builds, the PDB files for windows should be installed next - to the static libraries in the lib folder. Also the debug versions of - libraries and PDB files are now correctly built using the default - CMAKE_DEBUG_POSTFIX setting. - - (ADB - 2019/07/09, HDFFV-10581) - - - Add option to build only shared libs - - A request was made to prevent building static libraries and only build - shared. A new option was added to CMake, ONLY_SHARED_LIBS, which will - skip building static libraries. Certain utility functions will build with - static libs but are not published. Tests are adjusted to use the correct - libraries depending on SHARED/STATIC settings. - - (ADB - 2019/06/12, HDFFV-10805) - - - Add options to enable or disable building tools and tests - - Configure options --enable-tests and --enable-tools were added for - autotools configure. These options are enabled by default, and can be - disabled with either --disable-tests (or tools) or --enable-tests=no - (or --enable-tools=no). Build time is reduced ~20% when tools are - disabled, 35% when tests are disabled, 45% when both are disabled. - Re-enabling them after the initial build requires running configure - again with the option(s) enabled. - - (LRK - 2019/06/12, HDFFV-9976) - - - Change tools tests to search the error stack - - There are some use cases which can cause the error stack of tools to be - different then the expected output. These tests now use grepTest.cmake; - this was changed to allow the error file to be searched for an expected string. - - (ADB - 2019/04/15, HDFFV-10741) - - - Library: - -------- - - Added S3 and HDFS Virtual File Drivers (VFDs) to HDF5 - - These new VFDs have been introduced in HDF5-1.10.6. Instructions to - enable them when configuring HDF5 on Linux and Mac may be found at - https://portal.hdfgroup.org/display/HDF5/Virtual+File+Drivers+-+S3+and+HDFS. - - Installing on Windows requires CMake 3.13 and the following additional setup. - Install openssl library (with dev files); - from "Shining Light Productions". msi package preferred. - - PATH should have been updated with the installation dir. - set ENV variable OPENSSL_ROOT_DIR to the installation dir. - set ENV variable OPENSSL_CONF to the cfg file, likely %OPENSSL_ROOT_DIR%\bin\openssl.cfg - Install libcurl library (with dev files); - download the latest released version using git: https://github.com/curl/curl.git - - Open a Visual Studio Command prompt - change to the libcurl root folder - run the "buildconf.bat" batch file - change to the winbuild directory - nmake /f Makefile.vc mode=dll MACHINE=x64 - copy libcurl-vc-x64-release-dll-ipv6-sspi-winssl dir to C:\curl (installation dir) - set ENV variable CURL_ROOT to C:\curl (installation dir) - update PATH ENV variable to %CURL_ROOT%\bin (installation bin dir). - the aws credentials file should be in %USERPROFILE%\.aws folder - set the ENV variable "HDF5_ROS3_TEST_BUCKET_URL=https://s3.us-east-2.amazonaws.com/hdf5ros3" - - (ADB - 2019/09/12, HDFFV-10854) - - C++ Library: - ------------ - - Added new wrappers for H5Pset/get_create_intermediate_group() - LinkCreatPropList::setCreateIntermediateGroup() - LinkCreatPropList::getCreateIntermediateGroup() - - (BMR - 2019/04/22, HDFFV-10622) - - - Java Library: - ---------------- - - Fixed a failure in JUnit-TestH5P on 32-bit architectures - - (JTH - 2019/04/30) - - -Support for new platforms, languages and compilers. -======================================= - - CMake added support for VS2019 in version 3.15. Updated scripts. - - - macOS 10.13.6 Darwin 17.7.0 with Apple clang LLVM version 10.0.0 - - - macOS 10.14.6 Darwin 18.7.0 with Apple clang LLVM version 10.0.1 - - -Bug Fixes since HDF5-1.10.5 release -================================== - - Library - ------- - - Improved performance when creating a large number of small datasets by - retrieving default property values from the API context instead of doing - skip list searches. More work is required to achieve parity with HDF5 1.8. - - (CJH - 2019/12/10, HDFFV-10658) - - - Fixed user-created data access properties not existing in the property list - returned by H5Dget_access_plist. Thanks to Steven Varga for submitting a - reproducer and a patch. - - (CJH - 2019/12/9, HDFFV-10934) - - - Inappropriate linking with deprecated MPI C++ libraries - - HDF5 does not define *_SKIP_MPICXX in the public headers, so applications - can inadvertently wind up linking to the deprecated MPI C++ wrappers. - - MPICH_SKIP_MPICXX and OMPI_SKIP_MPICXX have both been defined in H5public.h - so this should no longer be an issue. HDF5 makes no use of the deprecated - MPI C++ wrappers. - - (DER - 2019/09/17, HDFFV-10893) - - - fcntl(2)-based file locking incorrectly passed the lock argument struct - instead of a pointer to the struct, causing errors on systems where - flock(2) is not available. - - File locking is used when files are opened to enforce SWMR semantics. A - lock operation takes place on all file opens unless the - HDF5_USE_FILE_LOCKING environment variable is set to the string "FALSE". - flock(2) is preferentially used, with fcntl(2) locks as a backup if - flock(2) is unavailable on a system (if neither is available, the lock - operation fails). On these systems, the file lock will often fail, which - causes HDF5 to not open the file and report an error. - - This bug only affects POSIX systems. Win32 builds on Windows use a no-op - locking call which always succeeds. Systems which exhibit this bug will - have H5_HAVE_FCNTL defined but not H5_HAVE_FLOCK in the configure output. - - This bug affects HDF5 1.10.0 through 1.10.5. - - fcntl(2)-based file locking now correctly passes the struct pointer. - - (DER - 2019/08/27, HDFFV-10892) - - - Fixed a bug caused by a bad tag value when condensing object header - messages - - There was an assertion failure when moving messages from running a - user test program with library release HDF5 1.10.4. It was because - the tag value (object header's address) was not set up when entering - the library routine H5O__chunk_update_idx(), which eventually - verifies the metadata tag value when protecting the object header. - - The problem was fixed by replacing FUNC_ENTER_PACKAGE in H5O__chunk_update_idx() - with FUNC_ENTER_PACKAGE_TAG(oh->cache_info.addr) to set up the metadata tag. - - (VC - 2019/08/23, HDFFV-10873) - - - Fixed the test failure from test_metadata_read_retry_info() in - test/swmr.c - - The test failure is due to an incorrect number of bins returned for - retry info (info.nbins). The # of bins expected for 101 read attempts - is 3 instead of 2. The routine H5F_set_retries() in src/H5Fint.c - calculates the # of bins by first obtaining the log10 value for - (read attempts - 1). For PGI/19, the log10 value for 100 read attempts - is 1.9999999999999998 instead of 2.00000. When casting the log10 value - to unsigned later on, the decimal part is chopped off causing the test - failure. - - This was fixed by obtaining the rounded integer value (HDceil) for the - log10 value of read attempts first before casting the result to unsigned. - - (VC - 2019/8/14, HDFFV-10813) - - - Fixed an issue when creating a file with non-default file space info - together with library high bound setting to H5F_LIBVER_V18. - - When setting non-default file space info in fcpl via - H5Pset_file_space_strategy() and then creating a file with both high and - low library bounds set to H5F_LIBVER_V18 in fapl, the library succeeds in - creating the file. File creation should fail because the feature of - setting non-default file space info does not exist in library release 1.8 - or earlier. - - This was fixed by setting and checking the proper version in the file - space info message based on the library low and high bounds when creating - and opening the HDF5 file. - - (VC - 2019/6/25, HDFFV-10808) - - - Fixed an issue where copying a version 1.8 dataset between files using - H5Ocopy fails due to an incompatible fill version - - When using the HDF5 1.10.x H5Ocopy() API call to copy a version 1.8 - dataset to a file created with both high and low library bounds set to - H5F_LIBVER_V18, the H5Ocopy() call will fail with the error stack indicating - that the fill value version is out of bounds. - - This was fixed by changing the fill value message version to H5O_FILL_VERSION_3 - (from H5O_FILL_VERSION_2) for H5F_LIBVER_V18. - - (VC - 2019/6/14, HDFFV-10800) - - - Fixed a bug that would cause an error or cause fill values to be - incorrectly read from a chunked dataset using the "single chunk" index if - the data was held in cache and there was no data on disk. - - (NAF - 2019/03/06) - - - Fixed a bug that could cause an error or cause fill values to be - incorrectly read from a dataset that was written to using H5Dwrite_chunk - if the dataset was not closed after writing. - - (NAF - 2019/03/06, HDFFV-10716) - - - Fixed memory leak in scale offset filter - - In a special case where the MinBits is the same as the number of bits in - the datatype's precision, the filter's data buffer was not freed, causing - the memory usage to grow. In general the buffer was freed correctly. The - Minbits are the minimal number of bits to store the data values. Please - see the reference manual for H5Pset_scaleoffset for the details. - - (RL - 2019/3/4, HDFFV-10705) - - - Configuration - ------------- - - Correct option for default API version - - CMake options for default API version are not mutually exclusive. - Change the multiple BOOL options to a single STRING option with the - strings; v16, v18, v110. - - (ADB - 2019/08/12, HDFFV-10879) - - Tools - ----- - - h5repack was fixed to repack datasets with external storage - to other types of storage. - - New test added to repack files and verify the correct data using h5diff. - - (JS - 2019/09/25, HDFFV-10408) - (ADB - 2019/10/02, HDFFV-10918) - - -Supported Platforms -=================== - - Linux 2.6.32-696.20.1.el6.ppc64 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-23) - #1 SMP ppc64 GNU/Linux g++ (GCC) 4.4.7 20120313 (Red Hat 4.4.7-23) - (ostrich) GNU Fortran (GCC) 4.4.7 20120313 (Red Hat 4.4.7-23) - IBM XL C/C++ V13.1 - IBM XL Fortran V15.1 - - Linux 3.10.0-327.10.1.el7 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (jelly/kituo/moohan) Version 4.8.5 20150623 (Red Hat 4.8.5-4) - Version 4.9.3, Version 5.2.0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 17.0.0.098 Build 20160721 - MPICH 3.1.4 compiled with GCC 4.9.3 - - SunOS 5.11 32- and 64-bit Sun C 5.12 SunOS_sparc - (emu) Sun Fortran 95 8.6 SunOS_sparc - Sun C++ 5.12 SunOS_sparc - - Windows 7 Visual Studio 2015 w/ Intel Fortran 18 (cmake) - - Windows 7 x64 Visual Studio 2015 w/ Intel C, Fortran 2018 (cmake) - Visual Studio 2015 w/ MSMPI 8 (cmake) - - Windows 10 Visual Studio 2015 w/ Intel Fortran 18 (cmake) - - Windows 10 x64 Visual Studio 2015 w/ Intel Fortran 18 (cmake) - Visual Studio 2017 w/ Intel Fortran 19 (cmake) - Visual Studio 2019 w/ Intel Fortran 19 (cmake) - - macOS 10.13.6, Darwin, Apple clang LLVM version 10.0.0 - 17.7.0, x86_64 gfortran GNU Fortran (GCC) 6.3.0 - (bear) Intel icc/icpc/ifort version 19.0.4 - - macOS 10.14.6, Darwin Apple clang LLVM version 10.0.1 - 18.7.0, x86_64 gfortran GNU Fortran (GCC) 6.3.0 - (bobcat) Intel icc/icpc/ifort version 19.0.4 - - -Tested Configuration Features Summary -===================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90/ F90 C++ zlib SZIP - parallel F2003 parallel -Solaris2.11 32-bit n y/y n y y y -Solaris2.11 64-bit n y/n n y y y -Windows 7 y y/y n y y y -Windows 7 x64 y y/y y y y y -Windows 7 Cygwin n y/n n y y y -Windows 7 x64 Cygwin n y/n n y y y -Windows 10 y y/y n y y y -Windows 10 x64 y y/y n y y y -Mac OS X Yosemite 10.10.5 64-bit n y/y n y y y -Mac OS X El Capitan 10.11.6 64-bit n y/y n y y y -MacOS High Sierra 10.13.6 64-bit n y/y n y y y -CentOS 7.2 Linux 3.10.0 x86_64 PGI n y/y n y y y -CentOS 7.2 Linux 3.10.0 x86_64 GNU y y/y y y y y -CentOS 7.2 Linux 3.10.0 x86_64 Intel n y/y n y y y -Linux 2.6.32-573.18.1.el6.ppc64 n y/y n y y y - - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -Solaris2.11 32-bit y y y y -Solaris2.11 64-bit y y y y -Windows 7 y y y y -Windows 7 x64 y y y y -Windows 7 Cygwin n n n y -Windows 7 x64 Cygwin n n n y -Windows 10 y y y y -Windows 10 x64 y y y y -Mac OS X Yosemite 10.10.5 64-bit y y y y -Mac OS X El Capitan 10.11.6 64-bit y y y y -MacOS High Sierra 10.13.6 64-bit y y y y -CentOS 7.2 Linux 3.10.0 x86_64 PGI y y y n -CentOS 7.2 Linux 3.10.0 x86_64 GNU y y y y -CentOS 7.2 Linux 3.10.0 x86_64 Intel y y y n -Linux 2.6.32-573.18.1.el6.ppc64 y y y n - -Compiler versions for each platform are listed in the preceding -"Supported Platforms" table. - - -More Tested Platforms -===================== -The following configurations are not supported but have been tested for this release. - - Linux 2.6.32-754.11.1.el6 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (mayll/platypus) Version 4.4.7 20120313 - Version 4.9.3, 5.3.0, 6.2.0 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 17.10-0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 17.0.4.196 Build 20170411 - MPICH 3.1.4 compiled with GCC 4.9.3 - - Linux 3.10.0-327.18.2.el7 GNU C (gcc) and C++ (g++) compilers - #1 SMP x86_64 GNU/Linux Version 4.8.5 20150623 (Red Hat 4.8.5-4) - (jelly) with NAG Fortran Compiler Release 6.2(Chiyoda) - GCC Version 7.1.0 - MPICH 3.2-GCC-4.9.3 - MPICH 3.2.1-GCC-7.2.0-2.29 - OpenMPI 2.1.5-GCC-7.2.0-2.29 - Intel(R) C (icc) and C++ (icpc) compilers - Version 17.0.0.098 Build 20160721 - with NAG Fortran Compiler Release 6.2(Chiyoda) - - Linux 3.10.0-327.10.1.el7 MPICH 3.2 compiled with GCC 5.3.0 - #1 SMP x86_64 GNU/Linux - (moohan) - - Linux 2.6.32-573.18.1.el6.ppc64 MPICH mpich 3.1.4 compiled with - #1 SMP ppc64 GNU/Linux IBM XL C/C++ for Linux, V13.1 - (ostrich) and IBM XL Fortran for Linux, V15.1 - - Fedora30 5.3.11-200.fc30.x86_64 - #1 SMP x86_64 GNU/Linux GNU gcc (GCC) 9.2.1 20190827 (Red Hat 9.2.1 20190827) - GNU Fortran (GCC) 9.2.1 20190827 (Red Hat 9.2.1 20190827) - (cmake and autotools) - - Mac OS X 10.11.6, Darwin, Apple clang version 7.3.0 from Xcode 7.3 - 15.6.0, x86-64 gfortran GNU Fortran (GCC) 5.2.0 - (osx1011test) Intel icc/icpc/ifort version 16.0.2 - - macOS 10.12.6, Darwin, Apple clang LLVM version 8.1.0 from Xcode 8.3 - 16.6.0, x86_64 gfortran GNU Fortran (GCC) 7.1.0 - (kite) Intel icc/icpc/ifort version 17.0.2 - - Windows 7 x64 Visual Studio 2008 - - -Known Problems -============== - CMake files do not behave correctly with paths containing spaces. - Do not use spaces in paths because the required escaping for handling spaces - results in very complex and fragile build files. - ADB - 2019/05/07 - - At present, metadata cache images may not be generated by parallel - applications. Parallel applications can read files with metadata cache - images, but since this is a collective operation, a deadlock is possible - if one or more processes do not participate. - - Three tests fail with OpenMPI 3.0.0/GCC-7.2.0-2.29: - testphdf5 (ecdsetw, selnone, cchunk1, cchunk3, cchunk4, and actualio) - t_shapesame (sscontig2) - t_pflush1/fails on exit - The first two tests fail attempting collective writes. - - Parallel builds using OpenMPI 3.03 or later and romio fail several tests - with collective writes or compression that will not fail when ompio is used - instead of romio. This can be done by adding "--mca io ompio" to the mpirun - command. For example, in autotools builds RUNPARALLEL can be set to - "mpirun --mca io ompio -n 6" provided ompio is installed. - - CPP ptable test fails on VS2017 with Intel compiler, JIRA issue: HDFFV-10628. - This test will pass with VS2015 with Intel compiler. - - Older MPI libraries such as OpenMPI 2.0.1 and MPICH 2.1.5 were tested - while attempting to resolve the Jira issue: HDFFV-10540. - The known problems of reading or writing > 2GBs when using MPI-2 was - partially resolved with the MPICH library. The proposed support recognizes - IO operations > 2GB and if the datatype is not a derived type, the library - breaks the IO into chunks which can be input or output with the existing - MPI 2 limitations, i.e. size reporting and function API size/count - arguments are restricted to be 32 bit integers. For derived types larger - than 2GB, MPICH 2.1.5 fails while attempting to read or write data. - OpenMPI in contrast, implements MPI-3 APIs even in the older releases - and thus does not suffer from the 32 bit size limitation described here. - OpenMPI releases prior to v3.1.3 appear to have other datatype issues however, - e.g. within a single parallel test (testphdf5) the subtests (cdsetr, eidsetr) - report data verification errors before eventually aborting. - The most recent versions of OpenMPI (v3.1.3 or newer) have evidently - resolved these issues and parallel HDF5 testing does not currently report - errors though occasional hangs have been observed. - - Known problems in previous releases can be found in the HISTORY*.txt files - in the HDF5 source. Please report any new problems found to - help@hdfgroup.org. - - -CMake vs. Autotools installations -================================= -While both build systems produce similar results, there are differences. -Each system produces the same set of folders on linux (only CMake works -on standard Windows); bin, include, lib and share. Autotools places the -COPYING and RELEASE.txt file in the root folder, CMake places them in -the share folder. - -The bin folder contains the tools and the build scripts. Additionally, CMake -creates dynamic versions of the tools with the suffix "-shared". Autotools -installs one set of tools depending on the "--enable-shared" configuration -option. - build scripts - ------------- - Autotools: h5c++, h5cc, h5fc - CMake: h5c++, h5cc, h5hlc++, h5hlcc - -The include folder holds the header files and the fortran mod files. CMake -places the fortran mod files into separate shared and static subfolders, -while Autotools places one set of mod files into the include folder. Because -CMake produces a tools library, the header files for tools will appear in -the include folder. - -The lib folder contains the library files, and CMake adds the pkgconfig -subfolder with the hdf5*.pc files used by the bin/build scripts created by -the CMake build. CMake separates the C interface code from the fortran code by -creating C-stub libraries for each Fortran library. In addition, only CMake -installs the tools library. The names of the szip libraries are different -between the build systems. - -The share folder will have the most differences because CMake builds include -a number of CMake specific files for support of CMake's find_package and support -for the HDF5 Examples CMake project. - -%%%%1.10.5%%%% - -HDF5 version 1.10.5 released on 2019-02-25 -================================================================================ - - -INTRODUCTION - -This document describes the differences between this release and the previous -HDF5 release. It contains information on the platforms tested and known -problems in this release. For more details check the HISTORY*.txt files in the -HDF5 source. - -Note that documentation in the links below will be updated at the time of each -final release. - -Links to HDF5 documentation can be found on The HDF5 web page: - - https://portal.hdfgroup.org/display/HDF5/HDF5 - -The official HDF5 releases can be obtained from: - - https://www.hdfgroup.org/downloads/hdf5/ - -Changes from Release to Release and New Features in the HDF5-1.10.x release series -can be found at: - - https://portal.hdfgroup.org/display/HDF5/HDF5+Application+Developer%27s+Guide - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS - -- New Features -- Support for new platforms and languages -- Bug Fixes since HDF5-1.10.4 -- Supported Platforms -- Tested Configuration Features Summary -- More Tested Platforms -- Known Problems -- CMake vs. Autotools installations - - -New Features -============ - - Configuration: - ------------- - - Cross compile on mutrino and other Cray XC40 systems. - - Added support for CMake options to use CrayLinuxEnvironment, craype-mic-knl - module for building with craype-haswell module for configuration, and - batch scripts in bin/batch for use with sbatch or bsub to run tests in - batch jobs on compute nodes. An instruction file README_HPC describing - the use of these options was added in release_docs. - - (LRK - 2019/02/18, TRILABS-34) - - - Rework CMake command files to fix MPI testing. - - Added setup fixture to remove any test generated files and added DEPENDS - to test properties to execute tests in order expected. - - (ADB - 2019/02/14, TRILABS-111) - - - Disable SZIP or ZLIB options if TGZ files are not available. - - Changed the TGZ option for SZip and ZLib to disable the options - if the source tar.gz files are not found. - - (ADB - 2019/02/05, HDFFV-10697) - - - Added a new option to enable/disable using pread/pwrite instead of - read/write in the sec2, log, and core VFDs. - - This option is enabled by default when pread/pwrite are detected. - - Autotools: --enable-preadwrite - CMake: HDF5_ENABLE_PREADWRITE - - (DER - 2019/02/03, HDFFV-10696) - - - Rework CMake versioning for OSX platforms. - - Changed the current_version and compatibility_version flags from optional - with HDF5_BUILD_WITH_INSTALL_NAME to always setting the flags for OSX. - - (ADB - 2019/01/22, HDFFV-10685) - - - Rework CMake command files to eliminate developer CMP005 warning - - Use variables without quotes in if () statements. - - (ADB - 2019/01/18, TILABS-105) - - - Rework CMake configure files to eliminate developer CMP0075 warning - - Renamed varname to HDF5_REQUIRED_LIBRARIES as the contents were not - required for configuration. Also moved check includes calls to top of - files. - - (ADB - 2019/01/03, HDFFV-10546) - - - Keep stderr and stdout separate in tests - - Changed test handling of output capture. Tests now keep the stderr - output separate from the stdout output. It is up to the test to decide - which output to check against a reference. Also added the option - to grep for a string in either output. - - (ADB - 2018/12/12, HDFFV-10632) - - - Incorrectly installed private header files were removed from - CMake installs. - - The CMake build files incorrectly flagged the following header files - as public and installed them. They are private and will no longer be - installed. - - HDF5 library private package files (H5Xpkg.h) - H5Edefin.h - H5Einit.h - H5Eterm.h - H5LTparse.h - h5diff.h - h5tools_dump.h - h5tools.h - h5tools_ref.h - h5tools_str.h - h5tools_utils.h - h5trav.h - - (DER - 2018/10/26, HDFFV-10614, 10609) - - - Autotools installs now install H5FDwindows.h - - This is simply to align the installed header files between the - autotools and CMake. H5FDwindows.h has no functionality on - non-Windows systems. - - (DER - 2018/10/26, HDFFV-10614) - - - Library: - -------- - - The sec2, log, and core VFDs can now use pread/pwrite instead of - read/write. - - pread and pwrite do not change the file offset, a feature that was - requested by a user working with a multi-threaded application. - - The option to configure this feature is described above. - - (DER - 2019/02/03, HDFFV-10696) - - - Add ability to minimize dataset object headers. - - Creation of many, very small datasets resulted in extensive file bloat - due to extra space in the dataset object headers -- this space is - allocated by default to allow for the insertion of a small number of - attributes within the object header and not require a continuation - block, an unnecessary provision in the target use case. - - Inform the library to expect no attributes on created datasets, and to - allocate the least space possible for the object headers. - NOTE: A continuation block is created if attributes are added to a - 'minimized' dataset, which can reduce performance. - NOTE: Some extra space is allocated for attributes essential to the - correct behavior of the object header (store creation times, e.g.). This - does not violate the design principle, as the space is calculated and - allocated as needed at the time of dataset object header creation -- - unused space is not generated. - New API calls: - H5Fget_dset_no_attrs_hint - H5Fset_dset_no_attrs_hint - H5Pget_dset_no_attrs_hint - H5Pset_dset_no_attrs_hint - - (JOS - 2019/01/04, TRILAB-45) - - - Added new chunk query functions - - The following public functions were added to discover information about - the chunks in an HDF5 file. - herr_t H5Dget_num_chunks(dset_id, fspace_id, *nchunks) - herr_t H5Dget_chunk_info_by_coord(dset_id, *coord, *filter_mask, *addr, *size) - herr_t H5Dget_chunk_info(dset_id, fspace_id, index, *coord, *filter_mask, *addr, *size) - - (BMR - 2018/11/07, HDFFV-10615) - - - Several empty public header files where removed from the distribution - - The following files were empty placeholders. They are for internal - packages that are unlikely to ever have public functionality and have - thus been removed. - - H5Bpublic.h - H5B2public.h - H5FSpublic.h - H5HFpublic.h - H5HGpublic.h - H5HLpublic.h - - They were only installed in CMake builds. - - (DER - 2018/10/26, HDFFV-10614) - - - Parallel Library: - ----------------- - - Changed the default behavior in parallel when reading the same dataset in its entirety - (i.e. H5S_ALL dataset selection) which is being read by all the processes collectively. - The dataset must be contiguous, less than 2GB, and of an atomic datatype. - The new behavior is the HDF5 library will use an MPI_Bcast to pass the data read from - the disk by the root process to the remain processes in the MPI communicator associated - with the HDF5 file. - - (MSB - 2019/01/02, HDFFV-10652) - - - All MPI-1 API calls have been replaced with MPI-2 equivalents. - - This was done to better support OpenMPI, as default builds no longer - include MPI-1 support (as of OpenMPI 4.0). - - (DER - 2018/12/30, HDFFV-10566) - - Fortran Library: - ---------------- - - Added wrappers for dataset object header minimization calls. - (see the note for TRILAB-45, above) - - New API calls: - - h5fget_dset_no_attrs_hint_f - h5fset_dset_no_attrs_hint_f - h5pget_dset_no_attrs_hint_f - h5pset_dset_no_attrs_hint_f - - (DER - 2019/01/09, TRILAB-45) - - - Added new Fortran derived type, c_h5o_info_t, which is interoperable with - C's h5o_info_t. This is needed for callback functions which - pass C's h5o_info_t data type definition. - - (MSB, 2019/01/08, HDFFV-10443) - - - Added new Fortran API, H5gmtime, which converts (C) 'time_t' structure - to Fortran DATE AND TIME storage format. - - (MSB, 2019/01/08, HDFFV-10443) - - - Added new Fortran 'fields' optional parameter to: h5ovisit_f, h5oget_info_by_name_f, - h5oget_info, h5oget_info_by_idx and h5ovisit_by_name_f. - - (MSB, 2019/01/08, HDFFV-10443) - - C++ Library: - ------------ - - Added new function to the C++ interface - - Added wrapper for H5Ovisit2: - H5Object::visit() - - (BMR - 2019/02/14, HDFFV-10532) - - - Java Library: - ---------------- - - Rewrote the JNI error handling to be much cleaner - - (JTH - 2019/02/12) - - - Add new functions to java interface - - Added wrappers for: - H5Fset_libver_bounds - H5Fget_dset_no_attrs_hint/H5Fset_dset_no_attrs_hint - H5Pget_dset_no_attrs_hint/H5Pset_dset_no_attrs_hint - - (ADB - 2019/01/07, HDFFV-10664) - - - Fix java unit tests when Time is a natural number - - Time substitution in java/test/junit.sh.in doesn't - handle the case when Time is a natural number. Fixed - the regular expression. - - (ADB - 2019/01/07, HDFFV-10674) - - - Duplicate the data read/write functions of Datasets for Attributes. - - Region references could not be displayed for attributes as they could - for datasets. Datasets had overloaded read and write functions for different - datatypes that were not available for attributes. After adding similar - functions, attribute region references work normally. - - (ADB - 2018/12/12, HDFVIEW-4) - - - Tools: - ------ - - The h5repart -family-to-sec2 argument was changed to -family-to-single - - In order to better support other single-file VFDs which could work with - h5repart, the -family-to-sec2 argument was renamed to -family-to-single. - This is just a name change and the functionality of the argument has not - changed. - - The -family-to-sec2 argument has been kept for backwards-compatibility. - This argument should be considered deprecated. - - (DER - 2018/11/14, HDFFV-10633) - - -Bug Fixes since HDF5-1.10.4 release -================================== - - Library - ------- - - Fix hangs with collective metadata reads during chunked dataset I/O - - In the parallel library, it was discovered that when a particular - sequence of operations following a pattern of: - - "write to chunked dataset" -> "flush file" -> "read from dataset" - - occurred with collective metadata reads enabled, hangs could be - observed due to certain MPI ranks not participating in the collective - metadata reads. - - To fix the issue, collective metadata reads are now disabled during - chunked dataset raw data I/O. - - (JTH - 2019/02/11, HDFFV-10563, HDFFV-10688) - - - Performance issue when closing an object - - The slow down is due to the search of the "tag_list" to find - out the "corked" status of an object and "uncork" it if so. - - Improve performance by skipping the search of the "tag_list" - if there are no "corked" objects when closing an object. - - (VC - 2019/02/06) - - - Uninitialized bytes from a type conversion buffer could be written - to disk in H5Dwrite calls where type conversion takes place - and the type conversion buffer was created by the HDF5 library. - - When H5Dwrite is called and datatype conversion must be performed, - the library will create a temporary buffer for type conversion if - one is not provided by the user via H5Pset_buffer. This internal - buffer is allocated via malloc and contains uninitialized data. In - some datatype conversions (float to long double, possibly others), - some of this uninitialized data could be written to disk. - - This was flagged by valgrind in the dtransform test and does not - appear to be a common occurrence (it is flagged in one test out - of the entire HDF5 test suite). - - Switching to calloc fixed the problem. - - (DER - 2019/02/03, HDFFV-10694) - - - There was missing protection against division by zero reported to - The HDF Group as issue #CVE-2018-17434. - - Protection against division by zero was added to address the issue - #CVE-2018-17434. - - (BMR - 2019/01/29, HDFFV-10586) - - - The issue CVE-2018-17437 was reported to The HDF Group - - Although CVE-2018-17437 reported a memory leak, the actual issue - was invalid read. It was found that the attribute name length - in an attribute message was corrupted, which caused the buffer - pointer to be advanced too far and later caused an invalid read. - - A check was added to detect when the attribute name or its length - was corrupted and report the potential of data corruption. - - (BMR - 2019/01/29, HDFFV-10588) - - - H5Ewalk did not stop when it was supposed to - - H5Ewalk was supposed to stop when the callback function stopped - even though the errors in the stack were not all visited, but it - did not. This problem is now fixed. - - (BMR - 2019/01/29, HDFFV-10684) - - - Revert H5Oget_info* and H5Ovisit* functions - - In 1.10.3 new H5Oget_info*2 and H5Ovisit*2 functions were - added for performance. Inadvertently, the original functions; - H5Oget_info, - H5Oget_info_by_name, - H5Oget_info_by_idx, - H5Ovisit, - H5Ovisit_by_name - were versioned to H5Oget_info*1 and H5Ovisit*1. This - broke the API compatibility for a maintenance release. The - original functions have been restored. - - (ADB - 2019/01/24, HDFFV-10686) - - - Fixed a potential invalid memory access and failure that could occur when - decoding an unknown object header message (from a future version of the - library). - - (NAF - 2019/01/07) - - - Deleting attributes in dense storage - - The library aborts with "infinite loop closing library" after - attributes in dense storage are created and then deleted. - - When deleting the attribute nodes from the name index v2 B-tree, - if an attribute is found in the intermediate B-tree nodes, - which may be merged/redistributed in the process, we need to - free the dynamically allocated spaces for the intermediate - decoded attribute. - - (VC - 2018/12/26, HDFFV-10659) - - - There was missing protection against division by zero reported to - The HDF Group as issue #CVE-2018-17233. - - Protection against division by zero was added to address the issue - #CVE-2018-17233. In addition, several similar occurrences in the same - file were fixed as well. - - (BMR - 2018/12/23, HDFFV-10577) - - - Fixed an issue where the parallel filters tests would fail - if zlib was not available on the system. Until support can - be added in the tests for filters beyond gzip/zlib, the tests - will be skipped if zlib is not available. - - (JTH - 2018/12/05) - - - A bug was discovered in the parallel library where an application - would eventually consume all of the available MPI communicators - when continually writing to a compressed dataset in parallel. This - was due to internal copies of an HDF5 File Access Property List, - which each contained a copy of the MPI communicator, not being - closed at the end of each write operation. This problem was - exacerbated by larger numbers of processors. - - (JTH - 2018/12/05, HDFFV-10629) - - - Fortran - -------- - - Fixed issue with Fortran not returning h5o_info_t field values - meta_size%attr%index_size and meta_size%attr%heap_size. - - (MSB, 2019/01/08, HDFFV-10443) - - - Added symbolic links libhdf5_hl_fortran.so to libhdf5hl_fortran.so and - libhdf5_hl_fortran.a to libhdf5hl_fortran.a in hdf5/lib directory for - autotools installs. These were added to match the name of the files - installed by cmake and the general pattern of hl lib files. We will - change the names of the installed lib files to the matching name in - the next major release. - - (LRK - 2019/01/04, HDFFV-10596) - - - Made Fortran specific subroutines PRIVATE in generic procedures. - - Affected generic procedures were functions in H5A, H5D, H5P, H5R and H5T. - - (MSB, 2018/12/04, HDFFV-10511) - - - Testing - ------- - - Fixed a test failure in testpar/t_dset.c caused by - the test trying to use the parallel filters feature - on MPI-2 implementations. - - (JTH, 2019/2/7) - - -Supported Platforms -=================== - - Linux 2.6.32-696.16.1.el6.ppc64 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-18) - #1 SMP ppc64 GNU/Linux g++ (GCC) 4.4.7 20120313 (Red Hat 4.4.7-18) - (ostrich) GNU Fortran (GCC) 4.4.7 20120313 (Red Hat 4.4.7-18) - IBM XL C/C++ V13.1 - IBM XL Fortran V15.1 - - Linux 3.10.0-327.10.1.el7 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (kituo/moohan) Version 4.8.5 20150623 (Red Hat 4.8.5-4) - Version 4.9.3, Version 5.2.0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 17.0.0.098 Build 20160721 - MPICH 3.1.4 compiled with GCC 4.9.3 - - SunOS 5.11 32- and 64-bit Sun C 5.12 SunOS_sparc - (emu) Sun Fortran 95 8.6 SunOS_sparc - Sun C++ 5.12 SunOS_sparc - - Windows 7 Visual Studio 2015 w/ Intel Fortran 16 (cmake) - - Windows 7 x64 Visual Studio 2013 - Visual Studio 2015 w/ Intel Fortran 16 (cmake) - Visual Studio 2015 w/ Intel C, Fortran 2018 (cmake) - Visual Studio 2015 w/ MSMPI 8 (cmake) - - Windows 10 Visual Studio 2015 w/ Intel Fortran 18 (cmake) - - Windows 10 x64 Visual Studio 2015 w/ Intel Fortran 18 (cmake) - Visual Studio 2017 w/ Intel Fortran 18 (cmake) - - Mac OS X Yosemite 10.10.5 Apple clang/clang++ version 6.1 from Xcode 7.0 - 64-bit gfortran GNU Fortran (GCC) 4.9.2 - (osx1010dev/osx1010test) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X El Capitan 10.11.6 Apple clang/clang++ version 7.3.0 from Xcode 7.3 - 64-bit gfortran GNU Fortran (GCC) 5.2.0 - (osx1011dev/osx1011test) Intel icc/icpc/ifort version 16.0.2 - - MacOS High Sierra 10.13.6 Apple LLVM version 10.0.0 (clang/clang++-1000.10.44.4) - 64-bit gfortran GNU Fortran (GCC) 8.3.0 - - -Tested Configuration Features Summary -===================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90/ F90 C++ zlib SZIP - parallel F2003 parallel -Solaris2.11 32-bit n y/y n y y y -Solaris2.11 64-bit n y/n n y y y -Windows 7 y y/y n y y y -Windows 7 x64 y y/y y y y y -Windows 7 Cygwin n y/n n y y y -Windows 7 x64 Cygwin n y/n n y y y -Windows 10 y y/y n y y y -Windows 10 x64 y y/y n y y y -Mac OS X Yosemite 10.10.5 64-bit n y/y n y y y -Mac OS X El Capitan 10.11.6 64-bit n y/y n y y y -MacOS High Sierra 10.13.6 64-bit n y/y n y y y -CentOS 7.2 Linux 3.10.0 x86_64 PGI n y/y n y y y -CentOS 7.2 Linux 3.10.0 x86_64 GNU y y/y y y y y -CentOS 7.2 Linux 3.10.0 x86_64 Intel n y/y n y y y -Linux 2.6.32-573.18.1.el6.ppc64 n y/y n y y y - - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -Solaris2.11 32-bit y y y y -Solaris2.11 64-bit y y y y -Windows 7 y y y y -Windows 7 x64 y y y y -Windows 7 Cygwin n n n y -Windows 7 x64 Cygwin n n n y -Windows 10 y y y y -Windows 10 x64 y y y y -Mac OS X Yosemite 10.10.5 64-bit y y y y -Mac OS X El Capitan 10.11.6 64-bit y y y y -MacOS High Sierra 10.13.6 64-bit y y y y -CentOS 7.2 Linux 3.10.0 x86_64 PGI y y y n -CentOS 7.2 Linux 3.10.0 x86_64 GNU y y y y -CentOS 7.2 Linux 3.10.0 x86_64 Intel y y y n -Linux 2.6.32-573.18.1.el6.ppc64 y y y n - -Compiler versions for each platform are listed in the preceding -"Supported Platforms" table. - - -More Tested Platforms -===================== -The following configurations are not supported but have been tested for this release. - - Linux 2.6.32-573.22.1.el6 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (mayll/platypus) Version 4.4.7 20120313 - Version 4.9.3, 5.3.0, 6.2.0 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 17.10-0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 17.0.4.196 Build 20170411 - MPICH 3.1.4 compiled with GCC 4.9.3 - - Linux 3.10.0-327.18.2.el7 GNU C (gcc) and C++ (g++) compilers - #1 SMP x86_64 GNU/Linux Version 4.8.5 20150623 (Red Hat 4.8.5-4) - (jelly) with NAG Fortran Compiler Release 6.1(Tozai) - GCC Version 7.1.0 - MPICH 3.2-GCC-4.9.3 - MPICH 3.2.1-GCC-7.2.0-2.29 - OpenMPI 2.1.5-GCC-7.2.0-2.29 - Intel(R) C (icc) and C++ (icpc) compilers - Version 17.0.0.098 Build 20160721 - with NAG Fortran Compiler Release 6.1(Tozai) - - Linux 3.10.0-327.10.1.el7 MPICH 3.2 compiled with GCC 5.3.0 - #1 SMP x86_64 GNU/Linux - (moohan) - - Fedora 29 4.20.10-200.fc29.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc, g++ (GCC) 8.2.1 20181215 - (Red Hat 8.2.1-6) - GNU Fortran (GCC) 8.2.1 20181215 - (Red Hat 8.2.1-6) - (cmake and autotools) - - Windows 7 x64 Visual Studio 2008 - - -Known Problems -============== - - At present, metadata cache images may not be generated by parallel - applications. Parallel applications can read files with metadata cache - images, but since this is a collective operation, a deadlock is possible - if one or more processes do not participate. - - Three tests fail with OpenMPI 3.0.0/GCC-7.2.0-2.29: - testphdf5 (ecdsetw, selnone, cchunk1, cchunk3, cchunk4, and actualio) - t_shapesame (sscontig2) - t_pflush1/fails on exit - The first two tests fail attempting collective writes. - - CPP ptable test fails on VS2017 with Intel compiler, JIRA issue: HDFFV-10628. - This test will pass with VS2015 with Intel compiler. - - Older MPI libraries such as OpenMPI 2.0.1 and MPICH 2.1.5 were tested - while attempting to resolve the Jira issue: HDFFV-10540. - The known problems of reading or writing > 2GBs when using MPI-2 was - partially resolved with the MPICH library. The proposed support recognizes - IO operations > 2GB and if the datatype is not a derived type, the library - breaks the IO into chunks which can be input or output with the existing - MPI 2 limitations, i.e. size reporting and function API size/count - arguments are restricted to be 32 bit integers. For derived types larger - than 2GB, MPICH 2.1.5 fails while attempting to read or write data. - OpenMPI in contrast, implements MPI-3 APIs even in the older releases - and thus does not suffer from the 32 bit size limitation described here. - OpenMPI releases prior to v3.1.3 appear to have other datatype issues however, - e.g. within a single parallel test (testphdf5) the subtests (cdsetr, eidsetr) - report data verification errors before eventually aborting. - The most recent versions of OpenMPI (v3.1.3 or newer) have evidently - resolved these issues and parallel HDF5 testing does not currently report - errors though occasional hangs have been observed. - - Known problems in previous releases can be found in the HISTORY*.txt files - in the HDF5 source. Please report any new problems found to - help@hdfgroup.org. - - -CMake vs. Autotools installations -================================= -While both build systems produce similar results, there are differences. -Each system produces the same set of folders on linux (only CMake works -on standard Windows); bin, include, lib and share. Autotools places the -COPYING and RELEASE.txt file in the root folder, CMake places them in -the share folder. - -The bin folder contains the tools and the build scripts. Additionally, CMake -creates dynamic versions of the tools with the suffix "-shared". Autotools -installs one set of tools depending on the "--enable-shared" configuration -option. - build scripts - ------------- - Autotools: h5c++, h5cc, h5fc - CMake: h5c++, h5cc, h5hlc++, h5hlcc - -The include folder holds the header files and the fortran mod files. CMake -places the fortran mod files into separate shared and static subfolders, -while Autotools places one set of mod files into the include folder. Because -CMake produces a tools library, the header files for tools will appear in -the include folder. - -The lib folder contains the library files, and CMake adds the pkgconfig -subfolder with the hdf5*.pc files used by the bin/build scripts created by -the CMake build. CMake separates the C interface code from the fortran code by -creating C-stub libraries for each Fortran library. In addition, only CMake -installs the tools library. The names of the szip libraries are different -between the build systems. - -The share folder will have the most differences because CMake builds include -a number of CMake specific files for support of CMake's find_package and support -for the HDF5 Examples CMake project. - -%%%%1.10.4%%%% - -HDF5 version 1.10.4 released on 2018-10-05 -================================================================================ - - -INTRODUCTION - -This document describes the differences between this release and the previous -HDF5 release. It contains information on the platforms tested and known -problems in this release. For more details check the HISTORY*.txt files in the -HDF5 source. - -Note that documentation in the links below will be updated at the time of each -final release. - -Links to HDF5 documentation can be found on The HDF5 web page: - - https://portal.hdfgroup.org/display/HDF5/HDF5 - -The official HDF5 releases can be obtained from: - - https://www.hdfgroup.org/downloads/hdf5/ - -Changes from Release to Release and New Features in the HDF5-1.10.x release series -can be found at: - - https://portal.hdfgroup.org/display/HDF5/HDF5+Application+Developer%27s+Guide - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS - -- Bug Fixes since HDF5-1.10.3 -- Supported Platforms -- Tested Configuration Features Summary -- More Tested Platforms -- Known Problems -- CMake vs. Autotools installations - - -New Features -============ - - Configuration: - ------------- - - Add toolchain and cross-compile support - - Added info on using a toolchain file to INSTALL_CMAKE.txt. A - toolchain file is also used in cross-compiling, which requires - CMAKE_CROSSCOMPILING_EMULATOR to be set. To help with cross-compiling - the fortran configure process, the HDF5UseFortran.cmake file macros - were improved. Fixed a Fortran configure file issue that incorrectly - used #cmakedefine instead of #define. - - (ADB - 2018/10/04, HDFFV-10594) - - - Add warning flags for Intel compilers - - Identified Intel compiler specific warnings flags that should be used - instead of GNU flags. - - (ADB - 2018/10/04, TRILABS-21) - - - Add default rpath to targets - - Default rpaths should be set in shared executables and - libraries to allow the use of loading dependent libraries - without requiring LD_LIBRARY_PATH to be set. The default - path should be relative using @rpath on osx and $ORIGIN - on linux. Windows is not affected. - - (ADB - 2018/09/26, HDFFV-10594) - - Library: - -------- - - Allow pre-generated H5Tinit.c and H5make_libsettings.c to be used. - - Rather than always running H5detect and generating H5Tinit.c and - H5make_libsettings.c, supply a location for those files. - - (ADB - 2018/09/18, HDFFV-10332) - - -Bug Fixes since HDF5-1.10.3 release -================================== - - Library - ------- - - Allow H5detect and H5make_libsettings to take a file as an argument. - - Rather than only writing to stdout, add a command argument to name - the file that H5detect and H5make_libsettings will use for output. - Without an argument, stdout is still used, so backwards compatibility - is maintained. - - (ADB - 2018/09/05, HDFFV-9059) - - - A bug was discovered in the parallel library where an application - would hang if a collective read/write of a chunked dataset occurred - when collective metadata reads were enabled and some of the ranks - had no selection in the dataset's dataspace. The ranks which had no - selection in the dataset's dataspace called H5D__chunk_addrmap() to - retrieve the lowest chunk address in the dataset. This is because we - require reads/writes to be performed in strictly non-decreasing order - of chunk address in the file. - - When the chunk index used was a version 1 or 2 B-tree, these - non-participating ranks would issue a collective MPI_Bcast() call - that the participating ranks would not issue, causing the hang. Since - the non-participating ranks are not actually reading/writing anything, - the H5D__chunk_addrmap() call can be safely removed and the address used - for the read/write can be set to an arbitrary number (0 was chosen). - - (JTH - 2018/08/25, HDFFV-10501) - - Java Library: - ---------------- - - JNI native library dependencies - - The build for the hdf5_java native library used the wrong - hdf5 target library for CMake builds. Correcting the hdf5_java - library to build with the shared hdf5 library required testing - paths to change also. - - (ADB - 2018/08/31, HDFFV-10568) - - - Java iterator callbacks - - Change global callback object to a small stack structure in order - to fix a runtime crash. This crash was discovered when iterating - through a file with nested group members. The global variable - visit_callback is overwritten when recursion starts. When recursion - completes, visit_callback will be pointing to the wrong callback method. - - (ADB - 2018/08/15, HDFFV-10536) - - - Java HDFLibraryException class - - Change parent class from Exception to RuntimeException. - - (ADB - 2018/07/30, HDFFV-10534) - - - JNI Read and Write - - Refactored variable-length functions, H5DreadVL and H5AreadVL, - to correct dataset and attribute reads. New write functions, - H5DwriteVL and H5AwriteVL, are under construction. - - (ADB - 2018/06/02, HDFFV-10519) - - -Supported Platforms -=================== - - Linux 2.6.32-696.16.1.el6.ppc64 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-18) - #1 SMP ppc64 GNU/Linux g++ (GCC) 4.4.7 20120313 (Red Hat 4.4.7-18) - (ostrich) GNU Fortran (GCC) 4.4.7 20120313 (Red Hat 4.4.7-18) - IBM XL C/C++ V13.1 - IBM XL Fortran V15.1 - - Linux 3.10.0-327.10.1.el7 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (kituo/moohan) Version 4.8.5 20150623 (Red Hat 4.8.5-4) - Version 4.9.3, Version 5.2.0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 17.0.0.098 Build 20160721 - MPICH 3.1.4 compiled with GCC 4.9.3 - - SunOS 5.11 32- and 64-bit Sun C 5.12 SunOS_sparc - (emu) Sun Fortran 95 8.6 SunOS_sparc - Sun C++ 5.12 SunOS_sparc - - Windows 7 Visual Studio 2015 w/ Intel Fortran 16 (cmake) - - Windows 7 x64 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - Visual Studio 2015 w/ Intel Fortran 16 (cmake) - Visual Studio 2015 w/ Intel C, Fortran 2017 (cmake) - Visual Studio 2015 w/ MSMPI 8 (cmake) - - Windows 10 Visual Studio 2015 w/ Intel Fortran 18 (cmake) - - Windows 10 x64 Visual Studio 2015 w/ Intel Fortran 18 (cmake) - Visual Studio 2017 w/ Intel Fortran 18 (cmake) - - Mac OS X Yosemite 10.10.5 Apple clang/clang++ version 6.1 from Xcode 7.0 - 64-bit gfortran GNU Fortran (GCC) 4.9.2 - (osx1010dev/osx1010test) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X El Capitan 10.11.6 Apple clang/clang++ version 7.3.0 from Xcode 7.3 - 64-bit gfortran GNU Fortran (GCC) 5.2.0 - (osx1011dev/osx1011test) Intel icc/icpc/ifort version 16.0.2 - - Mac OS Sierra 10.12.6 Apple LLVM version 8.1.0 (clang/clang++-802.0.42) - 64-bit gfortran GNU Fortran (GCC) 7.1.0 - (kite) Intel icc/icpc/ifort version 17.0.2 - - -Tested Configuration Features Summary -===================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90/ F90 C++ zlib SZIP - parallel F2003 parallel -Solaris2.11 32-bit n y/y n y y y -Solaris2.11 64-bit n y/n n y y y -Windows 7 y y/y n y y y -Windows 7 x64 y y/y y y y y -Windows 7 Cygwin n y/n n y y y -Windows 7 x64 Cygwin n y/n n y y y -Windows 10 y y/y n y y y -Windows 10 x64 y y/y n y y y -Mac OS X Mavericks 10.9.5 64-bit n y/y n y y y -Mac OS X Yosemite 10.10.5 64-bit n y/y n y y y -Mac OS X El Capitan 10.11.6 64-bit n y/y n y y y -Mac OS Sierra 10.12.6 64-bit n y/y n y y y -CentOS 7.2 Linux 3.10.0 x86_64 PGI n y/y n y y y -CentOS 7.2 Linux 3.10.0 x86_64 GNU y y/y y y y y -CentOS 7.2 Linux 3.10.0 x86_64 Intel n y/y n y y y -Linux 2.6.32-573.18.1.el6.ppc64 n y/y n y y y - - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -Solaris2.11 32-bit y y y y -Solaris2.11 64-bit y y y y -Windows 7 y y y y -Windows 7 x64 y y y y -Windows 7 Cygwin n n n y -Windows 7 x64 Cygwin n n n y -Windows 10 y y y y -Windows 10 x64 y y y y -Mac OS X Mavericks 10.9.5 64-bit y n y y -Mac OS X Yosemite 10.10.5 64-bit y n y y -Mac OS X El Capitan 10.11.6 64-bit y n y y -Mac OS Sierra 10.12.6 64-bit y n y y -CentOS 7.2 Linux 3.10.0 x86_64 PGI y y y n -CentOS 7.2 Linux 3.10.0 x86_64 GNU y y y y -CentOS 7.2 Linux 3.10.0 x86_64 Intel y y y n -Linux 2.6.32-573.18.1.el6.ppc64 y y y n - -Compiler versions for each platform are listed in the preceding -"Supported Platforms" table. - - -More Tested Platforms -===================== -The following platforms are not supported but have been tested for this release. - - Linux 2.6.32-573.22.1.el6 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (mayll/platypus) Version 4.4.7 20120313 - Version 4.9.3, 5.3.0, 6.2.0 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 17.10-0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 17.0.4.196 Build 20170411 - MPICH 3.1.4 compiled with GCC 4.9.3 - - Linux 3.10.0-327.18.2.el7 GNU C (gcc) and C++ (g++) compilers - #1 SMP x86_64 GNU/Linux Version 4.8.5 20150623 (Red Hat 4.8.5-4) - (jelly) with NAG Fortran Compiler Release 6.1(Tozai) - GCC Version 7.1.0 - OpenMPI 3.0.0-GCC-7.2.0-2.29, - 3.1.0-GCC-7.2.0-2.29 - Intel(R) C (icc) and C++ (icpc) compilers - Version 17.0.0.098 Build 20160721 - with NAG Fortran Compiler Release 6.1(Tozai) - - Linux 3.10.0-327.10.1.el7 MPICH 3.2 compiled with GCC 5.3.0 - #1 SMP x86_64 GNU/Linux - (moohan) - - Linux 2.6.32-573.18.1.el6.ppc64 MPICH mpich 3.1.4 compiled with - #1 SMP ppc64 GNU/Linux IBM XL C/C++ for Linux, V13.1 - (ostrich) and IBM XL Fortran for Linux, V15.1 - - Debian 8.4 3.16.0-4-amd64 #1 SMP Debian 3.16.36-1 x86_64 GNU/Linux - gcc, g++ (Debian 4.9.2-10) 4.9.2 - GNU Fortran (Debian 4.9.2-10) 4.9.2 - (cmake and autotools) - - Fedora 24 4.7.2-201.fc24.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc, g++ (GCC) 6.1.1 20160621 - (Red Hat 6.1.1-3) - GNU Fortran (GCC) 6.1.1 20160621 - (Red Hat 6.1.1-3) - (cmake and autotools) - - Ubuntu 16.04.1 4.4.0-38-generic #57-Ubuntu SMP x86_64 GNU/Linux - gcc, g++ (Ubuntu 5.4.0-6ubuntu1~16.04.2) - 5.4.0 20160609 - GNU Fortran (Ubuntu 5.4.0-6ubuntu1~16.04.2) - 5.4.0 20160609 - (cmake and autotools) - - -Known Problems -============== - - At present, metadata cache images may not be generated by parallel - applications. Parallel applications can read files with metadata cache - images, but since this is a collective operation, a deadlock is possible - if one or more processes do not participate. - - Three tests fail with OpenMPI 3.0.0/GCC-7.2.0-2.29: - testphdf5 (ecdsetw, selnone, cchunk1, cchunk3, cchunk4, and actualio) - t_shapesame (sscontig2) - t_pflush1/fails on exit - The first two tests fail attempting collective writes. - - Known problems in previous releases can be found in the HISTORY*.txt files - in the HDF5 source. Please report any new problems found to - help@hdfgroup.org. - - -CMake vs. Autotools installations -================================= -While both build systems produce similar results, there are differences. -Each system produces the same set of folders on linux (only CMake works -on standard Windows); bin, include, lib and share. Autotools places the -COPYING and RELEASE.txt file in the root folder, CMake places them in -the share folder. - -The bin folder contains the tools and the build scripts. Additionally, CMake -creates dynamic versions of the tools with the suffix "-shared". Autotools -installs one set of tools depending on the "--enable-shared" configuration -option. - build scripts - ------------- - Autotools: h5c++, h5cc, h5fc - CMake: h5c++, h5cc, h5hlc++, h5hlcc - -The include folder holds the header files and the fortran mod files. CMake -places the fortran mod files into separate shared and static subfolders, -while Autotools places one set of mod files into the include folder. Because -CMake produces a tools library, the header files for tools will appear in -the include folder. - -The lib folder contains the library files, and CMake adds the pkgconfig -subfolder with the hdf5*.pc files used by the bin/build scripts created by -the CMake build. CMake separates the C interface code from the fortran code by -creating C-stub libraries for each Fortran library. In addition, only CMake -installs the tools library. The names of the szip libraries are different -between the build systems. - -The share folder will have the most differences because CMake builds include -a number of CMake specific files for support of CMake's find_package and support -for the HDF5 Examples CMake project. - -%%%%1.10.3%%%% - -HDF5 version 1.10.3 released on 2018-08-21 -================================================================================ - - -INTRODUCTION - -This document describes the differences between this release and the previous -HDF5 release. It contains information on the platforms tested and known -problems in this release. For more details check the HISTORY*.txt files in the -HDF5 source. - -Note that documentation in the links below will be updated at the time of each -final release. - -Links to HDF5 documentation can be found on The HDF5 web page: - - https://portal.hdfgroup.org/display/HDF5/HDF5 - -The official HDF5 releases can be obtained from: - - https://www.hdfgroup.org/downloads/hdf5/ - -Changes from Release to Release and New Features in the HDF5-1.10.x release series -can be found at: - - https://portal.hdfgroup.org/display/HDF5/HDF5+Application+Developer%27s+Guide - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS - -- New Features -- Bug Fixes since HDF5-1.10.2 -- Supported Platforms -- Tested Configuration Features Summary -- More Tested Platforms -- Known Problems -- CMake vs. Autotools installations - - -New Features -============ - - Library - ------- - - Moved the H5DOread/write_chunk() API calls to H5Dread/write_chunk() - - The functionality of the direct chunk I/O calls in the high-level - library has been moved to the H5D package in the main library. This - will allow using those functions without building the high-level - library. The parameters and functionality of the H5D calls are - identical to the H5DO calls. - - The original H5DO high-level API calls have been retained, though - they are now just wrappers for the H5D calls. They are marked as - deprecated and are only available when the library is built with - deprecated functions. New code should use the H5D calls for this - reason. - - As a part of this work, the following symbols from H5Dpublic.h are no - longer used: - - H5D_XFER_DIRECT_CHUNK_WRITE_FLAG_NAME - H5D_XFER_DIRECT_CHUNK_WRITE_FILTERS_NAME - H5D_XFER_DIRECT_CHUNK_WRITE_OFFSET_NAME - H5D_XFER_DIRECT_CHUNK_WRITE_DATASIZE_NAME - H5D_XFER_DIRECT_CHUNK_READ_FLAG_NAME - H5D_XFER_DIRECT_CHUNK_READ_OFFSET_NAME - H5D_XFER_DIRECT_CHUNK_READ_FILTERS_NAME - - And properties with these names are no longer stored in the dataset - transfer property lists. The symbols are still defined in H5Dpublic.h, - but only when the library is built with deprecated symbols. - - (DER - 2018/05/04) - - Configuration: - ------------- - - Add missing USE_110_API_DEFAULT option. - - Option USE_110_API_DEFAULT sets the default version of - versioned APIs. The bin/makevers perl script did not set - the maxidx variable correctly when the 1.10 branch was - created. This caused the versioning process to always use - the latest version of any API. - - (ADB - 2018/08/17, HDFFV-10552) - - - Added configuration checks for the following MPI functions: - - MPI_Mprobe - Used for the Parallel Compression feature - MPI_Imrecv - Used for the Parallel Compression feature - - MPI_Get_elements_x - Used for the "big Parallel I/O" feature - MPI_Type_size_x - Used for the "big Parallel I/O" feature - - (JTH - 2018/08/02, HDFFV-10512) - - - Added section to the libhdf5.settings file to indicate - the status of the Parallel Compression and "big Parallel I/O" - features. - - (JTH - 2018/08/02, HDFFV-10512) - - - Add option to execute swmr shell scripts from CMake. - - Option TEST_SHELL_SCRIPTS redirects processing into a - separate ShellTests.cmake file for UNIX types. The tests - execute the shell scripts if a SH program is found. - - (ADB - 2018/07/16) - - - C++ Library: - ------------ - - New wrappers - - Added the following items: - - + Class DSetAccPropList for the dataset access property list. - - + Wrapper for H5Dget_access_plist to class DataSet - // Gets the access property list of this dataset. - DSetAccPropList getAccessPlist() const; - - + Wrappers for H5Pset_chunk_cache and H5Pget_chunk_cache to class DSetAccPropList - // Sets the raw data chunk cache parameters. - void setChunkCache(size_t rdcc_nslots, size_t rdcc_nbytes, double rdcc_w0) - - // Retrieves the raw data chunk cache parameters. - void getChunkCache(size_t &rdcc_nslots, size_t &rdcc_nbytes, double &rdcc_w0) - - + New operator!= to class DataType (HDFFV-10472) - // Determines whether two datatypes are not the same. - bool operator!=(const DataType& compared_type) - - + Wrappers for H5Oget_info2, H5Oget_info_by_name2, and H5Oget_info_by_idx2 - (HDFFV-10458) - - // Retrieves information about an HDF5 object. - void getObjinfo(H5O_info_t& objinfo, unsigned fields = H5O_INFO_BASIC) const; - - // Retrieves information about an HDF5 object, given its name. - void getObjinfo(const char* name, H5O_info_t& objinfo, - unsigned fields = H5O_INFO_BASIC, - const LinkAccPropList& lapl = LinkAccPropList::DEFAULT) const; - void getObjinfo(const H5std_string& name, H5O_info_t& objinfo, - unsigned fields = H5O_INFO_BASIC, - const LinkAccPropList& lapl = LinkAccPropList::DEFAULT) const; - - // Retrieves information about an HDF5 object, given its index. - void getObjinfo(const char* grp_name, H5_index_t idx_type, - H5_iter_order_t order, hsize_t idx, H5O_info_t& objinfo, - unsigned fields = H5O_INFO_BASIC, - const LinkAccPropList& lapl = LinkAccPropList::DEFAULT) const; - void getObjinfo(const H5std_string& grp_name, H5_index_t idx_type, - H5_iter_order_t order, hsize_t idx, H5O_info_t& objinfo, - unsigned fields = H5O_INFO_BASIC, - const LinkAccPropList& lapl = LinkAccPropList::DEFAULT) const; - - (BMR - 2018/07/22, HDFFV-10150, HDFFV-10458, HDFFV-1047) - - - Java Library: - ---------------- - - Java HDFLibraryException class - - Change parent class from Exception to RuntimeException. - - (ADB - 2018/07/30, HDFFV-10534) - - - JNI Read and Write - - Refactored variable-length functions, H5DreadVL and H5AreadVL, - to correct dataset and attribute reads. New write functions, - H5DwriteVL and H5AwriteVL, are under construction. - - (ADB - 2018/06/02, HDFFV-10519) - - -Bug Fixes since HDF5-1.10.2 release -================================== - - Library - ------- - - Performance issue with H5Oget_info - - H5Oget_info family of routines retrieves information for an object such - as object type, access time, number of attributes, and storage space etc. - Retrieving all such information regardless is an overkill and causes - performance issue when doing so for many objects. - - Add an additional parameter "fields" to the the H5Oget_info family of routines - indicating the type of information to be retrieved. The same is done to - the H5Ovisit family of routines which recursively visits an object - returning object information in a callback function. Both sets of routines - are versioned and the corresponding compatibility macros are added. - - The version 2 names of the two sets of routines are: - (1) H5Oget_info2, H5Oget_info_by_idx2, H5Oget_info_by_name2 - (2) H5Ovisit2, H5Ovisit_by_name2 - - (VC - 2018/08/15, HDFFV-10180) - - - Test failure due to metadata size in test/vds.c - - The size of metadata from test_api_get_ex_dcpl() in test/vds.c is not as expected - because the latest format should be used when encoding the layout for VDS. - - Set the latest format in a temporary fapl and pass the setting to the routines that - encode the dataset selection for VDS. - - (VC - 2018/08/14 HDFFV-10469) - - - Java HDF5LibraryException class - - The error minor and major values would be lost after the - constructor executed. - - Created two local class variables to hold the values obtained during - execution of the constructor. Refactored the class functions to retrieve - the class values rather then calling the native functions. - The native functions were renamed and called only during execution - of the constructor. - Added error checking to calling class constructors in JNI classes. - - (ADB - 2018/08/06, HDFFV-10544) - - - Added checks of the defined MPI_VERSION to guard against usage of - MPI-3 functions in the Parallel Compression and "big Parallel I/O" - features when HDF5 is built with MPI-2. Previously, the configure - step would pass but the build itself would fail when it could not - locate the MPI-3 functions used. - - As a result of these new checks, HDF5 can again be built with MPI-2, - but the Parallel Compression feature will be disabled as it relies - on the MPI-3 functions used. - - (JTH - 2018/08/02, HDFFV-10512) - - - User's patches: CVEs - - The following patches have been applied: - - CVE-2018-11202 - NULL pointer dereference was discovered in - H5S_hyper_make_spans in H5Shyper.c (HDFFV-10476) - https://security-tracker.debian.org/tracker/CVE-2018-11202 - https://cve.mitre.org/cgi-bin/cvename.cgi?name=3DCVE-2018-11202 - - CVE-2018-11203 - A division by zero was discovered in - H5D__btree_decode_key in H5Dbtree.c (HDFFV-10477) - https://security-tracker.debian.org/tracker/CVE-2018-11203 - https://cve.mitre.org/cgi-bin/cvename.cgi?name=3DCVE-2018-11203 - - CVE-2018-11204 - A NULL pointer dereference was discovered in - H5O__chunk_deserialize in H5Ocache.c (HDFFV-10478) - https://security-tracker.debian.org/tracker/CVE-2018-11204 - https://cve.mitre.org/cgi-bin/cvename.cgi?name=3DCVE-2018-11204 - - CVE-2018-11206 - An out of bound read was discovered in - H5O_fill_new_decode and H5O_fill_old_decode in H5Ofill.c - (HDFFV-10480) - https://security-tracker.debian.org/tracker/CVE-2018-11206 - https://cve.mitre.org/cgi-bin/cvename.cgi?name=3DCVE-2018-11206 - - CVE-2018-11207 - A division by zero was discovered in - H5D__chunk_init in H5Dchunk.c (HDFFV-10481) - https://security-tracker.debian.org/tracker/CVE-2018-11207 - https://cve.mitre.org/cgi-bin/cvename.cgi?name=3DCVE-2018-11207 - - (BMR - 2018/7/22, PR#s: 1134 and 1139, - HDFFV-10476, HDFFV-10477, HDFFV-10478, HDFFV-10480, HDFFV-10481) - - - H5Adelete - - H5Adelete failed when deleting the last "large" attribute that - is stored densely via fractal heap/v2 b-tree. - - After removing the attribute, update the ainfo message. If the - number of attributes goes to zero, remove the message. - - (VC - 2018/07/20, HDFFV-9277) - - - A bug was discovered in the parallel library which caused partial - parallel reads of filtered datasets to return incorrect data. The - library used the incorrect dataspace for each chunk read, causing - the selection used in each chunk to be wrong. - - The bug was not caught during testing because all of the current - tests which do parallel reads of filtered data read all of the data - using an H5S_ALL selection. Several tests were added which exercise - partial parallel reads. - - (JTH - 2018/07/16, HDFFV-10467) - - - A bug was discovered in the parallel library which caused parallel - writes of filtered datasets to trigger an assertion failure in the - file free space manager. - - This occurred when the filter used caused chunks to repeatedly shrink - and grow over the course of several dataset writes. The previous chunk - information, such as the size of the chunk and the offset in the file, - was being cached and not updated after each write, causing the next write - to the chunk to retrieve the incorrect cached information and run into - issues when reallocating space in the file for the chunk. - - (JTH - 2018/07/16, HDFFV-10509) - - - A bug was discovered in the parallel library which caused the - H5D__mpio_array_gatherv() function to allocate too much memory. - - When the function is called with the 'allgather' parameter set - to a non-true value, the function will receive data from all MPI - ranks and gather it to the single rank specified by the 'root' - parameter. However, the bug in the function caused memory for - the received data to be allocated on all MPI ranks, not just the - singular rank specified as the receiver. In some circumstances, - this would cause an application to fail due to the large amounts - of memory being allocated. - - (JTH - 2018/07/16, HDFFV-10467) - - - Error checks in h5stat and when decoding messages - - h5stat exited with seg fault/core dumped when - errors are encountered in the internal library. - - Add error checks and --enable-error-stack option to h5stat. - Add range checks when decoding messages: old fill value, old - layout and refcount. - - (VC - 2018/07/11, HDFFV-10333) - - - If an HDF5 file contains a malformed compound datatype with a - suitably large offset, the type conversion code can run off - the end of the type conversion buffer, causing a segmentation - fault. - - This issue was reported to The HDF Group as issue #CVE-2017-17507. - - NOTE: The HDF5 C library cannot produce such a file. This condition - should only occur in a corrupt (or deliberately altered) file - or a file created by third-party software. - - THE HDF GROUP WILL NOT FIX THIS BUG AT THIS TIME - - Fixing this problem would involve updating the publicly visible - H5T_conv_t function pointer typedef and versioning the API calls - which use it. We normally only modify the public API during - major releases, so this bug will not be fixed at this time. - - (DER - 2018/02/26, HDFFV-10356) - - - Configuration - ------------- - - Applied patches to address Cywin build issues - - There were three issues for Cygwin builds: - - Shared libs were not built. - - The -std=c99 flag caused a SIG_SETMASK undeclared error. - - Undefined errors when buildbing test shared libraries. - - Patches to address these issues were received and incorporated in this version. - - (LRK - 2018/07/18, HDFFV-10475) - - - The --enable-debug/production configure flags are listed as 'deprecated' - when they should really be listed as 'removed'. - - In the autotools overhaul several years ago, we removed these flags and - implemented a new --enable-build-mode= flag. This was done because we - changed the semantics of the modes and didn't want users to silently - be exposed to them. The newer system is also more flexible and us to - add other modes (like 'clean'). - - The --enable-debug/production flags are now listed as removed. - - (DER - 2018/05/31, HDFFV-10505) - - - Moved the location of gcc attribute. - - The gcc attribute(no_sanitize), named as the macro HDF_NO_UBSAN, - was located after the function name. Builds with GCC 7 did not - indicate any problem, but GCC 8 issued errors. Moved the - attribute before the function name, as required. - - (ADB - 2018/05/22, HDFFV-10473) - - - Reworked java test suite into individual JUnit tests. - - Testing the whole suite of java unit tests in a single JUnit run - made it difficult to determine actual failures when tests would fail. - Running each file set of tests individually, allows individual failures - to be diagnosed easier. A side benefit is that tests for optional components - of the library can be disabled if not configured. - - (ADB - 2018/05/16, HDFFV-9739) - - - Converted CMake global commands ADD_DEFINITIONS and INCLUDE_DIRECTORIES - to use target_* type commands. This change modernizes the CMake usage - in the HDF5 library. - - In addition, there is the intention to convert to generator expressions, - where possible. The exception is Fortran FLAGS on Windows Visual Studio. - The HDF macros TARGET_C_PROPERTIES and TARGET_FORTRAN_PROPERTIES have - been removed with this change in usage. - - The additional language (C++ and Fortran) checks have also been localized - to only be checked when that language is enabled. - - (ADB - 2018/05/08) - - - Performance - ------------- - - Revamped internal use of DXPLs, improving performance - - (QAK - 2018/05/20) - - - Fortran - -------- - - Fixed issue with h5fget_obj_count_f and using a file id of H5F_OBJ_ALL_F not - returning the correct count. - - (MSB - 2018/5/15, HDFFV-10405) - - - C++ APIs - -------- - - Adding default arguments to existing functions - - Added the following items: - + Two more property list arguments are added to H5Location::createDataSet: - const DSetAccPropList& dapl = DSetAccPropList::DEFAULT - const LinkCreatPropList& lcpl = LinkCreatPropList::DEFAULT - - + One more property list argument is added to H5Location::openDataSet: - const DSetAccPropList& dapl = DSetAccPropList::DEFAULT - - (BMR - 2018/07/21, PR# 1146) - - - Improvement C++ documentation - - Replaced the table in main page of the C++ documentation from mht to htm format - for portability. - - (BMR - 2018/07/17, PR# 1141) - - -Supported Platforms -=================== - - Linux 2.6.32-696.16.1.el6.ppc64 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-18) - #1 SMP ppc64 GNU/Linux g++ (GCC) 4.4.7 20120313 (Red Hat 4.4.7-18) - (ostrich) GNU Fortran (GCC) 4.4.7 20120313 (Red Hat 4.4.7-18) - IBM XL C/C++ V13.1 - IBM XL Fortran V15.1 - - Linux 3.10.0-327.10.1.el7 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (kituo/moohan) Version 4.8.5 20150623 (Red Hat 4.8.5-4) - Version 4.9.3, Version 5.2.0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 17.0.0.098 Build 20160721 - MPICH 3.1.4 compiled with GCC 4.9.3 - - SunOS 5.11 32- and 64-bit Sun C 5.12 SunOS_sparc - (emu) Sun Fortran 95 8.6 SunOS_sparc - Sun C++ 5.12 SunOS_sparc - - Windows 7 Visual Studio 2015 w/ Intel Fortran 16 (cmake) - - Windows 7 x64 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - Visual Studio 2015 w/ Intel Fortran 16 (cmake) - Visual Studio 2015 w/ Intel C, Fortran 2017 (cmake) - Visual Studio 2015 w/ MSMPI 8 (cmake) - - Windows 10 Visual Studio 2015 w/ Intel Fortran 18 (cmake) - - Windows 10 x64 Visual Studio 2015 w/ Intel Fortran 18 (cmake) - Visual Studio 2017 w/ Intel Fortran 18 (cmake) - - Mac OS X Yosemite 10.10.5 Apple clang/clang++ version 6.1 from Xcode 7.0 - 64-bit gfortran GNU Fortran (GCC) 4.9.2 - (osx1010dev/osx1010test) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X El Capitan 10.11.6 Apple clang/clang++ version 7.3.0 from Xcode 7.3 - 64-bit gfortran GNU Fortran (GCC) 5.2.0 - (osx1011dev/osx1011test) Intel icc/icpc/ifort version 16.0.2 - - Mac OS Sierra 10.12.6 Apple LLVM version 8.1.0 (clang/clang++-802.0.42) - 64-bit gfortran GNU Fortran (GCC) 7.1.0 - (swallow/kite) Intel icc/icpc/ifort version 17.0.2 - -Tested Configuration Features Summary -===================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90/ F90 C++ zlib SZIP - parallel F2003 parallel -Solaris2.11 32-bit n y/y n y y y -Solaris2.11 64-bit n y/n n y y y -Windows 7 y y/y n y y y -Windows 7 x64 y y/y y y y y -Windows 7 Cygwin n y/n n y y y -Windows 7 x64 Cygwin n y/n n y y y -Windows 10 y y/y n y y y -Windows 10 x64 y y/y n y y y -Mac OS X Mavericks 10.9.5 64-bit n y/y n y y y -Mac OS X Yosemite 10.10.5 64-bit n y/y n y y y -Mac OS X El Capitan 10.11.6 64-bit n y/y n y y y -Mac OS Sierra 10.12.6 64-bit n y/y n y y y -CentOS 7.2 Linux 2.6.32 x86_64 PGI n y/y n y y y -CentOS 7.2 Linux 2.6.32 x86_64 GNU y y/y y y y y -CentOS 7.2 Linux 2.6.32 x86_64 Intel n y/y n y y y -Linux 2.6.32-573.18.1.el6.ppc64 n y/y n y y y - - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -Solaris2.11 32-bit y y y y -Solaris2.11 64-bit y y y y -Windows 7 y y y y -Windows 7 x64 y y y y -Windows 7 Cygwin n n n y -Windows 7 x64 Cygwin n n n y -Windows 10 y y y y -Windows 10 x64 y y y y -Mac OS X Mavericks 10.9.5 64-bit y n y y -Mac OS X Yosemite 10.10.5 64-bit y n y y -Mac OS X El Capitan 10.11.6 64-bit y n y y -Mac OS Sierra 10.12.6 64-bit y n y y -CentOS 7.2 Linux 2.6.32 x86_64 PGI y y y n -CentOS 7.2 Linux 2.6.32 x86_64 GNU y y y y -CentOS 7.2 Linux 2.6.32 x86_64 Intel y y y n -Linux 2.6.32-573.18.1.el6.ppc64 y y y n - -Compiler versions for each platform are listed in the preceding -"Supported Platforms" table. - - -More Tested Platforms -===================== -The following platforms are not supported but have been tested for this release. - - Linux 2.6.32-573.22.1.el6 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (mayll/platypus) Version 4.4.7 20120313 - Version 4.9.3, 5.3.0, 6.2.0 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 17.10-0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 17.0.4.196 Build 20170411 - MPICH 3.1.4 compiled with GCC 4.9.3 - - Linux 3.10.0-327.18.2.el7 GNU C (gcc) and C++ (g++) compilers - #1 SMP x86_64 GNU/Linux Version 4.8.5 20150623 (Red Hat 4.8.5-4) - (jelly) with NAG Fortran Compiler Release 6.1(Tozai) - GCC Version 7.1.0 - OpenMPI 3.0.0-GCC-7.2.0-2.29, - 3.1.0-GCC-7.2.0-2.29 - Intel(R) C (icc) and C++ (icpc) compilers - Version 17.0.0.098 Build 20160721 - with NAG Fortran Compiler Release 6.1(Tozai) - - Linux 3.10.0-327.10.1.el7 MPICH 3.2 compiled with GCC 5.3.0 - #1 SMP x86_64 GNU/Linux - (moohan) - - Linux 2.6.32-573.18.1.el6.ppc64 MPICH mpich 3.1.4 compiled with - #1 SMP ppc64 GNU/Linux IBM XL C/C++ for Linux, V13.1 - (ostrich) and IBM XL Fortran for Linux, V15.1 - - Debian 8.4 3.16.0-4-amd64 #1 SMP Debian 3.16.36-1 x86_64 GNU/Linux - gcc, g++ (Debian 4.9.2-10) 4.9.2 - GNU Fortran (Debian 4.9.2-10) 4.9.2 - (cmake and autotools) - - Fedora 24 4.7.2-201.fc24.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc, g++ (GCC) 6.1.1 20160621 - (Red Hat 6.1.1-3) - GNU Fortran (GCC) 6.1.1 20160621 - (Red Hat 6.1.1-3) - (cmake and autotools) - - Ubuntu 16.04.1 4.4.0-38-generic #57-Ubuntu SMP x86_64 GNU/Linux - gcc, g++ (Ubuntu 5.4.0-6ubuntu1~16.04.2) - 5.4.0 20160609 - GNU Fortran (Ubuntu 5.4.0-6ubuntu1~16.04.2) - 5.4.0 20160609 - (cmake and autotools) - - -Known Problems -============== - - At present, metadata cache images may not be generated by parallel - applications. Parallel applications can read files with metadata cache - images, but since this is a collective operation, a deadlock is possible - if one or more processes do not participate. - - Three tests fail with OpenMPI 3.0.0/GCC-7.2.0-2.29: - testphdf5 (ecdsetw, selnone, cchunk1, cchunk3, cchunk4, and actualio) - t_shapesame (sscontig2) - t_pflush1/fails on exit - The first two tests fail attempting collective writes. - - Known problems in previous releases can be found in the HISTORY*.txt files - in the HDF5 source. Please report any new problems found to - help@hdfgroup.org. - - -CMake vs. Autotools installations -================================= -While both build systems produce similar results, there are differences. -Each system produces the same set of folders on linux (only CMake works -on standard Windows); bin, include, lib and share. Autotools places the -COPYING and RELEASE.txt file in the root folder, CMake places them in -the share folder. - -The bin folder contains the tools and the build scripts. Additionally, CMake -creates dynamic versions of the tools with the suffix "-shared". Autotools -installs one set of tools depending on the "--enable-shared" configuration -option. - build scripts - ------------- - Autotools: h5c++, h5cc, h5fc - CMake: h5c++, h5cc, h5hlc++, h5hlcc - -The include folder holds the header files and the fortran mod files. CMake -places the fortran mod files into separate shared and static subfolders, -while Autotools places one set of mod files into the include folder. Because -CMake produces a tools library, the header files for tools will appear in -the include folder. - -The lib folder contains the library files, and CMake adds the pkgconfig -subfolder with the hdf5*.pc files used by the bin/build scripts created by -the CMake build. CMake separates the C interface code from the fortran code by -creating C-stub libraries for each Fortran library. In addition, only CMake -installs the tools library. The names of the szip libraries are different -between the build systems. - -The share folder will have the most differences because CMake builds include -a number of CMake specific files for support of CMake's find_package and support -for the HDF5 Examples CMake project. - - -%%%%1.10.2%%%% - -HDF5 version 1.10.2 released on 2018-03-29 -================================================================================ - - -INTRODUCTION - -This document describes the differences between this release and the previous -HDF5 release. It contains information on the platforms tested and known -problems in this release. For more details check the HISTORY*.txt files in the -HDF5 source. - -Note that documentation in the links below will be updated at the time of each -final release. - -Links to HDF5 documentation can be found on The HDF5 web page: - - https://portal.hdfgroup.org/display/HDF5/HDF5 - -The official HDF5 releases can be obtained from: - - https://www.hdfgroup.org/downloads/hdf5/ - -Changes from Release to Release and New Features in the HDF5-1.10.x release series -can be found at: - - https://portal.hdfgroup.org/display/HDF5/HDF5+Application+Developer%27s+Guide - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS - -- New Features -- Support for new platforms and languages -- Bug Fixes since HDF5-1.10.1 -- Supported Platforms -- Tested Configuration Features Summary -- More Tested Platforms -- Known Problems - - -New Features -============ - - Configuration and Build Systems: - -------------------------------- - - CMake builds - -------------- - - - Changed minimum CMake required version to 3.10. - - This change removed the need to support a copy of the FindMPI.cmake module, - which has been removed, along with its subfolder in the config/cmake_ext_mod - location. - - (ADB - 2018/03/09) - - - Added pkg-config file generation - - Added pkg-config file generation for the C, C++, HL, and HL C++ libraries. - In addition, builds on Linux will create h5cc, h5c++, h5hlcc, and h5hlc++ scripts in the bin - directory that use the pkg-config files. The scripts can be used to build HDF5 C and C++ - applications (i.e, similar to the compiler scripts produced by the Autotools builds). - - (ADB - 2018/03/08, HDFFV-4359) - - - Refactored use of CMAKE_BUILD_TYPE for new variable, which understands - the type of generator in use. - - Added new configuration macros to use new HDF_BUILD_TYPE variable. This - variable is set correctly for the type of generator being used for the build. - - (ADB - 2018/01/08, HDFFV-10385, HDFFV-10296) - - - Autotools builds - ------------------ - - - Removed version-specific gcc/gfortran flags for version 4.0 (inclusive) - and earlier. - - The config/gnu-flags file, which is sourced as a part of the configure - process, adds version-specific flags for use when building HDF5. Most of - these flags control warnings and do not affect the final product. - - Flags for older versions of the compiler were consolidated into the - common flags section. Moving these flags simplifies maintenance of - the file. - - The upshot of this is that building with ancient versions of gcc - (<= 4.0) will possibly no longer work without hand-hacking the file - to remove the flags not understood by that version of the compiler. - Nothing should change when building with gcc >= 4.1. - - (DER - 2017/05/31, HDFFV-9937) - - - -fno-omit-frame-pointer was added when building with debugging symbols - enabled. - - Debugging symbols can be enabled independently of the overall build - mode in both the autotools and CMake. This allows (limited) debugging - of optimized code. Since many debuggers rely on the frame pointer, - we've disabled this optimization when debugging symbols are requested - (e.g.: via building with --enable-symbols). - - (DER - 2017/05/31, HDFFV-10226) - - - Library: - -------- - - Added an enumerated value to H5F_libver_t for H5Pset_libver_bounds(). - - Currently, the library defines two values for H5F_libver_t and supports - only two pairs of (low, high) combinations as derived from these values. - Thus the bounds setting via H5Pset_libver_bounds() is rather restricted. - - Added an enumerated value (H5F_LIBVER_V18) to H5F_libver_t and - H5Pset_libver_bounds() now supports five pairs of (low, high) combinations - as derived from these values. This addition provides the user more - flexibility in setting bounds for object creation. - - (VC - 2018/03/14) - - - Added prefix option to VDS files. - - Currently, VDS source files must be in the active directory to be - found by the virtual file. Adding the option of a prefix to be set - on the virtual file, using a data access property list (DAPL), - allows the source files to locate at an absolute or relative path - to the virtual file. - Private utility functions in H5D and H5L packages merged into single - function in H5F package. - - New public APIs: - herr_t H5Pset_virtual_prefix(hid_t dapl_id, const char* prefix); - ssize_t H5Pget_virtual_prefix(hid_t dapl_id, char* prefix /*out*/, size_t size); - The prefix can also be set with an environment variable, HDF5_VDS_PREFIX. - - (ADB - 2017/12/12, HDFFV-9724, HDFFV-10361) - - - H5FDdriver_query() API call added to the C library. - - This new library call allows the user to query a virtual file driver - (VFD) for the feature flags it supports (listed in H5FDpublic.h). - This can be useful to determine if a VFD supports SWMR, for example. - - Note that some VFDs have feature flags that may only be present - after a file has been created or opened (e.g.: the core VFD will - have the H5FD_FEAT_POSIX_COMPAT_HANDLE flag set if the backing - store is switched on). Since the new API call queries a generic VFD - unassociated with a file, these flags will never be returned. - - (DER - 2017/05/31, HDFFV-10215) - - - H5FD_FEAT_DEFAULT_VFD_COMPATIBLE VFD feature flag added to the C library. - - This new feature flag indicates that the VFD is compatible with the - default VFD. VFDs that set this flag create single files that follow - the canonical HDF5 file format. - - (DER - 2017/05/31, HDFFV-10214) - - - The H5I_REFERENCE value in the H5I_type_t enum (defined in H5Ipublic.h) - has been marked as deprecated. - - This ID type value is not used in the C library. i.e.: There are no - hid_t values that are of ID type H5I_REFERENCE. - - This enum value will be removed in a future major version of the library. - The code will remain unchanged in the HDF5 1.10.x releases and branches. - - (DER - 2017/04/05, HDFFV-10252) - - - Parallel Library: - ----------------- - - Enabled compression for parallel applications. - - With this release parallel applications can create and write compressed - datasets (or the datasets with the filters such as Fletcher32 applied). - - (EIP - 2018/03/29) - - - Addressed slow file close on some Lustre file systems. - - Slow file close has been reported on some Lustre file systems. - While the ultimate cause is not understood fully, the proximate - cause appears to be long delays in MPI_File_set_size() calls at - file close and flush. - - To minimize this problem pending a definitive diagnosis and fix, - PHDF5 has been modified to avoid MPI_File_set_size() calls when - possible. This is done by comparing the library's EOA (End of - Allocation) with the file systems EOF, and skipping the - MPI_File_set_size() call if the two match. - - (JRM - 2018/03/29) - - - Optimized parallel open/location of the HDF5 super-block. - - Previous releases of PHDF5 required all parallel ranks to - search for the HDF5 superblock signature when opening the - file. As this is accomplished more or less as a synchronous - operation, a large number of processes can experience a - slowdown in the file open due to filesystem contention. - - As a first step in improving the startup/file-open performance, - we allow MPI rank 0 of the associated MPI communicator to locate - the base offset of the super-block and then broadcast that result - to the remaining ranks in the parallel group. Note that this - approach is utilized ONLY during file opens which employ the MPIO - file driver in HDF5 by previously having called H5Pset_fapl_mpio(). - - HDF5 parallel file operations which do not employ multiple ranks - e.g. specifying MPI_COMM_SELF (whose MPI_Comm_size == 1) - as opposed to MPI_COMM_WORLD, will not be affected by this - optimization. Conversely, parallel file operations on subgroups - of MPI_COMM_WORLD are allowed to be run in parallel with each - subgroup operating as an independent collection of processes. - - (RAW - 2017/10/10, HDFFV-10294) - - - Added large (>2GB) MPI-IO transfers. - - Previous releases of PHDF5 would fail when attempting to - read or write greater than 2GB of data in a single IO operation. - This issue stems principally from an MPI API whose definitions - utilize 32 bit integers to describe the number of data elements - and datatype that MPI should use to effect a data transfer. - Historically, HDF5 has invoked MPI-IO with the number of - elements in a contiguous buffer represented as the length - of that buffer in bytes. - - Resolving the issue and thus enabling larger MPI-IO transfers - is accomplished first, by detecting when a user IO request would - exceed the 2GB limit as described above. Once a transfer request - is identified as requiring special handling, PHDF5 now creates a - derived datatype consisting of a vector of fixed sized blocks - which is in turn wrapped within a single MPI_Type_struct to - contain the vector and any remaining data. The newly created - datatype is then used in place of MPI_BYTE and can be used to - fulfill the original user request without encountering API - errors. - - (RAW - 2017/09/10, HDFFV-8839) - - - C++ Library: - ------------ - - The following C++ API wrappers have been added to the C++ Library: - + H5Lcreate_soft: - // Creates a soft link from link_name to target_name. - void link(const char *target_name, const char *link_name,...) - void link(const H5std_string& target_name,...) - - + H5Lcreate_hard: - // Creates a hard link from new_name to curr_name. - void link(const char *curr_name, const Group& new_loc,...) - void link(const H5std_string& curr_name, const Group& new_loc,...) - - // Creates a hard link from new_name to curr_name in same location. - void link(const char *curr_name, const hid_t same_loc,...) - void link(const H5std_string& curr_name, const hid_t same_loc,...) - - Note: previous version of H5Location::link will be deprecated. - - + H5Lcopy: - // Copy an object from a group of file to another. - void copyLink(const char *src_name, const Group& dst,...) - void copyLink(const H5std_string& src_name, const Group& dst,...) - - // Copy an object from a group of file to the same location. - void copyLink(const char *src_name, const char *dst_name,...) - void copyLink(const H5std_string& src_name,...) - - + H5Lmove: - // Rename an object in a group or file to a new location. - void moveLink(const char* src_name, const Group& dst,...) - void moveLink(const H5std_string& src_name, const Group& dst,...) - - // Rename an object in a group or file to the same location. - void moveLink(const char* src_name, const char* dst_name,...) - void moveLink(const H5std_string& src_name,...) - - Note: previous version H5Location::move will be deprecated. - - + H5Ldelete: - // Removes the specified link from this location. - void unlink(const char *link_name, - const LinkAccPropList& lapl = LinkAccPropList::DEFAULT) - void unlink(const H5std_string& link_name, - const LinkAccPropList& lapl = LinkAccPropList::DEFAULT) - - Note: additional parameter is added to previous H5Location::unlink. - - + H5Tencode and H5Tdecode: - // Creates a binary object description of this datatype. - void DataType::encode() - C API H5Tencode() - - // Returns the decoded type from the binary object description. - DataType::decode() - C API H5Tdecode() - ArrayType::decode() - C API H5Tdecode() - CompType::decode() - C API H5Tdecode() - DataType::decode() - C API H5Tdecode() - EnumType::decode() - C API H5Tdecode() - FloatType::decode() - C API H5Tdecode() - IntType::decode() - C API H5Tdecode() - StrType::decode() - C API H5Tdecode() - VarLenType::decode() - C API H5Tdecode() - - + H5Lget_info: - // Returns the information of the named link. - H5L_info_t getLinkInfo(const H5std_string& link_name,...) - - (BMR - 2018/03/11, HDFFV-10149) - - - Added class LinkCreatPropList for link create property list. - - (BMR - 2018/03/11, HDFFV-10149) - - - Added overloaded functions H5Location::createGroup to take a link - creation property list. - Group createGroup(const char* name, const LinkCreatPropList& lcpl) - Group createGroup(const H5std_string& name, const LinkCreatPropList& lcpl) - - (BMR - 2018/03/11, HDFFV-10149) - - - A document is added to the HDF5 C++ API Reference Manual to show the - mapping from a C API to C++ wrappers. It can be found from the main - page of the C++ API Reference Manual. - - (BMR - 2017/10/17, HDFFV-10151) - - - Java Library: - ---------------- - - Wrapper added for enabling the error stack. - - H5error_off would disable the error stack reporting. In order - to re-enable the reporting, the error stack info needs to be - saved so that H5error_on can revert state. - - (ADB - 2018/03/13, HDFFV-10412) - - - Wrappers were added for the following C APIs: - H5Pset_evict_on_close - H5Pget_evict_on_close - H5Pset_chunk_opts - H5Pget_chunk_opts - H5Pset_efile_prefix - H5Pget_efile_prefix - H5Pset_virtual_prefix - H5Pget_virtual_prefix - - (ADB - 2017/12/20) - - - The H5I_REFERENCE value in the H5I_type_t enum (defined in H5Ipublic.h) - has been marked as deprecated. - - JNI code which refers to this value will be removed in a future - major version of the library. The code will remain unchanged in the - 1.10.x releases and branches. - - See the C library section, above, for further information. - - (HDFFV-10252, DER, 2017/04/05) - - - Tools: - ------ - - h5diff has a new option to display error stack. - - Updated h5diff with the --enable-error-stack argument, which - enables the display of the hdf5 error stack. This completes the - improvement to the main tools: h5copy, h5diff, h5dump, h5ls and - h5repack. - - (ADB - 2017/08/30, HDFFV-9774) - - -Support for new platforms, languages and compilers. -======================================= - - None - -Bug Fixes since HDF5-1.10.1 release -================================== - - Library - ------- - - The data read after a direct chunk write to a chunked dataset with - one chunk was incorrect. - - The problem was due to the passing of a null dataset pointer to - the insert callback for the chunk index in the routine - H5D__chunk_direct_write() in H5Dchunk.c - The dataset was a single-chunked dataset which will use the - single chunk index when latest format was enabled on file creation. - The single chunk index was the only index that used this pointer - in the insert callback. - - Passed the dataset pointer to the insert callback for the chunk - index in H5D__chunk_direct_write(). - - (VC - 2018/03/20, HDFFV-10425) - - - Added public routine H5DOread_chunk to the high-level C library. - - The patch for H5DOwrite_chunk() to write an entire chunk to the file - directly was contributed by GE Healthcare and integrated by The HDF Group - developers. - - (VC - 2017/05/19, HDFFV-9934) - - - Freeing of object header after failed checksum verification. - - It was discovered that the object header (in H5Ocache.c) was not released properly - when the checksum verification failed and a re-load of the object - header was needed. - - Freed the object header that failed the chksum verification only - after the new object header is reloaded, deserialized and set up. - - (VC - 2018/03/14, HDFFV-10209) - - - Updated H5Pset_evict_on_close in H5Pfapl.c - - Changed the minor error number from H5E_CANTSET to H5E_UNSUPPORTED for - parallel library. - - (ADB - 2018/03/06, HDFFV-10414) - - - Fixed the problems with the utility function that could not handle lowercase - Windows drive letters. - - Added call to upper function for drive letter. - - (ADB - 2017/12/18, HDFFV-10307) - - - Fixed H5Sencode() bug when the number of elements selected was > 2^32. - - H5Sencode() incorrectly encodes dataspace selection with number of - elements exceeding 2^32. When decoding such selection via H5Sdecode(), - the number of elements in the decoded dataspace is not the same as - what is encoded. This problem exists for H5S_SEL_HYPER and - H5S_SEL_POINTS encoding. - - The cause of the problem is due to the fact that the library uses 32 bits to - encode counts and block offsets for the selection. - The solution is to use the original 32 bit encodings if possible, - but use a different way to encode selection if more that 32 bits is needed. - See details in the RFC: H5Sencode/H5Sdecode Format Change i - https://bitbucket.hdfgroup.org/projects/HDFFV/repos/hdf5doc/browse/RFCs/HDF5_Library/H5SencodeFormatChange. - - (VC - 2017/11/28, HDFFV-9947) - - - Fixed filter plugin handling in H5PL.c and H5Z.c to not require i availability of - dependent libraries (e.g., szip or zlib). - - It was discovered that the dynamic loading process used by - filter plugins had issues with library dependencies. - - CMake build process changed to use LINK INTERFACE keywords, which - allowed HDF5 C library to make dependent libraries private. The - filter plugin libraries no longer require dependent libraries - (such as szip or zlib) to be available. - - (ADB - 2017/11/16, HDFFV-10328) - - - Fixed rare object header corruption bug. - - In certain cases, such as when converting large attributes to dense - storage, an error could occur which would either fail an assertion or - cause file corruption. Fixed and added test. - - (NAF - 2017/11/14, HDFFV-10274) - - - Updated H5Zfilter_avail in H5Z.c. - - The public function checked for plugins, while the private - function did not. - - Modified H5Zfilter_avail and private function, H5Z_filter_avail. - Moved check for plugin from public to private function. Updated - H5P__set_filter due to change in H5Z_filter_avail. Updated tests. - - (ADB - 2017/10/10, HDFFV-10297, HDFFV-10319) - - - h5dump produced SEGFAULT when dumping corrypted file. - - The behavior was due to the error in the internal function H5HL_offset_into(). - - (1) Fixed H5HL_offset_into() to return error when offset exceeds heap data - block size. - (2) Fixed other places in the library that call this routine to detect - error routine. - - (VC - 2017/08/30, HDFFV-10216) - - - Fixes for paged aggregation feature. - - Skip test in test/fheap.c when: - (1) multi/split drivers and - (2) persisting free-space or using paged aggregation strategy - - (VC, 2017/07/10) - - Changes made based on RFC review comments: - (1) Added maximum value for file space page size - (2) Dropped check for page end metadata threshold - (3) Removed "can_shrink" and "shrink" callbacks for small section class - - (VC - 2017/06/09) - - - Fixed for infinite loop in H5VM_power2up(). - - The function H5VM_power2up() returns the next power of 2 - for n. When n exceeds 2^63, it overflows and becomes 0 causing - the infinite looping. - - The fix ensures that the function checks for n >= 2^63 - and returns 0. - - (VC - 2017/07/10, HDFFV-10217) - - - Fixed for H5Ocopy doesn't work with open identifiers. - - Changes made so that raw data for dataset objects are copied from - cached info when possible instead of flushing objects to file and - read them back in again. - - (VC - 2017/07/05, HDFFV-7853) - - - An uninitialized struct could cause a memory access error when using - variable-length or reference types in a compressed, chunked dataset. - - A struct containing a callback function pointer and a pointer to some - associated data was used before initialization. This could cause a - memory access error and system crash. This could only occur under - unusual conditions when using variable-lenth and reference types in - a compressed, chunked dataset. - - On recent versions of Visual Studio, when built in debug mode, the - debug heap will complain and cause a crash if the code in question - is executed (this will cause the objcopy test to fail). - - (DER - 2017/11/21, HDFFV-10330) - - - Fixed collective metadata writes on file close. - - It was discovered that metadata was being written twice as part of - the parallel file close behavior, once independently and once - collectively. - - A fix for this error was included as part of the parallel compression - feature but remained undocumented here. - - (RAW - 2017/12/01, HDFFV-10272) - - - If an HDF5 file contains a filter pipeline message with a 'number of - filters' field that exceeds the maximum number of allowed filters, - the error handling code will attempt to dereference a NULL pointer. - - This issue was reported to The HDF Group as issue #CVE-2017-17505. - https://security-tracker.debian.org/tracker/CVE-2017-17505 - https://cve.mitre.org/cgi-bin/cvename.cgi?name=3DCVE-2017-17505 - - NOTE: The HDF5 C library cannot produce such a file. This condition - should only occur in a corrupt (or deliberately altered) file - or a file created by third-party software. - - This problem arose because the error handling code assumed that - the 'number of filters' field implied that a dynamic array of that - size had already been created and that the cleanup code should - iterate over that array and clean up each element's resources. If - an error occurred before the array has been allocated, this will - not be true. - - This has been changed so that the number of filters is set to - zero on errors. Additionally, the filter array traversal in the - error handling code now requires that the filter array not be NULL. - - (DER - 2018/02/06, HDFFV-10354) - - - If an HDF5 file contains a filter pipeline message which contains - a 'number of filters' field that exceeds the actual number of - filters in the message, the HDF5 C library will read off the end of - the read buffer. - - This issue was reported to The HDF Group as issue #CVE-2017-17506. - https://security-tracker.debian.org/tracker/CVE-2017-17506 - https://cve.mitre.org/cgi-bin/cvename.cgi?name=3DCVE-2017-17506 - - NOTE: The HDF5 C library cannot produce such a file. This condition - should only occur in a corrupt (or deliberately altered) file - or a file created by third-party software. - - The problem was fixed by passing the buffer size with the buffer - and ensuring that the pointer cannot be incremented off the end - of the buffer. A mismatch between the number of filters declared - and the actual number of filters will now invoke normal HDF5 - error handling. - - (DER - 2018/02/26, HDFFV-10355) - - - If an HDF5 file contains a malformed compound datatype with a - suitably large offset, the type conversion code can run off - the end of the type conversion buffer, causing a segmentation - fault. - - This issue was reported to The HDF Group as issue #CVE-2017-17507. - https://security-tracker.debian.org/tracker/CVE-2017-17506 - https://cve.mitre.org/cgi-bin/cvename.cgi?name=3DCVE-2017-17506 - - NOTE: The HDF5 C library cannot produce such a file. This condition - should only occur in a corrupt (or deliberately altered) file - or a file created by third-party software. - - THE HDF GROUP WILL NOT FIX THIS BUG AT THIS TIME - - Fixing this problem would involve updating the publicly visible - H5T_conv_t function pointer typedef and versioning the API calls - which use it. We normally only modify the public API during - major releases, so this bug will not be fixed at this time. - - (DER - 2018/02/26, HDFFV-10356) - - - If an HDF5 file contains a malformed compound type which contains - a member of size zero, a division by zero error will occur while - processing the type. - - This issue was reported to The HDF Group as issue #CVE-2017-17508. - https://security-tracker.debian.org/tracker/CVE-2017-17508 - https://cve.mitre.org/cgi-bin/cvename.cgi?name=3DCVE-2017-17508 - - NOTE: The HDF5 C library cannot produce such a file. This condition - should only occur in a corrupt (or deliberately altered) file - or a file created by third-party software. - - Checking for zero before dividing fixes the problem. Instead of the - division by zero, the normal HDF5 error handling is invoked. - - (DER - 2018/02/26, HDFFV-10357) - - - If an HDF5 file contains a malformed symbol table node that declares - it contains more symbols than it actually contains, the library - can run off the end of the metadata cache buffer while processing - the symbol table node. - - This issue was reported to The HDF Group as issue #CVE-2017-17509. - https://security-tracker.debian.org/tracker/CVE-2017-17509 - https://cve.mitre.org/cgi-bin/cvename.cgi?name=3DCVE-2017-17509 - - NOTE: The HDF5 C library cannot produce such a file. This condition - should only occur in a corrupt (or deliberately altered) file - or a file created by third-party software. - - Performing bounds checks on the buffer while processing fixes the - problem. Instead of the segmentation fault, the normal HDF5 error - handling is invoked. - - (DER - 2018/03/12, HDFFV-10358) - - - Fixed permissions passed to open(2) on file create. - - On Windows, the POSIX permissions passed to open(2) when creating files - were only incidentally correct. They are now set to the correct value of - (_S_IREAD | _S_IWRITE). - - On other platforms, the permissions were set to a mix of 666, 644, and - 000. They are now set uniformly to 666. - - (DER - 2017/04/28, HDFFV-9877) - - - The H5FD_FEAT_POSIX_COMPAT_HANDLE flag is no longer used to determine - if a virtual file driver (VFD) is compatible with SWMR. - - Use of this VFD feature flag was not in line with the documentation in - the public H5FDpublic.h file. In particular, it was being used as a - proxy for determining if SWMR I/O is allowed. This is unnecessary as we - already have a feature flag for this (H5FD_SUPPORTS_SWMR_IO). - - (DER - 2017/05/31, HDFFV-10214) - - - Configuration - ------------- - - CMake changes - - - Updated CMake commands configuration. - - A number of improvements were made to the CMake commands. Most - changes simplify usage or eliminate unused constructs. Also, - some changes support better cross-platform support. - - (ADB - 2018/02/01, HDFFV-10398) - - - Corrected usage of CMAKE_BUILD_TYPE variable. - - The use of the CMAKE_BUILD_TYPE is incorrect for multi-config - generators (Visual Studio and XCode) and is optional for single - config generators. Created a new macro to check - GLOBAL PROPERTY -> GENERATOR_IS_MULTI_CONFIG - Created two new HDF variable, HDF_BUILD_TYPE and HDF_CFG_BUILD_TYPE. - Defaults for these variables is "Release". - - (ADB - 2018/01/10, HDFFV-10385) - - - Added replacement of fortran flags if using static CRT. - - Added TARGET_STATIC_CRT_FLAGS call to HDFUseFortran.cmake file in - config/cmake_ext_mod folder. - - (ADB - 2018/01/08, HDFFV-10334) - - - - The hdf5 library used shared szip and zlib, which needlessly required - applications to link with the same szip and zlib libraries. - - Changed the target_link_libraries commands to use the static libs. - Removed improper link duplication of szip and zlib. - Adjusted the link dependencies and the link interface values of - the target_link_libraries commands. - - (ADB - 2017/11/14, HDFFV-10329) - - - CMake MPI - - CMake implementation for MPI was problematic and would create incorrect - MPI library references in the hdf5 libraries. - - Reworked the CMake MPI code to properly create CMake targets. Also merged - the latest CMake FindMPI.cmake changes to the local copy. This is necessary - until HDF changes the CMake minimum to 3.9 or greater. - - (ADB - 2017/11/02, HDFFV-10321) - - - Corrected FORTRAN_HAVE_C_LONG_DOUBLE processing in the autotools. - - A bug in the autotools Fortran processing code always set the - FORTRAN_HAVE_C_LONG_DOUBLE variable to be true regardless of - whether or not a C long double type was present. - - This would cause compilation failures on platforms where a C - long double type was not available and the Fortran wrappers - were being built. - - (DER - 2017/07/05, HDFFV-10247) - - - The deprecated --enable-production and --enable-debug configure options - failed to emit errors when passed an empty string - (e.g.: --enable-debug=""). - - Due to the way we checked for these options being set, it was possible - to avoid the error message and continue configuration if an empty string - was passed to the option. - - Any use of --enable-production or --enable-debug will now halt the - configuration step and emit a helpful error message - (use --enable-build-mode=debug|production instead). - - (DER - 2017/07/05, HDFFV-10248) - - - CMake - - Too many commands for POST_BUILD step caused command line to be - too big on windows. - - Changed foreach of copy command to use a custom command with the - use of the HDFTEST_COPY_FILE macro. - - (ADB - 2017/07/12, HDFFV-10254) - - - CMake test execution environment - - The parallel HDF5 test: 't_pread' assumed the use of autotools - and the directory structure associated with that testing approach. - Modified the test code to check whether the 'h5jam' utility can be - found in the same directory as the test executable (which is - preferred directory structure utilized by cmake) and if found - will invoke the tool directly rather than utilizing a relative path. - - (RAW - 2017/11/03, HDFFV-10318) - - - Fortran compilation fails for xlf and CMake builds. - - Fixed CMake shared library build for H5match_types and modules - - (MSB - 2017/12/19, HDFFV-10363) - - - Shared libraries fail test on OSX with Fortran enabled with CMake. - - Fixed by removing the F77 use of EQUIVALENCE and COMMON, replaced - using MODULES. Updated CMake. - - (MSB - 2017/12/07, HDFFV-10223) - - - The bin/trace script now emits an error code on problems and autogen.sh - will fail if bin/trace fails. - - The bin/trace script adds tracing functionality to public HDF5 API calls. - It is only of interest to developers who modify the HDF5 source code. - Previously, bin/trace just wrote an error message to stdout when it - encountered problems, so autogen.sh processing did not halt and a broken - version of the library could be built. The script will now return an - error code when it encounters problems, and autogen.sh will fail. - - This only affects users who run autogen.sh to rebuild the Autotools files, - which is not necessary to build HDF5 from source in official releases of the - library. CMake users are unaffected as bin/trace is not run via CMake - at this time. - - (DER - 2017/04/25, HDFFV-10178) - - - FC_BASENAME was changed from gfortran40 to gfortran in a few places. - - In the autotools, FC_BASENAME was set to gfortran40 in a few locations - (config/gnu-fflags and config/freebsd). This was probably a historical - artifact and did not seem to affect many users. - - The value is now correctly set to gfortran. - - (DER - 2017/05/26, HDFFV-10249) - - - The ar flags were changed to -cr (was: -cru) - - The autotools set the flags for ar to -cru by default. The -u flag, - which allows selective replacement of only the members which have - changed, raises warnings on some platforms, so the flags are now set to - -cr via AR_FLAGS in configure.ac. This causes the static library to - always be completely recreated from the object files on each build. - - (DER - 2017/11/15, HDFFV-10428) - - - Fortran - -------- - - Fixed compilation errors when using Intel 18 Fortran compilers - (MSB - 2017/11/3, HDFFV-10322) - - Tools - ----- - - h5clear - - An enhancement to the tool in setting a file's stored EOA. - - It was discovered that a crashed file's stored EOA in the superblock - was smaller than the actual file's EOF. When the file was reopened - and closed, the library truncated the file to the stored EOA. - - Added an option to the tool in setting the file's stored EOA in the - superblock to the maximum of (EOA, EOF) + increment. - An option was also added to print the file's EOA and EOF. - - (VC - 2018/03/14, HDFFV-10360) - - - h5repack - - h5repack changes the chunk parameters when a change of layout is not - specified and a filter is applied. - - HDFFV-10297, HDFFV-10319 reworked code for h5repack and h5diff code - in the tools library. The check for an existing layout was incorrectly - placed into an if block and not executed. The check was moved into - the normal path of the function. - - (ADB - 2018/02/21, HDFFV-10412) - - - h5dump - - The tools library will hide the error stack during file open. - - While this is preferable almost always, there are reasons to enable - display of the error stack when a tool will not open a file. Adding an - optional argument to the --enable-error-stack will provide this use case. - As an optional argument it will not affect the operation of the - --enable-error-stack. h5dump is the only tool to implement this change. - - (ADB - 2018/02/15, HDFFV-10384) - - - h5dump - - h5dump would output an indented blank line in the filters section. - - h5dump overused the h5tools_simple_prefix function, which is a - function intended to account for the data index (x,y,z) option. - Removed the function call for header information. - - (ADB - 2018/01/25, HDFFV-10396) - - - h5repack - - h5repack incorrectly searched internal object table for name. - - h5repack would search the table of objects for a name, if the - name did not match it tried to determine if the name without a - leading slash would match. The logic was flawed! The table - stored names(paths) without a leading slash and did a strstr - of the table path to the name. - The assumption was that if there was a difference of one then - it was a match, however "pressure" would match "/pressure" as - well as "/pressure1", "/pressure2", etc. Changed logic to remove - any leading slash and then do a full compare of the name. - - (ADB - 2018/01/18, HDFFV-10393) - - - h5repack - - h5repack failed to handle command line parameters for customer filters. - - User defined filter parameter conversions would fail when integers were - represented on the command line with character string - larger then 9 characters. Increased local variable array for storing - the current command line parameter to prevent buffer overflows. - - (ADB - 2018/01/17, HDFFV-10392) - - - h5diff - - h5diff seg faulted if comparing VL strings against fixed strings. - - Reworked solution for HDFFV-8625 and HDFFV-8639. Implemented the check - for string objects of same type in the diff_can_type function by - adding an if(tclass1 == H5T_STRING) block. This "if block" moves the - same check that was added for attributes to this function, which is - used by all object types. This function handles complex type structures. - Also added a new test file in h5diffgenttest for testing this issue - and removed the temporary files used in the test scripts. - - (ADB - 2018/01/04, HDFFV-8745) - - - h5repack - - h5repack failed to copy a dataset with existing filter. - - Reworked code for h5repack and h5diff code in the tools library. Added - improved error handling, cleanup of resources and checks of calls. - Modified H5Zfilter_avail and private function, H5Z_filter_avail. - Moved check for plugin from public to private function. Updated - H5P__set_filter due to change in H5Z_filter_avail. Updated tests. - Note, h5repack output display has changed to clarify the individual - steps of the repack process. The output indicates if an operation - applies to all objects. Lines with notation and no information - have been removed. - - (ADB - 2017/10/10, HDFFV-10297, HDFFV-10319) - - - h5repack - - h5repack always set the User Defined filter flag to H5Z_FLAG_MANDATORY. - - Added another parameter to the 'UD=' option to set the flag by default - to '0' or H5Z_FLAG_MANDATORY, the other choice is '1' or H5Z_FLAG_OPTIONAL. - - (ADB - 2017/08/31, HDFFV-10269) - - - h5ls - - h5ls generated error on stack when it encountered a H5S_NULL - dataspace. - - Adding checks for H5S_NULL before calling H5Sis_simple (located - in the h5tools_dump_mem function) fixed the issue. - - (ADB - 2017/08/17, HDFFV-10188) - - - h5repack - - Added tests to h5repack.sh.in to verify options added for paged - aggregation work as expected. - - (VC - 2017/08/03) - - - h5dump - - h5dump segfaulted on output of XML file. - - Function that escape'd strings used the full buffer length - instead of just the length of the replacement string in a - strncpy call. Using the correct length fixed the issue. - - (ADB - 2017/08/01, HDFFV-10256) - - - h5diff - - h5diff segfaulted on compare of a NULL variable length string. - - Improved h5diff compare of strings by adding a check for - NULL strings and setting the lengths to zero. - - (ADB - 2017/07/25, HDFFV-10246) - - - h5import - - h5import crashed trying to import data from a subset of a dataset. - - Improved h5import by adding the SUBSET keyword. h5import understands - to use the Count times the Block as the size of the dimensions. - Added INPUT_B_ORDER keyword to old-style configuration files. - The import from h5dump function expects the binary files to use native - types (FILE '-b' option) in the binary file. - - (ADB - 2017/06/15, HDFFV-10219) - - - h5repack - - h5repack did not maintain the creation order flag of the root - group. - - Improved h5repack by reading the creation order and applying the - flag to the new root group. Also added arguments to set the - order and index direction, which applies to the traversing of the - original file, on the command line. - - (ADB - 2017/05/26, HDFFV-8611) - - - h5diff - - h5diff failed to account for strpad type and null terminators - of char strings. Also, h5diff failed to account for string length - differences and would give a different result depending on file - order in the command line. - - Improved h5diff compare of strings and arrays by adding a check for - string lengths and if the strpad was null filled. - - (ADB - 2017/05/18, HDFFV-9055, HDFFV-10128) - - High-Level APIs: - ------ - - H5DOwrite_chunk() problems when overwriting an existing chunk with - no filters enabled. - - When overwriting chunks and no filters were being used, the library would - fail (when asserts are enabled, e.g. debug builds) or incorrectly - insert additional chunks instead of overwriting (when asserts are not - enabled, e.g. production builds). - - This has been fixed and a test was added to the hl/test_dset_opt test. - - (DER - 2017/05/11, HDFFV-10187) - - C++ APIs - -------- - - Removal of memory leaks. - - A private function was inadvertently called, causing memory leaks. This - is now fixed. - - (BMR - 2018/03/12 - User's reported in email) - - Testing - ------- - - Memory for three variables in testphdf5's coll_write_test was malloced - but not freed, leaking memory when running the test. - - The variables' memory is now freed. - - (LRK - 2018/03/12, HDFFV-10397) - - - Refactored the testpar/t_bigio.c test to include ALARM macros - - Changed the test to include the ALARM_ON and ALARM_OFF macros which - are intended to prevent nightly test hangs that have been observed - with this particular parallel test example. The code was also modified to - simplify status reporting (only from MPI rank 0) and additional - status checking added. - - (RAW - 2017/11/08, HDFFV-10301) - - -Supported Platforms -=================== - - Linux 2.6.32-696.16.1.el6.ppc64 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-18) - #1 SMP ppc64 GNU/Linux g++ (GCC) 4.4.7 20120313 (Red Hat 4.4.7-18) - (ostrich) GNU Fortran (GCC) 4.4.7 20120313 (Red Hat 4.4.7-18) - IBM XL C/C++ V13.1 - IBM XL Fortran V15.1 - - Linux 3.10.0-327.10.1.el7 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (kituo/moohan) Version 4.8.5 20150623 (Red Hat 4.8.5-4) - Version 4.9.3, Version 5.2.0, - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 17.0.0.098 Build 20160721 - MPICH 3.1.4 compiled with GCC 4.9.3 - - SunOS 5.11 32- and 64-bit Sun C 5.12 SunOS_sparc - (emu) Sun Fortran 95 8.6 SunOS_sparc - Sun C++ 5.12 SunOS_sparc - - Windows 7 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - Visual Studio 2015 w/ Intel Fortran 16 (cmake) - - Windows 7 x64 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - Visual Studio 2015 w/ Intel Fortran 16 (cmake) - Visual Studio 2015 w/ Intel C, Fortran 2017 (cmake) - Visual Studio 2015 w/ MSMPI 8 (cmake) - Cygwin(CYGWIN_NT-6.1 2.8.0(0.309/5/3) - gcc and gfortran compilers (GCC 5.4.0) - (cmake and autotools) - - Windows 10 Visual Studio 2015 w/ Intel Fortran 16 (cmake) - Cygwin(CYGWIN_NT-6.1 2.8.0(0.309/5/3) - gcc and gfortran compilers (GCC 5.4.0) - (cmake and autotools) - - Windows 10 x64 Visual Studio 2015 w/ Intel Fortran 16 (cmake) - - Mac OS X Yosemite 10.10.5 Apple clang/clang++ version 6.1 from Xcode 7.0 - 64-bit gfortran GNU Fortran (GCC) 4.9.2 - (osx1010dev/osx1010test) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X El Capitan 10.11.6 Apple clang/clang++ version 7.3.0 from Xcode 7.3 - 64-bit gfortran GNU Fortran (GCC) 5.2.0 - (osx1011dev/osx1011test) Intel icc/icpc/ifort version 16.0.2 - - Mac OS Sierra 10.12.6 Apple LLVM version 8.1.0 (clang/clang++-802.0.42) - 64-bit gfortran GNU Fortran (GCC) 7.1.0 - (swallow/kite) Intel icc/icpc/ifort version 17.0.2 - - -Tested Configuration Features Summary -===================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90/ F90 C++ zlib SZIP - parallel F2003 parallel -Solaris2.11 32-bit n y/y n y y y -Solaris2.11 64-bit n y/n n y y y -Windows 7 y y/y n y y y -Windows 7 x64 y y/y y y y y -Windows 7 Cygwin n y/n n y y y -Windows 7 x64 Cygwin n y/n n y y y -Windows 10 y y/y n y y y -Windows 10 x64 y y/y n y y y -Mac OS X Mountain Lion 10.8.5 64-bit n y/y n y y y -Mac OS X Mavericks 10.9.5 64-bit n y/y n y y y -Mac OS X Yosemite 10.10.5 64-bit n y/y n y y y -Mac OS X El Capitan 10.11.6 64-bit n y/y n y y y -CentOS 7.2 Linux 2.6.32 x86_64 PGI n y/y n y y y -CentOS 7.2 Linux 2.6.32 x86_64 GNU y y/y y y y y -CentOS 7.2 Linux 2.6.32 x86_64 Intel n y/y n y y y -Linux 2.6.32-573.18.1.el6.ppc64 n y/y n y y y - - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -Solaris2.11 32-bit y y y y -Solaris2.11 64-bit y y y y -Windows 7 y y y y -Windows 7 x64 y y y y -Windows 7 Cygwin n n n y -Windows 7 x64 Cygwin n n n y -Windows 10 y y y y -Windows 10 x64 y y y y -Mac OS X Mountain Lion 10.8.5 64-bit y n y y -Mac OS X Mavericks 10.9.5 64-bit y n y y -Mac OS X Yosemite 10.10.5 64-bit y n y y -Mac OS X El Capitan 10.11.6 64-bit y n y y -CentOS 7.2 Linux 2.6.32 x86_64 PGI y y y n -CentOS 7.2 Linux 2.6.32 x86_64 GNU y y y y -CentOS 7.2 Linux 2.6.32 x86_64 Intel y y y n -Linux 2.6.32-573.18.1.el6.ppc64 y y y n - -Compiler versions for each platform are listed in the preceding -"Supported Platforms" table. - - -More Tested Platforms -===================== -The following platforms are not supported but have been tested for this release. - - Linux 2.6.32-573.22.1.el6 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (mayll/platypus) Version 4.4.7 20120313 - Version 4.9.3, 5.3.0, 6.2.0 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 17.10-0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 17.0.4.196 Build 20170411 - MPICH 3.1.4 compiled with GCC 4.9.3 - - Linux 3.10.0-327.18.2.el7 GNU C (gcc) and C++ (g++) compilers - #1 SMP x86_64 GNU/Linux Version 4.8.5 20150623 (Red Hat 4.8.5-4) - (jelly) with NAG Fortran Compiler Release 6.1(Tozai) - GCC Version 7.1.0 - OpenMPI 3.0.0-GCC-7.2.0-2.29 - Intel(R) C (icc) and C++ (icpc) compilers - Version 17.0.0.098 Build 20160721 - with NAG Fortran Compiler Release 6.1(Tozai) - - Linux 3.10.0-327.10.1.el7 MPICH 3.2 compiled with GCC 5.3.0 - #1 SMP x86_64 GNU/Linux - (moohan) - - Linux 2.6.32-573.18.1.el6.ppc64 MPICH mpich 3.1.4 compiled with - #1 SMP ppc64 GNU/Linux IBM XL C/C++ for Linux, V13.1 - (ostrich) and IBM XL Fortran for Linux, V15.1 - - Debian 8.4 3.16.0-4-amd64 #1 SMP Debian 3.16.36-1 x86_64 GNU/Linux - gcc, g++ (Debian 4.9.2-10) 4.9.2 - GNU Fortran (Debian 4.9.2-10) 4.9.2 - (cmake and autotools) - - Fedora 24 4.7.2-201.fc24.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc, g++ (GCC) 6.1.1 20160621 - (Red Hat 6.1.1-3) - GNU Fortran (GCC) 6.1.1 20160621 - (Red Hat 6.1.1-3) - (cmake and autotools) - - Ubuntu 16.04.1 4.4.0-38-generic #57-Ubuntu SMP x86_64 GNU/Linux - gcc, g++ (Ubuntu 5.4.0-6ubuntu1~16.04.2) - 5.4.0 20160609 - GNU Fortran (Ubuntu 5.4.0-6ubuntu1~16.04.2) - 5.4.0 20160609 - (cmake and autotools) - - -Known Problems -============== - - At present, metadata cache images may not be generated by parallel - applications. Parallel applications can read files with metadata cache - images, but since this is a collective operation, a deadlock is possible - if one or more processes do not participate. - - Three tests fail with OpenMPI 3.0.0/GCC-7.2.0-2.29: - testphdf5 (ecdsetw, selnone, cchunk1, cchunk3, cchunk4, and actualio) - t_shapesame (sscontig2) - t_pflush1/fails on exit - The first two tests fail attempting collective writes. - - Known problems in previous releases can be found in the HISTORY*.txt files - in the HDF5 source. Please report any new problems found to - help@hdfgroup.org. - - -%%%%1.10.1%%%% - -HDF5 version 1.10.1 released on 2017-04-27 -================================================================================ - -INTRODUCTION - -This document describes the differences between HDF5-1.10.0-patch1 and -HDF5 1.10.1, and contains information on the platforms tested and known -problems in HDF5-1.10.1. For more details check the HISTORY*.txt files -in the HDF5 source. - -Links to HDF5 1.10.1 source code, documentation, and additional materials can -be found on The HDF5 web page at: - - https://support.hdfgroup.org/HDF5/ - -The HDF5 1.10.1 release can be obtained from: - - https://support.hdfgroup.org/HDF5/release/obtain5.html - -User documentation for the snapshot can be accessed directly at this location: - - https://support.hdfgroup.org/HDF5/doc/ - -New features in the HDF5-1.10.x release series, including brief general -descriptions of some new and modified APIs, are described in the "New Features -in HDF5 Release 1.10" document: - - https://support.hdfgroup.org/HDF5/docNewFeatures/index.html - -All new and modified APIs are listed in detail in the "HDF5 Software Changes -from Release to Release" document, in the section "Release 10.1 (current -release) versus Release 1.10.0 - - https://support.hdfgroup.org/HDF5/doc/ADGuide/Changes.html - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS - -- Major New Features Introduced in HDF5 1.10.1 -- Other New Features and Enhancements -- Support for New Platforms, Languages, and Compilers -- Bug Fixes since HDF5-1.10.0-patch1 -- Supported Platforms -- Tested Configuration Features Summary -- More Tested Platforms -- Known Problems - - -Major New Features Introduced in HDF5 1.10.1 -============================================ - -For links to the RFCs and documentation in this section please view -https://support.hdfgroup.org/HDF5/docNewFeatures in a web browser. - -________________________________________ -Metadata Cache Image -________________________________________ - - HDF5 metadata is typically small, and scattered throughout the HDF5 file. - This can affect performance, particularly on large HPC systems. The - Metadata Cache Image feature can improve performance by writing the - metadata cache in a single block on file close, and then populating the - cache with the contents of this block on file open, thus avoiding the many - small I/O operations that would otherwise be required on file open and - close. See the RFC for complete details regarding this feature. Also, - see the Fine Tuning the Metadata Cache documentation. - - At present, metadata cache images may not be generated by parallel - applications. Parallel applications can read files with metadata cache - images, but since this is a collective operation, a deadlock is possible - if one or more processes do not participate. - -________________________________________ -Metadata Cache Evict on Close -________________________________________ - - The HDF5 library's metadata cache is fairly conservative about holding on - to HDF5 object metadata (object headers, chunk index structures, etc.), - which can cause the cache size to grow, resulting in memory pressure on - an application or system. The "evict on close" property will cause all - metadata for an object to be evicted from the cache as long as metadata - is not referenced from any other open object. See the Fine Tuning the - Metadata Cache documentation for information on the APIs. - - At present, evict on close is disabled in parallel builds. - -________________________________________ -Paged Aggregation -________________________________________ - - The current HDF5 file space allocation accumulates small pieces of metadata - and raw data in aggregator blocks which are not page aligned and vary - widely in sizes. The paged aggregation feature was implemented to provide - efficient paged access of these small pieces of metadata and raw data. - See the RFC for details. Also, see the File Space Management documentation. - -________________________________________ -Page Buffering -________________________________________ - - Small and random I/O accesses on parallel file systems result in poor - performance for applications. Page buffering in conjunction with paged - aggregation can improve performance by giving an application control of - minimizing HDF5 I/O requests to a specific granularity and alignment. - See the RFC for details. Also, see the Page Buffering documentation. - - At present, page buffering is disabled in parallel builds. - - - -Other New Features and Enhancements -=================================== - - Library - ------- - - Added a mechanism for disabling the SWMR file locking scheme. - - The file locking calls used in HDF5 1.10.0 (including patch1) - will fail when the underlying file system does not support file - locking or where locks have been disabled. To disable all file - locking operations, an environment variable named - HDF5_USE_FILE_LOCKING can be set to the five-character string - 'FALSE'. This does not fundamentally change HDF5 library - operation (aside from initial file open/create, SWMR is lock-free), - but users will have to be more careful about opening files - to avoid problematic access patterns (i.e.: multiple writers) - that the file locking was designed to prevent. - - Additionally, the error message that is emitted when file lock - operations set errno to ENOSYS (typical when file locking has been - disabled) has been updated to describe the problem and potential - resolution better. - - (DER, 2016/10/26, HDFFV-9918) - - - The return type of H5Pget_driver_info() has been changed from void * - to const void *. - - The pointer returned by this function points to internal library - memory and should not be freed by the user. - - (DER, 2016/11/04, HDFFV-10017) - - - The direct I/O VFD has been removed from the list of VFDs that - support SWMR. - - This configuration was never officially tested and several SWMR - tests fail when this VFD is set. - - (DER, 2016/11/03, HDFFV-10169) - - Configuration: - -------------- - - The minimum version of CMake required to build HDF5 is now 3.2.2. - - (ADB, 2017/01/10) - - - An --enable/disable-developer-warnings option has been added to - configure. - - This disables warnings that do not indicate poor code quality such - as -Winline and gcc's -Wsuggest-attribute. Developer warnings are - disabled by default. - - (DER, 2017/01/10) - - - A bin/restore.sh script was added that reverts autogen.sh processing. - - (DER, 2016/11/08) - - - CMake: Added NAMESPACE hdf5:: to package configuration files to allow - projects using installed HDF5 binaries built with CMake to link with - them without specifying the HDF5 library location via IMPORTED_LOCATION. - - (ABD, 2016/10/17, HDFFV-10003) - - - CMake: Changed the CTEST_BUILD_CONFIGURATION option to - CTEST_CONFIGURATION_TYPE as recommended by the CMake documentation. - - (ABD, 2016/10/17, HDFFV-9971) - - - Fortran Library: - ---------------- - - - The HDF5 Fortran library can now be compiled with the NAG compiler. - - (MSB, 2017/2/10, HDFFV-9973) - - - C++ Library: - ------------ - - - The following C++ API wrappers have been added to the C++ Library: - - // Sets/Gets the strategy and the threshold value that the library - // will employ in managing file space. - FileCreatPropList::setFileSpaceStrategy - H5Pset_file_space_strategy - FileCreatPropList::getFileSpaceStrategy - H5Pget_file_space_strategy - - // Sets/Gets the file space page size for paged aggregation. - FileCreatPropList::setFileSpacePagesize - H5Pset_file_space_page_size - FileCreatPropList::getFileSpacePagesize - H5Pget_file_space_page_size - - // Checks if the given ID is valid. - IdComponent::isValid - H5Iis_valid - - // Sets/Gets the number of soft or user-defined links that can be - // traversed before a failure occurs. - LinkAccPropList::setNumLinks - H5Pset_nlinks - LinkAccPropList::getNumLinks - H5Pget_nlinks - - // Returns a copy of the creation property list of a datatype. - DataType::getCreatePlist - H5Tget_create_plist - - // Opens/Closes an object within a group or a file, regardless of object - // type - Group::getObjId - H5Oopen - Group::closeObjId - H5Oclose - - // Maps elements of a virtual dataset to elements of the source dataset. - DSetCreatPropList::setVirtual - H5Pset_virtual - - // Gets general information about this file. - H5File::getFileInfo - H5Fget_info2 - - // Returns the number of members in a type. - IdComponent::getNumMembers - H5Inmembers - - // Determines if an element type exists. - IdComponent::typeExists - H5Itype_exists - - // Determines if an object exists. - H5Location::exists - H5Lexists. - - // Returns the header version of an HDF5 object. - H5Object::objVersion - H5Oget_info for version - - (BMR, 2017/03/20, HDFFV-10004, HDFFV-10139, HDFFV-10145) - - - New exception: ObjHeaderIException for H5O interface. - - (BMR, 2017/03/15, HDFFV-10145) - - - New class LinkAccPropList for link access property list, to be used by - wrappers of H5Lexists. - - (BMR, 2017/01/04, HDFFV-10145) - - - New constructors to open datatypes in ArrayType, CompType, DataType, - EnumType, FloatType, IntType, StrType, and VarLenType. - - (BMR, 2016/12/26, HDFFV-10056) - - - New member functions: - - DSetCreatPropList::setNbit() to setup N-bit compression for a dataset. - - ArrayType::getArrayNDims() const - ArrayType::getArrayDims() const - both to replace the non-const versions. - - (BMR, 2016/04/25, HDFFV-8623, HDFFV-9725) - - - Tools: - ------ - - The following options have been added to h5clear: - -s: clear the status_flags field in the file's superblock - -m: Remove the metadata cache image from the file - - (QAK, 2017/03/22, PR#361) - - - High-Level APIs: - --------------- - - Added New Fortran 2003 API for h5tbmake_table_f. - - (MSB, 2017/02/10, HDFFV-8486) - - - -Support for New Platforms, Languages, and Compilers -=================================================== - - - Added NAG compiler - - - -Bug Fixes since HDF5-1.10.0-patch1 release -================================== - - Library - ------- - - Outdated data structure was used in H5D_CHUNK_DEBUG blocks, causing - compilation errors when H5D_CHUNK_DEBUG was defined. This is fixed. - - (BMR, 2017/04/04, HDFFV-8089) - - - SWMR implementation in the HDF5 1.10.0 and 1.10.0-patch1 releases has a - broken metadata flush dependency that manifested itself with the following - error at the end of the HDF5 error stack: - - H5Dint.c line 846 in H5D__swmr_setup(): dataspace chunk index must be 0 - for SWMR access, chunkno = 1 - major: Dataset - minor: Bad value - - It was also reported at https://github.com/areaDetector/ADCore/issues/203 - - The flush dependency is fixed in this release. - - - Changed the plugins dlopen option from RTLD_NOW to RTLD_LAZY - - (ABD, 2016/12/12, PR#201) - - - A number of issues were fixed when reading/writing from/to corrupted - files to ensure that the library fails gracefully in these cases: - - * Writing to a corrupted file that has an object message which is - incorrectly marked as shareable on disk results in a buffer overflow / - invalid write instead of a clean error message. - - * Decoding data from a corrupted file with a dataset encoded with the - H5Z_NBIT decoding can result in a code execution vulnerability under - the context of the application using the HDF5 library. - - * When decoding an array datatype from a corrupted file, the HDF5 library - fails to return an error in production if the number of dimensions - decoded is greater than the maximum rank. - - * When decoding an "old style" array datatype from a corrupted file, the - HDF5 library fails to return an error in production if the number of - dimensions decoded is greater than the maximum rank. - - (NAF, 2016/10/06, HDFFV-9950, HDFFV-9951, HDFFV-9992, HDFFV-9993) - - - Fixed an error that would occur when copying an object with an attribute - which is a compound datatype consisting of a variable length string. - - (VC, 2016/08/24, HDFFV-7991) - - - H5DOappend will no longer fail if a dataset has no append callback - registered. - - (VC, 2016/08/14, HDFFV-9960) - - - Fixed an issue where H5Pset_alignment could result in misaligned blocks - with some input combinations, causing an assertion failure in debug mode. - - (NAF, 2016/08/11, HDFFV-9948) - - - Fixed a problem where a plugin compiled into a DLL in the default plugin - directory could not be found by the HDF5 library at runtime on Windows - when the HDF5_PLUGIN_PATH environment variable was not set. - - (ABD, 2016/08/01, HDFFV-9706) - - - Fixed an error that would occur when calling H5Adelete on an attribute - which is attached to an externally linked object in the target file and - whose datatype is a committed datatype in the main file. - - (VC, 2016/07/06, HDFFV-9940) - - - (a) Throw an error instead of assertion when v1 btree level hits the 1 - byte limit. - (b) Modifications to better handle error recovery when conversion by - h5format_convert fails. - - (VC, 2016/05/29, HDFFV-9434) - - - Fixed a memory leak where an array used by the library to track SWMR - read retries was unfreed. - - The leaked memory was small (on the order of a few tens of ints) and - allocated per-file. The memory was allocated (and lost) only when a - file was opened for SWMR access. - - (DER, 2016/04/27, HDFFV-9786) - - - Fixed a memory leak that could occur when opening a file for the first - time (including creating) and the call fails. - - This occurred when the file-driver-specific info was not cleaned up. - The amount of memory leaked varied with the file driver, but would - normally be less than 1 kB. - - (DER, 2016/12/06, HDFFV-10168) - - - Fixed a failure in collective metadata writes. - - This failure only appeared when collective metadata writes - were enabled (via H5Pset_coll_metadata_write()). - - (JRM, 2017/04/10, HDFFV-10055) - - - Parallel Library - ---------------- - - Fixed a bug that could occur when allocating a chunked dataset in parallel - with an alignment set and an alignment threshold greater than the chunk - size but less than or equal to the raw data aggregator size. - - (NAF, 2016/08/11, HDFFV-9969) - - - Configuration - ------------- - - Configuration will check for the strtoll and strtoull functions - before using alternatives - - (ABD, 2017/03/17, PR#340) - - - CMake uses a Windows pdb directory variable if available and - will generate both static and shared pdb files. - - (ABD, 2017/02/06, HDFFV-9875) - - - CMake now builds shared versions of tools. - - (ABD, 2017/02/01, HDFFV-10123) - - - Makefiles and test scripts have been updated to correctly remove files - created when running "make check" and to avoid removing any files under - source control. In-source builds followed by "make clean" and "make - distclean" should result in the original source files. - (LRK, 2017/01/17, HDFFV-10099) - - - The tools directory has been divided into two separate source and test - directories. This resolves a build dependency and, as a result, - 'make check' will no longer fail in the tools directory if 'make' was - not executed first. - - (ABD, 2016/10/27, HDFFV-9719) - - - CMake: Fixed a timeout error that would occasionally occur when running - the virtual file driver tests simultaneously due to test directory - and file name collisions. - - (ABD, 2016/09/19, HDFFV-9431) - - - CMake: Fixed a command length overflow error by converting custom - commands inside CMakeTest.cmake files into regular dependencies and - targets. - - (ABD, 2016/07/12, HDFFV-9939) - - - Fixed a problem preventing HDF5 to be built on 32-bit CYGWIN by - condensing cygwin configuration files into a single file and - removing outdated compiler settings. - - (ABD, 2016/07/12, HDFFV-9946) - - - Fortran - -------- - - Changed H5S_ALL_F from INTEGER to INTEGER(HID_T) - - (MSB, 2016/10/14, HDFFV-9987) - - - Tools - ----- - - h5diff now correctly ignores strpad in comparing strings. - - (ABD, 2017/03/03, HDFFV-10128) - - - h5repack now correctly parses the command line filter options. - - (ABD, 2017/01/24, HDFFV-10046) - - - h5diff now correctly returns an error when it cannot read data due - to an unavailable filter plugin. - - (ADB 2017/01/18, HDFFV-9994 ) - - - Fixed an error in the compiler wrapper scripts (h5cc, h5fc, et al.) - in which they would erroneously drop the file argument specified via - the -o flag when the -o flag was specified before the -c flag on the - command line, resulting in a failure to compile. - - (LRK, 2016/11/04, HDFFV-9938, HDFFV-9530) - - - h5repack User Defined (UD) filter parameters were not parsed correctly. - - The UD filter parameters were not being parsed correctly. Reworked coding - section to parse the correct values and verify number of parameters. - - (ABD, 2016/10/19, HDFFV-9996, HDFFV-9974, HDFFV-9515, HDFFV-9039) - - - h5repack allows the --enable-error-stack option on the command line. - - (ADB, 2016/08/08, HDFFV-9775) - - - C++ APIs - -------- - - The member function H5Location::getNumObjs() is moved to - class Group because the objects are in a group or a file only, - and H5Object::getNumAttrs to H5Location to get the number of - attributes at a given location. - - (BMR, 2017/03/17, PR#466) - - - Due to the change in the C API, the overloaded functions of - PropList::setProperty now need const for some arguments. They are - planned for deprecation and are replaced by new versions with proper - consts. - - (BMR, 2017/03/17, PR#344) - - - The high-level API Packet Table (PT) did not write data correctly when - the datatype is a compound type that has string type as one of the - members. This problem started in 1.8.15, after the fix of HDFFV-9042 - was applied, which caused the Packet Table to use native type to access - the data. It should be up to the application to specify whether the - buffer to be read into memory is in the machine's native architecture. - Thus, the PT is fixed to not use native type but to make a copy of the - user's provided datatype during creation or the packet table's datatype - during opening. If an application wishes to use native type to read the - data, then the application will request that. However, the Packet Table - doesn't provide a way to specify memory datatype in this release. This - feature will be available in future releases. - - (BMR, 2016/10/27, HDFFV-9758) - - - The obsolete macros H5_NO_NAMESPACE and H5_NO_STD have been removed from - the HDF5 C++ API library. - - (BMR, 2016/10/23, HDFFV-9532) - - - The problem where a user-defined function cannot access both, attribute - and dataset, using only one argument is now fixed. - - (BMR, 2016/10/11, HDFFV-9920) - - - In-memory array information, ArrayType::rank and - ArrayType::dimensions, were removed. This is an implementation - detail and should not affect applications. - - (BMR, 2016/04/25, HDFFV-9725) - - - Testing - ------- - - Fixed a problem that caused tests using SWMR to occasionally fail when - running "make check" using parallel make. - - (LRK, 2016/03/22, PR#338, PR#346, PR#358) - - -Supported Platforms -=================== - - Linux 2.6.32-573.18.1.el6.ppc64 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-4) - #1 SMP ppc64 GNU/Linux g++ (GCC) 4.4.7 20120313 (Red Hat 4.4.7-4) - (ostrich) GNU Fortran (GCC) 4.4.7 20120313 - (Red Hat 4.4.7-4) - IBM XL C/C++ V13.1 - IBM XL Fortran V15.1 - - Linux 3.10.0-327.10.1.el7 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (kituo/moohan) Version 4.8.5 20150623 (Red Hat 4.8.5-4) - Version 4.9.3, Version 5.2.0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 15.0.3.187 Build 20150407 - MPICH 3.1.4 compiled with GCC 4.9.3 - - SunOS 5.11 32- and 64-bit Sun C 5.12 SunOS_sparc - (emu) Sun Fortran 95 8.6 SunOS_sparc - Sun C++ 5.12 SunOS_sparc - - Windows 7 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - Visual Studio 2015 w/ Intel Fortran 16 (cmake) - - Windows 7 x64 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - Visual Studio 2015 w/ Intel Fortran 16 (cmake) - Visual Studio 2015 w/ MSMPI 8 (cmake) - Cygwin(CYGWIN_NT-6.1 2.8.0(0.309/5/3) - gcc and gfortran compilers (GCC 5.4.0) - (cmake and autotools) - - Windows 10 Visual Studio 2015 w/ Intel Fortran 16 (cmake) - Cygwin(CYGWIN_NT-6.1 2.8.0(0.309/5/3) - gcc and gfortran compilers (GCC 5.4.0) - (cmake and autotools) - - Windows 10 x64 Visual Studio 2015 w/ Intel Fortran 16 (cmake) - - Mac OS X Mt. Lion 10.8.5 Apple clang/clang++ version 5.1 from Xcode 5.1 - 64-bit gfortran GNU Fortran (GCC) 4.8.2 - (swallow/kite) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X Mavericks 10.9.5 Apple clang/clang++ version 6.0 from Xcode 6.2 - 64-bit gfortran GNU Fortran (GCC) 4.9.2 - (wren/quail) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X Yosemite 10.10.5 Apple clang/clang++ version 6.1 from Xcode 7.0 - 64-bit gfortran GNU Fortran (GCC) 4.9.2 - (osx1010dev/osx1010test) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X El Capitan 10.11.6 Apple clang/clang++ version 7.3 from Xcode 7.3 - 64-bit gfortran GNU Fortran (GCC) 5.2.0 - (osx1010dev/osx1010test) Intel icc/icpc/ifort version 16.0.2 - - -Tested Configuration Features Summary -===================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90/ F90 C++ zlib SZIP - parallel F2003 parallel -Solaris2.11 32-bit n y/y n y y y -Solaris2.11 64-bit n y/n n y y y -Windows 7 y y/y n y y y -Windows 7 x64 y y/y y y y y -Windows 7 Cygwin n y/n n y y y -Windows 7 x64 Cygwin n y/n n y y y -Windows 10 y y/y n y y y -Windows 10 x64 y y/y n y y y -Mac OS X Mountain Lion 10.8.5 64-bit n y/y n y y y -Mac OS X Mavericks 10.9.5 64-bit n y/y n y y y -Mac OS X Yosemite 10.10.5 64-bit n y/y n y y y -Mac OS X El Capitan 10.11.6 64-bit n y/y n y y y -CentOS 7.2 Linux 2.6.32 x86_64 PGI n y/y n y y y -CentOS 7.2 Linux 2.6.32 x86_64 GNU y y/y y y y y -CentOS 7.2 Linux 2.6.32 x86_64 Intel n y/y n y y y -Linux 2.6.32-573.18.1.el6.ppc64 n y/y n y y y - - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -Solaris2.11 32-bit y y y y -Solaris2.11 64-bit y y y y -Windows 7 y y y y -Windows 7 x64 y y y y -Windows 7 Cygwin n n n y -Windows 7 x64 Cygwin n n n y -Windows 10 y y y y -Windows 10 x64 y y y y -Mac OS X Mountain Lion 10.8.5 64-bit y n y y -Mac OS X Mavericks 10.9.5 64-bit y n y y -Mac OS X Yosemite 10.10.5 64-bit y n y y -Mac OS X El Capitan 10.11.6 64-bit y n y y -CentOS 7.2 Linux 2.6.32 x86_64 PGI y y y n -CentOS 7.2 Linux 2.6.32 x86_64 GNU y y y y -CentOS 7.2 Linux 2.6.32 x86_64 Intel y y y n -Linux 2.6.32-573.18.1.el6.ppc64 y y y n - -Compiler versions for each platform are listed in the preceding -"Supported Platforms" table. - - -More Tested Platforms -===================== - -The following platforms are not supported but have been tested for this release. - - Linux 2.6.32-573.22.1.el6 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (mayll/platypus) Version 4.4.7 20120313 - Version 4.8.4 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 16.10-0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 15.0.3.187 (Build 20150407) - MPICH 3.1.4 compiled with GCC 4.9.3 - - Linux 3.10.0-327.18.2.el7 GNU C (gcc) and C++ (g++) compilers - #1 SMP x86_64 GNU/Linux Version 4.8.5 20150623 (Red Hat 4.8.5-4) - (jelly) with NAG Fortran Compiler Release 6.1(Tozai) - Intel(R) C (icc) and C++ (icpc) compilers - Version 15.0.3.187 (Build 20150407) - with NAG Fortran Compiler Release 6.1(Tozai) - - Linux 2.6.32-573.18.1.el6.ppc64 MPICH mpich 3.1.4 compiled with - #1 SMP ppc64 GNU/Linux IBM XL C/C++ for Linux, V13.1 - (ostrich) and IBM XL Fortran for Linux, V15.1 - - Debian 8.4 3.16.0-4-amd64 #1 SMP Debian 3.16.36-1 x86_64 GNU/Linux - gcc, g++ (Debian 4.9.2-10) 4.9.2 - GNU Fortran (Debian 4.9.2-10) 4.9.2 - (cmake and autotools) - - Fedora 24 4.7.2-201.fc24.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc, g++ (GCC) 6.1.1 20160621 - (Red Hat 6.1.1-3) - GNU Fortran (GCC) 6.1.1 20160621 - (Red Hat 6.1.1-3) - (cmake and autotools) - - Ubuntu 16.04.1 4.4.0-38-generic #57-Ubuntu SMP x86_64 GNU/Linux - gcc, g++ (Ubuntu 5.4.0-6ubuntu1~16.04.2) - 5.4.0 20160609 - GNU Fortran (Ubuntu 5.4.0-6ubuntu1~16.04.2) - 5.4.0 20160609 - (cmake and autotools) - - -Known Problems -============== - - At present, metadata cache images may not be generated by parallel - applications. Parallel applications can read files with metadata cache - images, but since this is a collective operation, a deadlock is possible - if one or more processes do not participate. - - Known problems in previous releases can be found in the HISTORY*.txt files - in the HDF5 source. Please report any new problems found to - help@hdfgroup.org. - - -%%%%1.10.0-patch1%%%% - - -HDF5 version 1.10.0-patch1 released on 2016-05-23 -================================================================================ - -INTRODUCTION - -This document describes the differences between HDF5-1.8 series and -HDF5 1.10.0 releases, and contains information on the platforms -tested. - -Links to HDF5 1.10.0 source code can be found on The HDF Group's -development FTP server at the following location: - - https://www.hdfgroup.org/HDF5/release/obtain5110.html - -User documentation can be accessed directly at this location: - - https://www.hdfgroup.org/HDF5/docNewFeatures/ - -For more information, see the HDF5 home page: - - https://www.hdfgroup.org/HDF5/ - -If you have any questions or comments, please send them to the HDF -Help Desk: - - help@hdfgroup.org - - - -CONTENTS - -- New Features -- Issues Addressed in this Release -- Supported Platforms -- Tested Configuration Features Summary -- More Tested Platforms -- Known Problems and Limitations - - - -New Features -============ -This release supports the following features: - - Configuration - ------------- - - API Compatibility with HDF5 1.8 Flag Was Added - - The 1.10 version of the HDF5 Library can be configured to operate - identically to the 1.8 library with the --with-default-api-version=v18 - configure flag. This allows existing code to be compiled with the 1.10 - library without requiring immediate changes to the application source - code. For additional configuration options and other details, see - "API Compatibility Macros in HDF5" at - https://www.hdfgroup.org/HDF5/doc/RM/APICompatMacros.html. - - - Autotools Configuration Has Been Extensively Reworked - - The autotools configuration options have been updated to allow more - fine-grained control of the build options and to correct some bugs. - See configure --help for comprehensive information on each option. - - Specific changes: - - * --enable-debug and --enable-production are no longer accepted. - Use --enable-build-mode=(debug | production) instead. These set - appropriate defaults for symbols, optimizations, and other - configuration options. These defaults can be overridden by the - user. - - * Extra debug output messages are no longer enabled with - --enable-debug=. Use --enable-internal-debug= - instead. - - * A new --enable-symbols option allows symbols to be generated - independently of the build mode. --disable-symbols can be used - to strip symbols from the binary. - - * A new --enable-asserts option sets/unsets NDEBUG. This is - independent of the build mode. This also enables some extra - low-overhead debug checks in the library. - - * A new --enable-profiling option sets profiling flags. This is - independent of the build mode. - - * A new --enable-optimization option sets the optimization level. - This is independent of the build mode. - - * Many of these options can take a flags string that will be used - to build the library. This can be useful for specifying custom - optimization flags such as -Os and -Ofast. - - * gnu C++ and Fortran use configure sub-files that update the - build flags and turn on warnings. The increase in warnings when - building these wrapper libraries is due to these flag changes - and not to a decrease in code quality. - - * The option to clear file buffers has been removed. Any buffer that - will eventually be written to disk will now always be memset - to zero. This prevents the previous contents of the buffer from - being written to the disk if the buffer contents are not - completely overwritten, which has security implications. - - - LFS Changes - - The way the autotools handle large file support (LFS) has been - overhauled in this release. - - * We assume ftello and fseeko exist - - * We no longer explicitly use the *64 I/O functions. Instead, we - rely on a mapping provided by _FILE_OFFSET_BITS or its equivalent. - - * _LARGEFILE(64)_SOURCE is no longer exported via AM_CPPFLAGS. - - - - Parallel Library - ----------------- - - Collective Metadata I/O - - Calls for HDF5 metadata can result in many small reads and writes. - On metadata reads, collective metadata I/O can improve performance - by allowing the library to perform optimizations when reading the - metadata by having one rank read the data and broadcasting it to - all other ranks. - - Collective metadata I/O improves metadata write performance through - the construction of an MPI derived datatype that is then written - collectively in a single call. For more information, see - https://www.hdfgroup.org/HDF5/docNewFeatures/NewFeaturesCollectiveMetadataIoDocs.html. - - - - Library - -------- - - Concurrent Access to HDF5 Files - Single Writer/ Multiple Reader (SWMR) - - The Single Writer/ Multiple Reader or SWMR feature enables users to - read data concurrently while writing it. Communications between the - processes and file locking are not required. The processes can run - on the same or on different platforms as long as they share a common - file system that is POSIX compliant. For more information, see the - Single-Writer/Multiple-Reader (SWMR) documentation at - https://www.hdfgroup.org/HDF5/docNewFeatures/NewFeaturesSwmrDocs.html. - - - Virtual Dataset (VDS) - - The VDS feature enables data to be accessed across HDF5 files - using standard HDF5 objects such as groups and datasets without - rewriting or rearranging the data. An HDF5 virtual dataset (VDS) - is an HDF5 dataset that is composed of source HDF5 datasets in - a predefined mapping. VDS can be used with the SWMR feature. For - documentation, check - https://www.hdfgroup.org/HDF5/docNewFeatures/NewFeaturesVirtualDatasetDocs.html. - - - Persistent Free File Space Tracking - - Usage patterns when working with an HDF5 file sometimes result in - wasted space within the file. This can also impair access times - when working with the resulting files. The new file space management - feature provides strategies for managing space in a file to improve - performance in both of these areas. For more information, see - https://www.hdfgroup.org/HDF5/docNewFeatures/NewFeaturesFileSpaceMgmtDocs.html. - - - Version 3 Metadata Cache - - The version 3 metadata cache moves management of metadata I/O from - the clients to the metadata cache proper. This change is essential for - SWMR and other features that have yet to be released. - - - - C++ Library - ------------ - - New Member Function Added to H5::ArrayType - - The assignment operator ArrayType::operator= was added because - ArrayType has pointer data members. - - (BMR - 2016/03/07, HDFFV-9562) - - - - Tools - ------ - - h5watch - - The h5watch tool allows users to output new records appended to - a dataset under SWMR access as it grows. The functionality is - similar to the Unix user command "tail" with the follow option, - which outputs appended data as the file grows. For more - information, see - https://www.hdfgroup.org/HDF5/docNewFeatures/NewFeaturesSwmrDocs.html#Tools. - - - h5format_convert - - The h5format_convert tool allows users to convert the indexing - type of a chunked dataset made with a 1.10.x version of the HDF5 - Library when the latest file format is used to the 1.8.x version 1 B-tree indexing - type. For example, datasets created using SWMR access, can be - converted to be accessed by the HDF5 1.18 library and tools. The - tool does not rewrite raw data, but it does rewrite HDF5 metadata. - - - - High-Level APIs - ---------------- - - H5DOappend - - The function appends data to a dataset along a specified dimension. - - - C Packet Table API - ------------------ - - Replacement of a Public Function with H5PTcreate - - The existing function H5PTcreate_fl limits applications so they - can use the deflate compression only. The public function - H5PTcreate has been added to replace H5PTcreate_fl. H5PTcreate - takes a property list identifier to provide flexibility on - creation properties. - - (BMR - 2016/03/04, HDFFV-8623) - - - New Public Functions: H5PTget_dataset and H5PTget_type - - Two accessor functions have been added. H5PTget_dataset returns - the identifier of the dataset associated with the packet table, - and H5PTget_type returns the identifier of the datatype used by - the packet table. - - (BMR, 2016/03/04, HDFFV-8623) - - - Regarding #ifdef VLPT_REMOVED - - The #ifdef VLPT_REMOVED blocks have been removed from the packet - table (PT) library source except for the following functions: - + H5PTis_varlen() has been made available again - + H5PTfree_vlen_readbuff() is now H5PTfree_vlen_buff() - - (BMR - 2016/03/04, HDFFV-442) - - C++ Packet Table API - -------------------- - - New Constructor Added to FL_PacketTable - - An overloaded constructor has been added to FL_PacketTable and - takes a property list identifier to provide flexibility on - creation properties. - - (BMR - 2016/03/08, HDFFV-8623) - - - New Public Functions - - Two accessor wrappers are added to class PacketTable. - PacketTable::GetDataset() returns the identifier of the dataset - associated with the packet table, and PacketTable::GetDatatype() - returns the identifier of the datatype that the packet table uses. - - (BMR - 2016/03/04, HDFFV-8623) - - - Member Functions with "char*" as an Argument - - Overloaded functions were added to provide the "const char*" - argument; the existing version will be deprecated in future - releases. - - (BMR - 2016/03/04, HDFFV-8623) - - - Regarding #ifdef VLPT_REMOVED - - The #ifdef VLPT_REMOVED blocks have been removed from the packet - table library source code except for the following functions: - + VL_PacketTable::IsVariableLength() was moved to PacketTable - + VL_PacketTable::FreeReadBuff() is now PacketTable::FreeBuff() - - (BMR - 2016/03/04, HDFFV-442) - - - - Java Wrapper Library - -------------------- - - The Java HDF5 JNI library has been integrated into the HDF5 repository. - The configure option is "--enable-java", and the CMake option is - HDF5_BUILD_JAVA:BOOL=ON. The package hierarchy has changed from the - HDF5 1.8 JNI, which was "ncsa.hdf.hdflib.hdf5", to HDF5 1.10, - "hdf.hdflib.hdf5". - - A number of new APIs were added including some for VDS and SWMR. - - - - Other Important Changes - ----------------------- - - The hid_t type was changed from 32-bit to a 64-bit value. - - - -Issues Addressed in this Release Since 1.10.0 -============================================= - - - h5diff would return from a compare attributes abnormally if one of the datatypes - was a vlen. This resulted in a memory leak as well as an incorrect report of - attribute comparison. - - Fixed. - (ADB - 2016/04/26, HDFFV-9784) - - - The JUnit-interface test may fail on Solaris platforms. The result of - a test for verifying the content of the error stack to stdout is - in a different order on Solaris then other platforms. - - This test is skipped on Solaris - (ADB - 2016/04/21, HDFFV-9734) - - - When building HDF5 with Java using CMake and specifying Debug for CMAKE_BUILD_TYPE, - there was a missing command argument for the tests of the examples. - - Fixed. - (ADB - 2016/04/21, HDFFV-9743) - - - Changed h5diff to print a warning when a dataset is virtual, enabling - the data to be compared. In addition h5repack failed to copy the data - of a virtual dataset to the new file. Function H5D__get_space_status changed - to correctly determine the H5D_space_status_t allocation value. - - CMake added the Fixed Array indexing tests that were only in the autotools - test scripts. - - Fixed and tests added for vds issues. - (ADB,NAF - 2016/04/21, HDFFV-9756) - - - CMake added the h5format_convert tool and tests that were only in the autotools - build and test scripts. The autotools test script was reworked to allow CMake - to execute the test suite in parallel. - - Also, h5clear tool and tests were added to the misc folder. - - Fixed. - (ADB - 2016/04/21, HDFFV-9766) - - - CMake added the h5watch tool and argument tests that were only in the autotools - build and test scripts. The POSIX only tests were not added to CMake. - - CMake HL tools files were refactored to move the CMake test scripts into each tool folder. - - Fixed. - (ADB - 2016/04/21, HDFFV-9770) - - - Configure fails to detect valid real KINDs on FreeBSD 9.3 (i386) with Fortran enabled. - - Fixed. Added the exponential option to SELECTED_REAL_KIND to distinguish - KINDs of same precision - (MSB - 2016/05/14,HDFFV-9912) - - - - Corrected the f90 H5AWRITE_F integer interface's buf to be INTENT(IN). - (MSB - 2016/05/14) - - - Configure fails in sed command on FreeBSD 9.3 (i386) with Fortran enabled. - - Fixed. - (MSB - 2016/05/14,HDFFV-9912) - - - Compile time error in H5f90global.F90 with IBM XL Fortran 14.1.0.13 on BG/Q with Fortran - enabled. - - Fixed. - (MSB - 2016/05/16,HDFFV-9917) - - - A cmake build with Fortran enabled does not install module h5fortkit - - Fixed. - (MSB - 2016/05/23,HDFFV-9923) - - -Issues Addressed in this Release Since alpha1 -============================================= - - - H5Pget_virtual_printf_gap, H5Pget_virtual_view, H5Pget_efile_prefix - - The correct access property list settings from the - H5Pget_virtual_printf_gap, H5Pget_virtual_view, and - H5Pget_efile_prefix function calls could not be retrieved - using H5Dget_access_plist(). - - Fixed. - - (DER and NAF - 2016/03/14, HDFFV-9716) - - - h5dump - - When h5dump was provided with the name of a non-existing file or - when optional arguments were the last option on the command line, - h5dump would segfault. - - Fixed. - - (ADB 2016/02/28 HDFFV-9639, HDFFV-9684) - - - No Error Message for Corrupt Metadata - - The HDF5 Library did not propagate an error when it encountered - corrupt metadata in an HDF5 file. The issue was fixed for a - specific file provided by a user. If you still see the problem, - please contact help@hdfgroup.org - - Fixed. - - (MC - 2016/02/18, HDFFV-9670) - - - Problem Reading Chunked Datasets with a String Datatype Larger - Than the Chunk Size in Bytes - - When the latest file format was used and when a chunked dataset - was created with a datatype with the size bigger than a chunk - size, the data could not be read back. The issue was reported - for chunked datasets with a string datatype and was confirmed - for other datatypes with the sizes bigger than the chunk size in - bytes. - - Fixed. - - (JM - 2016/02/13, HDFFV-9672) - - - Control over the Location of External Files - - Users were unable to specify the locations of external files. - - Two APIs - H5Pget_efile_prefix and H5Pset_efile_prefix - were - added so that users could specify the locations of external files. - - (DER - 2016/02/04, HDFFV-8740) - - - -Issues Addressed in this Release Since alpha0 -============================================= - - h5format_convert - - The h5format_convert tool did not downgrade the version of the - superblock. - - Fixed. The tool now will downgrade the version of the superblock. - - (EIP 2016/01/11) - - - Crashes with multiple threads: invalid pointers - - It was reported that alpha0 crashed when used with multiple - threads. The issue exists in the HDF5 Library versions 1.8 and - 1.9. The problem is related to a shared file pointer used in some - miscellaneous data structures. The thread-safe library exposed - paths in the library where a file pointer became invalid. - - The alpha1 release contains the fixes for the specific use case - as described in HDFFV-9643. We will keep working on identifying - and fixing other paths in the library with similar problems. - - (EIP - 2016/01/15, HDFFV-9643) - - - -Supported Platforms -=================== -The following platforms are supported and have been tested for this release. -They are built with the configure process unless specified otherwise. - - AIX 6.1 xlc/xlc_r 10.1.0.5 - (NASA G-ADA) xlC/xlC_r 10.1.0.5 - xlf90/xlf90_r 12.1.0.6 - - Linux 2.6.32-573.22.1.el6 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (mayll/platypus) Version 4.4.7 20120313 (Red Hat 4.4.7-16) - Version 4.9.3, Version 5.2.0 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 15.7-0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 15.0.3.187 Build 20150407 - MPICH 3.1.4 compiled with GCC 4.9.3 - - Linux 2.6.32-573.18.1.el6.ppc64 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-11) - #1 SMP ppc64 GNU/Linux g++ (GCC) 4.4.7 20120313 (Red Hat 4.4.7-11) - (ostrich) GNU Fortran (GCC) 4.4.7 20120313 (Red Hat 4.4.7-11) - IBM XL C/C++ V13.1 - IBM XL Fortran V15.1 - - Linux 3.10.0-327.10.1.el7 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (kituo/moohan) Version 4.8.5 20150623 (Red Hat 4.8.5-4) - Version 4.9.3, Version 5.2.0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 15.0.3.187 Build 20150407 - MPICH 3.1.4 compiled with GCC 4.9.3 - - SunOS 5.11 32- and 64-bit Sun C 5.12 SunOS_sparc - (emu) Sun Fortran 95 8.6 SunOS_sparc - Sun C++ 5.12 SunOS_sparc - - Windows 7 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - Visual Studio 2015 w/ Intel Fortran 16 (cmake) - Cygwin(CYGWIN_NT-6.1 2.2.1(0.289/5/3) gcc(4.9.3) compiler and gfortran) - (cmake and autotools) - - Windows 7 x64 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - Visual Studio 2015 w/ Intel Fortran 16 (cmake) - - Windows 8.1 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - - Windows 8.1 x64 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - - Mac OS X Mt. Lion 10.8.5 Apple clang/clang++ version 5.1 from Xcode 5.1 - 64-bit gfortran GNU Fortran (GCC) 4.8.2 - (swallow/kite) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X Mavericks 10.9.5 Apple clang/clang++ version 6.0 from Xcode 6.2 - 64-bit gfortran GNU Fortran (GCC) 4.9.2 - (wren/quail) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X Yosemite 10.10.5 Apple clang/clang++ version 6.0 from Xcode 7.0 - 64-bit gfortran GNU Fortran (GCC) 4.9.2 - (osx1010dev/osx1010test) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X El Capitan 10.11.4 Apple clang/clang++ version 7.3.0 from Xcode 7.3 - 64-bit gfortran GNU Fortran (GCC) 5.2.0 - (osx1010dev/osx1010test) Intel icc/icpc/ifort version 15.0.3 - - - -Tested Configuration Features Summary -===================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90/ F90 C++ zlib SZIP - parallel F2003 parallel -SunOS 5.11 32-bit n y/y n y y y -SunOS 5.11 64-bit n y/y n y y y -Windows 7 y y/y n y y y -Windows 7 x64 y y/y n y y y -Windows 7 Cygwin n y/y n y y n -Windows 8.1 n y/y n y y y -Windows 8.1 x64 n y/y n y y y -Mac OS X Mountain Lion 10.8.5 64-bit n y/y n y y y -Mac OS X Mavericks 10.9.5 64-bit n y/y n y y y -Mac OS X Yosemeti 10.10.5 64-bit n y/y n y y y -AIX 6.1 32- and 64-bit n y/n n y y y -CentOS 6.7 Linux 2.6.32 x86_64 GNU y y/y y y y y -CentOS 6.7 Linux 2.6.32 x86_64 Intel n y/y n y y y -CentOS 6.7 Linux 2.6.32 x86_64 PGI n y/y n y y y -CentOS 7.1 Linux 3.10.0 x86_64 GNU y y/y y y y y -CentOS 7.1 Linux 3.10.0 x86_64 Intel n y/y n y y y -Linux 2.6.32-431.11.2.el6.ppc64 n y/n n y y y - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -SunOS 5.11 32-bit y y y y -SunOS 5.11 64-bit y y y y -Windows 7 y y y y -Windows 7 x64 y y y y -Windows 7 Cygwin n n n y -Windows 8.1 y y y y -Windows 8.1 x64 y y y y -Mac OS X Mountain Lion 10.8.5 64-bit y n y y -Mac OS X Mavericks 10.9.5 64-bit y n y y -Mac OS X Yosemeti 10.10.5 64-bit y n y y -AIX 6.1 32- and 64-bit y n n y -CentOS 6.7 Linux 2.6.32 x86_64 GNU y y y y -CentOS 6.7 Linux 2.6.32 x86_64 Intel y y y y -CentOS 6.7 Linux 2.6.32 x86_64 PGI y y y y -CentOS 7.1 Linux 3.10.0 x86_64 GNU y y y y -CentOS 7.1 Linux 3.10.0 x86_64 Intel y y y y -Linux 2.6.32-431.11.2.el6.ppc64 y y y y - -Compiler versions for each platform are listed in the preceding -"Supported Platforms" table. - - - -More Tested Platforms -===================== -The following platforms are not supported but have been tested for this release. - - Linux 2.6.18-431.11.2.el6 g95 (GCC 4.0.3 (g95 0.94!) - #1 SMP x86_64 GNU/Linux - (platypus) - - Windows 7 Visual Studio 2008 (cmake) - - Windows 7 x64 Visual Studio 2008 (cmake) - - Windows 7 x64 Visual Studio 2010 (cmake) with SWMR using GPFS - - Windows 10 Visual Studio 2013 w/ Intel Fortran 15 (cmake) - - Windows 10 x64 Visual Studio 2013 w/ Intel Fortran 15 (cmake) - - Debian7.5.0 3.2.0-4-amd64 #1 SMP Debian 3.2.51-1 x86_64 GNU/Linux - gcc (Debian 4.7.2-5) 4.7.2 - GNU Fortran (Debian 4.7.2-5) 4.7.2 - (cmake and autotools) - - Fedora20 3.15.3-200.fc20.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc (GCC) 4.8.3 20140624 (Red Hat 4.8.3-1) - GNU Fortran (GCC) 4.8.3 20140624 (Red Hat 4.8.3-1) - (cmake and autotools) - - SUSE 13.1 3.11.10-17-desktop #1 SMP PREEMPT x86_64 x86_64 x86_64 GNU/Linux - gcc (SUSE Linux) 4.8.1 - GNU Fortran (SUSE Linux) 4.8.1 - (cmake and autotools) - - Ubuntu 14.04 3.13.0-35-generic #62-Ubuntu SMP x86_64 GNU/Linux - gcc (Ubuntu/Linaro 4.9.1-0ubuntu1) 4.9.1 - GNU Fortran (Ubuntu/Linaro 4.9.1-0ubuntu1) 4.9.1 - (cmake and autotools) - - hopper.nersc.gov PrgEnv-gnu/5.2.40 - gcc (GCC) 4.9.2 20141030 (Cray Inc.) - GNU Fortran (GCC) 4.9.2 20141030 (Cray Inc.) - g++ (GCC) 4.9.2 20141030 (Cray Inc.) - - - -Known Problems and Limitations -============================== -This section contains the list of known problems and limitations introduced -in this release of HDF5. - -Note: this list is not exhaustive of all known issues discovered in HDF5 -software to date. For a list of significant problems and known workarounds -identified in past releases, please refer to: - -https://www.hdfgroup.org/HDF5/release/known_problems/ - -The HDF Group also maintains a JIRA issue-tracking database which is used to -capture all known issues which are too numerous to reasonably list in this -document. The HDF Group is taking steps to make our JIRA issue database -open to the public, and this section will refer to that database in a future -release. In the meantime, please contact help@hdfgroup.org if you come across -an issue not listed here or at the link above, and we will provide any -information about known workarounds that we have or add it to our list of -known issues if it is a new issue. - - - The flush/refresh test occasionally fails on OS X platforms. This is - being investigated but no fix or workaround is available at this time. - (DER - 2016/03/22, HDFFV-9731) - - - The VDS/SWMR test will fail with a segmentation fault if the library - is built with --enable-using-memchecker. The is due to a VDS shutdown - procedure freeing a shared resource too early when the memory - checker changes are built. This problem does not arise when the - memory checker changes are not used since the internal library free - lists behave differently. The memory checker configure option should - normally only be used under special circumstances so this should not - affect most users. Users should be aware that the --enable-using-memchecker - + VDS combination may cause a segfault, however, so Valgrind et al. may - have to be used with an HDF5 library built without the feature if this - proves to be a problem. - (DER - 2016/03/21, HDFFV-9732) - - - SWMR feature limitations - The SWMR feature will only work if an HDF5 file under SWMR access resides - on a file system that obeys POSIX write() ordering semantics. Because of - this, SWMR will not work on network file systems such as NFS or SMB/Windows - file shares since those systems do not guarantee write odering. SWMR - regression tests are likely to fail if run on a network file system. SWMR - is currently not tested on Windows though it can be tested manually - (some of the SWMR test programs are built by CMake), and there are no - obvious reasons for it to not work on NTFS or GPFS. - (EIP - 2016/03/20, HDFFV-9733) - - - VDS feature limitation - Currently, the path to a VDS source file is interpreted as relative to the - directory where the executable program runs and not to the HDF5 file with - the VDS dataset unless a full path to the source file is specified during - the mapping. - (EIP - 2016/03/20, HDFFV-9724) - - - The H5Lexists API changed behavior in HDF5-1.10 when used with a file handle - and root group name ("/"): - - H5Lexists(fileid, "/") - - In HDF5-1.8 it returns false (0) and in HDF5-1.10 it returns true (1). - The documentation will be updated with information regarding this change. - (LRK - 2016/03/30, HDFFV-8746) - - -%%%%1.10.0%%%% - -HDF5 version 1.10.0 released on 2016-03-30 -================================================================================ - - - -INTRODUCTION - -This document describes the differences between HDF5-1.8 series and -HDF5 1.10.0 releases, and contains information on the platforms -tested. - -Links to HDF5 1.10.0 source code can be found on The HDF Group's -development FTP server at the following location: - - https://www.hdfgroup.org/HDF5/release/obtain5110.html - -User documentation can be accessed directly at this location: - - https://www.hdfgroup.org/HDF5/docNewFeatures/ - -For more information, see the HDF5 home page: - - https://www.hdfgroup.org/HDF5/ - -If you have any questions or comments, please send them to the HDF -Help Desk: - - help@hdfgroup.org - - - -CONTENTS - -- New Features -- Issues Addressed in this Release -- Supported Platforms -- Tested Configuration Features Summary -- More Tested Platforms -- Known Problems and Limitations - - - -New Features -============ -This release supports the following features: - - Configuration - ------------- - - API Compatibility with HDF5 1.8 Flag Was Added - - The 1.10 version of the HDF5 Library can be configured to operate - identically to the 1.8 library with the --with-default-api-version=v18 - configure flag. This allows existing code to be compiled with the 1.10 - library without requiring immediate changes to the application source - code. For additional configuration options and other details, see - "API Compatibility Macros in HDF5" at - https://www.hdfgroup.org/HDF5/doc/RM/APICompatMacros.html. - - - Autotools Configuration Has Been Extensively Reworked - - The autotools configuration options have been updated to allow more - fine-grained control of the build options and to correct some bugs. - See configure --help for comprehensive information on each option. - - Specific changes: - - * --enable-debug and --enable-production are no longer accepted. - Use --enable-build-mode=(debug | production) instead. These set - appropriate defaults for symbols, optimizations, and other - configuration options. These defaults can be overridden by the - user. - - * Extra debug output messages are no longer enabled with - --enable-debug=. Use --enable-internal-debug= - instead. - - * A new --enable-symbols option allows symbols to be generated - independently of the build mode. --disable-symbols can be used - to strip symbols from the binary. - - * A new --enable-asserts option sets/unsets NDEBUG. This is - independent of the build mode. This also enables some extra - low-overhead debug checks in the library. - - * A new --enable-profiling option sets profiling flags. This is - independent of the build mode. - - * A new --enable-optimization option sets the optimization level. - This is independent of the build mode. - - * Many of these options can take a flags string that will be used - to build the library. This can be useful for specifying custom - optimization flags such as -Os and -Ofast. - - * gnu C++ and Fortran use configure sub-files that update the - build flags and turn on warnings. The increase in warnings when - building these wrapper libraries is due to these flag changes - and not to a decrease in code quality. - - * The option to clear file buffers has been removed. Any buffer that - will eventually be written to disk will now always be memset - to zero. This prevents the previous contents of the buffer from - being written to the disk if the buffer contents are not - completely overwritten, which has security implications. - - - LFS Changes - - The way the autotools handle large file support (LFS) has been - overhauled in this release. - - * We assume ftello and fseeko exist - - * We no longer explicitly use the *64 I/O functions. Instead, we - rely on a mapping provided by _FILE_OFFSET_BITS or its equivalent. - - * _LARGEFILE(64)_SOURCE is no longer exported via AM_CPPFLAGS. - - - - Parallel Library - ----------------- - - Collective Metadata I/O - - Calls for HDF5 metadata can result in many small reads and writes. - On metadata reads, collective metadata I/O can improve performance - by allowing the library to perform optimizations when reading the - metadata by having one rank read the data and broadcasting it to - all other ranks. - - Collective metadata I/O improves metadata write performance through - the construction of an MPI derived datatype that is then written - collectively in a single call. For more information, see - https://www.hdfgroup.org/HDF5/docNewFeatures/NewFeaturesCollectiveMetadataIoDocs.html. - - - - Library - -------- - - Concurrent Access to HDF5 Files - Single Writer/ Multiple Reader (SWMR) - - The Single Writer/ Multiple Reader or SWMR feature enables users to - read data concurrently while writing it. Communications between the - processes and file locking are not required. The processes can run - on the same or on different platforms as long as they share a common - file system that is POSIX compliant. For more information, see the - Single-Writer/Multiple-Reader (SWMR) documentation at - https://www.hdfgroup.org/HDF5/docNewFeatures/NewFeaturesSwmrDocs.html. - - - Virtual Dataset (VDS) - - The VDS feature enables data to be accessed across HDF5 files - using standard HDF5 objects such as groups and datasets without - rewriting or rearranging the data. An HDF5 virtual dataset (VDS) - is an HDF5 dataset that is composed of source HDF5 datasets in - a predefined mapping. VDS can be used with the SWMR feature. For - documentation, check - https://www.hdfgroup.org/HDF5/docNewFeatures/NewFeaturesVirtualDatasetDocs.html. - - - Persistent Free File Space Tracking - - Usage patterns when working with an HDF5 file sometimes result in - wasted space within the file. This can also impair access times - when working with the resulting files. The new file space management - feature provides strategies for managing space in a file to improve - performance in both of these areas. For more information, see - https://www.hdfgroup.org/HDF5/docNewFeatures/NewFeaturesFileSpaceMgmtDocs.html. - - - Version 3 Metadata Cache - - The version 3 metadata cache moves management of metadata I/O from - the clients to the metadata cache proper. This change is essential for - SWMR and other features that have yet to be released. - - - - C++ Library - ------------ - - New Member Function Added to H5::ArrayType - - The assignment operator ArrayType::operator= was added because - ArrayType has pointer data members. - - (BMR - 2016/03/07, HDFFV-9562) - - - - Tools - ------ - - h5watch - - The h5watch tool allows users to output new records appended to - a dataset under SWMR access as it grows. The functionality is - similar to the Unix user command "tail" with the follow option, - which outputs appended data as the file grows. For more - information, see - https://www.hdfgroup.org/HDF5/docNewFeatures/NewFeaturesSwmrDocs.html#Tools. - - - h5format_convert - - The h5format_convert tool allows users to convert the indexing - type of a chunked dataset made with a 1.10.x version of the HDF5 - Library when the latest file format is used to the 1.8.x version 1 B-tree indexing - type. For example, datasets created using SWMR access, can be - converted to be accessed by the HDF5 1.18 library and tools. The - tool does not rewrite raw data, but it does rewrite HDF5 metadata. - - - - High-Level APIs - ---------------- - - H5DOappend - - The function appends data to a dataset along a specified dimension. - - - C Packet Table API - ------------------ - - Replacement of a Public Function with H5PTcreate - - The existing function H5PTcreate_fl limits applications so they - can use the deflate compression only. The public function - H5PTcreate has been added to replace H5PTcreate_fl. H5PTcreate - takes a property list identifier to provide flexibility on - creation properties. - - (BMR - 2016/03/04, HDFFV-8623) - - - New Public Functions: H5PTget_dataset and H5PTget_type - - Two accessor functions have been added. H5PTget_dataset returns - the identifier of the dataset associated with the packet table, - and H5PTget_type returns the identifier of the datatype used by - the packet table. - - (BMR, 2016/03/04, HDFFV-8623) - - - Regarding #ifdef VLPT_REMOVED - - The #ifdef VLPT_REMOVED blocks have been removed from the packet - table (PT) library source except for the following functions: - + H5PTis_varlen() has been made available again - + H5PTfree_vlen_readbuff() is now H5PTfree_vlen_buff() - - (BMR - 2016/03/04, HDFFV-442) - - C++ Packet Table API - -------------------- - - New Constructor Added to FL_PacketTable - - An overloaded constructor has been added to FL_PacketTable and - takes a property list identifier to provide flexibility on - creation properties. - - (BMR - 2016/03/08, HDFFV-8623) - - - New Public Functions - - Two accessor wrappers are added to class PacketTable. - PacketTable::GetDataset() returns the identifier of the dataset - associated with the packet table, and PacketTable::GetDatatype() - returns the identifier of the datatype that the packet table uses. - - (BMR - 2016/03/04, HDFFV-8623) - - - Member Functions with "char*" as an Argument - - Overloaded functions were added to provide the "const char*" - argument; the existing version will be deprecated in future - releases. - - (BMR - 2016/03/04, HDFFV-8623) - - - Regarding #ifdef VLPT_REMOVED - - The #ifdef VLPT_REMOVED blocks have been removed from the packet - table library source code except for the following functions: - + VL_PacketTable::IsVariableLength() was moved to PacketTable - + VL_PacketTable::FreeReadBuff() is now PacketTable::FreeBuff() - - (BMR - 2016/03/04, HDFFV-442) - - - - Java Wrapper Library - -------------------- - - The Java HDF5 JNI library has been integrated into the HDF5 repository. - The configure option is "--enable-java", and the CMake option is - HDF5_BUILD_JAVA:BOOL=ON. The package hierarchy has changed from the - HDF5 1.8 JNI, which was "ncsa.hdf.hdflib.hdf5", to HDF5 1.10, - "hdf.hdflib.hdf5". - - A number of new APIs were added including some for VDS and SWMR. - - - - Other Important Changes - ----------------------- - - The hid_t type was changed from 32-bit to a 64-bit value. - - - -Issues Addressed in this Release Since alpha1 -============================================= - - - H5Pget_virtual_printf_gap, H5Pget_virtual_view, H5Pget_efile_prefix - - The correct access property list settings from the - H5Pget_virtual_printf_gap, H5Pget_virtual_view, and - H5Pget_efile_prefix function calls could not be retrieved - using H5Dget_access_plist(). - - Fixed. - - (DER and NAF - 2016/03/14, HDFFV-9716) - - - h5dump - - When h5dump was provided with the name of a non-existing file or - when optional arguments were the last option on the command line, - h5dump would segfault. - - Fixed. - - (ADB 2016/02/28 HDFFV-9639, HDFFV-9684) - - - No Error Message for Corrupt Metadata - - The HDF5 Library did not propagate an error when it encountered - corrupt metadata in an HDF5 file. The issue was fixed for a - specific file provided by a user. If you still see the problem, - please contact help@hdfgroup.org - - Fixed. - - (MC - 2016/02/18, HDFFV-9670) - - - Problem Reading Chunked Datasets with a String Datatype Larger - Than the Chunk Size in Bytes - - When the latest file format was used and when a chunked dataset - was created with a datatype with the size bigger than a chunk - size, the data could not be read back. The issue was reported - for chunked datasets with a string datatype and was confirmed - for other datatypes with the sizes bigger than the chunk size in - bytes. - - Fixed. - - (JM - 2016/02/13, HDFFV-9672) - - - Control over the Location of External Files - - Users were unable to specify the locations of external files. - - Two APIs - H5Pget_efile_prefix and H5Pset_efile_prefix - were - added so that users could specify the locations of external files. - - (DER - 2016/02/04, HDFFV-8740) - - - -Issues Addressed in this Release Since alpha0 -============================================= - - h5format_convert - - The h5format_convert tool did not downgrade the version of the - superblock. - - Fixed. The tool now will downgrade the version of the superblock. - - (EIP 2016/01/11) - - - Crashes with multiple threads: invalid pointers - - It was reported that alpha0 crashed when used with multiple - threads. The issue exists in the HDF5 Library versions 1.8 and - 1.9. The problem is related to a shared file pointer used in some - miscellaneous data structures. The thread-safe library exposed - paths in the library where a file pointer became invalid. - - The alpha1 release contains the fixes for the specific use case - as described in HDFFV-9643. We will keep working on identifying - and fixing other paths in the library with similar problems. - - (EIP - 2016/01/15, HDFFV-9643) - - - -Supported Platforms -=================== -The following platforms are supported and have been tested for this release. -They are built with the configure process unless specified otherwise. - - AIX 6.1 xlc/xlc_r 10.1.0.5 - (NASA G-ADA) xlC/xlC_r 10.1.0.5 - xlf90/xlf90_r 12.1.0.6 - - Linux 2.6.32-573.18.1.el6 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (mayll/platypus) Version 4.4.7 20120313 (Red Hat 4.4.7-16) - Version 4.9.3, Version 5.2.0 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 15.7-0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 15.0.3.187 Build 20150407 - MPICH 3.1.4 compiled with GCC 4.9.3 - - Linux 2.6.32-504.8.1.el6.ppc64 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-11) - #1 SMP ppc64 GNU/Linux g++ (GCC) 4.4.7 20120313 (Red Hat 4.4.7-11) - (ostrich) GNU Fortran (GCC) 4.4.7 20120313 (Red Hat 4.4.7-11) - IBM XL C/C++ V13.1 - IBM XL Fortran V15.1 - - Linux 3.10.0-327.10.1.el7 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (kituo/moohan) Version 4.8.5 20150623 (Red Hat 4.8.5-4) - Version 4.9.3, Version 5.2.0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 15.0.3.187 Build 20150407 - MPICH 3.1.4 compiled with GCC 4.9.3 - - SunOS 5.11 32- and 64-bit Sun C 5.12 SunOS_sparc - (emu) Sun Fortran 95 8.6 SunOS_sparc - Sun C++ 5.12 SunOS_sparc - - Windows 7 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - Visual Studio 2015 w/ Intel Fortran 16 (cmake) - Cygwin(CYGWIN_NT-6.1 2.2.1(0.289/5/3) gcc(4.9.3) compiler and gfortran) - (cmake and autotools) - - Windows 7 x64 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - Visual Studio 2015 w/ Intel Fortran 16 (cmake) - - Windows 8.1 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - - Windows 8.1 x64 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - - Mac OS X Mt. Lion 10.8.5 Apple clang/clang++ version 5.1 from Xcode 5.1 - 64-bit gfortran GNU Fortran (GCC) 4.8.2 - (swallow/kite) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X Mavericks 10.9.5 Apple clang/clang++ version 6.0 from Xcode 6.2.0 - 64-bit gfortran GNU Fortran (GCC) 4.9.2 - (wren/quail) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X Yosemite 10.10.5 Apple clang/clang++ version 6.0 from Xcode 7.0.0 - 64-bit gfortran GNU Fortran (GCC) 4.9.2 - (osx1010dev/osx1010test) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X El Capitan 10.11.3 Apple clang/clang++ version 7.0.2 from Xcode 7.0.2 - 64-bit gfortran GNU Fortran (GCC) 5.2.0 - (osx1010dev/osx1010test) Intel icc/icpc/ifort version 15.0.3 - - - -Tested Configuration Features Summary -===================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90/ F90 C++ zlib SZIP - parallel F2003 parallel -SunOS 5.11 32-bit n y/y n y y y -SunOS 5.11 64-bit n y/y n y y y -Windows 7 y y/y n y y y -Windows 7 x64 y y/y n y y y -Windows 7 Cygwin n y/y n y y n -Windows 8.1 n y/y n y y y -Windows 8.1 x64 n y/y n y y y -Mac OS X Mountain Lion 10.8.5 64-bit n y/y n y y y -Mac OS X Mavericks 10.9.5 64-bit n y/y n y y y -Mac OS X Yosemeti 10.10.5 64-bit n y/y n y y y -AIX 6.1 32- and 64-bit n y/n n y y y -CentOS 6.7 Linux 2.6.32 x86_64 GNU y y/y y y y y -CentOS 6.7 Linux 2.6.32 x86_64 Intel n y/y n y y y -CentOS 6.7 Linux 2.6.32 x86_64 PGI n y/y n y y y -CentOS 7.1 Linux 3.10.0 x86_64 GNU y y/y y y y y -CentOS 7.1 Linux 3.10.0 x86_64 Intel n y/y n y y y -Linux 2.6.32-431.11.2.el6.ppc64 n y/n n y y y - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -SunOS 5.11 32-bit y y y y -SunOS 5.11 64-bit y y y y -Windows 7 y y y y -Windows 7 x64 y y y y -Windows 7 Cygwin n n n y -Windows 8.1 y y y y -Windows 8.1 x64 y y y y -Mac OS X Mountain Lion 10.8.5 64-bit y n y y -Mac OS X Mavericks 10.9.5 64-bit y n y y -Mac OS X Yosemeti 10.10.5 64-bit y n y y -AIX 6.1 32- and 64-bit y n n y -CentOS 6.7 Linux 2.6.32 x86_64 GNU y y y y -CentOS 6.7 Linux 2.6.32 x86_64 Intel y y y y -CentOS 6.7 Linux 2.6.32 x86_64 PGI y y y y -CentOS 7.1 Linux 3.10.0 x86_64 GNU y y y y -CentOS 7.1 Linux 3.10.0 x86_64 Intel y y y y -Linux 2.6.32-431.11.2.el6.ppc64 y y y y - -Compiler versions for each platform are listed in the preceding -"Supported Platforms" table. - - - -More Tested Platforms -===================== -The following platforms are not supported but have been tested for this release. - - Linux 2.6.18-431.11.2.el6 g95 (GCC 4.0.3 (g95 0.94!) - #1 SMP x86_64 GNU/Linux - (platypus) - - Windows 7 Visual Studio 2008 (cmake) - - Windows 7 x64 Visual Studio 2008 (cmake) - - Windows 7 x64 Visual Studio 2010 (cmake) with SWMR using GPFS - - Windows 10 Visual Studio 2013 w/ Intel Fortran 15 (cmake) - - Windows 10 x64 Visual Studio 2013 w/ Intel Fortran 15 (cmake) - - Debian7.5.0 3.2.0-4-amd64 #1 SMP Debian 3.2.51-1 x86_64 GNU/Linux - gcc (Debian 4.7.2-5) 4.7.2 - GNU Fortran (Debian 4.7.2-5) 4.7.2 - (cmake and autotools) - - Fedora20 3.15.3-200.fc20.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc (GCC) 4.8.3 20140624 (Red Hat 4.8.3-1) - GNU Fortran (GCC) 4.8.3 20140624 (Red Hat 4.8.3-1) - (cmake and autotools) - - SUSE 13.1 3.11.10-17-desktop #1 SMP PREEMPT x86_64 x86_64 x86_64 GNU/Linux - gcc (SUSE Linux) 4.8.1 - GNU Fortran (SUSE Linux) 4.8.1 - (cmake and autotools) - - Ubuntu 14.04 3.13.0-35-generic #62-Ubuntu SMP x86_64 GNU/Linux - gcc (Ubuntu/Linaro 4.9.1-0ubuntu1) 4.9.1 - GNU Fortran (Ubuntu/Linaro 4.9.1-0ubuntu1) 4.9.1 - (cmake and autotools) - - hopper.nersc.gov PrgEnv-gnu/5.2.40 - gcc (GCC) 4.9.2 20141030 (Cray Inc.) - GNU Fortran (GCC) 4.9.2 20141030 (Cray Inc.) - g++ (GCC) 4.9.2 20141030 (Cray Inc.) - - - -Known Problems and Limitations -============================== -This section contains the list of known problems and limitations introduced -in this release of HDF5. - -Note: this list is not exhaustive of all known issues discovered in HDF5 -software to date. For a list of significant problems and known workarounds -identified in past releases, please refer to: - -https://www.hdfgroup.org/HDF5/release/known_problems/ - -The HDF Group also maintains a JIRA issue-tracking database which is used to -capture all known issues which are too numerous to reasonably list in this -document. The HDF Group is taking steps to make our JIRA issue database -open to the public, and this section will refer to that database in a future -release. In the meantime, please contact help@hdfgroup.org if you come across -an issue not listed here or at the link above, and we will provide any -information about known workarounds that we have or add it to our list of -known issues if it is a new issue. - - - The JUnit-interface test may fail on Solaris platforms. The result of - a test for verifying the content of the error stack to stdout is - in a different order on Solaris then other platforms. Use make -i option - to test beyond the java/test folder. - (ADB - 2016/03/22, HDFFV-9734) - - - The flush/refresh test occasionally fails on OS X platforms. This is - being investigated but no fix or workaround is available at this time. - (DER - 2016/03/22, HDFFV-9731) - - - The VDS/SWMR test will fail with a segmentation fault if the library - is built with --enable-using-memchecker. The is due to a VDS shutdown - procedure freeing a shared resource too early when the memory - checker changes are built. This problem does not arise when the - memory checker changes are not used since the internal library free - lists behave differently. The memory checker configure option should - normally only be used under special circumstances so this should not - affect most users. Users should be aware that the --enable-using-memchecker - + VDS combination may cause a segfault, however, so Valgrind et al. may - have to be used with an HDF5 library built without the feature if this - proves to be a problem. - (DER - 2016/03/21, HDFFV-9732) - - - SWMR feature limitations - The SWMR feature will only work if an HDF5 file under SWMR access resides - on a file system that obeys POSIX write() ordering semantics. Because of - this, SWMR will not work on network file systems such as NFS or SMB/Windows - file shares since those systems do not guarantee write odering. SWMR - regression tests are likely to fail if run on a network file system. SWMR - is currently not tested on Windows though it can be tested manually - (some of the SWMR test programs are built by CMake), and there are no - obvious reasons for it to not work on NTFS or GPFS. - (EIP - 2016/03/20, HDFFV-9733) - - - VDS feature limitation - Currently, the path to a VDS source file is interpreted as relative to the - directory where the executable program runs and not to the HDF5 file with - the VDS dataset unless a full path to the source file is specified during - the mapping. - (EIP - 2016/03/20, HDFFV-9724) - - - When building HDF5 with Java using CMake and specifying Debug for CMAKE_BUILD_TYPE, - there is a missing command argument for the tests of the examples. - - This error can be avoided by not building Java with Debug, HDF5_BUILD_JAVA:BOOL=OFF, - or not building Examples, HDF5_BUILD_EXAMPLES:BOOL=OFF. - (LRK - 2016/03/30, HDFFV-9743) - - - The H5Lexists API changed behavior in HDF5-1.10 when used with a file handle - and root group name ("/"): - - H5Lexists(fileid, "/") - - In HDF5-1.8 it returns false (0) and in HDF5-1.10 it returns true (1). - The documentation will be updated with information regarding this change. - (LRK - 2016/03/30, HDFFV-8746) - diff --git a/release_docs/HISTORY-1_8.txt b/release_docs/HISTORY-1_8.txt deleted file mode 100644 index 461e0be7394..00000000000 --- a/release_docs/HISTORY-1_8.txt +++ /dev/null @@ -1,14439 +0,0 @@ -HDF5 History -============ - -This file contains development history of HDF5 1.8 branch - -23. Release Information for hdf5-1.8.21 -22. Release Information for hdf5-1.8.20 -21. Release Information for hdf5-1.8.19 -20. Release Information for hdf5-1.8.18 -19. Release Information for hdf5-1.8.17 -18. Release Information for hdf5-1.8.16 -17. Release Information for hdf5-1.8.15 -16. Release Information for hdf5-1.8.14 -15. Release Information for hdf5-1.8.13 -14. Release Information for hdf5-1.8.12 -13. Release Information for hdf5-1.8.11 -12. Release Information for hdf5-1.8.10-patch1 -11. Release Information for hdf5-1.8.10 -10. Release Information for hdf5-1.8.9 -09. Release Information for hdf5-1.8.8 -08. Release Information for hdf5-1.8.7 -07. Release Information for hdf5-1.8.6 -06. Release Information for hdf5-1.8.5 -05. Release Information for hdf5-1.8.4 -04. Release Information for hdf5-1.8.3 -03. Release Information for hdf5-1.8.2 -02. Release Information for hdf5-1.8.1 -01. Release Information for hdf5-1.8.0 - -[Search on the string '%%%%' for section breaks of each release.] - -%%%%1.8.21%%%% - - -HDF5 version 1.8.21 released on 2018-06-04 -================================================================================ - -INTRODUCTION -============ - -This document describes the differences between HDF5-1.8.20 and -HDF5-1.8.21, and contains information on the platforms tested and -known problems in HDF5-1.8.21. -For more details, see the files HISTORY-1_0-1_8_0_rc3.txt -and HISTORY-1_8.txt in the release_docs/ directory of the HDF5 source. - -Links to the HDF5 1.8.21 source code, documentation, and additional materials -can be found on the HDF5 web page at: - - https://support.hdfgroup.org/HDF5/ - -The HDF5 1.8.21 release can be obtained from: - - https://support.hdfgroup.org/HDF5/release/obtain518.html - -User documentation for 1.8.21 can be accessed directly at this location: - - https://support.hdfgroup.org/HDF5/doc1.8/ - -New features in the HDF5-1.8.x release series, including brief general -descriptions of some new and modified APIs, are described in the "What's New -in 1.8.0?" document: - - https://support.hdfgroup.org/HDF5/doc/ADGuide/WhatsNew180.html - -All new and modified APIs are listed in detail in the "HDF5 Software Changes -from Release to Release" document, in the section "Release 1.8.21 (current -release) versus Release 1.8.20 - - https://support.hdfgroup.org/HDF5/doc1.8/ADGuide/Changes.html - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS -======== - -- New Features -- Support for New Platforms, Languages, and Compilers -- Bug Fixes since HDF5-1.8.20 -- Supported Platforms -- Supported Configuration Features Summary -- More Tested Platforms -- Known Problems - - -New Features -============ - - Configuration - ------------- - - CMake - - Change minimum version to 3.10. - - This change removes the need to support a copy of the FindMPI.cmake module, - which has been removed, along with its subfolder in the config/cmake_ext_mod - location. - - (ADB - 2018/03/09) - - - CMake - - Add pkg-config file generation - - Added pkg-config file generation for the C, C++, HL, and HL C++ libraries. - In addition, builds on linux will create h5cXXX scripts that use the pkg-config - files. This is a limited implementation of a script like autotools h5cc. - - (ADB - 2018/03/08, HDFFV-4359) - - - CMake - - Refactor use of CMAKE_BUILD_TYPE for new variable, which understands - the type of generator in use. - - Added new configuration macros to use new HDF_BUILD_TYPE variable. This - variable is set correctly for the type of generator being used for the build. - - (ADB - 2018/01/08, HDFFV-10385, HDFFV-10296) - - C++ API - ------- - - The following C++ API wrappers have been added to class H5Location - + H5Lcreate_soft: - // Creates a soft link from link_name to target_name. - void link(const char *target_name, const char *link_name,...) - void link(const H5std_string& target_name,...) - - + H5Lcreate_hard: - // Creates a hard link from new_name to curr_name. - void link(const char *curr_name, const Group& new_loc,...) - void link(const H5std_string& curr_name, const Group& new_loc,...) - - // Creates a hard link from new_name to curr_name in the same location. - void link(const char *curr_name, const hid_t same_loc,...) - void link(const H5std_string& curr_name, const hid_t same_loc,...) - - Note: previous version CommonFG::link will be deprecated. - - + H5Lcopy: - // Copy an object from a group of file to another. - void copyLink(const char *src_name, const Group& dst,...) - void copyLink(const H5std_string& src_name, const Group& dst,...) - - // Copy an object from a group of file to the same location. - void copyLink(const char *src_name, const char *dst_name,...) - void copyLink(const H5std_string& src_name,...) - - + H5Lmove: - // Rename an object in a group or file to a new location. - void moveLink(const char* src_name, const Group& dst,...) - void moveLink(const H5std_string& src_name, const Group& dst,...) - - // Rename an object in a group or file to the same location. - void moveLink(const char* src_name, const char* dst_name,...) - void moveLink(const H5std_string& src_name,...) - - Note: previous version CommonFG::move will be deprecated. - - + H5Ldelete: - // Removes the specified link from this location. - void unlink(const char *link_name, - const LinkAccPropList& lapl = LinkAccPropList::DEFAULT) - void unlink(const H5std_string& link_name, - const LinkAccPropList& lapl = LinkAccPropList::DEFAULT) - - Note: An additional parameter is added to CommonFG::unlink and it - is moved to H5Location. - - (BMR - 2018/05/11 - HDFFV-10445) - - - New property list subclasses - - Property list subclasses StrCreatPropList, LinkCreatPropList, and - AttrCreatPropList are added for the C property list classes - H5P_STRING_CREATE, H5P_LINK_CREATE, and H5P_ATTRIBUTE_CREATE. - - (BMR - 2018/05/11 - HDFFV-10445) - - - Another argument, LinkCreatPropList& lcpl, is added to the following - functions for the use of link creation property list. - Group createGroup(const char* name, size_t size_hint = 0, - const LinkCreatPropList& lcpl = LinkCreatPropList::DEFAULT) - Group createGroup(const H5std_string& name, size_t size_hint = 0, - const LinkCreatPropList& lcpl = LinkCreatPropList::DEFAULT) - - (BMR - 2018/05/11 - HDFFV-10445) - - - -Support for New Platforms, Languages, and Compilers -=================================================== - - - Added support for Visual Studio 2017 w/ Intel Fortran 18 on Windows 10 x64. - - -Bug Fixes since HDF5-1.8.20 -=========================== - - - If an HDF5 file contains a filter pipeline message with a 'number of - filters' field that exceeds the maximum number of allowed filters, - the error handling code will attempt to dereference a NULL pointer. - - This issue was reported to The HDF Group as issue #CVE-2017-17505. - - NOTE: The HDF5 C library cannot produce such a file. This condition - should only occur in a corrupt (or deliberately altered) file - or a file created by third-party software. - - This problem arose because the error handling code assumed that - the 'number of filters' field implied that a dynamic array of that - size had already been created and that the cleanup code should - iterate over that array and clean up each element's resources. If - an error occurred before the array has been allocated, this will - not be true. - - This has been changed so that the number of filters is set to - zero on errors. Additionally, the filter array traversal in the - error handling code now requires that the filter array not be NULL. - - (DER - 2018/02/06, HDFFV-10354) - - - If an HDF5 file contains a filter pipeline message which contains - a 'number of filters' field that exceeds the actual number of - filters in the message, the HDF5 C library will read off the end of - the read buffer. - - This issue was reported to The HDF Group as issue #CVE-2017-17506. - - NOTE: The HDF5 C library cannot produce such a file. This condition - should only occur in a corrupt (or deliberately altered) file - or a file created by third-party software. - - The problem was fixed by passing the buffer size with the buffer - and ensuring that the pointer cannot be incremented off the end - of the buffer. A mismatch between the number of filters declared - and the actual number of filters will now invoke normal HDF5 - error handling. - - (DER - 2018/02/26, HDFFV-10355) - - - If an HDF5 file contains a malformed compound datatype with a - suitably large offset, the type conversion code can run off - the end of the type conversion buffer, causing a segmentation - fault. - - This issue was reported to The HDF Group as issue #CVE-2017-17507. - - NOTE: The HDF5 C library cannot produce such a file. This condition - should only occur in a corrupt (or deliberately altered) file - or a file created by third-party software. - - THE HDF GROUP WILL NOT FIX THIS BUG AT THIS TIME - - Fixing this problem would involve updating the publicly visible - H5T_conv_t function pointer typedef and versioning the API calls - which use it. We normally only modify the public API during - major releases, so this bug will not be fixed at this time. - - (DER - 2018/02/26, HDFFV-10356) - - - If an HDF5 file contains a malformed compound type which contains - a member of size zero, a division by zero error will occur while - processing the type. - - This issue was reported to The HDF Group as issue #CVE-2017-17508. - - NOTE: The HDF5 C library cannot produce such a file. This condition - should only occur in a corrupt (or deliberately altered) file - or a file created by third-party software. - - Checking for zero before dividing fixes the problem. Instead of the - division by zero, the normal HDF5 error handling is invoked. - - (DER - 2018/02/26, HDFFV-10357) - - - If an HDF5 file contains a malformed symbol table node that declares - it contains more symbols than it actually contains, the library - can run off the end of the metadata cache buffer while processing - the symbol table node. - - This issue was reported to The HDF Group as issue #CVE-2017-17509. - - NOTE: The HDF5 C library cannot produce such a file. This condition - should only occur in a corrupt (or deliberately altered) file - or a file created by third-party software. - - Performing bounds checks on the buffer while processing fixes the - problem. Instead of the segmentation fault, the normal HDF5 error - handling is invoked. - - (DER - 2018/03/12, HDFFV-10358) - - - Configuration - ------------- - - Library - - Moved the location of gcc attribute. - - The gcc attribute(no_sanitize), named as the macro HDF_NO_UBSAN, - was located after the function name. Builds with GCC 7 did not - indicate any problem, but GCC 8 issued errors. Moved the - attribute before the function name, as required. - - (ADB 2018/05/22, HDFFV-10473) - - - CMake - - Update CMake commands configuration. - - A number of improvements were made to the CMake commands. Most - changes simplify usage or eliminate unused constructs. Also, - some changes support better cross-platform support. - - (ADB - 2018/02/01, HDFFV-10398) - - - CMake - - Correct usage of CMAKE_BUILD_TYPE variable. - - The use of the CMAKE_BUILD_TYPE is incorrect for multi-config - generators (Visual Studio and XCode) and is optional for single - config generators. Created a new macro to check - GLOBAL PROPERTY -> GENERATOR_IS_MULTI_CONFIG - Created two new HDF variable, HDF_BUILD_TYPE and HDF_CFG_BUILD_TYPE. - Defaults for these variables is "Release". - - (ADB - 2018/01/10, HDFFV-10385) - - - CMake - - Add replacement of fortran flags if using static CRT. - - Added TARGET_STATIC_CRT_FLAGS call to HDFUseFortran.cmake file in - config/cmake_ext_mod folder. - - (ADB - 2018/01/08, HDFFV-10334) - - - Library - ------- - - Utility function can not handle lowercase Windows drive letters - - Added call to toupper function for drive letter. - - (ADB - 2017/12/18, HDFFV-10307) - - - Tools - ----- - - h5repack - - h5repack changes the chunk parameters when a change of layout is not - specified and a filter is applied. - - HDFFV-10297, HDFFV-10319 reworked code for h5repack and h5diff code - in the tools library. The check for an existing layout was incorrectly - placed into an if block and not executed. The check was moved into - the normal path of the function. - - (ADB - 2018/02/21, HDFFV-10412) - - - h5dump - - the tools library will hide the error stack during file open. - - While this is preferable almost always, there are reasons to enable - display of the error stack when a tool will not open a file. Adding an - optional argument to the --enable-error-stack will provide this use case. - As an optional argument it will not affect the operation of the - --enable-error-stack. h5dump is the only tool to implement this change. - - (ADB - 2018/02/15, HDFFV-10384) - - - h5dump - - h5dump would output an indented blank line in the filters section. - - h5dump overused the h5tools_simple_prefix function, which is a - function intended to account for the data index (x,y,z) option. - Removed the function call for header information. - - (ADB - 2018/01/25, HDFFV-10396) - - - h5repack - - h5repack incorrectly searched internal object table for name. - - h5repack would search the table of objects for a name, if the - name did not match it tried to determine if the name without a - leading slash would match. The logic was flawed! The table - stored names(paths) without a leading slash and did a strstr - of the table path to the name. - The assumption was that if there was a difference of one then - it was a match, however "pressure" would match "/pressure" as - well as "/pressure1", "/pressure2", etc. Changed logic to remove - any leading slash and then do a full compare of the name. - - (ADB - 2018/01/18, HDFFV-10393) - - - h5repack - - h5repack failed to handle command line parameters for customer filters. - - User defined filter parameter conversions would fail when integers - were represented on the command line with character strings - larger than 9 characters. Increased local variable array for storing - the current command line parameter to prevent buffer overflows. - - (ADB - 2018/01/17, HDFFV-10392) - - - h5diff - - h5diff seg faulted if comparing VL strings against fixed strings. - - Reworked solution for HDFFV-8625 and HDFFV-8639. Implemented the check - for string objects of same type in the diff_can_type function by - adding an if(tclass1 == H5T_STRING) block. This "if block" moves the - same check that was added for attributes to this function, which is - used by all object types. This function handles complex type structures. - Also added a new test file in h5diffgentest for testing this issue - and removed the temporary files used in the test scripts. - - (ADB - 2018/01/04, HDFFV-8745) - - - C++ API - ------- - - Removal of memory leaks - - A private function was inadvertently called, causing memory leaks. This - is now fixed. - - (BMR - 2018/04/12 - User reported in email) - - - Changes in exception classes - - Some exception classes are reorganized to reflect the HDF5 object - hierarchy and allow customization. - DataSetIException -> LocationException -> Exception - DataTypeIException -> LocationException -> Exception - GroupIException -> LocationException -> Exception - AttributeIException -> LocationException -> Exception - FileIException -> GroupIException -> LocationException -> Exception - Member functions in H5Location and H5Object now throw specific exceptions - associated with the invoking objects. - - (BMR - 2018/05/11) - - - H5Location::closeObjId is made static - (BMR - 2018/05/11) - - - H5A wrappers in H5Location are removed as they have been in H5Object. - (BMR - 2018/05/11) - - -Supported Platforms -=================== -The following platforms are supported and have been tested for this release. -They are built with the configure process unless specified otherwise. - - Linux 2.6.32-573.22.1.el6 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (platypus/mayll) Version 4.4.7 20120313 - Versions 4.9.3, 5.3.0, 6.2.0 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 17.10-0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 17.0.4.196 Build 20160721 - MPICH 3.1.4 compiled with GCC 4.9.3 - OpenMPI 2.0.1 compiled with GCC 4.9.3 - - Linux 2.6.32-573.18.1.el6 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-16) - #1 SMP ppc64 GNU/Linux g++ (GCC) 4.4.7 20120313 (Red Hat 4.4.7-16) - (ostrich) GNU Fortran (GCC) 4.4.7 20120313 (Red Hat 4.4.7-16) - IBM XL C/C++ V13.1 - IBM XL Fortran V15.1 - - Linux 3.10.0-327.10.1.el7 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (kituo/moohan/jelly Version 4.8.5 20150623 (Red Hat 4.8.5-4) - Versions 4.9.3, 5.3.0, 6.2.0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 17.0.4.196 Build 20170411 - MPICH 3.1.4 compiled with GCC 4.9.3 - NAG Fortran Compiler Release 6.1(Tozai) Build 6116 - - SunOS 5.11 32- and 64-bit Sun C 5.12 SunOS_sparc - (emu) Sun Fortran 95 8.6 SunOS_sparc - Sun C++ 5.12 SunOS_sparc - - Windows 7 Visual Studio 2015 w/ Intel Fortran 16 (cmake) - - Windows 7 x64 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - Visual Studio 2015 w/ Intel Fortran 16 (cmake) - Visual Studio 2015 w/ Intel C, Fortran 2017 (cmake) - Visual Studio 2015 w/ MSMPI 8 (cmake) - - Windows 10 Visual Studio 2015 w/ Intel Fortran 16 (cmake) - - Windows 10 x64 Visual Studio 2015 w/ Intel Fortran 16 (cmake) - Visual Studio 2017 w/ Intel Fortran 18 (cmake) - - Mac OS X Mavericks 10.9.5 Apple LLVM version 6.0 (clang-600.0.57) - 64-bit gfortran GNU Fortran (GCC) 4.9.2 - (wren/quail) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X Yosemite 10.10.5 Apple LLVM version 6.1 (clang-602.0.53) - 64-bit gfortran GNU Fortran (GCC) 4.9.2 - (osx1010dev/osx1010test) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X El Capitan 10.11.6 Apple LLVM version 7.3.0 (clang-703.0.29) - 64-bit gfortran GNU Fortran (GCC) 5.2.0 - (VM osx1011dev/osx1011test) Intel icc/icpc/ifort version 16.0.2 - - Mac OS Sierra 10.12.6 Apple LLVM version 8.1 (clang-802.0.42) - 64-bit gfortran GNU Fortran (GCC) 7.1.0 - (kite) Intel icc/icpc/ifort version 17.0.2 - - -Tested Configuration Features Summary -===================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90/ F90 C++ zlib SZIP - parallel F2003 parallel -SunOS 5.11 32-bit n y/y n y y y -SunOS 5.11 64-bit n y/y n y y y -Windows 7 y y/y n y y y -Windows 7 x64 y y/y n y y y -Windows 7 Cygwin n y/n n y y y -Windows 7 x64 Cygwin n y/n n y y y -Windows 10 y y/y n y y y -Windows 10 x64 y y/y n y y y -Mac OS X Yosemite 10.10.5 64-bit n y/y n y y y -Mac OS X El Capitan 10.11.6 64-bit n y/y n y y y -Mac OS Sierra 10.12.6 64-bit n y/y n y y y -AIX 6.1 32- and 64-bit n y/n n y y y -CentOS 6.7 Linux 2.6.32 x86_64 GNU y y/y y y y y -CentOS 6.7 Linux 2.6.32 x86_64 Intel n y/y n y y y -CentOS 6.7 Linux 2.6.32 x86_64 PGI n y/y n y y y -CentOS 7.1 Linux 3.10.0 x86_64 GNU y y/y y y y y -CentOS 7.1 Linux 3.10.0 x86_64 Intel n y/y n y y y -Linux 2.6.32-573.18.1.el6.ppc64 n y/n n y y y - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -SunOS 5.11 32-bit y y y y -SunOS 5.11 64-bit y y y y -Windows 7 y y y y -Windows 7 x64 y y y y -Windows 7 Cygwin n n n y -Windows 7 x64 Cygwin n n n y -Windows 10 y y y y -Windows 10 x64 y y y y -Mac OS X Yosemite 10.10.5 64-bit y n y y -Mac OS X El Capitan 10.11.6 64-bit y n y y -Mac OS Sierra 10.12.6 64-bit y n y y -AIX 6.1 32- and 64-bit y n n y -CentOS 6.7 Linux 2.6.32 x86_64 GNU y y y y -CentOS 6.7 Linux 2.6.32 x86_64 Intel y y y y -CentOS 6.7 Linux 2.6.32 x86_64 PGI y y y y -CentOS 7.1 Linux 3.10.0 x86_64 GNU y y y y -CentOS 7.1 Linux 3.10.0 x86_64 Intel y y y y -Linux 2.6.32-573.18.1.el6.ppc64 y y y y - -Compiler versions for each platform are listed in the preceding -"Supported Platforms" table. - - -More Tested Platforms -===================== -The following platforms are not supported but have been tested for this release. - - Linux 2.6.32-573.22.1.el6 g95 (GCC 4.0.3 (g95 0.94!) - #1 SMP x86_64 GNU/Linux - (mayll) - - Debian8.4.0 3.16.0-4-amd64 #1 SMP Debian 3.16.36-1 x86_64 GNU/Linux - gcc (Debian 4.9.2-10) 4.9.2 - GNU Fortran (Debian 4.9.2-10) 4.9.2 - (cmake and autotools) - - Fedora24 4.7.2-201.fc24.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc (GCC) 6.1.1 20160621 (Red Hat 6.1.1-3) - GNU Fortran (GCC) 6.1.1 20160621 (Red Hat 6.1.1-3) - (cmake and autotools) - - CentOS 7.2 3.10.0-327.28.2.el7.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc (GCC) 4.8.5 20150623 (Red Hat 4.8.5-4) - GNU Fortran (GCC) 4.8.5 20150623 (Red Hat 4.8.5-4) - (cmake and autotools) - - Ubuntu 16.04 4.4.0-38-generic #62-Ubuntu SMP x86_64 GNU/Linux - gcc (Ubuntu 5.4.0-6ubuntu1~16.04.2) 5.4.0 - GNU Fortran (Ubuntu 5.4.0-6ubuntu1~16.04.2) 5.4.0 - (cmake and autotools) - - -Known Problems -============== - - The dynamically loaded plugin test libraries require undefined references - to HDF5 functions to be resolved at runtime in order to function properly. - With autotools on CYGWIN this results in build errors, and we have not - found a solution that satisfies both. Therefore the dynamically loaded - plugin tests have been disabled on CYGWIN. - - Mac OS X 10.13 added additional subdirectory structure in .libs for shared - libraries. Consequently "make check" will fail testing java and dynamically - loaded plugin test libraries attempting to copy files from the previous - locations in .libs directories. This will be addressed in the next release - when support for the Mac OS X 10.13 platform is added. - - Known problems in previous releases can be found in the HISTORY*.txt files - in the HDF5 source. Please report any new problems found to - help@hdfgroup.org. - - -%%%%1.8.20%%%% - - -HDF5 version 1.8.20 released on 2017-11-28 -================================================================================ - -INTRODUCTION -============ - -This document describes the differences between HDF5-1.8.19 and -HDF5-1.8.20, and contains information on the platforms tested and -known problems in HDF5-1.8.20. -For more details, see the files HISTORY-1_0-1_8_0_rc3.txt -and HISTORY-1_8.txt in the release_docs/ directory of the HDF5 source. - -Links to the HDF5 1.8.20 source code, documentation, and additional materials -can be found on the HDF5 web page at: - - https://support.hdfgroup.org/HDF5/ - -The HDF5 1.8.20 release can be obtained from: - - https://support.hdfgroup.org/HDF5/release/obtain518.html - -User documentation for 1.8.20 can be accessed directly at this location: - - https://support.hdfgroup.org/HDF5/doc1.8/ - -New features in the HDF5-1.8.x release series, including brief general -descriptions of some new and modified APIs, are described in the "What's New -in 1.8.0?" document: - - https://support.hdfgroup.org/HDF5/doc/ADGuide/WhatsNew180.html - -All new and modified APIs are listed in detail in the "HDF5 Software Changes -from Release to Release" document, in the section "Release 1.8.20 (current -release) versus Release 1.8.19 - - https://support.hdfgroup.org/HDF5/doc1.8/ADGuide/Changes.html - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS -======== - -- New Features -- Support for New Platforms, Languages, and Compilers -- Bug Fixes since HDF5-1.8.20 -- Supported Platforms -- Supported Configuration Features Summary -- More Tested Platforms -- Known Problems - - -New Features -============ - - Tools - ----- - - h5diff - - h5diff has new option enable-error-stack. - - Updated h5diff with the --enable-error-stack argument, which - enables the display of the hdf5 error stack. This completes the - improvement to the main tools; h5copy, h5diff, h5dump, h5ls and - h5repack. - - (ADB - 2017/08/30, HDFFV-9774) - - - C++ API - ------- - - The following C++ API wrappers have been added to the C++ Library: - - // Creates a binary object description of this datatype. - void DataType::encode() - C API H5Tencode() - - // Returns the decoded type from the binary object description. - DataType::decode() - C API H5Tdecode() - ArrayType::decode() - C API H5Tdecode() - CompType::decode() - C API H5Tdecode() - DataType::decode() - C API H5Tdecode() - EnumType::decode() - C API H5Tdecode() - FloatType::decode() - C API H5Tdecode() - IntType::decode() - C API H5Tdecode() - StrType::decode() - C API H5Tdecode() - VarLenType::decode() - C API H5Tdecode() - - // Three overloaded functions to retrieve information about an object - H5Location::getObjectInfo() - H5Oget_info()/H5Oget_info_by_name() - - (BMR - 2017/10/17, HDFFV-10175) - - - New constructors to open existing datatypes added in ArrayType, - CompType, DataType, EnumType, FloatType, IntType, StrType, and - VarLenType. - - (BMR - 2017/10/17, HDFFV-10175) - - - A document is added to the HDF5 C++ API Reference Manual to show the - mapping from a C API to C++ wrappers. It can be found from the main - page of the C++ API Reference Manual. - - (BMR - 2017/10/17, HDFFV-10151) - - - High-Level APIs - --------------- - - H5DOread_chunk - - Users wanted to read compressed data directly from a file without any - processing by the HDF5 data transfer pipeline, just as they were able - to write it directly to a file with H5DOwrite_chunk. - - New API function, corresponding to existing function H5DOwrite_chunk. - H5DOread_chunk reads a raw data chunk directly from a chunked dataset - in the file into the application buffer, bypassing the library’s internal - data transfer pipeline, including filters. - - (VC - 2017/05/02, HDFFV-9934) - - -Support for New Platforms, Languages, and Compilers -=================================================== - - - Added NAG compiler - - -Bug Fixes since HDF5-1.8.19 -=========================== - - Configuration - ------------- - - cmake - - The hdf5 library used shared szip and zlib, which needlessly required - applications to link with the same szip and zlib libraries. - - Changed the target_link_libraries commands to use the static libs. - Removed improper link duplication of szip and zlib. - Adjusted the link dependencies and the link interface values of - the target_link_libraries commands. - - (ADB - 2017/11/14, HDFFV-10329) - - - cmake MPI - - CMake implementation for MPI was problematic and would create incorrect - MPI library references in the hdf5 libraries. - - Reworked the CMake MPI code to properly create CMake targets.Also merged - the latest CMake FindMPI.cmake changes to the local copy. This is necessary - until HDF changes the CMake minimum to 3.9 or greater. - - (ADB - 2017/11/02, HDFFV-10321) - - - - Fixed Fortran linker flags when using the NAG Fortran compiler (autotools). - - (HDFFV-10037, MSB, 2017/10/21) - - - cmake - - Too many commands for POST_BUILD step caused command line to be - too big on windows. - - Changed foreach of copy command to use a custom command with the - use of the HDFTEST_COPY_FILE macro. - - (ADB - 2017/07/12, HDFFV-10254) - - - Library - ------- - - filter plugin handling in H5PL.c and H5Z.c - - It was discovered that the dynamic loading process used by - filter plugins had issues with library dependencies. - - CMake build process changed to use LINK INTERFACE keywords, which - allowed HDF5 C library to make dependent libraries private. The - filter plugin libraries no longer require dependent libraries - (such as szip or zlib) to be available. - (ADB - 2017/11/16, HDFFV-10328) - - - Fix rare object header corruption bug - - In certain cases, such as when converting large attributes to dense - storage, an error could occur which would either fail an assertion or - cause file corruption. Fixed and added test. - - (NAF - 2017/11/14, HDFFV-10274) - - - H5Zfilter_avail in H5Z.c - - The public function checked for plugins, while the private - function did not. - - Modified H5Zfilter_avail and private function, H5Z_filter_avail. - Moved check for plugin from public to private function. Updated - H5P__set_filter due to change in H5Z_filter_avail. Updated tests. - - (ADB - 2017/10/10, HDFFV-10297, HDFFV-10319) - - - Fix H5Sencode bug when num points selected is >2^32 - - Modified to fail if the 32 bit limit is exceeded when encoding either - offsets or counts in the selection. - - (HDFFV-10323, VC, 2017/09/07) - - - Fix H5HL_offset_into() - - (1) Fix H5HL_offset_into() to return error when offset exceeds heap data - block size. - (2) Fix other places in the library that call this routine to detect - error routine. - - (HDFFV-10216, VC, 2017/09/05) - - - Tools - ----- - - h5repack - - h5repack failed to copy a dataset with existing filter. - - Reworked code for h5repack and h5diff code in tools library. Added - improved error handling, cleanup of resources and checks of calls. - Modified H5Zfilter_avail and private function, H5Z_filter_avail. - Moved check for plugin from public to private function. Updated - H5P__set_filter due to change in H5Z_filter_avail. Updated tests. - Note, h5repack output display has changed to clarify the individual - steps of the repack process. The output indicates if an operation - applies to all objects. Lines with notation and no information - have been removed. - - (ADB - 2017/10/10, HDFFV-10297, HDFFV-10319) - - - h5repack - - h5repack always set the User Defined filter flag to H5Z_FLAG_MANDATORY. - - Added another parameter to the 'UD=' option to set the flag by default - to '0' or H5Z_FLAG_MANDATORY, the other choice is '1' or H5Z_FLAG_OPTIONAL. - - (ADB - 2017/08/31, HDFFV-10269) - - - h5ls - - h5ls generated error on stack when it encountered a H5S_NULL - dataspace. - - Adding checks for H5S_NULL before calling H5Sis_simple (located - in the h5tools_dump_mem function) fixed the issue. - - (ADB - 2017/08/17, HDFFV-10188) - - - h5dump - - h5dump segfaulted on output of XML file. - - Function that escape'd strings used the full buffer length - instead of just the length of the replacement string in a - strncpy call. Using the correct length fixed the issue. - - (ADB - 2017/08/01, HDFFV-10256) - - - h5diff - - h5diff segfaulted on compare of a NULL variable length string. - - Improved h5diff compare of strings by adding a check for - NULL strings and setting the lengths to zero. - - (ADB - 2017/07/25, HDFFV-10246) - - - h5import - - h5import crashed trying to import data from a subset of a dataset. - - Improved h5import by adding the SUBSET keyword. h5import understands - to use the Count times the Block as the size of the dimensions. - Added INPUT_B_ORDER keyword to old-style configuration files. - The import from h5dump function expects the binary files to use native - types (FILE '-b' option) in the binary file. - - (ADB - 2017/06/15, HDFFV-10219) - - - C++ API - ------- - - Marked the following functions deprecated because they were moved to - class H5Object: - H5Location::createAttribute() - H5Location::openAttribute() - H5Location::attrExists() - H5Location::removeAttr() - H5Location::renameAttr() - H5Location::getNumAttrs() - - (BMR - 2017/10/17) - - -Supported Platforms -=================== -The following platforms are supported and have been tested for this release. -They are built with the configure process unless specified otherwise. - - Linux 2.6.32-573.22.1.el6 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (platypus/mayll) Version 4.4.7 20120313 - Versions 4.9.3, 5.3.0, 6.2.0 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 16.10-0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 17.0.0.196 Build 20160721 - MPICH 3.1.4 compiled with GCC 4.9.3 - OpenMPI 2.0.1 compiled with GCC 4.9.3 - - Linux 2.6.32-573.18.1.el6 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-16) - #1 SMP ppc64 GNU/Linux g++ (GCC) 4.4.7 20120313 (Red Hat 4.4.7-16) - (ostrich) GNU Fortran (GCC) 4.4.7 20120313 (Red Hat 4.4.7-16) - IBM XL C/C++ V13.1 - IBM XL Fortran V15.1 - - Linux 3.10.0-327.10.1.el7 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (kituo/moohan/jelly Version 4.8.5 20150623 (Red Hat 4.8.5-4) - Versions 4.9.3, 5.3.0, 6.2.0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 17.0.4.196 Build 20170411 - MPICH 3.1.4 compiled with GCC 4.9.3 - NAG Fortran Compiler Release 6.1(Tozai) Build 6116 - - SunOS 5.11 32- and 64-bit Sun C 5.12 SunOS_sparc - (emu) Sun Fortran 95 8.6 SunOS_sparc - Sun C++ 5.12 SunOS_sparc - - Windows 7 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - Visual Studio 2015 w/ Intel Fortran 16 (cmake) - - Windows 7 x64 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - Visual Studio 2015 w/ Intel Fortran 16 (cmake) - Visual Studio 2015 w/ Intel C, Fortran 2017 (cmake) - Visual Studio 2015 w/ MSMPI 8 (cmake) - Cygwin(CYGWIN_NT-6.1 2.8.0(0.309/5/3) - gcc and gfortran compilers (GCC 5.4.0) - (cmake and autotools) - - Windows 10 Visual Studio 2015 w/ Intel Fortran 16 (cmake) - Cygwin(CYGWIN_NT-6.1 2.8.0(0.309/5/3) - gcc and gfortran compilers (GCC 5.4.0) - (cmake and autotools) - - Windows 10 x64 Visual Studio 2015 w/ Intel Fortran 16 (cmake) - - Mac OS X Mavericks 10.9.5 Apple LLVM version 6.0 (clang-600.0.57) - 64-bit gfortran GNU Fortran (GCC) 4.9.2 - (wren/quail) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X Yosemite 10.10.5 Apple LLVM version 6.1 (clang-602.0.53) - 64-bit gfortran GNU Fortran (GCC) 4.9.2 - (osx1010dev/osx1010test) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X El Capitan 10.11.6 Apple LLVM version 7.3.0 (clang-703.0.29) - 64-bit gfortran GNU Fortran (GCC) 5.2.0 - (VM osx1011dev/osx1011test) Intel icc/icpc/ifort version 16.0.2 - - Mac OS Sierra 10.12.6 Apple LLVM version 8.1 (clang-802.0.42) - 64-bit gfortran GNU Fortran (GCC) 7.1.0 - (kite) Intel icc/icpc/ifort version 17.0.2 - -Tested Configuration Features Summary -===================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90/ F90 C++ zlib SZIP - parallel F2003 parallel -SunOS 5.11 32-bit n y/y n y y y -SunOS 5.11 64-bit n y/y n y y y -Windows 7 y y/y n y y y -Windows 7 x64 y y/y n y y y -Windows 7 Cygwin n y/n n y y y -Windows 7 x64 Cygwin n y/n n y y y -Windows 10 y y/y n y y y -Windows 10 x64 y y/y n y y y -Mac OS X Mavericks 10.9.5 64-bit n y/y n y y y -Mac OS X Yosemite 10.10.5 64-bit n y/y n y y y -Mac OS X El Capitan 10.11.6 64-bit n y/y n y y y -Mac OS Sierra 10.12.6 64-bit n y/y n y y y -AIX 6.1 32- and 64-bit n y/n n y y y -CentOS 6.7 Linux 2.6.32 x86_64 GNU y y/y y y y y -CentOS 6.7 Linux 2.6.32 x86_64 Intel n y/y n y y y -CentOS 6.7 Linux 2.6.32 x86_64 PGI n y/y n y y y -CentOS 7.1 Linux 3.10.0 x86_64 GNU y y/y y y y y -CentOS 7.1 Linux 3.10.0 x86_64 Intel n y/y n y y y -Linux 2.6.32-431.11.2.el6.ppc64 n y/n n y y y - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -SunOS 5.11 32-bit y y y y -SunOS 5.11 64-bit y y y y -Windows 7 y y y y -Windows 7 x64 y y y y -Windows 7 Cygwin n n n y -Windows 7 x64 Cygwin n n n y -Windows 10 y y y y -Windows 10 x64 y y y y -Mac OS X Mavericks 10.9.5 64-bit y n y y -Mac OS X Yosemite 10.10.5 64-bit y n y y -Mac OS X El Capitan 10.11.6 64-bit y n y y -Mac OS Sierra 10.12.6 64-bit y n y y -AIX 6.1 32- and 64-bit y n n y -CentOS 6.7 Linux 2.6.32 x86_64 GNU y y y y -CentOS 6.7 Linux 2.6.32 x86_64 Intel y y y y -CentOS 6.7 Linux 2.6.32 x86_64 PGI y y y y -CentOS 7.1 Linux 3.10.0 x86_64 GNU y y y y -CentOS 7.1 Linux 3.10.0 x86_64 Intel y y y y -Linux 2.6.32-431.11.2.el6.ppc64 y y y y - -Compiler versions for each platform are listed in the preceding -"Supported Platforms" table. - - -More Tested Platforms -===================== -The following platforms are not supported but have been tested for this release. - - Linux 2.6.32-573.22.1.el6 g95 (GCC 4.0.3 (g95 0.94!) - #1 SMP x86_64 GNU/Linux - (mayll) - - Debian8.4.0 3.16.0-4-amd64 #1 SMP Debian 3.16.36-1 x86_64 GNU/Linux - gcc (Debian 4.9.2-10) 4.9.2 - GNU Fortran (Debian 4.9.2-10) 4.9.2 - (cmake and autotools) - - Fedora24 4.7.2-201.fc24.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc (GCC) 6.1.1 20160621 (Red Hat 6.1.1-3) - GNU Fortran (GCC) 6.1.1 20160621 (Red Hat 6.1.1-3) - (cmake and autotools) - - CentOS 7.2 3.10.0-327.28.2.el7.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc (GCC) 4.8.5 20150623 (Red Hat 4.8.5-4) - GNU Fortran (GCC) 4.8.5 20150623 (Red Hat 4.8.5-4) - (cmake and autotools) - - Ubuntu 16.04 4.4.0-38-generic #62-Ubuntu SMP x86_64 GNU/Linux - gcc (Ubuntu 5.4.0-6ubuntu1~16.04.2) 5.4.0 - GNU Fortran (Ubuntu 5.4.0-6ubuntu1~16.04.2) 5.4.0 - (cmake and autotools) - - -Known Problems -============== - - The dynamically loaded plugin test libraries require undefined references - to HDF5 functions to be resolved at runtime in order to function properly. - With autotools on CYGWIN this results in build errors, and we have not - found a solution that satisfies both. Therefore the dynamically loaded - plugin tests have been disabled on CYGWIN. - - Mac OS X 10.13 added additional subdirectory structure in .libs for shared - libraries. Consequently "make check" will fail testing java and dynamically - loaded plugin test libraries attempting to copy files from the previous - locations in .libs directories. This will be addressed in the next release - when support for the Mac OS X 10.13 platform is added. - - Known problems in previous releases can be found in the HISTORY*.txt files - in the HDF5 source. Please report any new problems found to - help@hdfgroup.org. - - -%%%%1.8.19%%%% - - -HDF5 version 1.8.19 released on 2017-06-15 -================================================================================ - -INTRODUCTION -============ - -This document describes the differences between HDF5-1.8.18 and -HDF5-1.8.19, and contains information on the platforms tested and -known problems in HDF5-1.8.19. -For more details, see the files HISTORY-1_0-1_8_0_rc3.txt -and HISTORY-1_8.txt in the release_docs/ directory of the HDF5 source. - -Links to the HDF5 1.8.19 source code, documentation, and additional materials -can be found on the HDF5 web page at: - - https://support.hdfgroup.org/HDF5/ - -The HDF5 1.8.19 release can be obtained from: - - https://support.hdfgroup.org/HDF5/release/obtain518.html - -User documentation for 1.8.19 can be accessed directly at this location: - - https://support.hdfgroup.org/HDF5/doc1.8/ - -New features in the HDF5-1.8.x release series, including brief general -descriptions of some new and modified APIs, are described in the "What's New -in 1.8.0?" document: - - https://support.hdfgroup.org/HDF5/doc/ADGuide/WhatsNew180.html - -All new and modified APIs are listed in detail in the "HDF5 Software Changes -from Release to Release" document, in the section "Release 1.8.19 (current -release) versus Release 1.8.18 - - https://support.hdfgroup.org/HDF5/doc1.8/ADGuide/Changes.html - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS -======== - -- New Features -- Support for New Platforms, Languages, and Compilers -- Bug Fixes since HDF5-1.8.19 -- Supported Platforms -- Supported Configuration Features Summary -- More Tested Platforms -- Known Problems - - -New Features -============ - - Configuration - -------------- - - CMake in the Tools - - User-defined filters on Windows require that tools be built - with shared libraries. - - CMake command code added to build tools with - shared libraries as well as with static libraries. - - (ADB - 2017/02/07, HDFFV-10123) - - - Library - ------- - - H5PL package - - Users would like to be able to set the plugin path programmatically - instead of through the environment variable. - - H5PL package obtained new APIs for manipulating the entries of - the plugin path table. The new APIs are; - H5PLappend - Insert a plugin path at the end of the list. - H5PLprepend - Insert a plugin path at the beginning of the list. - H5PLreplace - Replace the path at the specified index. - H5PLinsert - Insert a plugin path at the specified index, moving - other paths after the index. - H5PLremove - Remove the plugin path at the specified index and - compacting the list. - H5PLget - Query the plugin path at the specified index. - H5PLsize - Query the size of the current list of plugin paths. - - (ADB - 2017/04/04, HDFFV-10143) - - - H5Dget_chunk_storage_size - - The storage size of a chunk in the file is needed to determine the size - of the buffer to be allocated for reading a chunk directly from a file. - - New API function gets the size in bytes currently allocated within a - file for a raw data chunk in a dataset. This function was added to get - the chunk size in support of the implementation of H5DOread_chunks, but - may also be useful for other purposes. - - (VC - 2017/05/02, HDFFV-9934) - - C++ API - ------- - - The following C++ API wrappers have been added to the C++ Library: - // Determines if a name exists. - H5Location::nameExists() - C API H5Lexists() - - // Checks if an ID is valid. - IdComponent::isValid() - C API H5Iis_valid() - - // Sets the number of soft or user-defined links that can be - // traversed before a failure occurs. - LinkAccPropList::setNumLinks() - C API H5Pset_nlinks() - - // Gets the number of soft or user-defined link traversals allowed - LinkAccPropList::getNumLinks() - C API H5Pget_nlinks() - - // Returns a copy of the creation property list of a datatype. - DataType::getCreatePlist() - C API H5Tget_create_plist() - - // Opens an object within a location, regardless its type. - H5Location::openObjId() - C API H5Oopen() - H5Location::openObjId() - C API H5Oopen() - - // Closes an object opened by openObjId(). - H5Location::closeObjId() - C API H5Oclose() - - // Gets general information about a file. - H5File::getFileInfo() - C API H5Fget_info() - - // Returns the header version of an HDF5 object. - H5Object::objVersion() - C API H5Oget_info() - - (BMR, 2017/05/13, HDFFV-10004, HDFFV-10156) - - - New class LinkAccPropList is added for link access property list - - (BMR, 2017/05/13, HDFFV-10156) - - High-Level APIs - --------------- - - H5DOread_chunk - - Users wanted to read compressed data directly from a file without any - processing by the HDF5 data transfer pipeline, just as they were able - to write it directly to a file with H5DOwrite_chunk. - - New API function, corresponding to existing function H5DOwrite_chunk. - H5DOread_chunk reads a raw data chunk directly from a chunked dataset - in the file into the application buffer, bypassing the library’s internal - data transfer pipeline, including filters. - - (VC - 2017/05/02, HDFFV-9934) - - -Support for New Platforms, Languages, and Compilers -=================================================== - - - Added OpenMPI 2.0.1 compiled with GCC 4.9.3 - - -Bug Fixes since HDF5-1.8.18 -=========================== - - Configuration - ------------- - - Support for Fortran shared libraries on OS X with autotools now - works. Cmake builds still disables Fortran shared libraries on OS X. - (MSB - 2017/04/30, HDFFV-2772) - - - Library - ------- - - bitfield datatypes - - bitfield datatypes did not fully support endianness of the data. - - Improved the endianness of bitfield datatypes by adding missing functional - code. This code used integer types as a template. - - (ADB - 2017/05/12, HDFFV-10186) - - - Newly created datasets with H5D_COMPACT layout failed to open after - several H5Dopen/H5Dclose cycles. - - The layout "dirty" flag for a compact dataset is now properly reset - before flushing the message. - - (VC - 2017/05/11, HDFFV-10051) - - Missing #ifdef __cplusplus macros were added to the generated H5Epubgen.h file. - - (DER - 2017/04/25, HDFFV-9638) - - - Tools - ----- - - h5repack - - h5repack did not maintain the creation order flag of the root - group. - - Improved h5repack by reading the creation order and applying the - flag to the new root group. Also added arguments to set the - order and index direction, which applies to the traversing of the - original file, on the command line. - - (ADB - 2017/05/26, HDFFV-8611) - - - h5diff - - h5diff failed to account for strpad type and null terminators - of char strings. Also, h5diff failed to account for string length - differences and would give a different result depending on file - order in the command line. - - Improved h5diff compare of strings and arrays by adding a check for - string lengths and if the strpad was null filled. - - (ADB - 2017/05/18, HDFFV-9055, HDFFV-10128) - - - h5diff - - h5diff help text about epsilon comparison was confusing. - - Changed h5diff help text to indicate that the 'a' refers to the - datapoint in file1 and 'b' refers to the datapoint value in file2. - - (ADB - 2017/05/16, HDFFV-9995) - - - h5diff - - h5diff did not report user-defined filter data differences correctly. - - Improved h5diff compare of user-defined filter data by reporting an - error if the user-defined filter plugin cannot be found. - - (ADB - 2017/01/18, HDFFV-9994) - C++ API - ------- - - The class hierarchy is revised to better reflect the HDF5 model. - Class H5File is now derived from class Group instead of H5Location. - Class Attribute is now derived from H5Location instead of IdComponent. - Wrappers of H5A APIs in H5Location are now duplicated in H5Object, - the original wrappers in H5Location will be deprecated in future - releases. - - (BMR - 2017/05/15, HDFFV-10156) - - - -Supported Platforms -=================== -The following platforms are supported and have been tested for this release. -They are built with the configure process unless specified otherwise. - - Linux 2.6.32-573.22.1.el6 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (platypus/mayll) Version 4.4.7 20120313 - Versions 4.9.3, 5.2.0, 6.2.0 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 16.10-0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 17.0.0.098 Build 20160721 - MPICH 3.1.4 compiled with GCC 4.9.3 - OpenMPI 2.0.1 compiled with GCC 4.9.3 - - Linux 2.6.32-573.18.1.el6 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-16) - #1 SMP ppc64 GNU/Linux g++ (GCC) 4.4.7 20120313 (Red Hat 4.4.7-16) - (ostrich) GNU Fortran (GCC) 4.4.7 20120313 (Red Hat 4.4.7-16) - IBM XL C/C++ V13.1 - IBM XL Fortran V15.1 - - Linux 3.10.0-327.10.1.el7 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (kituo/moohan) Version 4.8.5 20150623 (Red Hat 4.8.5-4) - Versions 4.9.3, 5.3.0, 6.2.0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 17.0.4.196 Build 20170411 - MPICH 3.1.4 compiled with GCC 4.9.3 - - SunOS 5.11 32- and 64-bit Sun C 5.12 SunOS_sparc - (emu) Sun Fortran 95 8.6 SunOS_sparc - Sun C++ 5.12 SunOS_sparc - Windows 7 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - Visual Studio 2015 w/ Intel Fortran 16 (cmake) - - Windows 7 x64 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - Visual Studio 2015 w/ Intel Fortran 16 (cmake) - Visual Studio 2015 w/ Intel C, Fortran 2017 (cmake) - Visual Studio 2015 w/ MSMPI 8 (cmake) - Cygwin(CYGWIN_NT-6.1 2.8.0(0.309/5/3) - gcc and gfortran compilers (GCC 5.4.0) - (cmake and autotools) - - Windows 10 Visual Studio 2015 w/ Intel Fortran 16 (cmake) - Cygwin(CYGWIN_NT-6.1 2.8.0(0.309/5/3) - gcc and gfortran compilers (GCC 5.4.0) - (cmake and autotools) - - Windows 10 x64 Visual Studio 2015 w/ Intel Fortran 16 (cmake) - - Mac OS X Mt. Lion 10.8.5 Apple LLVM version 5.1 (clang-503.0.40) - 64-bit gfortran GNU Fortran (GCC) 4.8.2 - (swallow/kite) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X Mavericks 10.9.5 Apple LLVM version 6.0 (clang-600.0.57) - 64-bit gfortran GNU Fortran (GCC) 4.9.2 - (wren/quail) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X Yosemite 10.10.5 Apple LLVM version 6.1 (clang-602.0.53) - 64-bit gfortran GNU Fortran (GCC) 4.9.2 - (osx1010dev/osx1010test) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X El Capitan 10.11.4 Apple LLVM version 7.3.0 (clang-703.0.29) - 64-bit gfortran GNU Fortran (GCC) 5.2.0 - (VM osx1011dev/osx1011test) Intel icc/icpc/ifort version 16.0.2 - - -Tested Configuration Features Summary -===================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform -Platform C F90/ F90 C++ zlib SZIP - parallel F2003 parallel -SunOS 5.11 32-bit n y/y n y y y -SunOS 5.11 64-bit n y/y n y y y -Windows 7 y y/y n y y y -Windows 7 x64 y y/y n y y y -Windows 7 Cygwin n y/n n y y y -Windows 7 x64 Cygwin n y/n n y y y -Windows 10 y y/y n y y y -Windows 10 x64 y y/y n y y y -Mac OS X Mountain Lion 10.8.5 64-bit n y/y n y y y -Mac OS X Mavericks 10.9.5 64-bit n y/y n y y y -Mac OS X Yosemite 10.10.5 64-bit n y/y n y y y -AIX 6.1 32- and 64-bit n y/n n y y y -CentOS 6.7 Linux 2.6.32 x86_64 GNU y y/y y y y y -CentOS 6.7 Linux 2.6.32 x86_64 Intel n y/y n y y y -CentOS 6.7 Linux 2.6.32 x86_64 PGI n y/y n y y y -CentOS 7.1 Linux 3.10.0 x86_64 GNU y y/y y y y y -CentOS 7.1 Linux 3.10.0 x86_64 Intel n y/y n y y y -Linux 2.6.32-431.11.2.el6.ppc64 n y/n n y y y - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -SunOS 5.11 32-bit y y y y -SunOS 5.11 64-bit y y y y -Windows 7 y y y y -Windows 7 x64 y y y y -Windows 7 Cygwin n n n y -Windows 7 x64 Cygwin n n n y -Windows 10 y y y y -Windows 10 x64 y y y y -Mac OS X Mountain Lion 10.8.5 64-bit y n y y -Mac OS X Mavericks 10.9.5 64-bit y n y y -Mac OS X Yosemite 10.10.5 64-bit y n y y -AIX 6.1 32- and 64-bit y n n y -CentOS 6.7 Linux 2.6.32 x86_64 GNU y y y y -CentOS 6.7 Linux 2.6.32 x86_64 Intel y y y y -CentOS 6.7 Linux 2.6.32 x86_64 PGI y y y y -CentOS 7.1 Linux 3.10.0 x86_64 GNU y y y y -CentOS 7.1 Linux 3.10.0 x86_64 Intel y y y y -Linux 2.6.32-431.11.2.el6.ppc64 y y y y - -Compiler versions for each platform are listed in the preceding -"Supported Platforms" table. - - -More Tested Platforms -===================== -The following platforms are not supported but have been tested for this release. - - Linux 2.6.32-573.22.1.el6 g95 (GCC 4.0.3 (g95 0.94!) - #1 SMP x86_64 GNU/Linux - (mayll) - - Debian8.4.0 3.16.0-4-amd64 #1 SMP Debian 3.16.36-1 x86_64 GNU/Linux - gcc (Debian 4.9.2-10) 4.9.2 - GNU Fortran (Debian 4.9.2-10) 4.9.2 - (cmake and autotools) - - Fedora24 4.7.2-201.fc24.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc (GCC) 6.1.1 20160621 (Red Hat 6.1.1-3) - GNU Fortran (GCC) 6.1.1 20160621 (Red Hat 6.1.1-3) - (cmake and autotools) - - CentOS 7.2 3.10.0-327.28.2.el7.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc (GCC) 4.8.5 20150623 (Red Hat 4.8.5-4) - GNU Fortran (GCC) 4.8.5 20150623 (Red Hat 4.8.5-4) - (cmake and autotools) - - Ubuntu 16.04 4.4.0-38-generic #62-Ubuntu SMP x86_64 GNU/Linux - gcc (Ubuntu 5.4.0-6ubuntu1~16.04.2) 5.4.0 - GNU Fortran (Ubuntu 5.4.0-6ubuntu1~16.04.2) 5.4.0 - (cmake and autotools) - - -Known Problems -============== - - The dynamically loaded plugin test libraries require undefined references - to HDF5 functions to be resolved at runtime in order to function properly. - With autotools on CYGWIN this results in build errors, and we have not - found a solution that satisfies both. Therefore the dynamically loaded - plugin tests have been disabled on CYGWIN. - - Known problems in previous releases can be found in the HISTORY*.txt files - in the HDF5 source. Please report any new problems found to - help@hdfgroup.org. - - -%%%%1.8.18%%%% - - -HDF5 version 1.8.18 released on 2016-11-14 -================================================================================ - -INTRODUCTION -============ - -This document describes the differences between HDF5-1.8.17 and -HDF5-1.8.18, and contains information on the platforms tested and -known problems in HDF5-1.8.18. -For more details, see the files HISTORY-1_0-1_8_0_rc3.txt -and HISTORY-1_8.txt in the release_docs/ directory of the HDF5 source. - -Links to the HDF5 1.8.18 source code, documentation, and additional materials -can be found on the HDF5 web page at: - - https://support.hdfgroup.org/HDF5/ - -The HDF5 1.8.18 release can be obtained from: - - https://support.hdfgroup.org/HDF5/release/obtain518.html - -User documentation for 1.8.18 can be accessed directly at this location: - - https://support.hdfgroup.org/HDF5/doc1.8/ - -New features in the HDF5-1.8.x release series, including brief general -descriptions of some new and modified APIs, are described in the "What's New -in 1.8.0?" document: - - https://support.hdfgroup.org/HDF5/doc/ADGuide/WhatsNew180.html - -All new and modified APIs are listed in detail in the "HDF5 Software Changes -from Release to Release" document, in the section "Release 1.8.18 (current -release) versus Release 1.8.17 - - https://support.hdfgroup.org/HDF5/doc1.8/ADGuide/Changes.html - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS -======== - -- New Features -- Support for New Platforms, Languages, and Compilers -- Bug Fixes since HDF5-1.8.17 -- Supported Platforms -- Supported Configuration Features Summary -- More Tested Platforms -- Known Problems - - -New Features -============ - - Configuration - ------------- - - - CMake: Added NAMESPACE hdf5:: to package configuration files to allow - projects using installed HDF5 binaries built with CMake to link with - them without specifying the HDF5 library location via IMPORTED_LOCATION. - - (ADB, 2016/10/17, HDFFV-10003) - - - - CMake: Changed the CTEST_BUILD_CONFIGURATION option to - CTEST_CONFIGURATION_TYPE as recommended by the CMake documentation. - - (ADB, 2016/10/17, HDFFV-9971) - - - CMake: Added support for GIT - - (ADB, 2016/07/12) - - - Library - ------- - - None - - - Parallel Library - ---------------- - - None - - - Tools - ----- - - None - - - High-Level APIs - --------------- - - None - - - Fortran API - ----------- - - None - - - C++ API - ------- - - None - - -Support for New Platforms, Languages, and Compilers -=================================================== - - - -Bug Fixes since HDF5-1.8.17 -=========================== - - Configuration - ------------- - - - Fixed a problem preventing HDF5 to be built on 32-bit CYGWIN by - condensing cygwin configuration files into a single file and - removing outdated compiler settings. - - (ABD, 2016/07/12, HDFFV-9946) - - - - CMake: Fixed a command length overflow error by converting custom - commands inside CMakeTest.cmake files into regular dependencies and - targets. - - (ABD, 2016/07/12, HDFFV-9939) - - - - CMake: Fixed a timeout error that would occasionally occur when running - the virtual file driver tests simultaneously due to test directory and file - name collisions. - - (ABD, 2016/09/19, HDFFV-9431) - - - Library - ------- - - - Fixed a memory leak that would occur when the library allocated memory - for an external file prefix (H5Pset_efile_prefix) and failed to free it. - - (DER, 2016/04/29) - - - - Fixed an error that would occur when calling H5Adelete on an attribute - which is attached to an externally linked object in the target file and - whose datatype is a committed datatype in the main file. - - (VC, 2016-07-04, HDFFV-9940) - - - - Fixed a problem where a plugin compiled into a DLL in the default plugin - directory could not be found by the HDF5 library at runtime on Windows - when the HDF5_PLUGIN_PATH environment variable was not set. - - (ABD, 2016/08/01, HDFFV-9706) - - - - Fixed an issue where H5Pset_alignment could result in misaligned blocks - with some input combinations, causing an assertion failure in debug mode. - - (NAF, 2016/08/11, HDFFV-9948) - - - - A number of issues were fixed when reading/writing from/to corrupted - files to ensure that the library fails gracefully in these cases: - - * Writing to a corrupted file that has an object message which is - incorrectly marked as shareable on disk results in a buffer overflow / - invalid write instead of a clean error message. - - * Decoding data from a corrupted file with a dataset encoded with the - H5Z_NBIT decoding can result in a code execution vulnerability under - the context of the application using the HDF5 library. - - * When decoding an array datatype from a corrupted file, the HDF5 library - fails to return an error in production if the number of dimensions - decoded is greater than the maximum rank. - - * When decoding an "old style" array datatype from a corrupted file, the - HDF5 library fails to return an error in production if the number of - dimensions decoded is greater than the maximum rank. - - (NAF, 2016/10/06, HDFFV-9950, HDFFV-9951, HDFFV-9992, HDFFV-9993) - - - - Fixed an error that would occur when copying an object with an attribute - which is a compound datatype consisting of a variable length string. - - (VC, 2016-10-17, HDFFV-7991) - - - Parallel Library - ---------------- - - - Fixed a bug that could occur when allocating a chunked dataset in parallel - with an alignment set and an alignment threshold greater than the chunk - size but less than or equal to the raw data aggregator size. - - (NAF, 2016/08/11, HDFFV-9969) - - - Performance - ------------- - - None - - - Tools - ----- - - - Fixed an error in the compiler wrapper scripts (h5cc, h5fc, et al.) - in which they would erroneously drop the file argument specified via - the -o flag when the -o flag was specified before the -c flag on the - command line, resulting in a failure to compile. - - (LRK, 2016/06/08, HDFFV-9938, HDFFV-9530) - - - - h5repack User Defined (UD) filter parameters were not parsed correctly. - - The UD filter parameters were not being parsed correctly. Reworked coding - section to parse the correct values and verify number of parameters. - - (ADB, 2016/10/19, HDFFV-9996, HDFFV-9974, HDFFV-9515, HDFFV-9039) - - - Fortran API - ----------- - - - Fortran library fails to compile and fails tests with NAG compiler. - - * Removed the non-standard assumption that KIND=SIZEOF, in the HDF5 - configure programs. - * Removed Fortran 66 character/integer conversions from tests. - * Removed the use of C_SIZEOF in the test programs - * Changed to using STORAGE_SIZE in the test programs if available. Otherwise, - uses C_SIZEOF or SIZEOF. - - (MSB, 2016/9/22, HDFFV-9973) - - - - Fortran segfaults for F03 tests with NAG compiler - - * Removed INTENT(OUT) from 'fillvalue' in F2003 interface - for H5Pget_fill_value_f. - - (MSB, 2016/9/22, HDFFV-9980) - - - C++ API - ------- - - - The macro H5_NO_NAMESPACE is deprecated from the HDF5 C++ API library. - In future releases, the macros H5_NO_STD and OLD_HEADER_FILENAME may - also be removed. - - (BMR, 2016/10/27, HDFFV-9532) - - - High-Level APIs: - --------------- - - - The high-level API Packet Table (PT) did not write data correctly when - the datatype is a compound type that has string type as one of the - members. This problem started in 1.8.15, after the fix of HDFFV-9042 - was applied, which caused the Packet Table to use native type to access - the data. It should be up to the application to specify whether the - buffer to be read into memory in the machine’s native architecture. - Thus, the PT is fixed to not use native type but to make a copy of the - user's provided datatype during creation or the packet table's datatype - during opening. If an application wishes to use native type to read the - data, then the application will request that. However, the Packet Table - doesn't provide a way to specify memory datatype in this release. This - feature will be available in future releases, HDFFV-10023. - - (BMR, 2016/10/27, HDFFV-9758) - - - Fortran High-Level APIs: - ------------------------ - - None - - - Testing - ------- - - None - - -Supported Platforms -=================== -The following platforms are supported and have been tested for this release. -They are built with the configure process unless specified otherwise. - - Linux 2.6.32-573.22.1.el6 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (platypus/mayll) Version 4.4.7 20120313 - Versions 4.8.4, 4.9.3, 5.2.0 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 15.7-0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 15.0.3.187 Build 20150407 - MPICH 3.1.4 compiled with GCC 4.9.3 - - Linux 2.6.32-573.18.1.el6 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-16) - #1 SMP ppc64 GNU/Linux g++ (GCC) 4.4.7 20120313 (Red Hat 4.4.7-16) - (ostrich) GNU Fortran (GCC) 4.4.7 20120313 (Red Hat 4.4.7-16) - IBM XL C/C++ V13.1 - IBM XL Fortran V15.1 - - Linux 3.10.0-327.10.1.el7 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (kituo/moohan) Version 4.8.5 20150623 (Red Hat 4.8.5-4) - Versions 4.9.3, 5.2.0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 15.0.3.187 Build 20150407 - MPICH 3.1.4 compiled with GCC 4.9.3 - - SunOS 5.11 32- and 64-bit Sun C 5.12 SunOS_sparc - (emu) Sun Fortran 95 8.6 SunOS_sparc - Sun C++ 5.12 SunOS_sparc - - Windows 7 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - Visual Studio 2015 w/ Intel Fortran 16 (cmake) - Cygwin(CYGWIN_NT-6.1 2.2.1(0.289/5/3) gcc(4.9.3) compiler and gfortran) - (cmake and autotools) - - Windows 7 x64 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - Visual Studio 2015 w/ Intel Fortran 16 (cmake) - - Windows 10 Visual Studio 2015 w/ Intel Fortran 16 (cmake) - - Windows 10 x64 Visual Studio 2015 w/ Intel Fortran 16 (cmake) - - Mac OS X Mt. Lion 10.8.5 Apple LLVM version 5.1 (clang-503.0.40) - 64-bit gfortran GNU Fortran (GCC) 4.8.2 - (swallow/kite) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X Mavericks 10.9.5 Apple LLVM version 6.0 (clang-600.0.57) - 64-bit gfortran GNU Fortran (GCC) 4.9.2 - (wren/quail) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X Yosemite 10.10.5 Apple LLVM version 6.1 (clang-602.0.53) - 64-bit gfortran GNU Fortran (GCC) 4.9.2 - (osx1010dev/osx1010test) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X El Capitan 10.11.4 Apple LLVM version 7.3.0 (clang-703.0.29) - 64-bit gfortran GNU Fortran (GCC) 5.2.0 - (VM osx1011dev/osx1011test) Intel icc/icpc/ifort version 16.0.2 - - -Tested Configuration Features Summary -===================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90/ F90 C++ zlib SZIP - parallel F2003 parallel -SunOS 5.11 32-bit n y/y n y y y -SunOS 5.11 64-bit n y/y n y y y -Windows 7 y y/y n y y y -Windows 7 x64 y y/y n y y y -Windows 7 Cygwin n y/y n y y n -Windows 10 n y/y n y y y -Windows 10 x64 n y/y n y y y -Mac OS X Mountain Lion 10.8.5 64-bit n y/y n y y y -Mac OS X Mavericks 10.9.5 64-bit n y/y n y y y -Mac OS X Yosemite 10.10.5 64-bit n y/y n y y y -AIX 6.1 32- and 64-bit n y/n n y y y -CentOS 6.7 Linux 2.6.32 x86_64 GNU y y/y y y y y -CentOS 6.7 Linux 2.6.32 x86_64 Intel n y/y n y y y -CentOS 6.7 Linux 2.6.32 x86_64 PGI n y/y n y y y -CentOS 7.1 Linux 3.10.0 x86_64 GNU y y/y y y y y -CentOS 7.1 Linux 3.10.0 x86_64 Intel n y/y n y y y -Linux 2.6.32-431.11.2.el6.ppc64 n y/n n y y y - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -SunOS 5.11 32-bit y y y y -SunOS 5.11 64-bit y y y y -Windows 7 y y y y -Windows 7 x64 y y y y -Windows 7 Cygwin n n n y -Windows 10 y y y y -Windows 10 x64 y y y y -Mac OS X Mountain Lion 10.8.5 64-bit y n y y -Mac OS X Mavericks 10.9.5 64-bit y n y y -Mac OS X Yosemite 10.10.5 64-bit y n y y -AIX 6.1 32- and 64-bit y n n y -CentOS 6.7 Linux 2.6.32 x86_64 GNU y y y y -CentOS 6.7 Linux 2.6.32 x86_64 Intel y y y y -CentOS 6.7 Linux 2.6.32 x86_64 PGI y y y y -CentOS 7.1 Linux 3.10.0 x86_64 GNU y y y y -CentOS 7.1 Linux 3.10.0 x86_64 Intel y y y y -Linux 2.6.32-431.11.2.el6.ppc64 y y y y - -Compiler versions for each platform are listed in the preceding -"Supported Platforms" table. - - -More Tested Platforms -===================== -The following platforms are not supported but have been tested for this release. - - Linux 2.6.32-573.22.1.el6 g95 (GCC 4.0.3 (g95 0.94!) - #1 SMP x86_64 GNU/Linux - (platypus) - - Debian8.4.0 3.16.0-4-amd64 #1 SMP Debian 3.16.36-1 x86_64 GNU/Linux - gcc (Debian 4.9.2-10) 4.9.2 - GNU Fortran (Debian 4.9.2-10) 4.9.2 - (cmake and autotools) - - Fedora24 4.7.2-201.fc24.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc (GCC) 6.1.1 20160621 (Red Hat 6.1.1-3) - GNU Fortran (GCC) 6.1.1 20160621 (Red Hat 6.1.1-3) - (cmake and autotools) - - CentOS 7.2 3.10.0-327.28.2.el7.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc (GCC) 4.8.5 20150623 (Red Hat 4.8.5-4) - GNU Fortran (GCC) 4.8.5 20150623 (Red Hat 4.8.5-4) - (cmake and autotools) - - Ubuntu 16.04 4.4.0-38-generic #62-Ubuntu SMP x86_64 GNU/Linux - gcc (Ubuntu 5.4.0-6ubuntu1~16.04.2) 5.4.0 - GNU Fortran (Ubuntu 5.4.0-6ubuntu1~16.04.2) 5.4.0 - (cmake and autotools) - - -Known Problems -============== -* On windows platforms in debug configurations, the VFD flush1 tests will fail - with the split and multi VFD drivers. These tests will display a modal debug - dialog which must be answered or wait for the test timeout to expire. - (ADB - 2014/06/23 - HDFFV-8851) - -* CLANG compiler with the options -fcatch-undefined-behavior and -ftrapv - catches some undefined behavior in the alignment algorithm of the macro DETECT_I - in H5detect.c (Issue 8147). Since the algorithm is trying to detect the alignment - of integers, ideally the flag -fcatch-undefined-behavior shouldn't to be used for - H5detect.c. In the future, we can separate flags for H5detect.c from the rest of - the library. (SLU - 2013/10/16) - -* Make provided by Solaris fails in "make check". Solaris users should use - gmake to build and install the HDF5 software. (AKC - 2013/10/08 - HDFFV-8534) - -* The C++ and FORTRAN bindings are not currently working on FreeBSD with the - native release 8.2 compilers (4.2.1), but are working with gcc 4.6 from the - ports (and probably gcc releases after that). - (QAK - 2012/10/19) - -* The following h5dump test case fails in BG/P machines (and potentially other - machines that use a command script to launch executables): - - h5dump --no-compact-subset -d "AHFINDERDIRECT::ah_centroid_t[0] it=0 tl=0" - tno-subset.h5 - - This is due to the embedded spaces in the dataset name being interpreted - by the command script launcher as meta-characters, thus passing three - arguments to h5dump's -d flag. The command passes if run by hand, just - not via the test script. - (AKC - 2012/05/03) - -* The STDIO VFD does not work on some architectures, possibly due to 32/64 - bit or large file issues. The basic STDIO VFD test is known to fail on - 64-bit SunOS 5.10 on SPARC when built with -m64 and 32-bit OS X/Darwin - 10.7.0. The STDIO VFD test has been disabled while we investigate and - a fix should appear in a future release. - (DER - 2011/10/14 - HDFFV-8235) - -* h5diff can report inconsistent results when comparing datasets of enum type - that contain invalid values. This is due to how enum types are handled in - the library and will be addressed in a future release. - (DER - 2011/10/14 - HDFFV-7527) - -* The links test can fail under the stdio VFD due to some issues with external - links. This will be investigated and fixed in a future release. - (DER - 2011/10/14 - HDFFV-7768) - -* After the shared library support was fixed for some bugs, it was discovered - that "make prefix=XXX install" no longer works for shared libraries. It - still works correctly for static libraries. Therefore, if you want to - install the HDF5 shared libraries in a location such as /usr/local/hdf5, - you need to specify the location via the --prefix option during configure - time. E.g, ./configure --prefix=/usr/local/hdf5 ... - (AKC - 2011/05/07 - HDFFV-7583) - -* The parallel test, t_shapesame, in testpar/, may run for a long time and may - be terminated by the alarm signal. If that happens, one can increase the - alarm seconds (default is 1200 seconds = 20 minutes) by setting the - environment variable, $HDF5_ALARM_SECONDS, to a larger value such as 3600 - (60 minutes). Note that the t_shapesame test may fail in some systems - (see the "While working on the 1.8.6 release..." problem below). If - it does, it will waste more time if $HDF5_ALARM_SECONDS is set - to a larger value. - (AKC - 2011/05/07) - -* Shared Fortran libraries are not quite working on AIX. While they are - generated when --enable-shared is specified, the fortran and hl/fortran - tests fail. the issue. HL and C++ shared libraries should now be - working as intended, however. - (MAM - 2011/04/20) - -* While working on the 1.8.6 release of HDF5, a bug was discovered that can - occur when reading from a dataset in parallel shortly after it has been - written to collectively. The issue was exposed by a new test in the parallel - HDF5 test suite, but had existed before that. We believe the problem lies with - certain MPI implementations and/or file systems. - - We have provided a pure MPI test program, as well as a standalone HDF5 - program, that can be used to determine if this is an issue on your system. - They should be run across multiple nodes with a varying number of processes. - These programs can be found at: - http://www.hdfgroup.org/ftp/HDF5/examples/known_problems/ - (NAF - 2011/01/19) - -* All the VFL drivers aren't backward compatible. In H5FDpublic.h, the - structure H5FD_class_t changed in 1.8. There is new parameter added to - get_eoa and set_eoa callback functions. A new callback function - get_type_map was added in. The public function H5FDrealloc was taken - out in 1.8. The problem only happens when users define their own driver - for 1.6 and try to plug in 1.8 library. Because there's only one user - complaining about it, we (Elena, Quincey, and I) decided to leave it as - it is (see bug report #1279). Quincey will make a plan for 1.10. - (SLU - 2010/02/02) - -* The --enable-static-exec configure flag will only statically link libraries - if the static version of that library is present. If only the shared version - of a library exists (i.e., most system libraries on Solaris, AIX, and Mac, - for example, only have shared versions), the flag should still result in a - successful compilation, but note that the installed executables will not be - fully static. Thus, the only guarantee on these systems is that the - executable is statically linked with just the HDF5 library. - (MAM - 2009/11/04) - -* A dataset created or rewritten with a v1.6.3 library or after cannot be read - with the v1.6.2 library or before when the Fletcher32 EDC filter is enabled. - There was a bug in the calculation of the Fletcher32 checksum in the - library before v1.6.3; the checksum value was not consistent between big- - endian and little-endian systems. This bug was fixed in Release 1.6.3. - However, after fixing the bug, the checksum value was no longer the same as - before on little-endian system. Library releases after 1.6.4 can still read - datasets created or rewritten with an HDF5 library of v1.6.2 or before. - (SLU - 2005/06/30) - - -%%%%1.8.17%%%% - - -HDF5 version 1.8.17 released on 2016-05-10 -================================================================================ - -INTRODUCTION -============ - -This document describes the differences between HDF5-1.8.16 and -HDF5-1.8.17-*, and contains information on the platforms tested and -known problems in HDF5-1.8.17-*. -For more details, see the files HISTORY-1_0-1_8_0_rc3.txt -and HISTORY-1_8.txt in the release_docs/ directory of the HDF5 source. - -Links to the HDF5 1.8.17 source code, documentation, and additional materials -can be found on the HDF5 web page at: - - http://www.hdfgroup.org/products/hdf5/ - -The HDF5 1.8.17 release can be obtained from: - - http://www.hdfgroup.org/HDF5/release/obtain5.html - -User documentation for 1.8.17 can be accessed directly at this location: - - http://www.hdfgroup.org/HDF5/doc/ - -New features in the HDF5-1.8.x release series, including brief general -descriptions of some new and modified APIs, are described in the "What's New -in 1.8.0?" document: - - http://www.hdfgroup.org/HDF5/doc/ADGuide/WhatsNew180.html - -All new and modified APIs are listed in detail in the "HDF5 Software Changes -from Release to Release" document, in the section "Release 1.8.17 (current -release) versus Release 1.8.16 - - http://www.hdfgroup.org/HDF5/doc/ADGuide/Changes.html - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - - -CONTENTS -======== - -- New Features -- Support for New Platforms, Languages, and Compilers -- Bug Fixes since HDF5-1.8.16 -- Supported Platforms -- Supported Configuration Features Summary -- More Tested Platforms -- Known Problems - - - -New Features -============ - - Configuration - ------------- - - Cmakehdf5: Added Ability to Run Multiple Make Commands - - Added option --njobs to specify up to how many jobs to launch during - build (cmake) and testing (ctest). - - (AKC - 2015/12/13, HDFFV-9612) - - - Cmakehdf5: Added Szip Support and Verbose Option - - Added --with-szlib to support the Szip library; and - --enable/disable-verbose to display all CMake process output. - - (AKC - 2015/11/16, HDFFV-8932 and DAILYTEST-195) - - - CMake minimum is now 3.1.0. (ADB - 2015/11/14) - - - Large File System (LFS) Support has Changed in the Autotools - - We assume that fseeko and ftello exist. - - The *64 I/O functions and types are no longer explicitly used. - We now rely on a mapping provided by _FILE_OFFSET_BITS (or its - equivalent). - - _LARGEFILE(64)_SOURCE is no longer exposed via AM_CPPFLAGS. - - (DER - 2016/03/29, HDFFV-9626 and HDFFV-9541) - - - - Library - ------- - - New API Calls for Searching for External Dataset Storage - - API calls that determine the search path for dataset external - storage were added. H5Pset/get_efile_prefix() API calls were added - to the library. These functions give control over the search path - for dataset external storage that has been configured with - H5Pset_external(). - - Additionally, the HDF5_EXTFILE_PREFIX environment variable can be - used to control the search path. - - (DER - 2016/04/20, HDFFV-8740) - - - - Parallel Library - ---------------- - - None - - - - Tools - ----- - - None - - - - High-Level APIs - --------------- - - C Packet Table API - ------------------ - - Replacement of a Public Function with H5PTcreate - - The existing function H5PTcreate_fl limits applications so they - can use the deflate compression only. The public function - H5PTcreate has been added to replace H5PTcreate_fl. H5PTcreate - takes a property list identifier to provide flexibility on - creation properties. This also removes the following warning: - "deprecated conversion from string constant to "char*" - [-Wwrite-strings]". - - (BMR - 2016/04/25, HDFFV-9708, HDFFV-8615) - - - New Public Functions: H5PTget_dataset and H5PTget_type - - Two accessor functions have been added. H5PTget_dataset returns - the identifier of the dataset associated with the packet table, - and H5PTget_type returns the identifier of the datatype used by - the packet table. - - (BMR - 2016/04/25, HDFFV-8623 patch 3) - - - Regarding #ifdef VLPT_REMOVED - - The #ifdef VLPT_REMOVED blocks have been removed from the packet - table (PT) library source except for the following functions: - + H5PTis_varlen() has been made available again - + H5PTfree_vlen_readbuff() is now H5PTfree_vlen_buff() - - (BMR - 2016/04/25, HDFFV-442) - - C++ Packet Table API - -------------------- - - New Constructor in FL_PacketTable - - An overloaded constructor has been added to FL_PacketTable and - takes a property list identifier to provide flexibility on - creation properties such as compression. - - FL_PacketTable(hid_t fileID, const char* name, hid_t dtypeID, - hsize_t chunkSize = 0, hid_t plistID = H5P_DEFAULT) - - (BMR - 2016/04/25, HDFFV-8623 patch 5) - - - New Member Functions in PacketTable - - Two accessor wrappers were added to class PacketTable. - - PacketTable::GetDataset() returns the identifier of the dataset - associated with the packet table, and PacketTable::GetDatatype() - returns the identifier of the datatype that the packet table uses. - - (BMR - 2016/04/25, HDFFV-8623 patch 4) - - - New Member Functions with "char*" as an Argument - - Overloaded functions were added to provide the "const char*" - argument; the existing version will be deprecated in future - releases. This also removes the following warning: - "deprecated conversion from string constant to "char*" - [-Wwrite-strings]". - - (BMR - 2016/04/25, HDFFV-8623 patch 1, HDFFV-8615) - - - Regarding #ifdef VLPT_REMOVED - - The #ifdef VLPT_REMOVED blocks have been removed from the packet - table library source code except for the following functions: - + VL_PacketTable::IsVariableLength() was moved to PacketTable - + VL_PacketTable::FreeReadBuff() is now PacketTable::FreeBuff() - - (BMR - 2016/04/25, HDFFV-442) - - - - Fortran API - ----------- - - None - - - - C++ API - ------- - - New Member Function in DSetCreatPropList - - DSetCreatPropList::setNbit() was added to setup N-bit compression for - a dataset. - - (BMR - 2016/04/25, HDFFV-8623 patch 7) - - - New Overloaded "const" Member Functions in ArrayType - - The two following functions were added: - ArrayType::getArrayNDims() const - ArrayType::getArrayDims() const - to provide const version, and the non-const version was marked - deprecated. In-memory array information, ArrayType::rank and - ArrayType::dimensions, were removed. This is an implementation - detail and should not affect applications. - - (BMR, 2016/04/25, HDFFV-9725) - - - New member function added - - The assignment operator ArrayType::operator= is added because ArrayType - has pointer data members. - - (BMR, 2016/03/07, HDFFV-9562) - - -Support for New Platforms, Languages, and Compilers -=================================================== - - Mac OS X El Capitan 10.11.4 with compilers Apple clang/clang++ - version 7.3.0 from Xcode 7.3, gfortran GNU Fortran (GCC) 5.2.0 - and Intel icc/icpc/ifort version 16.0.2 - - - -Bug Fixes since HDF5-1.8.16 -=========================== - - Configuration - ------------- - - Updated Linux Language Level Flags to Match the Autotools. Removed - Linux-specific Flags from OS X. - - An addition to the flags simply being out of sync with the Autotools, - the Linux flags were used on OS X builds which led to symbols not being - found. Although this was non-fatal and compilation continued (implicit - definitions were used by the compiler and the symbols resolved at link - time), a large number of warnings were raised. - - Linux changes: - - * CHANGED: _POSIX_C_SOURCE (from 199605 to 200112L) - * ADDED: _GNU_SOURCE - * REMOVED: _BSD_SOURCE - * REMOVED: _DEFAULT_SOURCE - - (DER - 2015/12/08, HDFFV-9627) - - - The --enable-clear-file-buffers configure Option was Non-functional - so the Feature was Always Enabled (its default value). - - Regardless of the configure flag, the setting was always enabled when - the Autotools were used to configure HDF5. This was due to the "no" - option being processed after the "*" option in configure.ac so "*" - matched first. CMake was unaffected. - - The option now works correctly. - - NOTE that builders are always advised to leave this option enabled. - When disabled, buffers that are written to disk may contain the - memory's previous contents, which may include secure information. - The performance overhead of the feature (a single memset call per - allocation) is minimal. - - (DER - 2016/02/03, HDFFV-9676) - - - Added a patch to remove '"'s from arguments for MPI compilers that - were causing errors compiling H5lib_settings.c with SGI MPT. - - (LRK - 2016/04/20, HDFFV-9439) - - Library - ------- - - Fixed shared file pointer problem which caused a crash when running a - program provided by a user. - - (VC - 2016/04/01, HDFFV-9469) - - - Fixed some format string warnings that prevent compiling with - -Werror=format-security on gcc. - - These only appeared in error messages and would not cause problems - under normal operation. - - (DER - 2016/01/13, HDFFV-9640) - - - Fixed a library segmentation fault when accessing a corrupted - file provided by a user. - - (MSC - 2016/02/19, HDFFV-9670) - - - - Parallel Library - ---------------- - - None - - - - Performance - ------------- - - None - - - - Tools - ----- - - h5dump: Sub-setting Fixed for Dimensions Greater than Two - - When a dataset has more than two dimensions, sub-setting would - incorrectly calculate the data that needed to be displayed. - Added in block and stride calculations that account for dimensions - greater than two. NOTE: lines that have line breaks inserted - because of display length calculations may have index info that - is incorrect until the next dimension break. - - (ADB - 2016/03/07, HDFFV-9698) - - - h5dump: Issue with Argument Segmentation Fault - - When an argument with an optional value was at the end of the command - line with a value, h5dump would crash. Reworked check for remaining - arguments. - - (ADB - 2016/03/07, HDFFV-9570, HDFFV-9684) - - - h5dump: Issue with Default Fill Value - - Added all default cases of fill value to the display of fill value. - - (ADB -, 2016/03/07, HDFFV-9241) - - - h5dump: Clarified Help - - Clarified usage of -O F option in h5dump utility help. - - (ADB - 2016/03/07, HDFFV-9066) - - - h5dump: Issue with Double Free Fault - - Added a check for filename not null before calling free(). - - (ADB - 2016/01/27, HDFFV-9639) - - - VS2015 Release Changed how Timezone was Handled - - Created a function, HDget_timezone, in H5system.c. Replaced - timezone variable usage with function call. - - (ADB - 2015/11/02, HDFFV-9550) - - - - Fortran API - ----------- - - None - - - - C++ API - ------- - - Removal of Obsolete Methods - - The overloaded methods which had parameters that should be const - but were not have been removed. - - (BMR - 2016/01/13, HDFFV-9789) - - - - High-Level APIs: - --------------- - - Fixed Memory Leak in Packet Table API - - Applied user's patch to fix memory leak in the creation of a - packet table. - - (BMR - 2016/04/25, HDFFV-9700) - - - - Fortran High-Level APIs: - ------------------------ - - None - - - - Testing - ------- - - None - - - -Supported Platforms -=================== -The following platforms are supported and have been tested for this release. -They are built with the configure process unless specified otherwise. - - AIX 6.1 xlc/xlc_r 10.1.0.5 - (NASA G-ADA) xlC/xlC_r 10.1.0.5 - xlf90/xlf90_r 12.1.0.6 - - Linux 2.6.32-573.22.1.el6 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (platypus) Version 4.4.7 20120313 - Version 4.9.3, Version 5.2.0 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 15.7-0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 15.0.3.187 Build 20150407 - MPICH 3.1.4 compiled with GCC 4.9.3 - - Linux 2.6.32-504.8.1.el6.ppc64 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-16) - #1 SMP ppc64 GNU/Linux g++ (GCC) 4.4.7 20120313 (Red Hat 4.4.7-16) - (ostrich) GNU Fortran (GCC) 4.4.7 20120313 (Red Hat 4.4.7-16) - IBM XL C/C++ V13.1 - IBM XL Fortran V15.1 - - Linux 3.10.0-229.14.1.el7 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (kituo/moohan) Version 4.8.5 20150623 (Red Hat 4.8.5-4) - Version 5.2.0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 15.0.3.187 Build 20150407 - MPICH 3.1.4 compiled with GCC 4.9.3 - - SunOS 5.11 32- and 64-bit Sun C 5.12 SunOS_sparc - (emu) Sun Fortran 95 8.6 SunOS_sparc - Sun C++ 5.12 SunOS_sparc - - Windows 7 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - Visual Studio 2015 (cmake) - Cygwin(CYGWIN_NT-6.1 2.2.1(0.289/5/3) gcc(4.9.3) compiler and gfortran) - (cmake and autotools) - - Windows 7 x64 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - Visual Studio 2015 (cmake) - - Windows 8.1 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - - Windows 8.1 x64 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - - Mac OS X Mt. Lion 10.8.5 Apple clang/clang++ version 5.1 from Xcode 5.1 - 64-bit gfortran GNU Fortran (GCC) 4.8.2 - (swallow/kite) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X Mavericks 10.9.5 Apple clang/clang++ version 6.0 from Xcode 6.2 - 64-bit gfortran GNU Fortran (GCC) 4.9.2 - (wren/quail) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X Yosemite 10.10.5 Apple clang/clang++ version 6.1 from Xcode 7.0 - 64-bit gfortran GNU Fortran (GCC) 4.9.2 - (osx1010dev/osx1010test) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X El Capitan 10.11.4 Apple clang/clang++ version 7.3.0 from Xcode 7.3 - 64-bit gfortran GNU Fortran (GCC) 5.2.0 - (osx1011dev/ox1011test) Intel icc/icpc/ifort version 16.0.2 - - -Tested Configuration Features Summary -===================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90/ F90 C++ zlib SZIP - parallel F2003 parallel -SunOS 5.11 32-bit n y/y n y y y -SunOS 5.11 64-bit n y/y n y y y -Windows 7 y y/y n y y y -Windows 7 x64 y y/y n y y y -Windows 7 Cygwin n y/y n y y n -Windows 8.1 n y/y n y y y -Windows 8.1 x64 n y/y n y y y -Mac OS X Mountain Lion 10.8.5 64-bit n y/y n y y y -Mac OS X Mavericks 10.9.5 64-bit n y/y n y y y -Mac OS X Yosemeti 10.10.5 64-bit n y/y n y y y -AIX 6.1 32- and 64-bit n y/n n y y y -CentOS 6.7 Linux 2.6.32 x86_64 GNU y y/y y y y y -CentOS 6.7 Linux 2.6.32 x86_64 Intel n y/y n y y y -CentOS 6.7 Linux 2.6.32 x86_64 PGI n y/y n y y y -CentOS 7.1 Linux 3.10.0 x86_64 GNU y y/y y y y y -CentOS 7.1 Linux 3.10.0 x86_64 Intel n y/y n y y y -Linux 2.6.32-431.11.2.el6.ppc64 n y/n n y y y - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -SunOS 5.11 32-bit y y y y -SunOS 5.11 64-bit y y y y -Windows 7 y y y y -Windows 7 x64 y y y y -Windows 7 Cygwin n n n y -Windows 8.1 y y y y -Windows 8.1 x64 y y y y -Mac OS X Mountain Lion 10.8.5 64-bit y n y y -Mac OS X Mavericks 10.9.5 64-bit y n y y -Mac OS X Yosemeti 10.10.5 64-bit y n y y -AIX 6.1 32- and 64-bit y n n y -CentOS 6.7 Linux 2.6.32 x86_64 GNU y y y y -CentOS 6.7 Linux 2.6.32 x86_64 Intel y y y y -CentOS 6.7 Linux 2.6.32 x86_64 PGI y y y y -CentOS 7.1 Linux 3.10.0 x86_64 GNU y y y y -CentOS 7.1 Linux 3.10.0 x86_64 Intel y y y y -Linux 2.6.32-431.11.2.el6.ppc64 y y y y - -Compiler versions for each platform are listed in the preceding -"Supported Platforms" table. - - -More Tested Platforms -===================== -The following platforms are not supported but have been tested for this release. - - Linux 2.6.18-431.11.2.el6 g95 (GCC 4.0.3 (g95 0.94!) - #1 SMP x86_64 GNU/Linux - (platypus) - - Windows 7 Visual Studio 2008 (cmake) - - Windows 7 x64 Visual Studio 2008 (cmake) - - Windows 10 Visual Studio 2013 w/ Intel Fortran 15 (cmake) - - Windows 10 x64 Visual Studio 2013 w/ Intel Fortran 15 (cmake) - - Debian7.5.0 3.2.0-4-amd64 #1 SMP Debian 3.2.51-1 x86_64 GNU/Linux - gcc (Debian 4.7.2-5) 4.7.2 - GNU Fortran (Debian 4.7.2-5) 4.7.2 - (cmake and autotools) - - Fedora20 3.15.3-200.fc20.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc (GCC) 4.8.3 20140624 (Red Hat 4.8.3-1) - GNU Fortran (GCC) 4.8.3 20140624 (Red Hat 4.8.3-1) - (cmake and autotools) - - SUSE 13.1 3.11.10-17-desktop #1 SMP PREEMPT x86_64 x86_64 x86_64 GNU/Linux - gcc (SUSE Linux) 4.8.1 - GNU Fortran (SUSE Linux) 4.8.1 - (cmake and autotools) - - Ubuntu 14.04 3.13.0-35-generic #62-Ubuntu SMP x86_64 GNU/Linux - gcc (Ubuntu/Linaro 4.9.1-0ubuntu1) 4.9.1 - GNU Fortran (Ubuntu/Linaro 4.9.1-0ubuntu1) 4.9.1 - (cmake and autotools) - - hopper.nersc.gov PrgEnv-gnu/5.2.40 - gcc (GCC) 4.9.2 20141030 (Cray Inc.) - GNU Fortran (GCC) 4.9.2 20141030 (Cray Inc.) - g++ (GCC) 4.9.2 20141030 (Cray Inc.) - - -Known Problems -============== -* On windows platforms in debug configurations, the VFD flush1 tests will fail - with the split and multi VFD drivers. These tests will display a modal debug - dialog which must be answered or wait for the test timeout to expire. - (ADB - 2014/06/23 - HDFFV-8851) - -* CLANG compiler with the options -fcatch-undefined-behavior and -ftrapv - catches some undefined behavior in the alignment algorithm of the macro DETECT_I - in H5detect.c (Issue 8147). Since the algorithm is trying to detect the alignment - of integers, ideally the flag -fcatch-undefined-behavior shouldn't to be used for - H5detect.c. In the future, we can separate flags for H5detect.c from the rest of - the library. (SLU - 2013/10/16) - -* Make provided by Solaris fails in "make check". Solaris users should use - gmake to build and install the HDF5 software. (AKC - 2013/10/08 - HDFFV-8534) - -* The C++ and FORTRAN bindings are not currently working on FreeBSD with the - native release 8.2 compilers (4.2.1), but are working with gcc 4.6 from the - ports (and probably gcc releases after that). - (QAK - 2012/10/19) - -* The following h5dump test case fails in BG/P machines (and potentially other - machines that use a command script to launch executables): - - h5dump --no-compact-subset -d "AHFINDERDIRECT::ah_centroid_t[0] it=0 tl=0" - tno-subset.h5 - - This is due to the embedded spaces in the dataset name being interpreted - by the command script launcher as meta-characters, thus passing three - arguments to h5dump's -d flag. The command passes if run by hand, just - not via the test script. - (AKC - 2012/05/03) - -* The STDIO VFD does not work on some architectures, possibly due to 32/64 - bit or large file issues. The basic STDIO VFD test is known to fail on - 64-bit SunOS 5.10 on SPARC when built with -m64 and 32-bit OS X/Darwin - 10.7.0. The STDIO VFD test has been disabled while we investigate and - a fix should appear in a future release. - (DER - 2011/10/14 - HDFFV-8235) - -* h5diff can report inconsistent results when comparing datasets of enum type - that contain invalid values. This is due to how enum types are handled in - the library and will be addressed in a future release. - (DER - 2011/10/14 - HDFFV-7527) - -* The links test can fail under the stdio VFD due to some issues with external - links. This will be investigated and fixed in a future release. - (DER - 2011/10/14 - HDFFV-7768) - -* After the shared library support was fixed for some bugs, it was discovered - that "make prefix=XXX install" no longer works for shared libraries. It - still works correctly for static libraries. Therefore, if you want to - install the HDF5 shared libraries in a location such as /usr/local/hdf5, - you need to specify the location via the --prefix option during configure - time. E.g, ./configure --prefix=/usr/local/hdf5 ... - (AKC - 2011/05/07 - HDFFV-7583) - -* The parallel test, t_shapesame, in testpar/, may run for a long time and may - be terminated by the alarm signal. If that happens, one can increase the - alarm seconds (default is 1200 seconds = 20 minutes) by setting the - environment variable, $HDF5_ALARM_SECONDS, to a larger value such as 3600 - (60 minutes). Note that the t_shapesame test may fail in some systems - (see the "While working on the 1.8.6 release..." problem below). If - it does, it will waste more time if $HDF5_ALARM_SECONDS is set - to a larger value. - (AKC - 2011/05/07) - -* Shared Fortran libraries are not quite working on AIX. While they are - generated when --enable-shared is specified, the fortran and hl/fortran - tests fail. the issue. HL and C++ shared libraries should now be - working as intended, however. - (MAM - 2011/04/20) - -* While working on the 1.8.6 release of HDF5, a bug was discovered that can - occur when reading from a dataset in parallel shortly after it has been - written to collectively. The issue was exposed by a new test in the parallel - HDF5 test suite, but had existed before that. We believe the problem lies with - certain MPI implementations and/or file systems. - - We have provided a pure MPI test program, as well as a standalone HDF5 - program, that can be used to determine if this is an issue on your system. - They should be run across multiple nodes with a varying number of processes. - These programs can be found at: - http://www.hdfgroup.org/ftp/HDF5/examples/known_problems/ - (NAF - 2011/01/19) - -* All the VFL drivers aren't backward compatible. In H5FDpublic.h, the - structure H5FD_class_t changed in 1.8. There is new parameter added to - get_eoa and set_eoa callback functions. A new callback function - get_type_map was added in. The public function H5FDrealloc was taken - out in 1.8. The problem only happens when users define their own driver - for 1.6 and try to plug in 1.8 library. Because there's only one user - complaining about it, we (Elena, Quincey, and I) decided to leave it as - it is (see bug report #1279). Quincey will make a plan for 1.10. - (SLU - 2010/02/02) - -* The --enable-static-exec configure flag will only statically link libraries - if the static version of that library is present. If only the shared version - of a library exists (i.e., most system libraries on Solaris, AIX, and Mac, - for example, only have shared versions), the flag should still result in a - successful compilation, but note that the installed executables will not be - fully static. Thus, the only guarantee on these systems is that the - executable is statically linked with just the HDF5 library. - (MAM - 2009/11/04) - -* A dataset created or rewritten with a v1.6.3 library or after cannot be read - with the v1.6.2 library or before when the Fletcher32 EDC filter is enabled. - There was a bug in the calculation of the Fletcher32 checksum in the - library before v1.6.3; the checksum value was not consistent between big- - endian and little-endian systems. This bug was fixed in Release 1.6.3. - However, after fixing the bug, the checksum value was no longer the same as - before on little-endian system. Library releases after 1.6.4 can still read - datasets created or rewritten with an HDF5 library of v1.6.2 or before. - (SLU - 2005/06/30) - - -%%%%1.8.16%%%% - - -HDF5 version 1.8.16 released on 2015-11-10 -================================================================================ - -INTRODUCTION -============ - -This document describes the differences between HDF5-1.8.15 and -HDF5-1.8.16, and contains information on the platforms tested and -known problems in HDF5-1.8.16. -For more details, see the files HISTORY-1_0-1_8_0_rc3.txt -and HISTORY-1_8.txt in the release_docs/ directory of the HDF5 source. - -Links to the HDF5 1.8.16 source code, documentation, and additional materials -can be found on the HDF5 web page at: - - http://www.hdfgroup.org/products/hdf5/ - -The HDF5 1.8.16 release can be obtained from: - - http://www.hdfgroup.org/HDF5/release/obtain5.html - -User documentation for 1.8.16 can be accessed directly at this location: - - http://www.hdfgroup.org/HDF5/doc/ - -New features in the HDF5-1.8.x release series, including brief general -descriptions of some new and modified APIs, are described in the "What's New -in 1.8.0?" document: - - http://www.hdfgroup.org/HDF5/doc/ADGuide/WhatsNew180.html - -All new and modified APIs are listed in detail in the "HDF5 Software Changes -from Release to Release" document, in the section "Release 1.8.16 (current -release) versus Release 1.8.15": - - http://www.hdfgroup.org/HDF5/doc/ADGuide/Changes.html - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS -======== - -- New Features -- Support for New Platforms, Languages, and Compilers -- Bug Fixes since HDF5-1.8.15 -- Supported Platforms -- Supported Configuration Features Summary -- More Tested Platforms -- Known Problems - -New Features -============ - - Configuration and Build - ------------- - - The thread-safety + high-level library combination has been marked - as "unsupported" in the Autotools - - The global lock used by the thread-safety feature has never been - raised to the high-level library level, making it possible that - the library state could change if a context switch were to occur in - a high-level library call. Because of this, the combination of - thread-safety and high-level library is officially unsupported by - The HDF Group. - - In the past, although this combination has never been supported, this - was not enforced by the build systems. These changes will cause an - Autotools configure step to fail if --enable-threadsafe and - --enable-hl are combined unless additional options are specified. - Since the high-level library is built by default, this means that - these extra configuration options will need to be used any time - --enable-threadsafe is selected. - - To build with --enable-threadsafe, either: - - 1) Use --disable-hl to disable the high-level library (recommended) - - 2) Use --enable-unsupported to build the high-level library with - the thread-safety feature. - - (DER - 2015/09/10 HDFFV-8719) - - - Using CMake now builds both static and shared libraries. - - The CMake files have been updated to build both static and shared - libraries, with tools only built statically. The packaging of the - libraries and tools will include cmake-config files that allows - projects to choose either shared or static (default) libraries - to be found with the find_package command using the COMPONENTS - keyword and a list of components. The imported libraries will - include any interface specific settings and dependent libraries. - - The default setting for BUILD_SHARED_LIBS has changed from OFF - to ON, which builds both static and shared libraries. The static - libraries are always built because of tools requirements. - - (ADB - 2015/08/24 HDFFV-5881) - - - Inline functions now correctly annotated with Autotools builds. - - The method used to detect the appropriate inline markup scheme was - nonfunctional in Autotools builds. The Autotools have been modified - to correctly detect the compiler's inline markup symbol and apply it - to the source. Note that only a very small number of internal - functions are marked inline so this was not a very big change or - likely to significantly affect performance. - - As a part of this change, the H5_inline symbol no longer appears in - H5pubconf.h. - - (DER - 2015/08/13 HDFFV-9119, HDFFV-9421) - - - Removed obsolete/unmaintained files from config/ - - Several files were removed from the config directory. These files - represent old operating systems, were no longer necessary, and/or - were no longer maintained. configure.ac was updated to reflect the - removed files. - - Removed: - - craynv - dec-flags - hpux11.23 - ia64-linux-gnu - nec-superux14.1 - sv1-cray - x86_64-redstorm-linux-gnu - powerpc-ibm-aix5.x - - As a part of this work, a few lines that deal with locating the - sys/fpu.h header on SGI machines and some OSF/1 configure lines - were also removed. The Solaris config was also renamed to not have - a version number since the version number was ignored by configure - as it applies to all Solaris versions. - - (DER - 2015/09/04 HDFFV-9116) - - - Removed the FP_TO_INTEGER_OVERFLOW_WORKS macro/defines from the library - - This was for working around bugs in the Cray X1 compiler, which is no - longer supported. - - (DER - 2015/09/09 HDFFV-9191) - - - Removed the H5_SW_ULONG_TO_FP_BOTTOM_BIT_WORKS and - H5_FP_TO_ULLONG_BOTTOM_BIT_WORKS symbols and associated code. - - H5_SW_ULONG_TO_FP_BOTTOM_BIT_WORKS was a work-around on old 64-bit - SGI and Solaris systems. - - H5_FP_TO_ULLONG_BOTTOM_BIT_WORKS was a work-around for old PGI - compilers on Linux. - - Neither of these were used in any current library code and only appeared - in the dt_arith test. - - (DER - 2015/09/09 HDFFV-9187) - - - Removed CONVERT_DENORMAL_FLOAT symbol and associated code from the - library. - - This was only set in configure files for Cray and NEC computers. These - config files no longer exist so there is no effect on currently - supported platforms. - - (DER - 2015/09/09 HDFFV-9188) - - - Removed _BSD_SOURCE and _DEFAULT_SOURCE from configure.ac - - These are old BSD-compatibility symbols that are no longer needed by - the library. - - (DER - 2015/09/10 HDFFV-9079) - - - Removed HW_FP_TO_LLONG_NOT_WORKS symbol and associated code from the - library. - - This was part of a work-around for the VS.NET 2003 compiler, which is - no longer supported. - - (DER - 2015/09/10 HDFFV-9189) - - - Removed the BAD_LOG2_CODE_GENERATED symbol and associated code from the - library. - - This was an IRIX work-around. - - (DER - 2015/09/11 HDFFV-9195) - - - Decoupled shared object version numbers for wrapper libraries from the - shared object version number for the HDF5 library. These will be - maintained on an individual basis according to the interface changes - specific to these wrapper libraries. - - For HDF5 1.8.16 the shared object version numbers were changed from - 10.0.1 to 10.1.0 for the HDF5 library due to added APIs. For the C++ - wrapper library they were changed from 10.0.1 to 11.0.0 due to changes - in existing APIs. For all other wrapper libraries the versions were - changed from 10.0.1 to 10.0.2 because while the APIs had no changes - there have been changes in code that did not result in changes to their - interfaces. - - (LRK - 2015/10/28) - - Library - ------- - - - H5F_ACC_DEBUG flag for H5Fopen/create: functionality removed - - The symbol was used to emit some extra debugging information - for HDF Group developers in the multi VFD. The underlying - functionality has been removed due to disuse. The symbol - remains defined since it was visible in H5Fpublic.h but it - has been set to zero and has no effect anywhere in the library. - - (DER - 2015-05-02, HDFFV-1074) - - - New public API call: H5is_library_threadsafe() - - This API call indicates if the library was built with thread- - safety enabled. - - (DER - 2015-09-01, HDFFV-9496) - - Parallel Library - ---------------- - - None - - Tools - ----- - - None - - High-Level APIs - --------------- - - None - - Fortran API - ----------- - - None - - - C++ API - ------- - - Class H5::ObjCreatPropList is added for the object creation property - list class. - - Class H5::ObjCreatPropList is derived from H5::PropList and is a - baseclass of H5::DSetCreatPropList. Additional property list classes - will be derived from H5::ObjCreatPropList when they are added to the - library in future releases. - - (BMR, 2015/10/13, Part of HDFFV-9169) - - - New Wrappers for C Functions H5P[s/g]et_attr_phase_change and - H5P[s/g]et_attr_creation_order. - - Wrappers were added to class H5::ObjCreatPropList for the C Functions - H5Pset_attr_phase_change: H5::ObjCreatPropList::setAttrPhaseChange - H5Pget_attr_phase_change: H5::ObjCreatPropList::getAttrPhaseChange - H5Pset_attr_creation_order: H5::ObjCreatPropList::setAttrCrtOrder - H5Pget_attr_creation_order: H5::ObjCreatPropList::getAttrCrtOrder - - (BMR, 2015/10/13, Part of HDFFV-9167 and HDFFV-9169) - - -Support for New Platforms, Languages, and Compilers -=================================================== - - Added VS2015 with Intel Fortran 16 to supported Windows 7 platforms - - -Bug Fixes since HDF5-1.8.15 -=========================== - - Configuration - ------------- - - - CMake test for long long printf format improved - - The CMake configuration test for determining the printf format string - for printing a long long integer was fixed. The test would crash - when executed with VS2015. - - (ADB - 2015-10-21 HDFFV-9488) - - Library - ------- - - VS2015 removed global variable timezone - - The usage of the global variable timezone was modified for VS2015 by - adding an alias to Windows builds. - - (ADB - 2015-10-23 HDFFV-9550) - - - Fix potential error in H5Iclear_type - - If the ID type's close callback could close another ID of the same type, - H5Iclear_type could occasionally run into problems due to the inner - workings of the skip list package. This could potentially cause an - error on library shutdown without calling H5Iclear_type directly. This - issue has been fixed. - - (NAF - 2015-08-12) - - - Fix uninitialized memory in dataspace selection code - - When creating a dataspace with H5Screate and setting the extent with - H5Sextent_copy, the selection offset was not initialized, potentially - causing invalid I/O. There may be other cases where this happened. - Modified the library to always initialize the offset. - - (NAF - 2015-09-08) - - - Truncate file in H5Fflush() if EOA != EOF to avoid file - corruption in certain scenarios - - In the following scenario, the resulting HDF5 file would be - incorrectly corrupted because the truncate operation - was at some point wrongly moved out of the flush operation: - - Create a new file with a single dataset. - - Write parts of the dataset (make sure that some values at - the end of the dataset are not initialized). - - Flush the file. - - Crash the program. - - Try to open the file with h5dump or h5debug, but the - resulting file is corrupted. - - (MSC - 2015-06-15 HDFFV-9418) - - - Parallel Library - ---------------- - - - (XYZ - YYYY/MM/DD HDFFV-####) - - Performance - ------------- - - None - - Tools - ----- - - VS2015 changed the default format for printing of exponents - - VS2015 default format for exponents changed with the elimination - of the leading '0'. CMake now tests for the VS2015 compiler and - adjusts which reference files are used by tests. - - (ADB - 2015-10-23 HDFFV-9550) - - - Fixed h5repack with user-defined filters - - h5repack would throw a buffer overrun exception on Windows when - parsing a user-defined filter ID of 5 digits. A local variable in - the parse routine was not of sufficient size. - - (ADB - 2015/09/01 HDFFV-9515) - - Fortran API - ------------ - - None - - - C++ API - ------ - - Removed memory leaks - - The static global constant objects were changed to constant references - referencing dynamically allocated objects. This ensures that the clean-up - process in the C++ library occurs before the termination of the C library - and prevents memory leaks because the previous global constants were not - properly deleted before the C library termination. - - (BMR, 2015/10/13, HDFFV-9529) - - - Fixed the problem about identifiers being closed prematurely. - - The C++ library needs to increment the ID's reference counter when it is - duplicated in the form of C++ objects, but not when the ID is obtained - from a C function. With this approach, both problems, prematurely - closing ID's and memory leaks due to ID's not being closed, should be - eliminated. - - (BMR, 2015/10/15, HDFFV-7947) - - - High-Level APIs: - ------ - - None - - - Fortran High-Level APIs: - ------------------------ - - None - - - Testing - ------- - - None - - -Supported Platforms -=================== -The following platforms are supported and have been tested for this release. -They are built with the configure process unless specified otherwise. - - AIX 6.1 xlc/xlc_r 10.1.0.5 - (NASA G-ADA) xlC/xlC_r 10.1.0.5 - xlf90/xlf90_r 12.1.0.6 - - Linux 2.6.32-573.3.1.el6 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (platypus) Version 4.4.7 20120313 - Version 4.8.4, Version 5.2.0 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 15.7-0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 15.0.3.187 Build 20150407 - MPICH 3.1.4 compiled with GCC 4.9.3 - - Linux 2.6.32-504.8.1.el6.ppc64 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-11) - #1 SMP ppc64 GNU/Linux g++ (GCC) 4.4.7 20120313 (Red Hat 4.4.7-11) - (ostrich) GNU Fortran (GCC) 4.4.7 20120313 (Red Hat 4.4.7-11) - IBM XL C/C++ V13.1 - IBM XL Fortran V15.1 - - Linux 3.10.0-229.14.1.el7 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (kituo/moohan) Version 4.8.3 20140911 (Red Hat 4.8.3-9) - Version 5.2.0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 15.0.3.187 Build 20150407 - MPICH 3.1.4 compiled with GCC 4.9.3 - - SunOS 5.11 32- and 64-bit Sun C 5.12 SunOS_sparc - (emu) Sun Fortran 95 8.6 SunOS_sparc - Sun C++ 5.12 SunOS_sparc - - Windows 7 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - Visual Studio 2015 w/ Intel Fortran 16 (cmake) - Cygwin(CYGWIN_NT-6.1 2.2.1(0.289/5/3) gcc(4.9.3) compiler and gfortran) - (cmake and autotools) - - Windows 7 x64 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - Visual Studio 2015 w/ Intel Fortran 16 (cmake) - - Windows 8.1 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - - Windows 8.1 x64 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - - Mac OS X Mt. Lion 10.8.5 Apple clang/clang++ version 5.1 from Xcode 5.1 - 64-bit gfortran GNU Fortran (GCC) 4.8.2 - (swallow/kite) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X Mavericks 10.9.5 Apple clang/clang++ version 6.0 from Xcode 6.2.0 - 64-bit gfortran GNU Fortran (GCC) 4.9.2 - (wren/quail) Intel icc/icpc/ifort version 15.0.3 - - Mac OS X Yosemite 10.10.5 Apple clang/clang++ version 6.0 from Xcode 7.0.0 - 64-bit gfortran GNU Fortran (GCC) 4.9.2 - (osx1010dev/osx1010test) Intel icc/icpc/ifort version 15.0.3 - -Tested Configuration Features Summary -===================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90/ F90 C++ zlib SZIP - parallel F2003 parallel -SunOS 5.11 32-bit n y/y n y y y -SunOS 5.11 64-bit n y/y n y y y -Windows 7 y y/y n y y y -Windows 7 x64 y y/y n y y y -Windows 7 Cygwin n y/y n y y n -Windows 8.1 n y/y n y y y -Windows 8.1 x64 n y/y n y y y -Mac OS X Mountain Lion 10.8.5 64-bit n y/y n y y y -Mac OS X Mavericks 10.9.5 64-bit n y/y n y y y -Mac OS X Yosemeti 10.10.5 64-bit n y/y n y y y -AIX 6.1 32- and 64-bit n y/n n y y y -CentOS 6.7 Linux 2.6.32 x86_64 GNU y y/y y y y y -CentOS 6.7 Linux 2.6.32 x86_64 Intel n y/y n y y y -CentOS 6.7 Linux 2.6.32 x86_64 PGI n y/y n y y y -CentOS 7.1 Linux 3.10.0 x86_64 GNU y y/y y y y y -CentOS 7.1 Linux 3.10.0 x86_64 Intel n y/y n y y y -Linux 2.6.32-431.11.2.el6.ppc64 n y/n n y y y - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -SunOS 5.11 32-bit y y y y -SunOS 5.11 64-bit y y y y -Windows 7 y y y y -Windows 7 x64 y y y y -Windows 7 Cygwin n n n y -Windows 8.1 y y y y -Windows 8.1 x64 y y y y -Mac OS X Mountain Lion 10.8.5 64-bit y n y y -Mac OS X Mavericks 10.9.5 64-bit y n y y -Mac OS X Yosemeti 10.10.5 64-bit y n y y -AIX 6.1 32- and 64-bit y n n y -CentOS 6.7 Linux 2.6.32 x86_64 GNU y y y y -CentOS 6.7 Linux 2.6.32 x86_64 Intel y y y y -CentOS 6.7 Linux 2.6.32 x86_64 PGI y y y y -CentOS 7.1 Linux 3.10.0 x86_64 GNU y y y y -CentOS 7.1 Linux 3.10.0 x86_64 Intel y y y y -Linux 2.6.32-431.11.2.el6.ppc64 y y y y - -Compiler versions for each platform are listed in the preceding -"Supported Platforms" table. - - -More Tested Platforms -===================== -The following platforms are not supported but have been tested for this release. - - Linux 2.6.18-431.11.2.el6 g95 (GCC 4.0.3 (g95 0.94!) - #1 SMP x86_64 GNU/Linux - (platypus) - - Mac OS X El Capitan 10.11 Apple clang/clang++ version 7.0.0 from Xcode 7.0.1 - 64-bit gfortran GNU Fortran (GCC) 5.2.0 - (VM) - - Windows 7 Visual Studio 2008 (cmake) - - Windows 7 x64 Visual Studio 2008 (cmake) - - Windows 10 Visual Studio 2013 w/ Intel Fortran 15 (cmake) - - Windows 10 x64 Visual Studio 2013 w/ Intel Fortran 15 (cmake) - - Debian7.5.0 3.2.0-4-amd64 #1 SMP Debian 3.2.51-1 x86_64 GNU/Linux - gcc (Debian 4.7.2-5) 4.7.2 - GNU Fortran (Debian 4.7.2-5) 4.7.2 - (cmake and autotools) - - Fedora20 3.15.3-200.fc20.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc (GCC) 4.8.3 20140624 (Red Hat 4.8.3-1) - GNU Fortran (GCC) 4.8.3 20140624 (Red Hat 4.8.3-1) - (cmake and autotools) - - SUSE 13.1 3.11.10-17-desktop #1 SMP PREEMPT x86_64 x86_64 x86_64 GNU/Linux - gcc (SUSE Linux) 4.8.1 - GNU Fortran (SUSE Linux) 4.8.1 - (cmake and autotools) - - Ubuntu 14.04 3.13.0-35-generic #62-Ubuntu SMP x86_64 GNU/Linux - gcc (Ubuntu/Linaro 4.9.1-0ubuntu1) 4.9.1 - GNU Fortran (Ubuntu/Linaro 4.9.1-0ubuntu1) 4.9.1 - (cmake and autotools) - - hopper.nersc.gov PrgEnv-gnu/5.2.40 - gcc (GCC) 4.9.2 20141030 (Cray Inc.) - GNU Fortran (GCC) 4.9.2 20141030 (Cray Inc.) - g++ (GCC) 4.9.2 20141030 (Cray Inc.) - - -Known Problems -============== -* On Windows platforms in debug configurations, the VFD flush1 tests will fail - with the split and multi VFD drivers. These tests will display a modal debug - dialog which must be answered or wait for the test timeout to expire. - (ADB - 2014/06/23 - HDFFV-8851) - -* CLANG compiler with the options -fcatch-undefined-behavior and -ftrapv - catches some undefined behavior in the alignment algorithm of the macro DETECT_I - in H5detect.c (Issue 8147). Since the algorithm is trying to detect the alignment - of integers, ideally the flag -fcatch-undefined-behavior shouldn't to be used for - H5detect.c. In the future, we can separate flags for H5detect.c from the rest of - the library. (SLU - 2013/10/16) - -* Make provided by Solaris fails in "make check". Solaris users should use - gmake to build and install the HDF5 software. (AKC - 2013/10/08 - HDFFV-8534) - -* The C++ and FORTRAN bindings are not currently working on FreeBSD with the - native release 8.2 compilers (4.2.1), but are working with gcc 4.6 from the - ports (and probably gcc releases after that). - (QAK - 2012/10/19) - -* The following h5dump test case fails in BG/P machines (and potentially other - machines that use a command script to launch executables): - - h5dump --no-compact-subset -d "AHFINDERDIRECT::ah_centroid_t[0] it=0 tl=0" - tno-subset.h5 - - This is due to the embedded spaces in the dataset name being interpreted - by the command script launcher as meta-characters, thus passing three - arguments to h5dump's -d flag. The command passes if run by hand, just - not via the test script. - (AKC - 2012/05/03) - -* The STDIO VFD does not work on some architectures, possibly due to 32/64 - bit or large file issues. The basic STDIO VFD test is known to fail on - 64-bit SunOS 5.10 on SPARC when built with -m64 and 32-bit OS X/Darwin - 10.7.0. The STDIO VFD test has been disabled while we investigate and - a fix should appear in a future release. - (DER - 2011/10/14 - HDFFV-8235) - -* h5diff can report inconsistent results when comparing datasets of enum type - that contain invalid values. This is due to how enum types are handled in - the library and will be addressed in a future release. - (DER - 2011/10/14 - HDFFV-7527) - -* The links test can fail under the stdio VFD due to some issues with external - links. This will be investigated and fixed in a future release. - (DER - 2011/10/14 - HDFFV-7768) - -* After the shared library support was fixed for some bugs, it was discovered - that "make prefix=XXX install" no longer works for shared libraries. It - still works correctly for static libraries. Therefore, if you want to - install the HDF5 shared libraries in a location such as /usr/local/hdf5, - you need to specify the location via the --prefix option during configure - time. E.g, ./configure --prefix=/usr/local/hdf5 ... - (AKC - 2011/05/07 - HDFFV-7583) - -* The parallel test, t_shapesame, in testpar/, may run for a long time and may - be terminated by the alarm signal. If that happens, one can increase the - alarm seconds (default is 1200 seconds = 20 minutes) by setting the - environment variable, $HDF5_ALARM_SECONDS, to a larger value such as 3600 - (60 minutes). Note that the t_shapesame test may fail in some systems - (see the "While working on the 1.8.6 release..." problem below). If - it does, it will waste more time if $HDF5_ALARM_SECONDS is set - to a larger value. - (AKC - 2011/05/07) - -* Shared Fortran libraries are not quite working on AIX. While they are - generated when --enable-shared is specified, the Fortran and HL/Fortran - tests fail. HL and C++ shared libraries should now be working as intended, - however. - (MAM - 2011/04/20) - -* While working on the 1.8.6 release of HDF5, a bug was discovered that can - occur when reading from a dataset in parallel shortly after it has been - written to collectively. The issue was exposed by a new test in the parallel - HDF5 test suite, but had existed before that. We believe the problem lies with - certain MPI implementations and/or file systems. - - We have provided a pure MPI test program, as well as a standalone HDF5 - program, that can be used to determine if this is an issue on your system. - They should be run across multiple nodes with a varying number of processes. - These programs can be found at: - http://www.hdfgroup.org/ftp/HDF5/examples/known_problems/ - (NAF - 2011/01/19) - -* All of the VFL drivers aren't backward compatible. In H5FDpublic.h, the - structure H5FD_class_t changed in 1.8. There is new parameter added to - get_eoa and set_eoa callback functions. A new callback function - get_type_map was added in. The public function H5FDrealloc was taken - out in 1.8. The problem only happens when users define their own driver - for 1.6 and try to plug in 1.8 library. Because there's only one user - complaining about it, we (Elena, Quincey, and I) decided to leave it as - it is (see bug report #1279). Quincey will make a plan for 1.10. - (SLU - 2010/02/02) - -* The --enable-static-exec configure flag will only statically link libraries - if the static version of that library is present. If only the shared version - of a library exists (i.e., most system libraries on Solaris, AIX, and Mac, - for example, only have shared versions), the flag should still result in a - successful compilation, but note that the installed executables will not be - fully static. Thus, the only guarantee on these systems is that the - executable is statically linked with just the HDF5 library. - (MAM - 2009/11/04) - -* A dataset created or rewritten with a v1.6.3 library or after cannot be read - with the v1.6.2 library or before when the Fletcher32 EDC filter is enabled. - There was a bug in the calculation of the Fletcher32 checksum in the - library before v1.6.3; the checksum value was not consistent between big- - endian and little-endian systems. This bug was fixed in Release 1.6.3. - However, after fixing the bug, the checksum value was no longer the same as - before on little-endian systems. Library releases after 1.6.4 can still read - datasets created or rewritten with an HDF5 library of v1.6.2 or before. - (SLU - 2005/06/30) - - -%%%%1.8.15%%%% - - -HDF5 version 1.8.15 released on 2015-05-04 -================================================================================ - -INTRODUCTION -============ - -This document describes the differences between HDF5-1.8.14 and -HDF5-1.8.15, and contains information on the platforms tested and -known problems in HDF5-1.8.15. - -Links to the HDF5 source code, documentation, and additional materials -can be found on the HDF5 web page at: - - http://www.hdfgroup.org/products/hdf5/ - -The HDF5 release can be obtained from: - - http://www.hdfgroup.org/HDF5/release/obtain5.html - -User documentation for HDF5 can be accessed directly at this location: - - http://www.hdfgroup.org/HDF5/doc/ - -All new and modified APIs are listed in detail in the "HDF5 Software Changes -from Release to Release" document at this location: - - http://www.hdfgroup.org/HDF5/doc/ADGuide/Changes.html - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS -======== - -- New Features -- Support for New Platforms, Languages, and Compilers -- Bug Fixes since HDF5-1.8.14 -- Supported Platforms -- Supported Configuration Features Summary -- More Tested Platforms -- Known Problems - - -New Features -============ - - Configuration - ------------- - - CMake - - Improvements made to the CMake build system. - - The default options were changed to align with the Autotools configure - defaults. CMake configure files now support components when packaged - with CPack. Windows CPack supports WiX packaging, and will look for - WiX and NSIS in the standard locations. - - The CMake minimum has been changed to 3.1. - - (ADB - 2015/04/01 HDFFV-8074, 8968, 9006) - - - cmakehdf5 for Cmake building. - Added configure options to support the building of Fortran or CXX API, - to enable/disable testings. Use "cmakehdf5 --help" for details. - (AKC - 2014/12/09 HDFFV-8932) - - - Building Shared and Parallel Made Explicit - - When --enable-parallel is specified, configure used to disable - shared by default. - - Removed the restriction for building shared when parallel is - enabled. --disable-shared has to be used explicitly if user - wishes to disable shared libraries. - - (MSC - 2015/02/19 HDFFV-9069) - - - Inferring Parallel Compilers - - configure was trying to infer if a compiler is a parallel - compiler with MPI support and enable parallel even if the user - did not explicitly enable parallel. This should not happen. - - Disabled inferring parallel compilers to enable parallel HDF5 - build. --enable-parallel has to be used explicitly to build - parallel HDF5 regardless of the compiler type being used. - - (MSC - 2015/02/19 HDFFV-9068) - - - Large File Support Configuration Option - - Removed the option to enable or disable large file support. It will - always be enabled. - - (MSC - 2015/02/19 HDFFV-9097) - - - Removed Configuration Feature - - When configure detected that the CodeWarrior compiler was being used it - would define a symbol that caused a test in test/tfile.c to be skipped - due to a broken CodeWarrior open() command. - - Since this only masks the problem instead of fixing it and we don't - support CodeWarrior anyway, this functionality was removed. - - (DER - 2015/02/21, HDFFV-9080) - - - VMS Build/Test Files Have Been Removed - - HDF5 no longer supports VMS, and the files were getting out of date. - Since we have no access to a VMS machine, there is no way for us to - maintain them. - - A Subversion tag was created at: - - https://svn.hdfgroup.uiuc.edu/tags/vms_last_support_1_8 - - immediately before removing the files. - - (DER - 2015-02-26, HDFFV-9147) - - - Removal of --with-default-vfd configure Option - - In theory, this option was intended to allow setting a default - VFD that would be used by the library. In practice, the feature - only accepted the POSIX (SEC2) VFD (already the default) and - the stdio VFD (a demo VFD not intended for production use). The - inability to pass key VFD parameters at configure time limits the - full implementation of this feature, so it was retired. - - (DER - 2015-02-26, HDFFV-9081) - - - Direct VFD configure Behavior - - The configure options for Linux now allow the Direct VFD to build - without passing additional compiler options/defines like _GNU_SOURCE. - Passing --enable-direct-vfd is now all that is needed to enable - the feature. - - The Direct VFD is now disabled by default since it is intended for - specialized audiences. It was previously enabled by default, but the - configure script did not set correct POSIX levels, etc. making this - a moot point. - - Note that the Direct VFD can only be configured on Linux when - the O_DIRECT flag to open()/create() and posix_memalign() function - are available. This is unchanged from previous behavior. - - (DER - 2015-02-26, HDFFV-9057, 7567, 9088, 7566) - - - _POSIX_C_SOURCE, _GNU_SOURCE, and _BSD_SOURCE No Longer Exported - to h5cc and Other Compiler Wrappers - - The _POSIX_C_SOURCE, _GNU_SOURCE, and _BSD_SOURCE definitions are - not required for using API functions and may conflict with user - code requirements. - - (DER - 2015-03-08, HDFFV-9152) - - - Removed the --enable-filters Option from configure - - This option allowed the user to disable selected internal filters, - presumably to make the library smaller. It has been removed since - it saved little space (the internal filters are small with respect - to the overall library size) and was not generally extendible to - the library at large due to the large number of #ifdefs that would - be required. - - Note that this features applied to internal filters such as shuffle - and n-bit and not external filters like gzip or Szip. Those are still - enabled or disabled via their own configure options. - - (DER - 2015-03-08, HDFFV-9086) - - - Removed Obsolete Time Functionality from configure and the C Library - - The library contained some residual functionality from obsolete - time zone handling code. This has been removed, and the configure - checks for the time functions have been cleaned up. - - * Lumped all the time functionality together in configure.ac. - This was previously more spread out due to Solaris issues - with the ordering of certain checks. - - * Removed processing that handles __tm_gmtoff members of struct - tm. (libc-4) - - * Removed BSDgettimeofday(). (IRIX 5.3) - - * Removed timezone struct handling in gettimeofday() (considered - harmful). - - Note that the HDF5 Library stores timestamps in a platform-independent - manner, so old files can still be read. This only affects converting - system time to HDF5 timestamps. - - The library currently uses the tm_gmtoff member of the tm struct - (preferred, if available) or the timezone global variable to - construct HDF5 timestamps. - - (DER - 2015-03-09, HDFFV-9083 and 9085) - - - Added -D_DEFAULT_SOURCE to CPPFLAGS on Linux Systems - - This is the replacement for -D_BSD_SOURCE in versions of glibc since 2.19. - Since both are defined, it should work for all versions of glibc. Defining - both suppresses the warning about defining _BSD_SOURCE. - - (NAF - 2015-04-02, HDFFV-9079) - - Library - ------- - - Added Memory Allocation Functions that Use the Library's Allocator - - HDF5 filters may need to allocate or resize the buffer that is passed - to them from the library. If the filter has been compiled separately - from the library, it and the library may use different memory - allocation libraries for the (re)allocation and free calls. This can - cause heap corruption and crashes. This is particularly a problem on - Windows since each C run-time library is implemented as a separate - shared library, but can also show up on POSIX systems when debug or - high-performance allocation libraries are in use. - - Two new functions (H5allocate_memory() and H5resize_memory()) were - added to the HDF5 C library. These functions have the same semantics as - malloc/calloc and realloc, respectively. Their primary purpose is to - allow filter authors to allocate or resize memory using the same - memory allocation library as the HDF5 library. Filter authors are - highly encouraged to use these new functions in place of malloc, - calloc, and realloc. They should also use the H5free_memory() call when - freeing memory. - - Note that the filters provided with the library (zlib, szip, etc.) do - not experience the problems that these new functions are intended to - fix. This work only applies to third-party filters that are compiled - separately from the library. - - (DER - 2015-04-01, HDFFV-9100) - - - H5Pset_istore_k and H5Pset_sym_k - - These two functions didn't check the value of the input parameter "ik". - When 2*ik exceeded 2 bytes of storage, data was lost in the file; - for example, some chunks would be overwritten. - - Added validation of "ik" to not exceed the max v1 btree entries (2 bytes) - to these two routines. - - (VC - 2015-03-24, HDFFV-9173) - - - Added Functions to Control the Value of H5PL_no_plugin_g without - Using an Environment Variable - - Sometimes it is necessary for an application to disable the use of - dynamically loaded plugin libraries without requiring the library to - be built with plugin support disabled or to set an environment - variable to disable plugin support globally. - - Two new functions (H5PLset_loading_state() and H5PLget_loading_state()) - were added to the HDF5 C Library. These functions require a parameter - that indicates which type of dynamically loaded plugin is enabled or - disabled. - - (ADB - 2015-03-17, HDFFV-8520) - - Parallel Library - ---------------- - - MPI_Finalize and HDF5 Library Shutdown - - Calling HDF5 routines after MPI_Finalize has been closed should - not be done, since those routines might call MPI functions that - would not be possible to do after finalizing the MPI library. - - Attached an attribute destroy callback to MPI_COMM_SELF that - shuts down the HDF5 library when MPI_COMM_SELF is destroyed, - in other words, on MPI_Finalize. This should fix several issues - that users see when they forget to close HDF5 objects before - calling MPI_Finalize(). - - (MSC - 2015/02/25, HDFFV-883) - - Tools - ----- - - None - - High-Level APIs - --------------- - - None - - Fortran API - ----------- - - Added Global Variables - - These new global variables are equivalent to the C definitions - without the '_F': - - H5G_UDLINK_F - H5G_SAME_LOC_F - H5O_TYPE_UNKNOWN_F - H5O_TYPE_GROUP_F - H5O_TYPE_DATASET_F - H5O_NAMED_DATATYPE_F - H5O_TYPE_NTYPES_F - - (MSB - 2015/02/03, HDFFV-9040) - - - C++ API - ------- - - New Wrappers for C Functions H5P[s/g]et_libver_bounds - - Wrappers were added to class H5::FileAccPropList for the - C Functions H5Pget_libver_bounds and H5Pset_libver_bounds. - - (BMR, 2015/04/06, Part of HDFFV-9167) - - - New Wrappers to Get the Object Header's Version - - The following wrappers are added to class H5::CommonFG - Returns the object header version of an object in a file or group, - given the object's name. - - unsigned childObjVersion(const char* objname) const; - unsigned childObjVersion(const H5std_string& objname) const; - - (BMR, 2015/04/06) - - - New DataType Constructor - - Added a DataType constructor that takes a PredType object, and this - constructor will invoke H5Tcopy to generate another datatype id - from a predefined datatype. - - (BMR, 2015/04/06) - - -Support for New Platforms, Languages, and Compilers -=================================================== - - Support for Linux 3.10.0-123.20.1.el7 added (LK - 2015/04/01) - - Support for Mac OS X Yosemite 10.10 added (AKC - 2015/03/04, HDFFV-9007) - - Support for AIX 6.1 added and AIX 5.3 is retired. (AKC - 2015/01/09) - -Bug Fixes since HDF5-1.8.14 -=========================== - - Configuration - ------------- - - Make uninstall generated "test: argument expected". - The error is due to $EXAMPLETOPDIR is used without setting a value first. - - Fixed by assign it with the proper value. - - (AKC - 2015/04/29, HDFFV-9298) - - - Windows Installer Incorrect Display of PATH Environment Variable - - In the Windows installer, the dialog box where the user can elect to - add the product's bin path to the %PATH% environment variable displayed - an incorrect path. This path was missing the C:\Program Files part - and used the POSIX file separator '/' before the bin (/bin, - instead of \bin). - - The dialog box text was changed to simply say that the product's bin - path would be added instead of explicitly displaying the path. - This is in line with most installers. The reason for not fixing the - displayed path instead is that it is difficult to pass the correct - path from CPack to the NSIS installer for display. - - Note that this was never a code issue - it was just a display - problem. The installer always did the right thing when updating the - environment variable. - - (DER - 2014/11/14, HDFFV-9016) - - Library - ------- - - Incorrect Usage of List in CMake COMPILE_DEFINITIONS set_property - - The CMake command set_property with COMPILE_DEFINITIONS property - needs a quoted semi-colon separated list of values. CMake will - transform the list to a series of -D{value} for the compile. - - (ADB - 2014/12/09, HDFV-9041) - - - Fixed Compile Errors on Windows w/ Visual Studio and CMake When - UNICODE is Defined - - The HDF5 Library could not be built on Windows with Visual Studio when - UNICODE was defined. This was due to the incorrect use of the TEXT() - macro and some Win32 API functions that take TCHAR parameters. The faulty - code was a part of the filter plugin functionality. This was a - compile-time error that only affected users who build HDF5 from source - and define UNICODE, usually when HDF5 is being built as a part of a - larger product. There were no run-time effects. - - These errors caused no problems when UNICODE was not defined. HDF5 is - normally not built with UNICODE defined and the binaries were - unaffected. - - The fix was to remove the TEXT() macro and explicitly use the - 'A' form of the Win32 API calls, which expect char strings instead of - wchar_t strings. - - Note that HDF5 currently does not support Unicode file paths on Windows. - - (DER - 2015/02/22, HDFFV-8927) - - - Addition of Error Tracing Functionality to Several C API Calls - - A bug in a text processing script caused API calls that return a - pointer to not receive error tracing macros/functionality. - - The bug has been corrected and error tracing functionality has been - added to the affected API calls. These functions will now correctly - print trace information when library errors are encountered. - - (DER - 2015/02/26, HDFFV-9141) - - - H5Rdereference Now Checks for HADDR_UNDEF or Uninitialized References - - When passed HADDR_UNDEF or uninitialized references, the previous - behavior of H5Rdereference was to continue to process the reference - as a valid address. - - H5Rdereference was changed to return immediately (with an error - message) if the references are HADDR_UNDEF or uninitialized. - - (MSB - 2015/3/10, HDFFV-7959) - - - Fixed Bugs in H5Sextent_copy - - H5Sextent_copy would not free the previous extent, resulting in a memory - leak. Also, H5Sextent_copy would not update the number of elements - selected if the selection was "all", causing various problems. These - issues have been fixed. - - (NAF - 2015/04/02) - - - Parallel Library - ---------------- - - Fixed a Potential Memory Error - - Fixed a potential memory error when performing parallel I/O on a - dataset with a single chunk, and at least one process has nothing - to do. - - (NAF - 2015/02/16) - - - Parallel Test Problem Fixed - - Fixed problem with parallel tests where they failed beyond a - certain number of ranks. All tests should work for any arbitrary - number of ranks. - - (MSC - 2014/11/06, HDFFV-1027,8962,8963) - - - MPE Support - - Enabling MPE was causing HDF5 build to fail. Support for it was - dropped at some point in time. - - Fixed problem with enabling MPE. Users should use the community - maintained MPE on github (http://git.mpich.org/mpe.git/). - - (MSC - 2015/02/20, HDFFV-9135) - - Performance - ------------- - - None - - Tools - ----- - - h5repack crashed on enumerated 8-bit type. - - Previous version 1.8.14 introduced an error that caused the reading - of enumerated 8-bit type nested in compound type to fail. - - Fixed library code responsible for reading the particular type. - (AKC - 2015.03/31, HDFFV-8667) - - - h52gif crashed non-8bit images. - - h52gif crashed if instructed to convert images other than 8bit images. - - h52gif could handle only 8bit images. Added code to detect non-8bit - images and flag them as failure. Update tool document page to reflect - the limit. - (AKC - 2015/03/31, HDFFV-8957) - - - perform/benchpar.c retired. - - benchpar.c has not been built for a long time and its original purpose - is not needed any more. - (AKC - 2014/12/19, HDFFV-8156) - - - Source perform/ directory moved to tools/perform. - The perform directory is moved to tools/perform for easier maintenance. - (AKC - 2014/12/17, HDFFV-9046) - - Fortran API - ------------ - - Fortran Fails with --enable-fortran2003 and Intel 15.x Compilers - - Added BIND(C) to the offending APIs. - - The Fortran Library (--enable-fortran2003) now works using Intel 15.x - without the need for any additional compilers flags. - - (MSB - 2015/1/26, HDFFV-9049) - - - h5tenum_insert_f Does Not Work with Default 8 Byte Integers - (xlf compiler) - - In the Fortran 90 API, 'value' is no longer cast into the C int type. - Therefore, if h5tenum_insert_f is passed an 8 byte integer (via -i8) - then 'value' is written as the same type as the default Fortran - integer type (which can be 8 bytes). - - A new Fortran 2003 API was added which is more in line with the C - API and users are strongly encouraged to use the Fortran 2003 API - instead of the Fortran 90 API. - - SUBROUTINE h5tenum_insert_f(type_id, name, value, hdferr) - INTEGER(HID_T) , INTENT(IN) :: type_id - CHARACTER(LEN=*), INTENT(IN) :: name - TYPE(C_PTR) , INTENT(IN) :: value - INTEGER, INTENT(OUT) :: hdferr - - (MSB - 2015/2/19, HDFFV-8908) - - - Some Fortran APIs Never Returned the Error State - - Some Fortran APIs never returned the error state: they - would always return a positive number. The APIs include - the following: - - h5fget_file_image_f - h5lget_name_by_idx_f - h5oget_comment_by_name_f - - They were corrected to return a negative number as described in - the Reference Manual if an error occurred. - - (MSB - 2015/3/19, HDF5-239) - - - Fixed h5pget_class_f - - h5pget_class_f never correlated the class identifier to the property - list class name as indicated in the HDF5 Reference Manual; it instead - returned a property list class identifier as an INTEGER. The INTEGER - needed to be of type INTEGER(HID_T) to be correct. - - The h5pget_class_f API was changed to return an INTEGER(HID_T) - property list class identifier instead of an INTEGER. This mimics the - intended behavior of the C API. - - (MSB - 2015/3/16, HDFFV5-9162) - - C++ API - ------ - - Combined Two H5File::getObjCount Overloaded Methods - - The following two methods - - ssize_t getObjCount(unsigned types) const; - ssize_t getObjCount() const; - - were combined into one: - - ssize_t getObjCount(unsigned types = H5F_OBJ_ALL) const; - - (BMR - 2015/04/06) - - - Many Warnings Were Removed - - Many warnings such as conversion, unused variables, missing base - class initialization, and initializing base classes in wrong order - were removed. - - (BMR, 2015/04/06) - - - Functionality Deprecation - - The following two constructors of classes AbstractDs, IdComponent, - H5Location, and H5Object are no longer appropriate after the data member - "id" had been moved from IdComponent to the sub-classes in previous - releases. - - (const hid_t h5_id); - (const & original); - - The copy constructors were no-op and removed in 1.8.15. The other - constructors will be removed from 1.10 release, and then from 1.8.17 - if their removal does not cause any problems. - - (BMR, 2015-04-06) - - - High-Level APIs: - ------ - - Suppress Warnings from Flex/Bison-generated Code - - Warning suppression #pragmas, etc. have been added to H5LTparse.c and - H5LTanalyze.c. We have no control over this code since it's created by - a generator. - - (DER - 2015/03/08 - HDFFV-9149) - - - Changed hdf5_hl.h to Include the HDF5 Main Library "hdf5.h" - - User's no longer need to include both hdf5_hl.h and hdf5.h - - (MSB - 2015/2/14, HDFFV-8685) - - - - H5PTcreate_fl Does Not Convert to Memory Datatype - - H5PTcreate_fl now converts to the table's native memory datatype - to fix the problem of handling BE and LE packet tables. - - (MSB - 2015/2/26 - HDFFV-9042) - - - Fix for H5LT Attribute Functions - - H5LT attribute functions fail to create attributes whose name - is a substring of an existing attribute. - - H5LT attribute functions can now create attributes whose name - is a substring of an existing attribute. - - (MSB - 2015/2/24, HDFFV-9132) - - - Fortran High-Level APIs: - ------------------------ - - - Internal Library Fix for Missing Argument Declaration - - In Interface block for h5tbmake_table_c, "max_char_size_field_names" - is listed as an input, but in the argument definitions it is - "INTEGER :: max_char_size". This caused no known problems with the - Fortran HL API. - - Fixed missing argument definition. - - (MSB - 2015/2/18, HDFFV-8559) - - - Testing - ------- - - None - - -Supported Platforms -=================== -The following platforms are supported and have been tested for this release. -They are built with the configure process unless specified otherwise. - - AIX 6.1 xlc/xlc_r 10.1.0.5 - (NASA G-ADA) xlC/xlC_r 10.1.0.5 - xlf90/xlf90_r 12.1.0.6 - - Linux 2.6.18-308.13.1.el5PAE GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP i686 i686 i386 compilers for 32-bit applications; - (jam) Version 4.1.2 20080704 (Red Hat 4.1.2-55) - Version 4.8.4, 4.9.2 - PGI C, Fortran, C++ Compilers for 32-bit - applications; - Version 14.10-0 - Intel(R) C, C++, Fortran Compiler for 32-bit - applications; - Version 15.0.1.133 (Build 20141023) - - Linux 2.6.18-371.6.1.el5 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers for 64-bit applications; - (koala) Version 4.1.2 20080704 (Red Hat 4.1.2-55) - Version 4.8.4, 4.9.2 - Intel(R) C, C++, Fortran Compilers for - applications running on Intel(R) 64; - Version 15.0.1.133 Build 20141023 - - Linux 2.6.32-431.11.2.el6 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (platypus) Version 4.4.7 20120313 - Version 4.8.2, Version 4.9.2 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 14.10-0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 15.0.1.133 Build 20141023 - - Linux 3.10.0-123.20.1.el7 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (moohan) Version 4.8.2 20140120 (Red Hat 4.8.2-16) - Intel(R) C Intel(R) 64 Compiler XE for - applications running on Intel(R) 64, - Version 15.0.1.133 Build 20141023 - - Linux 2.6.32-431.29.2.el6.ppc64 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-4) - #1 SMP ppc64 GNU/Linux g++ (GCC) 4.4.7 20120313 (Red Hat 4.4.7-4) - (ostrich) GNU Fortran (GCC) 4.4.7 20120313 (Red Hat 4.4.7-4) - IBM XL C/C++ V13.1 - IBM XL Fortran V15.1 - - SunOS 5.11 32- and 64-bit Sun C 5.12 SunOS_sparc - (emu) Sun Fortran 95 8.6 SunOS_sparc - Sun C++ 5.12 SunOS_sparc - - Windows 7 Visual Studio 2008 (cmake) - Visual Studio 2010 w/ Intel Fortran 14 (cmake) - Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - Cygwin(CYGWIN_NT-6.1 1.7.34(0.285/5/3) gcc(4.9.2) compiler and gfortran) - (cmake and autotools) - - Windows 7 x64 Visual Studio 2008 (cmake) - Visual Studio 2010 w/ Intel Fortran 14 (cmake) - Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - - Windows 8.1 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - - Windows 8.1 x64 Visual Studio 2012 w/ Intel Fortran 15 (cmake) - Visual Studio 2013 w/ Intel Fortran 15 (cmake) - - Mac OS X Mt. Lion 10.8.5 Apple clang/clang++ version 5.1 from Xcode 5.1 - 64-bit gfortran GNU Fortran (GCC) 4.8.2 - (swallow/kite) Intel icc/icpc/ifort version 14.0.2 - - Mac OS X Mavericks 10.9.5 Apple clang/clang++ version 6.0 from Xcode 6.1.1 - 64-bit gfortran GNU Fortran (GCC) 4.8.2 - (wren/quail) Intel icc/icpc/ifort version 14.0.2 - - Mac OS X Yosemite 10.10.2 Apple clang/clang++ version 6.0 from Xcode 6.1.1 - 64-bit gfortran GNU Fortran (GCC) 4.9.2 - (osx1010dev/osx1010test) Intel icc/icpc/ifort version 15.0.1 - - -Tested Configuration Features Summary -===================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90/ F90 C++ zlib SZIP - parallel F2003 parallel -SunOS 5.11 32-bit n y/y n y y y -SunOS 5.11 64-bit n y/y n y y y -Windows 7 y y/y n y y y -Windows 7 x64 y y/y n y y y -Windows 7 Cygwin n y/y n y y n -Windows 8.1 n y/y n y y y -Windows 8.1 x64 n y/y n y y y -Mac OS X Mountain Lion 10.8.5 64-bit n y/y n y y y -Mac OS X Mavericks 10.9.5 64-bit n y/y n y y y -Mac OS X Yosemeti 10.10.2 64-bit n y/y n y y y -AIX 6.1 32- and 64-bit n y/n n y y y -CentOS 5.9 Linux 2.6.18-308 i686 GNU y y/y y y y y -CentOS 5.9 Linux 2.6.18-308 i686 Intel n y/y n y y y -CentOS 5.9 Linux 2.6.18-308 i686 PGI n y/y n y y y -CentOS 5.9 Linux 2.6.18 x86_64 GNU n y/y n y y y -CentOS 5.9 Linux 2.6.18 x86_64 Intel n y/y n y y y -CentOS 6.4 Linux 2.6.32 x86_64 GNU y y/y y y y y -CentOS 6.4 Linux 2.6.32 x86_64 Intel n y/y n y y y -CentOS 6.4 Linux 2.6.32 x86_64 PGI n y/y n y y y -CentOS 7.0 Linux 3.10.0 x86_64 GNU y y/y y y y y -CentOS 7.0 Linux 3.10.0 x86_64 Intel n y/y n y y y -Linux 2.6.32-431.11.2.el6.ppc64 n y/n n y y y - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -SunOS 5.11 32-bit y y y y -SunOS 5.11 64-bit y y y y -Windows 7 y y y y -Windows 7 x64 y y y y -Windows 7 Cygwin n n n y -Windows 8.1 y y y y -Windows 8.1 x64 y y y y -Mac OS X Mountain Lion 10.8.5 64-bit y n y y -Mac OS X Mavericks 10.9.5 64-bit y n y y -Mac OS X Yosemeti 10.10.2 64-bit y n y y -AIX 6.1 32- and 64-bit y n n y -CentOS 5.9 Linux 2.6.18-308 i686 GNU y y y y -CentOS 5.9 Linux 2.6.18-308 i686 Intel y y y n -CentOS 5.9 Linux 2.6.18-308 i686 PGI y y y n -CentOS 5.9 Linux 2.6.18 x86_64 GNU y y y y -CentOS 5.9 Linux 2.6.18 x86_64 Intel y y y n -CentOS 6.4 Linux 2.6.32 x86_64 GNU y y y n -CentOS 6.4 Linux 2.6.32 x86_64 Intel y y y n -CentOS 6.4 Linux 2.6.32 x86_64 PGI y y y n -CentOS 7.0 Linux 3.10.0 x86_64 GNU y y y n -CentOS 7.0 Linux 3.10.0 x86_64 Intel y y y n -Linux 2.6.32-431.11.2.el6.ppc64 y y y n - -Compiler versions for each platform are listed in the preceding -"Supported Platforms" table. - - -More Tested Platforms -===================== -The following platforms are not supported but have been tested for this release. - - Linux 2.6.18-308.13.1.el5PAE MPICH mpich 3.1.3 compiled with - #1 SMP i686 i686 i386 gcc 4.9.2 and gfortran 4.9.2 - (jam) g95 (GCC 4.0.3 (g95 0.94!) - - Linux 2.6.18-431.11.2.el6 MPICH mpich 3.1.3 compiled with - #1 SMP x86_64 GNU/Linux gcc 4.9.2 and gfortran 4.9.2 - (platypus) g95 (GCC 4.0.3 (g95 0.94!) - - FreeBSD 8.2-STABLE i386 gcc 4.5.4 [FreeBSD] 20110526 - (loyalty) gcc 4.6.1 20110527 - g++ 4.6.1 20110527 - gfortran 4.6.1 20110527 - - FreeBSD 8.2-STABLE amd64 gcc 4.5.4 [FreeBSD] 20110526 - (freedom) gcc 4.6.1 20110527 - g++ 4.6.1 20110527 - gfortran 4.6.1 20110527 - - Debian7.5.0 3.2.0-4-amd64 #1 SMP Debian 3.2.51-1 x86_64 GNU/Linux - gcc (Debian 4.7.2-5) 4.7.2 - GNU Fortran (Debian 4.7.2-5) 4.7.2 - (cmake and autotools) - - Fedora20 3.15.3-200.fc20.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc (GCC) 4.8.3 20140624 (Red Hat 4.8.3-1) - GNU Fortran (GCC) 4.8.3 20140624 (Red Hat 4.8.3-1) - (cmake and autotools) - - SUSE 13.1 3.11.10-17-desktop #1 SMP PREEMPT x86_64 x86_64 x86_64 GNU/Linux - gcc (SUSE Linux) 4.8.1 - GNU Fortran (SUSE Linux) 4.8.1 - (cmake and autotools) - - Ubuntu 14.04 3.13.0-35-generic #62-Ubuntu SMP x86_64 GNU/Linux - gcc (Ubuntu/Linaro 4.9.1-0ubuntu1) 4.9.1 - GNU Fortran (Ubuntu/Linaro 4.9.1-0ubuntu1) 4.9.1 - (cmake and autotools) - - hopper.nersc.gov PrgEnv-gnu/5.2.40 - gcc (GCC) 4.9.2 20141030 (Cray Inc.) - GNU Fortran (GCC) 4.9.2 20141030 (Cray Inc.) - g++ (GCC) 4.9.2 20141030 (Cray Inc.) - - -Known Problems -============== -* On Windows platforms in debug configurations, the VFD flush1 tests will fail - with the split and multi VFD drivers. These tests will display a modal debug - dialog which must be answered or wait for the test timeout to expire. - (ADB - 2014/06/23 - HDFFV-8851) - -* CLANG compiler with the options -fcatch-undefined-behavior and -ftrapv - catches some undefined behavior in the alignment algorithm of the macro DETECT_I - in H5detect.c (Issue 8147). Since the algorithm is trying to detect the alignment - of integers, ideally the flag -fcatch-undefined-behavior shouldn't to be used for - H5detect.c. In the future, we can separate flags for H5detect.c from the rest of - the library. (SLU - 2013/10/16) - -* Make provided by Solaris fails in "make check". Solaris users should use - gmake to build and install the HDF5 software. (AKC - 2013/10/08 - HDFFV-8534) - -* The C++ and FORTRAN bindings are not currently working on FreeBSD with the - native release 8.2 compilers (4.2.1), but are working with gcc 4.6 from the - ports (and probably gcc releases after that). - (QAK - 2012/10/19) - -* The following h5dump test case fails in BG/P machines (and potentially other - machines that use a command script to launch executables): - - h5dump --no-compact-subset -d "AHFINDERDIRECT::ah_centroid_t[0] it=0 tl=0" - tno-subset.h5 - - This is due to the embedded spaces in the dataset name being interpreted - by the command script launcher as meta-characters, thus passing three - arguments to h5dump's -d flag. The command passes if run by hand, just - not via the test script. - (AKC - 2012/05/03) - -* The STDIO VFD does not work on some architectures, possibly due to 32/64 - bit or large file issues. The basic STDIO VFD test is known to fail on - 64-bit SunOS 5.10 on SPARC when built with -m64 and 32-bit OS X/Darwin - 10.7.0. The STDIO VFD test has been disabled while we investigate and - a fix should appear in a future release. - (DER - 2011/10/14 - HDFFV-8235) - -* h5diff can report inconsistent results when comparing datasets of enum type - that contain invalid values. This is due to how enum types are handled in - the library and will be addressed in a future release. - (DER - 2011/10/14 - HDFFV-7527) - -* The links test can fail under the stdio VFD due to some issues with external - links. This will be investigated and fixed in a future release. - (DER - 2011/10/14 - HDFFV-7768) - -* After the shared library support was fixed for some bugs, it was discovered - that "make prefix=XXX install" no longer works for shared libraries. It - still works correctly for static libraries. Therefore, if you want to - install the HDF5 shared libraries in a location such as /usr/local/hdf5, - you need to specify the location via the --prefix option during configure - time. E.g, ./configure --prefix=/usr/local/hdf5 ... - (AKC - 2011/05/07 - HDFFV-7583) - -* The parallel test, t_shapesame, in testpar/, may run for a long time and may - be terminated by the alarm signal. If that happens, one can increase the - alarm seconds (default is 1200 seconds = 20 minutes) by setting the - environment variable, $HDF5_ALARM_SECONDS, to a larger value such as 3600 - (60 minutes). Note that the t_shapesame test may fail in some systems - (see the "While working on the 1.8.6 release..." problem below). If - it does, it will waste more time if $HDF5_ALARM_SECONDS is set - to a larger value. - (AKC - 2011/05/07) - -* Shared Fortran libraries are not quite working on AIX. While they are - generated when --enable-shared is specified, the fortran and hl/fortran - tests fail. the issue. HL and C++ shared libraries should now be - working as intended, however. - (MAM - 2011/04/20) - -* While working on the 1.8.6 release of HDF5, a bug was discovered that can - occur when reading from a dataset in parallel shortly after it has been - written to collectively. The issue was exposed by a new test in the parallel - HDF5 test suite, but had existed before that. We believe the problem lies with - certain MPI implementations and/or file systems. - - We have provided a pure MPI test program, as well as a standalone HDF5 - program, that can be used to determine if this is an issue on your system. - They should be run across multiple nodes with a varying number of processes. - These programs can be found at: - http://www.hdfgroup.org/ftp/HDF5/examples/known_problems/ - (NAF - 2011/01/19) - -* All the VFL drivers aren't backward compatible. In H5FDpublic.h, the - structure H5FD_class_t changed in 1.8. There is new parameter added to - get_eoa and set_eoa callback functions. A new callback function - get_type_map was added in. The public function H5FDrealloc was taken - out in 1.8. The problem only happens when users define their own driver - for 1.6 and try to plug in 1.8 library. Because there's only one user - complaining about it, we (Elena, Quincey, and I) decided to leave it as - it is (see bug report #1279). Quincey will make a plan for 1.10. - (SLU - 2010/02/02) - -* The --enable-static-exec configure flag will only statically link libraries - if the static version of that library is present. If only the shared version - of a library exists (i.e., most system libraries on Solaris, AIX, and Mac, - for example, only have shared versions), the flag should still result in a - successful compilation, but note that the installed executables will not be - fully static. Thus, the only guarantee on these systems is that the - executable is statically linked with just the HDF5 library. - (MAM - 2009/11/04) - -* A dataset created or rewritten with a v1.6.3 library or after cannot be read - with the v1.6.2 library or before when the Fletcher32 EDC filter is enabled. - There was a bug in the calculation of the Fletcher32 checksum in the - library before v1.6.3; the checksum value was not consistent between big- - endian and little-endian systems. This bug was fixed in Release 1.6.3. - However, after fixing the bug, the checksum value was no longer the same as - before on little-endian system. Library releases after 1.6.4 can still read - datasets created or rewritten with an HDF5 library of v1.6.2 or before. - (SLU - 2005/06/30) - - -%%%%1.8.14%%%% - - -HDF5 version 1.8.14 released on 2014-11-12 -================================================================================ - -INTRODUCTION -============ - -This document describes the differences between HDF5-1.8.13 and -HDF5-1.8.14, and contains information on the platforms tested and -known problems in HDF5-1.8.14. - -All new and modified APIs are listed in the "HDF5 Software Changes -from Release to Release" document along with details about previous -releases at: - - http://www.hdfgroup.org/HDF5/doc/ADGuide/Changes.html - -Links to the HDF5 1.8.14 source code, documentation, and additional materials -can be found on the HDF5 web page at: - - http://www.hdfgroup.org/products/hdf5/ - -The HDF5 1.8.14 release can be obtained from: - - http://www.hdfgroup.org/HDF5/release/obtain5.html - -User documentation for 1.8.14 can be accessed directly at this location: - - http://www.hdfgroup.org/HDF5/doc/ - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS -======== - -- New Features -- Support for New Platforms, Languages, and Compilers -- Bug Fixes since HDF5-1.8.13 -- Supported Platforms -- Supported Configuration Features Summary -- More Tested Platforms -- Known Problems - - -New Features -============ - - Configuration - ------------- - - bin/cmakehdf5 configures, builds and installs C, C++, Fortran and High - level API's. (It used to build the C API only). - (AKC 2014/10/17 HDFFV-8932). - - Library - ------- - - None - - Parallel Library - ---------------- - - Chunk Fill Writes Changed to Collective - - Slow performance in chunk fill writes. Chunk fills - in the past were written independently by rank 0 one block - at a time. - - Optimized the chunk fill write algorithm so that all - chunk fill values will be written collectively in a single MPI-IO - call. This should show a great performance improvement when - creating chunked datasets in parallel when the chunk dimensions - are fairly small. - - (MSC - 2014/08/22, HDFFV-8878) - - Tools - ----- - - None - - High-level APIs - --------------- - - None - - Fortran API - ----------- - - None - - C++ API - ------- - - Initialization of Object IDs - - The data member "id" in classes that represent HDF5 objects were - initialized to 0, which caused problem for some users. - - Replaced 0 with H5I_INVALID_HID to initialize these "id"s. For the - PropList class, H5P_DEFAULT is used instead of H5I_INVALID_HID. - - (BMR - 2014/09/30, HDFFV-4259) - - - -Support for New Platforms, Languages, and Compilers -=================================================== - - None - -Bug Fixes since HDF5-1.8.13 -=========================== - - Configuration - ------------- - - CMake and SVN URLs - - The SVN URLs will be different when the HDF Group domain name changes. - - Removed the SVN URL references in the cacheinit.cmake and release_docs files. - - (ADB - 2014/10/27, HDFFV-8953) - - - CMake Packaging - - A Fortran module was not generated if the compiler was not F2003 - compliant. - - Removed the module name from the package list of Fortran modules because - that module was never generated. This was only an issue for Fortran - compilers that are not F2003 compatible. - - (ADB - 2014/10/16, HDFFV-8932) - - - Shared Library Interface Version Number (soname) - - In order to increase the maintainability of HDF5, an architectural - change was made which required the renaming of several public symbols in - H5Ppublic.h. - - The shared library interface version number ("soname") has been increased - on account of these changes. For a full list of the changed symbols, see - the interface compatibility report, which is available as a link off of - the 'HDF5 Software Changes from Release to Release' document: - - http://www.hdfgroup.org/HDF5/doc/ADGuide/Changes.html - - (AKC - 2014/10/03, HDFFV-8937) - - - Configure Settings for Mac OSX Need Defaults for PROD_XXX, DEBUG_XXX, - and PROFILE_XXX - - The configure setting files for Mac OSX (config/apple) did not - have the default settings of PROD_XXX, DEBUG_XXX, PROFILE_XXX. - - Added the default settings. Mac platforms now builds library with - "-O3" optimization when the default clang compiler is used. - - (AKC - 2014/10/01, HDFFV-8933) - - - CMake ConfigureChecks - - Two include files were missing from two C tests. - - Propagated the configure test changes to H5_LDOUBLE_TO_INTEGER_WORKS_TEST - and H5_ULLONG_TO_LDOUBLE_PRECISION_TEST to ConfigureChecks.cmake (added - stdlib.h and string.h in the HDFTests.c file). - - (ADB - 2014/09/02 HDFFV-8845) - - - CMake Parallel Test Missing - - The source file was removed in the previous release but the parallel - test t_posix_compliant was not. - - Removed the t_posix_compliant parallel test from the library. - - (ADB - 2014/8/14 HDFFV-8880) - - - Autotools Reconfigure. Bison. Flex. - - The Bison and Flex files were out of date. - - Bison was upgraded to 2.7, and Flex was upgraded to 2.5.37. The - bin/reconfigure script now will execute Bison and Flex and update - the hl/src files. - - (ADB - 2014/06/16 HDFFV-8709) - - - Autotools Reconfigure. m4. - - The m4 macro processor was out of date. - - Reconfigured Autotools with m4 upgraded to 1.4.17. - - (ADB - 2014/06/12 HDFFV-8743) - - - Autotools: Modified configure to add an entry at the beginning of AM_LDFLAGS - for the hdf5 install directory. Without this entry the relink commands - invoked by "make install" to create libraries dependent on libhdf5.so added - a dependency on the first libhdf5.so found in any directory in AM_LDFLAGS - regardless of its version. (LRK - 2014/10/17 HDFFV-8944) - - - Changed Autotools Build Behavior. Fortran High-level Library. - - The Fortran high-level (HL) library did not compile if the default - size of a REAL is DOUBLE PRECISION; the build would fail during - compilation. - - Configure now checks to see if REAL is DOUBLE PRECISION, Fortran is - enabled, and HL library is enabled. If this is true, then configure - will stop with an error message. - - (MSB - 2014/8/11, HDFFV-8883/HDFFV-889) - - - - Library - ------- - - Fixed Identifier Management Code - - Opening an object returns an identifier; closing the object should - free up the identifier. A problem was found where the identifiers - were not being freed up correctly. - - Fixed the problem so that identifiers that have been used can be - used again after their object has been closed. - - (QAK - 2014/10/16, HDFFV-8930) - - - Removal of DllMain() from Static Windows Builds - - A DllMain() function was added in HDF5 1.8.13 in order to handle - win32 thread cleanup. The preprocessor #ifdefs around the DllMain - function allowed it to be compiled when the static library is built, - which is incorrect behavior that can cause linkage problems in - clients. - - The fix was to change the preprocessor #ifdefs to exclude compiling - DllMain() in static builds. Our DllMain function is now only - compiled when the shared, thread-safe library is built on Windows. - - (DER - 2014/06/13, HDFFV-8837) - - - Enforce Constraint on page_size Parameter in H5Pset_core_write_tracking() - - The reference manual states that the page_size parameter cannot be - zero. - - This change checks the page_size parameter to see it is zero and - returns an error code if it is. - - (DER - 2014/08/11, HDFFV-8891) - - - H5Ldelete_by_idx() fails on non-existent group name. - (MSC - 2014/07/31, HDFFV-8888) - - - H5Ldelete_by_idx() Seg Fault on Non-existent Group Name - - If a non-existent group name was used by H5Ldelete_by_idx(), a - segmentation fault would result. - - Bug was fixed. - - (MSC - 2014/07/31, HDFFV-8888) - - - Bug in Test When Building Parallel HDF5 on PVFS2 - - There was a bug in a test when building Parallel HDF5 on PVFS2. - - The build now uses MPI_File_get_size() instead of stat(). - - (MSC - 2014/07/14, HDFFV-8856) - - - MPI-IO Driver Tried to Allocate Space for Zero-length Dataset - - MPI-IO driver tried to allocate space for zero-length dataset - and asserts. - - Fixed driver and added a regression test. - - (MSC - 2014/07/03, HDFFV-8761) - - - Parallel Library - ---------------- - - None - - Performance - ------------- - - None - - Tools - ----- - - None - - Fortran API - ------- - - SIZEOF Replaced by C_SIZEOF and STORAGE_SIZE. - - The intrinsic function SIZEOF is non-standard and should be replaced with a - standard intrinsic function. - - If the F2008 intrinsic C_SIZEOF and STORAGE_SIZE are available, then they will - be used instead of the non-standard SIZEOF intrinsic, even when the SIZEOF - function is available. - - (MSB - 2014/6/16, HDFFV-8653) - - - Non-functional API: h5pget_fill_value_f - - The Fortran wrapper h5pget_fill_value_f was calling the wrong C API. - - The correct C API, H5Pget_fill_value, is now called by the Fortran - wrapper. - - (MSB - 2014/9/25, HDFFV-8879) - - - Interoperability with C HDF5: H5Literate and h5literate_f - - h5literate_f assumes the return value for the callback function to - be of type int (or int_f in C). However, in the C wrapper the return - value of H5Literate is type herr_t, and this could cause - interoperability issues. - - The callback function should be declared INTEGER(C_INT) for - portability. The tests were updated accordingly. - - (MSB - 2014/9/26, HDFFV-8909) - - - Interoperability with C HDF5: Constant INTEGER Parameters with the - H5FD Interface - - Wrong type cast of constant Fortran INTEGER parameters was used. - - The following parameter constant types were changed from INTEGER to - INTEGER(HID_T) to match the C types: H5FD_CORE, H5FD_FAMILY, H5FD_LOG, - H5FD_MPIO, H5FD_MULTI, H5FD_SEC2, and H5FD_STDIO. - - Other internal 'int' types where changed to 'hid_t'; these are - transparent to the user. - - (MSB - 2014/7/18, HDFFV-8748) - - C++ API - ------ - - Memory Leaks - - There were several potential memory leaks in the library due to - dynamically allocated strings not being freed when failure occurs. - - Applied user's patches to remove these potential memory leaks. - - (BMR - 2014/09/30, HDFFV-8928) - - - Disallow H5F_ACC_CREAT - - H5F_ACC_CREAT was included in the C++ API but the C library does not - allow it at this time. - - Removed this flag from the functions in H5File class. - - (BMR - 2014/09/29, HDFFV-8852) - - - Missing Flags in Documentation: H5F_ACC_RDONLY and H5F_ACC_RDWR - - The H5F_ACC_RDONLY and H5F_ACC_RDWR flags were missing from the - documentation of the H5File constructors. - - These two flags are now included in the documentation for opening - files. - - (BMR - 2014/09/29, HDFFV-8852) - - High-level APIs: - ------ - - Seg Faults in H5TBread_field_name and H5TBread_field_name_f - - When H5TBread_field_name or H5TBread_field_name_f were used to read a - field and if the name of the field was wrong, a segmentation fault - would result. - - Both C and Fortran APIs were fixed so they no longer seg fault if - the name of the field is wrong, and both APIs return a negative - value if the name of the field is wrong. - - (MSB - 2014/09/29, HDFFV-8912) - - - Possible Buffer Overflow in High-level (HL) APIs - - Multiple HL APIs (H5DSis_scale is one example) had issues: - (1) The datatype from the file was re-used as the memory datatype, - and - (2) No effort was made to ensure that strings were actually - null-terminated. - - All of the HL routines now check for NULL pointers, for null-terminated - strings, and to see if string buffers are short enough not to overflow - the buffer. The minimum length of the buffers is now used in strncmp - to avoid overflow. - - (MSB - 2014/9/29, HDFFV-8670) - - - Behavior Change of H5LTdtype_to_text - - If a user buffer was passed in to H5LTdtype_to_text along with the - length, then the function would not truncate at the end of the - buffer, but would exceed the end of the user buffer. - - H5LTdtype_to_text was changed to truncate the string if the user - buffer is too small. - - (MSB - 2014/9/29, HDFFV-8855) - - Fortran High-level APIs: - ------ - - See entry for HDFFV-8912 above. - - Testing - ------- - - A subtest in parallel h5diff (ph5diff) testing was bypassed for the - local Linux 32 machine due to unknown issue in the previous version of - Mpich. The failure no long exists in the current Mpich. Therefore the - bypass is removed. (AKC - 2014/11/03 HDFFV-8954) - - - Fixed incorrect exit code values (was -1) in testframe which is commonly - used by several test programs. (AKC - 2014/07/22 HDFFV-8881) - - - Fixed Incorrect Exit Code Values in Testframe - The testframe which is commonly used by several test programs - had some incorrect exit code values. Fixed the incorrect exit code - values. (AKC - 2014/07/22, HDFFV-8881) - -Supported Platforms -=================== -The following platforms are supported and have been tested for this release. -They are built with the configure process unless specified otherwise. - - AIX 5.3 xlc 10.1.0.5 - (NASA G-ADA) xlC 10.1.0.5 - xlf90 12.1.0.6 - - Linux 2.6.18-308.13.1.el5PAE GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP i686 i686 i386 compilers for 32-bit applications; - (jam) Version 4.1.2 20080704 (Red Hat 4.1.2-54) - Version 4.8.2 - PGI C, Fortran, C++ Compilers for 32-bit - applications; - Version 13.7-0 - Intel(R) C, C++, Fortran Compiler for 32-bit - applications; - Version 14.0.2 (Build 20140120) - - Linux 2.6.18-371.6.1.el5 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers for 64-bit applications; - (koala) Version 4.1.2 20080704 (Red Hat 4.1.2-54) - Version 4.8.2 - Intel(R) C, C++, Fortran Compilers for - applications running on Intel(R) 64; - Version 14.0.2 (Build 20140120) - - Linux 2.6.32-431.11.2.el6 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (platypus) Version 4.4.7 20120313 - Version 4.8.2 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 13.7-0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 14.0.2 (Build 20140120) - - Linux 2.6.32-431.29.2.el6.ppc64 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-4) - #1 SMP ppc64 GNU/Linux g++ (GCC) 4.4.7 20120313 (Red Hat 4.4.7-4) - (ostrich) GNU Fortran (GCC) 4.4.7 20120313 (Red Hat 4.4.7-4) - IBM XL C/C++ V13.1 - IBM XL Fortran V15.1 - - SunOS 5.11 32- and 64-bit Sun C 5.12 SunOS_sparc - (emu) Sun Fortran 95 8.6 SunOS_sparc - Sun C++ 5.12 SunOS_sparc - - Windows 7 Visual Studio 2008 (cmake) - Visual Studio 2010 w/ Intel Fortran 14 (cmake) - Visual Studio 2012 w/ Intel Fortran 14 (cmake) - Visual Studio 2013 w/ Intel Fortran 14 (cmake) - Cygwin(CYGWIN_NT-6.1 1.7.32(0.274/5/3) gcc(4.8.3) compiler and gfortran) - (cmake and autotools) - - Windows 7 x64 Visual Studio 2008 (cmake) - Visual Studio 2010 w/ Intel Fortran 14 (cmake) - Visual Studio 2012 w/ Intel Fortran 14 (cmake) - Visual Studio 2013 w/ Intel Fortran 14 (cmake) - - Windows 8.1 Visual Studio 2012 w/ Intel Fortran 14 (cmake) - Visual Studio 2013 w/ Intel Fortran 14 (cmake) - - Windows 8.1 x64 Visual Studio 2012 w/ Intel Fortran 14 (cmake) - Visual Studio 2013 w/ Intel Fortran 14 (cmake) - - Mac OS X Lion 10.7.5 Apple clang/clang++ version 3.0 from Xcode 4.6.1 - 64-bit gfortran GNU Fortran (GCC) 4.8.2 - (duck) Intel icc/icpc/ifort version 13.0.3 - - Mac OS X Mt. Lion 10.8.5 Apple clang/clang++ version 5.1 from Xcode 5.1 - 64-bit gfortran GNU Fortran (GCC) 4.8.2 - (swallow/kite) Intel icc/icpc/ifort version 14.0.2 - - Mac OS X Mavericks 10.9.5 Apple clang/clang++ version 6.0 from Xcode 6.0.1 - 64-bit gfortran GNU Fortran (GCC) 4.8.2 - (wren) Intel icc/icpc/ifort version 14.0.2 - - -Tested Configuration Features Summary -===================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90/ F90 C++ zlib SZIP - parallel F2003 parallel -Solaris2.11 32-bit n y/y n y y y -Solaris2.11 64-bit n y/y n y y y -Windows 7 y y/y n y y y -Windows 7 x64 y y/y n y y y -Windows 7 Cygwin n y/y n y y n -Windows 8.1 n y/y n y y y -Windows 8.1 x64 n y/y n y y y -Mac OS X Lion 10.7.5 64-bit n y/y n y y y -Mac OS X Mountain Lion 10.8.5 64-bit n y/y n y y y -Mac OS X Mavericks 10.9.5 64-bit n y/y n y y ? -AIX 5.3 32- and 64-bit n y/n n y y y -CentOS 5.9 Linux 2.6.18-308 i686 GNU y y/y y y y y -CentOS 5.9 Linux 2.6.18-308 i686 Intel n y/y n y y y -CentOS 5.9 Linux 2.6.18-308 i686 PGI n y/y n y y y -CentOS 5.9 Linux 2.6.18 x86_64 GNU n y/y n y y y -CentOS 5.9 Linux 2.6.18 x86_64 Intel n y/y n y y y -CentOS 6.4 Linux 2.6.32 x86_64 GNU y y/y y y y y -CentOS 6.4 Linux 2.6.32 x86_64 Intel n y/y n y y y -CentOS 6.4 Linux 2.6.32 x86_64 PGI n y/y n y y y -Linux 2.6.32-431.11.2.el6.ppc64 n y/n n y y y - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -Solaris2.11 32-bit y y y y -Solaris2.11 64-bit y y y y -Windows 7 y y y y -Windows 7 x64 y y y y -Windows 7 Cygwin n n n y -Windows 8.1 y y y y -Windows 8.1 x64 y y y y -Mac OS X Lion 10.7.5 64-bit y n y y -Mac OS X Mountain Lion 10.8.5 64-bit y n y y -Mac OS X Mavericks 10.9.5 64-bit y n y y -AIX 5.3 32- and 64-bit y n n y -CentOS 5.9 Linux 2.6.18-308 i686 GNU y y y y -CentOS 5.9 Linux 2.6.18-308 i686 Intel y y y n -CentOS 5.9 Linux 2.6.18-308 i686 PGI y y y n -CentOS 5.9 Linux 2.6.18 x86_64 GNU y y y y -CentOS 5.9 Linux 2.6.18 x86_64 Intel y y y n -CentOS 6.4 Linux 2.6.32 x86_64 GNU y y y n -CentOS 6.4 Linux 2.6.32 x86_64 Intel y y y n -CentOS 6.4 Linux 2.6.32 x86_64 PGI y y y n -Linux 2.6.32-431.11.2.el6.ppc64 y y y n - -Compiler versions for each platform are listed in the preceding -"Supported Platforms" table. - - -More Tested Platforms -===================== -The following platforms are not supported but have been tested for this release. - - Linux 2.6.18-308.13.1.el5PAE MPICH mpich 3.1.2 compiled with - #1 SMP i686 i686 i386 gcc 4.9.1 and gfortran 4.9.1 - (jam) g95 (GCC 4.0.3 (g95 0.94!) - - Linux 2.6.18-431.11.2.el6 MPICH mpich 3.1.2 compiled with - #1 SMP x86_64 GNU/Linux gcc 4.9.1 and gfortran 4.9.1 - (platypus) g95 (GCC 4.0.3 (g95 0.94!) - - FreeBSD 8.2-STABLE i386 gcc 4.5.4 [FreeBSD] 20110526 - (loyalty) gcc 4.6.1 20110527 - g++ 4.6.1 20110527 - gfortran 4.6.1 20110527 - - FreeBSD 8.2-STABLE amd64 gcc 4.5.4 [FreeBSD] 20110526 - (freedom) gcc 4.6.1 20110527 - g++ 4.6.1 20110527 - gfortran 4.6.1 20110527 - - Debian7.5.0 3.2.0-4-amd64 #1 SMP Debian 3.2.51-1 x86_64 GNU/Linux - gcc (Debian 4.7.2-5) 4.7.2 - GNU Fortran (Debian 4.7.2-5) 4.7.2 - (cmake and autotools) - - Fedora20 3.15.3-200.fc20.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc (GCC) 4.8.3 20140624 (Red Hat 4.8.3-1) - GNU Fortran (GCC) 4.8.3 20140624 (Red Hat 4.8.3-1) - (cmake and autotools) - - SUSE 13.1 3.11.10-17-desktop #1 SMP PREEMPT x86_64 x86_64 x86_64 GNU/Linux - gcc (SUSE Linux) 4.8.1 - GNU Fortran (SUSE Linux) 4.8.1 - (cmake and autotools) - - Ubuntu 14.04 3.13.0-35-generic #62-Ubuntu SMP x86_64 GNU/Linux - gcc (Ubuntu/Linaro 4.9.1-0ubuntu1) 4.9.1 - GNU Fortran (Ubuntu/Linaro 4.9.1-0ubuntu1) 4.9.1 - (cmake and autotools) - - Cray Linux Environment (CLE) PrgEnv-pgi/4.2.34 - hopper.nersc.gov pgcc 13.6-0 64-bit target on x86-64 Linux -tp istanbul - pgf90 13.6-0 64-bit target on x86-64 Linux -tp istanbul - pgCC 13.6-0 64-bit target on x86-64 Linux -tp istanbul - - -Known Problems -============== -* On cygwin platforms the feature to load dynamic filter libraries only looks - for libraries with the a so extension. Support for cygwin cygxxx.dll libraries - is planned for the next release. - (ADB - 2014/11/04 - HDFFV-8736) - -* On windows platforms in debug configurations, the VFD flush1 tests will fail - with the split and multi VFD drivers. These tests will display a modal debug - dialog which must be answered or wait for the test timeout to expire. - The flush1 and flush2 tests will be skipped under debug for this release. - (ADB - 2014/06/23 - HDFFV-8851) - -* CLANG compiler with the options -fcatch-undefined-behavior and -ftrapv - catches some undefined behavior in the alignment algorithm of the macro DETECT_I - in H5detect.c (Issue 8147). Since the algorithm is trying to detect the alignment - of integers, ideally the flag -fcatch-undefined-behavior shouldn't to be used for - H5detect.c. In the future, we can separate flags for H5detect.c from the rest of - the library. (SLU - 2013/10/16) - -* Make provided by Solaris fails in "make check". Solaris users should use - gmake to build and install the HDF5 software. (AKC - 2013/10/08 - HDFFV-8534) - -* The h5dump and h5diff utilities occasionally produce different output - between Linux and Windows systems. This is caused by lower-level library - routines that fail to write fill values to the user's buffer when reading - unallocated chunks from datasets that have a fill value set to - H5D_FILL_VALUE_DEFAULT. Due to platform differences the return of - spurious data values has only been encountered on Windows 32-bit systems. - (Issue HDFFV-8247; JP - 2013/03/27) - -* The C++ and FORTRAN bindings are not currently working on FreeBSD with the - native release 8.2 compilers (4.2.1), but are working with gcc 4.6 from the - ports (and probably gcc releases after that). - (QAK - 2012/10/19) - -* The STDIO VFD does not work on some architectures, possibly due to 32/64 - bit or large file issues. The basic STDIO VFD test is known to fail on - 64-bit SunOS 5.10 on SPARC when built with -m64 and 32-bit OS X/Darwin - 10.7.0. The STDIO VFD test has been disabled while we investigate and - a fix should appear in a future release. - (DER - 2011/10/14 - HDFFV-8235) - -* h5diff can report inconsistent results when comparing datasets of enum type - that contain invalid values. This is due to how enum types are handled in - the library and will be addressed in a future release. - (DER - 2011/10/14 - HDFFV-7527) - -* The links test can fail under the stdio VFD due to some issues with external - links. This will be investigated and fixed in a future release. - (DER - 2011/10/14 - HDFFV-7768) - -* After the shared library support was fixed for some bugs, it was discovered - that "make prefix=XXX install" no longer works for shared libraries. It - still works correctly for static libraries. Therefore, if you want to - install the HDF5 shared libraries in a location such as /usr/local/hdf5, - you need to specify the location via the --prefix option during configure - time. E.g, ./configure --prefix=/usr/local/hdf5 ... - (AKC - 2011/05/07 - HDFFV-7583) - -* The parallel test, t_shapesame, in testpar/, may run for a long time and may - be terminated by the alarm signal. If that happens, one can increase the - alarm seconds (default is 1200 seconds = 20 minutes) by setting the - environment variable, $HDF5_ALARM_SECONDS, to a larger value such as 3600 - (60 minutes). Note that the t_shapesame test may fail in some systems - (see the "While working on the 1.8.6 release..." problem below). If - it does, it will waste more time if $HDF5_ALARM_SECONDS is set - to a larger value. - (AKC - 2011/05/07) - -* Shared Fortran libraries are not quite working on AIX. While they are - generated when --enable-shared is specified, the fortran and hl/fortran - tests fail. We are looking into the issue. HL and C++ shared libraries - should now be working as intended, however. - (MAM - 2011/04/20) - -* The --with-mpe configure option does not work with Mpich2. - (AKC - 2011/03/10) - -* While working on the 1.8.6 release of HDF5, a bug was discovered that can - occur when reading from a dataset in parallel shortly after it has been - written to collectively. The issue was exposed by a new test in the parallel - HDF5 test suite, but had existed before that. We believe the problem lies with - certain MPI implementations and/or file systems. - - We have provided a pure MPI test program, as well as a standalone HDF5 - program, that can be used to determine if this is an issue on your system. - They should be run across multiple nodes with a varying number of processes. - These programs can be found at: - http://www.hdfgroup.org/ftp/HDF5/examples/known_problems/ - (NAF - 2011/01/19) - -* All the VFL drivers aren't backward compatible. In H5FDpublic.h, the - structure H5FD_class_t changed in 1.8. There is new parameter added to - get_eoa and set_eoa callback functions. A new callback function - get_type_map was added in. The public function H5FDrealloc was taken - out in 1.8. The problem only happens when users define their own driver - for 1.6 and try to plug in 1.8 library. Because there's only one user - complaining about it, we (Elena, Quincey, and I) decided to leave it as - it is (see bug report #1279). Quincey will make a plan for 1.10. - (SLU - 2010/02/02) - -* The --enable-static-exec configure flag will only statically link libraries - if the static version of that library is present. If only the shared version - of a library exists (i.e., most system libraries on Solaris, AIX, and Mac, - for example, only have shared versions), the flag should still result in a - successful compilation, but note that the installed executables will not be - fully static. Thus, the only guarantee on these systems is that the - executable is statically linked with just the HDF5 library. - (MAM - 2009/11/04) - -* Parallel tests failed with 16 processes with data inconsistency at testphdf5 - / dataset_readAll. Parallel tests also failed with 32 and 64 processes with - collective abort of all ranks at t_posix_compliant / allwrite_allread_blocks - with MPI IO. - (CMC - 2009/04/28) - -* A dataset created or rewritten with a v1.6.3 library or after cannot be read - with the v1.6.2 library or before when the Fletcher32 EDC filter is enabled. - There was a bug in the calculation of the Fletcher32 checksum in the - library before v1.6.3; the checksum value was not consistent between big- - endian and little-endian systems. This bug was fixed in Release 1.6.3. - However, after fixing the bug, the checksum value was no longer the same as - before on little-endian system. Library releases after 1.6.4 can still read - datasets created or rewritten with an HDF5 library of v1.6.2 or before. - (SLU - 2005/06/30) - - -%%%%1.8.13%%%% - - -HDF5 version 1.8.13 released on 2014-05-05 -================================================================================ - -INTRODUCTION -============ - -This document describes the differences between HDF5-1.8.12 and -HDF5-1.8.13, and contains information on the platforms tested and -known problems in HDF5-1.8.13. -For more details, see the files HISTORY-1_0-1_8_0_rc3.txt -and HISTORY-1_8.txt in the release_docs/ directory of the HDF5 source. - -Links to the HDF5 1.8.13 source code, documentation, and additional materials -can be found on the HDF5 web page at: - - http://www.hdfgroup.org/products/hdf5/ - -The HDF5 1.8.13 release can be obtained from: - - http://www.hdfgroup.org/HDF5/release/obtain5.html - -User documentation for 1.8.13 can be accessed directly at this location: - - http://www.hdfgroup.org/HDF5/doc/ - -New features in the HDF5-1.8.x release series, including brief general -descriptions of some new and modified APIs, are described in the "What's New -in 1.8.0?" document: - - http://www.hdfgroup.org/HDF5/doc/ADGuide/WhatsNew180.html - -All new and modified APIs are listed in detail in the "HDF5 Software Changes -from Release to Release" document, in the section "Release 1.8.13 (current -release) versus Release 1.8.12": - - http://www.hdfgroup.org/HDF5/doc/ADGuide/Changes.html - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS -======== - -- New Features -- Support for New Platforms, Languages, and Compilers -- Bug Fixes since HDF5-1.8.12 -- Supported Platforms -- Supported Configuration Features Summary -- More Tested Platforms -- Known Problems - - -New Features -============ - - Configuration - ------------- - - Autotools: Automake updated to 1.14.1 (ADB - 2014/04/08) - - - CMake: Moved minimum CMake version to 2.8.11 which enables better library - include processing. (ADB - 2014/03/26) - - - When configuring a thread-safe HDF5 Library it is no longer necessary - to specify --enable-threadsafe with --with-pthreads if the Pthreads - library is in a standard location. (DER - 2014/04/11 HDFFV-8693) - - Library - ------- - - Added an H5free_memory API call. This should be used to free memory - that has been allocated by HDF5 API calls. H5Tget_member_name and - H5Pget_class_name are two examples. The main motivation for this call - is Windows, where it is common for application code and the HDF5 Library - to be using different C run-time libraries (CRT). Using the new call - ensures that the same CRT handles both the allocation and free. This - new function can also be useful in any case where the library uses a - different memory manager than the application, such as when a debug - memory manager is in use or when the HDF5 Library is wrapped for use - in a managed language like Python or Java. Fixes HDFFV-7710, 8519, - and 8851. (DER - 2014/04/11) - - - The Core VFD (aka Memory VFD) can now be configured to track dirty - regions in the file and only write out the changed regions on - flush/close. Additionally, a "page aggregation" size can be set that - will aggregate small writes into larger writes. For example, setting - a 1 MiB page aggregation size will logically partition the the - in-memory file into 1 MiB pages that will be written out in their - entirety if even a single byte is dirtied. The feature is controlled - via the new H5Pset/get_core_write_tracking() API call. A new - "core_paged" target has been added to the check-vfd target in - test/Makefile.am that exercises the feature over all HDF5 VFD-aware - tests. (DER - 2014/04/12) - - Parallel Library - ---------------- - - Removed MPI-POSIX VFD, as it wasn't helping anyone and was just - generating support questions. Application developers performing - parallel I/O should always use the MPI-IO VFD. - (QAK - 2014/03/28 HDFFV-8659) - - - Improved parallel I/O support to allow collective I/O on point - selections. (QAK - 2014/03/15) - - Tools - ----- - - None - - High-Level APIs - --------------- - - None - - Fortran API - ----------- - - Wrappers h5pset_file_image_f and h5pget_file_image_f were added to the - library. (MSB - 2014/1/2014) - - C++ API - ------- - - The following new features are added: - + Wrappers to class H5Object to get an object's name (HDFFV-8548). - ssize_t getObjName(char *obj_name, size_t buf_size = 0) - ssize_t getObjName(H5std_string& obj_name, size_t len = 0) - H5std_string getObjName() - + Wrappers to class H5CommonFG to get a child object's type from a - group or file (HDFFV-8367). - H5O_type_t childObjType(const H5std_string& objname) - H5O_type_t childObjType(const char* objname) - H5O_type_t childObjType(hsize_t index, - H5_index_t index_type=H5_INDEX_NAME, - H5_iter_order_t order=H5_ITER_INC, const char* objname=".") - + Wrappers to class DSetMemXferPropList for setting/getting a transform - property list (HDFFV-7907). - DSetMemXferPropList(const char* expression); - void setDataTransform(const char* expression) - void setDataTransform(const H5std_string& expression) - ssize_t getDataTransform(char* exp, size_t buf_size=0) - H5std_string getDataTransform() - + Wrapper to CompType for setting size to compound datatype (HDFFV-8642). - void setSize(size_t size) - + Overloaded functions to provide prototypes that declare constant - arguments const (HDFFV-3384). These include: - DataSet::fillMemBuf - DataSet::getVlenBufSize - DataSpace::extentCopy - DataType::commit - FileAccPropList::setSplit - H5File::getVFDHandle - + Additional overload to class H5Location to get a comment as a char* - ssize_t getComment(const char* name, size_t buf_size, char* comment) - + Additional overloads to class Attribute to get an attribute's name for - convenience: - ssize_t getName(char* attr_name, size_t buf_size=0) - ssize_t getName(H5std_string& attr_name, size_t buf_size=0) - (BMR, 2014/04/15) - + A static wrapper to Exception for printing the error stack without an - instance of Exception - static void printErrorStack(FILE* stream = stderr, - hid_t err_stack = H5E_DEFAULT); - (BMR, 2014/04/25) - - -Support for New Platforms, Languages, and Compilers -=================================================== - Mac OS X 10.6 Snow Leopard is not supported by Apple any more. In view of - the added support of Mac OS X 10.9, Mac OS X 10.6 is retired from HDF5 - supported platforms. (AKC - 2014/03/14 HDFFV-8704) - - Mac OS X 10.9 Mavericks is supported. (AKC - 2014/03/04 HDFFV-8694) - - -Bug Fixes since HDF5-1.8.12 -=========================== - - Configuration - ------------- - - CMake: When CMake commands are executed individually on the command line - and the external filters are being built, the CMAKE_BUILD_TYPE define - must be set to the same value as the configuration - (-DCMAKE_BUILD_TYPE:STRING=Release if using -C Release). This is needed - by the the szip and zlib filter build commands. (ADB - HDFFV-8695) - - - CMake: Removed use of the XLATE_UTILITY program. - (ADB - 2014/03/28 HDFFV-8640) - - - CMake: Added missing quotes in setting the CMAKE_EXE_LINKER_FLAGS for the - MPI option. (ADB - 2014/02/27 HDFFV-8674) - - - CMake: Configuration of the HDF5 C++ or Fortran libraries with the - thread-safety feature. - - C++ and/or Fortran + thread-safe is enforced as a non-supported - configuration. This matches the autotools. (DER - 2014/04/11) - - - CMake: Configuration of static HDF5 C library with the thread-safety - feature. - - Static + thread-safe + Win32 threads is not a supported configuration - due to the inability to automatically clean up thread-local storage. - This is expected to be fixed in a future release. In the meantime, a - work-around that uses internal functionality may allow the combination - to be used without resource leaks. Contact the help desk for more - information. (DER - 2014/04/11) - - - Autotools: Several changes were done to configure and installcheck. - - An export of LD_LIBRARY_PATH= was - removed from configure; make installcheck was revised to run - scripts installed in share/hdf5_examples to use the installed h5cc, etc. - to compile and run example source files also installed there. - - Make installcheck will now fail when a shared szip or other external lib - file cannot be found in the same manner that executables compiled and - linked with h5cc will fail to run when those lib files cannot be found - after install. Make installcheck should pass after setting - LD_LIBRARY_PATH to the szip location. (LRK - 2014/04/16) - - Library - ------- - - A Gnu Make directive (.NOTPARALLEL) is added to fortran/test/Makefile. - - AIX native make does not support this directive and would fail if - parallel make (e.g. make -j4) is used to build the library. AIX users - either do not use parallel make or install Gnu Make to build the library. - (AKC 2014/04/08 HDFFV-8738) - - - H5R.c: H5Rget_name gave an assertion failure if the "name" parameter - was NULL. - - Fixed H5Rget_name to return the size of the buffer needed to read a - name of the referenced object in this case. The size doesn't include - the NULL terminator. H5Rget_name returns negative on failure. - (MSB - 2014/01/22 HDFFV-8620) - - - H5Z.c: H5Zfilter_avail didn't check if a filter was available as a - dynamically loaded filter. The error manifested itself in the h5repack - tool when removing user-defined dynamically loaded filter. - - Added a code to find the filter among the dynamically loaded filters - after the function fails to find it among the registered filters. - (ADB - 2014/03/03 HDFFV-8629) - - - Memory leak: a memory leak was observed in conjunction to the - H5TS_errstk_key_g thread-local variable allocated in the H5E_get_stack - function in H5E.c. - - The shared HDF5 thread-safe library now no longer leaks thread-local - storage resources on Windows with Win32 threads. Currently, there is - no solution for this problem when HDF5 is statically built. We - disabled the build of the static HDF5 thread-safe library with - Win32 threads. (DER - 2014/04/11 HDFFV-8518) - - - H5Dio.c: Improved handling of NULL pointers to H5Dread/H5Dwrite - calls. Credit to Jason Newton (nevion@gmail.com) for the original patch. - - H5Dwrite/read failed when a NULL pointer was passed for a data buffer - and 0 elements were selected. Fixed. (QAK - 2014/04/16 HDFFV-8705) - - - Deprecated API (1_6 API): Improved handling of closing the library and - re-accessing it with a deprecated routine. - - When a program used a deprecated API (for example, H5Gcreate1), - closed the library, and reopened it again to access a group, dataset, - datatype, dataspace, attribute, or property list, HDF5 failed to - provide an identifier for the object. Fixed. - (NAF, QAK - 2014/04/16 HDFFV-8232) - - Parallel Library - ---------------- - - Fixed a missing H5F_Provisional module in HDF5mpio.f90 - (MSB - 2014/2/7 HDFFV-8651) - - Performance - ------------- - - None - - Tools - ----- - - The h5diff tool would report that a datafile compared with an exact - copy of the same datafile had differences. This was due to the issue - below of reading un-written chunks. This problem is also fixed. - (AKC - 2014/05/01 HDFFV-8637) - - - The h5dump and h5diff utilities occasionally produced different output - between Linux and Windows systems. This has been fixed. - - This happened to datasets that used chunked storage, with default fill - values, and some of the chunks had not been written. - When the dataset was read, the library failed to write the default fill - values to parts of the use buffer that were associated with the unwritten - chunks. (JP - 2014/05/01 HDFFV-8247) - - - The compress option is retired from bin/release. - (AKC - 2014/04/25 HDFFV-8755) - - - bin/release has a new option "zip" that produces a release zip file for - the Windows platform. (AKC - 2014/04/24 HDFFV-8433) - - - h5diff: Several failures relating to handling of strings attributes - are fixed. - - The tool crashed or gave an error message when one of the strings had - fixed size type and another variable-length size type. h5diff now flags such - strings as "not comparable". We plan to enhance the tool to handle - strings of the different types in the future releases. - (AKC - 2014/04/18 HDFFV-8625, 8639, 8745) - - - h5repack: h5repack would not remove user-defined filters. - Fixed by modifying h5repack to check if the filter is registered or - can be dynamically loaded. (ADB - 2014/03/03 HDFFV-8629) - - F90 API - ------- - - H5D_CHUNK_CACHE_NSLOTS_DFLT_F and H5D_CHUNK_CACHE_NBYTES_DFLT_F were - changed from the default KIND for INTEGER to INTEGER of KIND size_t. - (MSB - 2014/3/31 HDFFV-8689) - - C++ API - ------ - - Added throw() to all exception destructors. Credit to Jason Newton - (nevion@gmail.com) for the patch. (BMR - 2014/4/15 HDFFV-8623) - - Changed the default value for H5Location::getComment from 256 to 0 - to conform to C function and because it makes more sense. - (BMR - 2014/4/15) - - High-Level APIs: - ------ - - None - - Fortran High-Level APIs: - ------ - - None - - Testing - ------- - - testhdf5 now exits with EXIT_SUCCESS(0) if no errors, else - EXIT_FAILURE(1). (AKC - 2014/01/27 HDFFV-8572) - - - The big test now pays attention to the HDF5_DRIVER environment variable. - Previously, it would run all tests with the family, stdio, and sec2 - virtual file drivers (VFDs) for each VFD in the check-vfd make target, - regardless of the variable setting. It now checks the variable and - either runs the appropriate VFD-specific tests or skips as needed. - This saves much testing time. Fixes HDFFV-8554. (DER - 2014/04/11) - -Supported Platforms -=================== -The following platforms are supported and have been tested for this release. -They are built with the configure process unless specified otherwise. - - AIX 5.3 xlc 10.1.0.5 - (NASA G-ADA) xlC 10.1.0.5 - xlf90 12.1.0.6 - gmake v3.82 - - Linux 2.6.18-308.13.1.el5PAE GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP i686 i686 i386 compilers for 32-bit applications; - (jam) Version 4.1.2 20080704 (Red Hat 4.1.2-54) - Version 4.8.2 - PGI C, Fortran, C++ Compilers for 32-bit - applications; - Version 13.7-0 - Intel(R) C, C++, Fortran Compiler for 32-bit - applications; - Version 14.0.2 (Build 20140120) - - Linux 2.6.18-371.6.1.el5 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers for 64-bit applications; - (koala) Version 4.1.2 20080704 (Red Hat 4.1.2-54) - Version 4.8.2 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 13.7-0 - Intel(R) C, C++, Fortran Compilers for - applications running on Intel(R) 64; - Version 14.0.2 (Build 20140120) - - Linux 2.6.32-431.11.2.el6 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (platypus) Version 4.4.7 20120313 - Version 4.8.2 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 13.7-0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 14.0.2 (Build 20140120) - - Linux 2.6.32-431.11.2.el6.ppc64 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-3) - #1 SMP ppc64 GNU/Linux g++ (GCC) 4.4.7 20120313 (Red Hat 4.4.7-3) - (ostrich) GNU Fortran (GCC) 4.4.7 20120313 (Red Hat 4.4.7-3) - IBM XL C/C++ V11.1 - IBM XL Fortran V13.1 - - SunOS 5.11 32- and 64-bit Sun C 5.12 SunOS_sparc - (emu) Sun Fortran 95 8.6 SunOS_sparc - Sun C++ 5.12 SunOS_sparc - - Windows 7 Visual Studio 2008 w/ Intel Fortran 14 (cmake) - Visual Studio 2010 w/ Intel Fortran 14 (cmake) - Visual Studio 2012 w/ Intel Fortran 14 (cmake) - Cygwin(CYGWIN_NT-6.1 1.7.28(0.271/5/3) gcc(4.8.2) compiler and gfortran) - (cmake and autotools) - - Windows 7 x64 Visual Studio 2008 w/ Intel Fortran 14 (cmake) - Visual Studio 2010 w/ Intel Fortran 14 (cmake) - Visual Studio 2012 w/ Intel Fortran 14 (cmake) - - Windows 8.1 Visual Studio 2012 w/ Intel Fortran 14 (cmake) - - Windows 8.1 x64 Visual Studio 2012 w/ Intel Fortran 14 (cmake) - - Mac OS X Lion 10.7.3 Apple clang/clang++ version 3.0 from Xcode 4.6.1 - 64-bit gfortran GNU Fortran (GCC) 4.6.2 - (duck) Intel icc/icpc/ifort version 13.0.3 - - Mac OS X Mt. Lion 10.8.5 Apple clang/clang++ version 5.0 from Xcode 5.0.2 - 64-bit gfortran GNU Fortran (GCC) 4.8.2 - (swallow/kite) Intel icc/icpc/ifort version 14.0.2 - - Mac OS X Mavericks 10.9.2 Apple clang/clang++ version 5.1 from Xcode 5.1 - 64-bit gfortran GNU Fortran (GCC) 4.8.2 - (wren/quail) Intel icc/icpc/ifort version 14.0.2 - - -Tested Configuration Features Summary -===================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90/ F90 C++ zlib SZIP - parallel F2003 parallel -Solaris2.11 32-bit n y/y n y y y -Solaris2.11 64-bit n y/y n y y y -Windows 7 y y/y n y y y -Windows 7 x64 y y/y n y y y -Windows 7 Cygwin n y/y n y y y -Windows 8.1 n y/y n y y y -Windows 8.1 x64 n y/y n y y y -Mac OS X Lion 10.7.3 64-bit n y/y n y y y -Mac OS X Mountain Lion 10.8.1 64-bit n y/y n y y y -Mac OS X Mavericks 10.9.1 64-bit n y/y n y y y -AIX 5.3 32- and 64-bit n y/n n y y y -CentOS 5.9 Linux 2.6.18-308 i686 GNU y y/y y y y y -CentOS 5.9 Linux 2.6.18-308 i686 Intel n y/y n y y y -CentOS 5.9 Linux 2.6.18-308 i686 PGI n y/y n y y y -CentOS 5.9 Linux 2.6.18 x86_64 GNU n y/y n y y y -CentOS 5.9 Linux 2.6.18 x86_64 Intel n y/y n y y y -CentOS 6.4 Linux 2.6.32 x86_64 GNU y y/y y y y y -CentOS 6.4 Linux 2.6.32 x86_64 Intel n y/y n y y y -CentOS 6.4 Linux 2.6.32 x86_64 PGI n y/y n y y y -Linux 2.6.32-431.11.2.el6.ppc64 n y/n n y y y -OpenVMS IA64 V8.4 n y/n n y y n - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -Solaris2.11 32-bit y y y y -Solaris2.11 64-bit y y y y -Windows 7 y y y y -Windows 7 x64 y y y y -Windows 7 Cygwin n n n y -Windows 8.1 y y y y -Windows 8.1 x64 y y y y -Mac OS X Lion 10.7.3 64-bit y n y y -Mac OS X Mountain Lion 10.8.1 64-bit y n y y -Mac OS X Mavericks 10.9.1 64-bit y n y y -AIX 5.3 32- and 64-bit y n n y -CentOS 5.9 Linux 2.6.18-308 i686 GNU y y y y -CentOS 5.9 Linux 2.6.18-308 i686 Intel y y y n -CentOS 5.9 Linux 2.6.18-308 i686 PGI y y y n -CentOS 5.9 Linux 2.6.18 x86_64 GNU y y y y -CentOS 5.9 Linux 2.6.18 x86_64 Intel y y y n -CentOS 6.4 Linux 2.6.32 x86_64 GNU y y y n -CentOS 6.4 Linux 2.6.32 x86_64 Intel y y y n -CentOS 6.4 Linux 2.6.32 x86_64 PGI y y y n -Linux 2.6.32-431.11.2.el6.ppc64 y y y n -OpenVMS IA64 V8.4 n n n n - -Compiler versions for each platform are listed in the preceding -"Supported Platforms" table. - - -More Tested Platforms -===================== -The following platforms are not supported but have been tested for this release. - - Linux 2.6.18-308.13.1.el5PAE MPICH mpich 3.1 compiled with - #1 SMP i686 i686 i386 gcc 4.8.2 and gfortran 4.8.2 - (jam) g95 (GCC 4.0.3 (g95 0.94!) - - Linux 2.6.18-431.11.2.el6 MPICH mpich 3.1 compiled with - #1 SMP x86_64 GNU/Linux gcc 4.8.2 and gfortran 4.8.2 - (platypus) g95 (GCC 4.0.3 (g95 0.94!) - - FreeBSD 8.2-STABLE i386 gcc 4.5.4 [FreeBSD] 20110526 - (loyalty) gcc 4.6.1 20110527 - g++ 4.6.1 20110527 - gfortran 4.6.1 20110527 - - FreeBSD 8.2-STABLE amd64 gcc 4.5.4 [FreeBSD] 20110526 - (freedom) gcc 4.6.1 20110527 - g++ 4.6.1 20110527 - gfortran 4.6.1 20110527 - - Debian7.1.0 3.2.0-4-amd64 #1 SMP Debian 3.2.51-1 x86_64 GNU/Linux - gcc (Debian 4.7.2-5) 4.7.2 - GNU Fortran (Debian 4.7.2-5) 4.7.2 - (cmake and autotools) - - Fedora20 3.11.10-301.fc20.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc (GCC) 4.8.2 20131212 (Red Hat 4.8.2-7) - GNU Fortran (GCC) 4.8.2 20130603 (Red Hat 4.8.2-7) - (cmake and autotools) - - SUSE 13.1 3.11.6-4-desktop #1 SMP PREEMPT x86_64 x86_64 x86_64 GNU/Linux - gcc (SUSE Linux) 4.8.1 - GNU Fortran (SUSE Linux) 4.8.1 - (cmake and autotools) - - Ubuntu 13.10 3.11.0-13-generic #20-Ubuntu SMP x86_64 GNU/Linux - gcc (Ubuntu/Linaro 4.8.1-10ubuntu8) 4.8.1 - GNU Fortran (Ubuntu/Linaro 4.8.1-10ubuntu8) 4.8.1 - (cmake and autotools) - - Cray Linux Environment (CLE) PrgEnv-pgi/4.0.46 - hopper.nersc.gov pgcc 12.5-0 64-bit target on x86-64 Linux -tp shanghai - pgf90 12.5-0 64-bit target on x86-64 Linux -tp shanghai - pgCC 12.5-0 64-bit target on x86-64 Linux -tp shanghai - - -Known Problems -============== -* When reading or writing a dataset (H5Dread/H5Dwrite) with a large selection - size (e.g., 2GB ~= 500 million of 4 bytes integers or floating point - numbers), some I/O systems may not be able to process it correctly. - We advise users to find out system limits before using large selections. If - I/O size limits exist, application should use HDF5 partial I/O capabilities - (e.g., H5Sselect_hyperslab(...)) to divide large requests into smaller sizes. - In this case we also advise users not to use chunk storage sizes larger that - 2GB since the HDF5 library performs I/O on the entire chunk. We will work - on the HDF5 library to divide large data requests to smaller I/O requests. - (AKC 2014/05/02 HDFFV-8479) - -* Due to an Intel compiler bug introduced in version 14.0.1, the HDF5 FORTRAN - wrappers do not work with configure option --enable-fortran2003. - However, the option --enable-fortran works with Intel 14.0.1. The compiler - bug was fixed in Intel version 14.0.2 and resolved the issue. - (MSB - 2014/4/15) - -* Due to a PGI compiler bug introduced in versions before 13.3 and versions - after 14.2, the FORTRAN test 'Testing get file image' will fail. - (MSB - 2014/4/15) - -* On CYGWIN, when building the library dynamically, testing will fail on - dynamically loaded filters. The test process will build dynamic filter - libraries with the *.dll.a extension, and the HDF5 Library will be looking - for *.so libraries. Entered as issue HDFFV-8736. (ADB - 2014/04/14) - -* A Gnu Make directive (.NOTPARALLEL) is added to fortran/test/Makefile. - AIX native make does not support this directive and would fail if - parallel make (e.g. make -j4) is used to build the library. AIX users - either do not use parallel make or install Gnu Make to build the library. - (AKC 2014/04/08 HDFFV-8738) - -* CLANG compiler with the options -fcatch-undefined-behavior and -ftrapv - catches some undefined behavior in the alignment algorithm of the macro - DETECT_I in H5detect.c. Since the algorithm is trying to detect the alignment - of integers, ideally the flag -fcatch-undefined-behavior should not to be - used for H5detect.c. In the future, we can separate flags for H5detect.c - from the rest of the library. (SLU - 2013/10/16 HDFFV-8147) - -* Make provided by Solaris fails in "make check". Solaris users should use - gmake to build and install the HDF5 software. (AKC - 2013/10/08 - HDFFV-8534) - -* On OpenVMS, two soft conversion functions (H5T__conv_i_f and H5T__conv_f_i) - have bugs. They convert data between floating-point numbers and integers. - But the library's default is hard conversion. The user should avoid - explicitly enabling soft conversion between floating-point numbers and - integers. (Issue VMS-8; SLU - 2013/09/19) - -* On OpenVMS, ZLIB 1.2.8 library doesn't work properly. ZLIB 1.2.5 works - fine. So please use ZLIB 1.2.5 to build HDF5 library. (Issue VMS-5; - SLU 2013/09/19) - -* When building using the Cray compilers on Cray machines, HDF5 - configure mistakenly thinks the compiler is an intel compiler and - sets the -std=c99 flag which breaks configure on Cray. To build HDF5 - properly on a Cray machine, please consult with the instructions in - INSTALL_parallel for building on Hopper. - (MSC - 2013/04/26 - HDFFV-8429) - -* The 5.9 C++ compiler on Sun failed to compile a C++ test ttypes.cpp. It - complains with this message: - "/home/hdf5/src/H5Vprivate.h", line 130: Error: __func__ is not defined. - - The reason is that __func__ is a predefined identifier in C99 standard. The - HDF5 C library uses it in H5private.h. The test ttypes.cpp includes - H5private.h (H5Tpkg.h<-H5Fprivate.h<-H5Vprivate.h<-H5private.h). Sun's 5.9 - C++ compiler doesn't support __func__, thus fails to compile the C++ test. - But Sun's 5.11 C++ compiler does. To check whether your Sun C++ compiler - knows this identifier, try to compile the following simple C++ program: - #include - - int main(void) - { - printf("%s\n", __func__); - return 0; - } - (SLU - 2012/11/5) - -* The C++ and FORTRAN bindings are not currently working on FreeBSD with the - native release 8.2 compilers (4.2.1), but are working with gcc 4.6 from the - ports (and probably gcc releases after that). - (QAK - 2012/10/19) - -* The following h5dump test case fails in BG/P machines (and potentially other - machines that use a command script to launch executables): - - h5dump --no-compact-subset -d "AHFINDERDIRECT::ah_centroid_t[0] it=0 tl=0" - tno-subset.h5 - - This is due to the embedded spaces in the dataset name being interpreted - by the command script launcher as meta-characters, thus passing three - arguments to h5dump's -d flag. The command passes if run by hand, just - not via the test script. - (AKC - 2012/05/03) - -* On hopper, the build failed when RUNSERIAL and RUNPARALLEL are set - to aprun -np X, because the H5lib_settings.c file was not generated - properly. Not setting those environment variables works, because - configure was able to automatically detect that it's a Cray system - and used the proper launch commands when necessary. - (MSC - 2012/04/18) - -* The data conversion test dt_arith.c fails in "long double" to integer - conversion on Ubuntu 11.10 (3.0.0.13 kernel) with GCC 4.6.1 if the library - is built with optimization -O3 or -O2. The older GCC (4.5) or newer kernel - (3.2.2 on Fedora) doesn't have the problem. Users should lower the - optimization level (-O1 or -O0) by defining CFLAGS in the command line of - "configure" like: - - CFLAGS=-O1 ./configure - - This will overwrite the library's default optimization level. - (SLU - 2012/02/07 - HDFFV-7829) - This issue is no longer present on Ubuntu 12.10 (3.5.0 kernel) with - gcc 4.7.2. - -* The STDIO VFD does not work on some architectures, possibly due to 32/64 - bit or large file issues. The basic STDIO VFD test is known to fail on - 64-bit SunOS 5.10 on SPARC when built with -m64 and 32-bit OS X/Darwin - 10.7.0. The STDIO VFD test has been disabled while we investigate and - a fix should appear in a future release. - (DER - 2011/10/14 - HDFFV-8235) - -* h5diff can report inconsistent results when comparing datasets of enum type - that contain invalid values. This is due to how enum types are handled in - the library and will be addressed in a future release. - (DER - 2011/10/14 - HDFFV-7527) - -* The links test can fail under the stdio VFD due to some issues with external - links. This will be investigated and fixed in a future release. - (DER - 2011/10/14 - HDFFV-7768) - -* After the shared library support was fixed for some bugs, it was discovered - that "make prefix=XXX install" no longer works for shared libraries. It - still works correctly for static libraries. Therefore, if you want to - install the HDF5 shared libraries in a location such as /usr/local/hdf5, - you need to specify the location via the --prefix option during configure - time. E.g, ./configure --prefix=/usr/local/hdf5 ... - (AKC - 2011/05/07 - HDFFV-7583) - -* The parallel test, t_shapesame, in testpar/, may run for a long time and may - be terminated by the alarm signal. If that happens, one can increase the - alarm seconds (default is 1200 seconds = 20 minutes) by setting the - environment variable, $HDF5_ALARM_SECONDS, to a larger value such as 3600 - (60 minutes). Note that the t_shapesame test may fail in some systems - (see the "While working on the 1.8.6 release..." problem below). If - it does, it will waste more time if $HDF5_ALARM_SECONDS is set - to a larger value. - (AKC - 2011/05/07) - -* The C++ and FORTRAN bindings are not currently working on FreeBSD. - (QAK - 2011/04/26) - -* Shared Fortran libraries are not quite working on AIX. While they are - generated when --enable-shared is specified, the fortran and hl/fortran - tests fail. We are looking into the issue. HL and C++ shared libraries - should now be working as intended, however. - (MAM - 2011/04/20) - -* The --with-mpe configure option does not work with Mpich2. - (AKC - 2011/03/10) - -* While working on the 1.8.6 release of HDF5, a bug was discovered that can - occur when reading from a dataset in parallel shortly after it has been - written to collectively. The issue was exposed by a new test in the parallel - HDF5 test suite, but had existed before that. We believe the problem lies with - certain MPI implementations and/or file systems. - - We have provided a pure MPI test program, as well as a standalone HDF5 - program, that can be used to determine if this is an issue on your system. - They should be run across multiple nodes with a varying number of processes. - These programs can be found at: - http://www.hdfgroup.org/ftp/HDF5/examples/known_problems/ - (NAF - 2011/01/19) - -* All the VFL drivers aren't backward compatible. In H5FDpublic.h, the - structure H5FD_class_t changed in 1.8. There is new parameter added to - get_eoa and set_eoa callback functions. A new callback function - get_type_map was added in. The public function H5FDrealloc was taken - out in 1.8. The problem only happens when users define their own driver - for 1.6 and try to plug in 1.8 library. Because there's only one user - complaining about it, we (Elena, Quincey, and I) decided to leave it as - it is (see bug report #1279). Quincey will make a plan for 1.10. - (SLU - 2010/02/02) - -* The --enable-static-exec configure flag will only statically link libraries - if the static version of that library is present. If only the shared version - of a library exists (i.e., most system libraries on Solaris, AIX, and Mac, - for example, only have shared versions), the flag should still result in a - successful compilation, but note that the installed executables will not be - fully static. Thus, the only guarantee on these systems is that the - executable is statically linked with just the HDF5 library. - (MAM - 2009/11/04) - -* Parallel tests failed with 16 processes with data inconsistency at testphdf5 - / dataset_readAll. Parallel tests also failed with 32 and 64 processes with - collective abort of all ranks at t_posix_compliant / allwrite_allread_blocks - with MPI IO. - (CMC - 2009/04/28) - -* On an Intel 64 Linux cluster (RH 4, Linux 2.6.9) with Intel 10.0 compilers, - use -mp -O1 compilation flags to build the libraries. A higher level of - optimization causes failures in several HDF5 library tests. - -* A dataset created or rewritten with a v1.6.3 library or after cannot be read - with the v1.6.2 library or before when the Fletcher32 EDC filter is enabled. - There was a bug in the calculation of the Fletcher32 checksum in the - library before v1.6.3; the checksum value was not consistent between big- - endian and little-endian systems. This bug was fixed in Release 1.6.3. - However, after fixing the bug, the checksum value was no longer the same as - before on little-endian system. Library releases after 1.6.4 can still read - datasets created or rewritten with an HDF5 library of v1.6.2 or before. - (SLU - 2005/06/30) - - -%%%%1.8.12%%%% - - -HDF5 version 1.8.12 released on 2013-11-04 -================================================================================ - -INTRODUCTION -============ - -This document describes the differences between HDF5-1.8.11 and -HDF5-1.8.12, and contains information on the platforms tested and -known problems in HDF5-1.8.12. -For more details, see the files HISTORY-1_0-1_8_0_rc3.txt -and HISTORY-1_8.txt in the release_docs/ directory of the HDF5 source. - -Links to the HDF5 1.8.12 source code, documentation, and additional materials -can be found on the HDF5 web page at: - - http://www.hdfgroup.org/products/hdf5/ - -The HDF5 1.8.12 release can be obtained from: - - http://www.hdfgroup.org/HDF5/release/obtain5.html - -User documentation for 1.8.12 can be accessed directly at this location: - - http://www.hdfgroup.org/HDF5/doc/ - -New features in the HDF5-1.8.x release series, including brief general -descriptions of some new and modified APIs, are described in the "What's New -in 1.8.0?" document: - - http://www.hdfgroup.org/HDF5/doc/ADGuide/WhatsNew180.html - -All new and modified APIs are listed in detail in the "HDF5 Software Changes -from Release to Release" document, in the section "Release 1.8.12 (current -release) versus Release 1.8.11": - - http://www.hdfgroup.org/HDF5/doc/ADGuide/Changes.html - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS -======== - -- New Features -- Support for New Platforms, Languages, and Compilers -- Bug Fixes since HDF5-1.8.11 -- Supported Platforms -- Supported Configuration Features Summary -- More Tested Platforms -- Known Problems - - -New Features -============ - - Configuration - ------------- - - Added a configuration option to change the default plugin path. - The configure option is --with-default-plugindir=location. - The cmake option is -DH5_DEFAULT_PLUGINDIR:PATH=location. - HDFFV-8513. (ADB 2013/09/04) - - Renamed FFLAGS to FCFLAGS in configure. (ADB 2013/08/13) - - CMake can now package a compressed examples file, the default for - Windows binaries from HDF Group. (ADB - 2013/07/22) - - Library - ------- - - None - - Parallel Library - ---------------- - - None - - Tools - ----- - - h5repack: Added the ability to use plugin filters to read and write - files. The option uses the filter number. HDFFV-8345 - (ADB - 2013/09/04). - - h5dump: Added the option -N --any_path, which searches the file for - paths that match the search path. HDFFV-7989 (ADB - 2013/08/12). - - h5dump: Added the optional arg 0 to -A, which excludes attributes - from display. HDFFV-8134 (ADB - 2013/08/01). - - High-Level APIs - --------------- - - None - - Fortran API - ----------- - - None - - C++ API - ------- - - Added tutorial examples to C++/examples. They can be installed by - "make install-examples" and, in the installed directory, they can be - executed by running the script file run-c++-ex.sh. (BMR - 2013/09/28) - - A new class, H5::H5Location, is added to represent the location concept - in the C library. It is a base class to H5::H5File and H5::H5Ojbect, - whose member functions are moved into H5::H5Location. H5::H5File can - now inherent those functions. As a result, an H5::H5File object can have - an attribute. (BMR - 2013/09/27) - - Added wrappers for H5Rget_obj_type2 to retrieve the type of the object - that an object reference points to. (BMR - 2013/09/27) - H5O_type_t H5Location::getRefObjType(void *ref, H5R_type_t ref_type) - - Added wrappers for H5Aexist to check whether an attribute exists given - a name. (BMR - 2013/09/27) - bool H5::H5Location::attrExists(const char* name) - bool H5::H5Location::attrExists(const H5std_string& name) - - Added a number of overloaded functions for convenience. (BMR - 2013/09/27) - - -Support for New Platforms, Languages, and Compilers -=================================================== - - None - -Bug Fixes since HDF5-1.8.11 -=========================== - - Configuration - ------------- - - Modified H5detect.c to scan floating point types for padding bits before - analyzing the type further. This should fix problems with gcc 4.8. - (NAF - 2013/09/19 - HDFFV-8523/HDFFV-8500) - - HDF5 rpaths are no longer encoded in the library files when configured - with --disable-sharedlib-rpath. (LRK-2013-09-23 - HDFFV-8276) - - Library - ------- - - Added const qualifier to source buffer parameters in H5Dgather and - H5D_scatter_func_t (H5Dscatter callback). (NAF - 2013/7/09) - - - CMake now creates *.so.{lt_version} files with the same version as - configure. (ADB - 2013/06/05 HDFFV-8437) - - Parallel Library - ---------------- - - None - - Performance - ------------- - - None - - Tools - ----- - - h5dump: Added the option -N --any_path, which searches the file for - paths that match the search path. HDFFV-7989 (ADB - 2013/08/12). - - h5dump: Added the optional arg 0 to -A, which excludes attributes - from display. HDFFV-8134 (ADB - 2013/08/01). - - h5dump correctly exports subsetted data to a file, using the --output - option. (ADB - 2013/06/07 HDFFV-8447) - - h5cc and other compile scripts now default to linking shared libraries - when HDF5 is configured with the --disable-static option. - (LRK - 2013-09-23 - HDFFV-8141) - - F90 API - ------- - - None - - C++ API - ------ - - None - - High-Level APIs: - ------ - - None - - Fortran High-Level APIs: - ------ - - None - - Testing - ------- - - test/big sometimes failed with the message of "file selection+offset not - within extent". This has been fixed. (AKC - 2013/09/28 HDFFV-8271). - - tools/h5diff/testh5diff.sh is run in every "make check", even after it - has passed in the previous run. It should not run again if there are no - code changes. Fixed. (AKC - 2013/07/19 HDFFV-8392) - -Supported Platforms -=================== -The following platforms are supported and have been tested for this release. -They are built with the configure process unless specified otherwise. - - AIX 5.3 xlc 10.1.0.5 - (NASA G-ADA) xlC 10.1.0.5 - xlf90 12.1.0.6 - - Linux 2.6.18-308.13.1.el5PAE GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP i686 i686 i386 compilers for 32-bit applications; - (jam) Version 4.1.2 20080704 (Red Hat 4.1.2-54) - Version 4.8.1 - PGI C, Fortran, C++ Compilers for 32-bit - applications; - Version 13.7-0 - Intel(R) C, C++, Fortran Compiler for 32-bit - applications; - Version 13.1.3 (Build 20130607) - - Linux 2.6.18-308.16.1.el5 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers for 64-bit applications; - (koala) Version 4.1.2 20080704 (Red Hat 4.1.2-54) - Version 4.8.1 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 13.7-0 - Intel(R) C, C++, Fortran Compilers for - applications running on Intel(R) 64; - Version 13.1.3 (Build 20130607) - - Linux 2.6.32-358.18.1.el6 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (platypus) Version 4.4.7 20120313 - Version 4.8.1 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 13.7-0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 13.1.3 (Build 20130607) - - Linux 2.6.32-358.18.1.el6.ppc64 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-3) - #1 SMP ppc64 GNU/Linux g++ (GCC) 4.4.7 20120313 (Red Hat 4.4.7-3) - (ostrich) GNU Fortran (GCC) 4.4.7 20120313 (Red Hat 4.4.7-3) - IBM XL C/C++ V11.1 - IBM XL Fortran V13.1 - - SunOS 5.11 32- and 64-bit Sun C 5.12 SunOS_sparc - (emu) Sun Fortran 95 8.6 SunOS_sparc - Sun C++ 5.12 SunOS_sparc - - Windows 7 Visual Studio 2008 w/ Intel Fortran 11 (cmake) - Visual Studio 2010 w/ Intel Fortran 12 (cmake) - Visual Studio 2012 w/ Intel Fortran 13 (cmake) - Cygwin(CYGWIN_NT-6.1 1.7.15(0.260/5/3) gcc(4.5.3) compiler and gfortran) - (cmake and autotools) - - Windows 7 x64 Visual Studio 2008 w/ Intel Fortran 11 (cmake) - Visual Studio 2010 w/ Intel Fortran 12 (cmake) - Visual Studio 2012 w/ Intel Fortran 13 (cmake) - - Windows 8.1 Visual Studio 2012 w/ Intel Fortran 13 (cmake) - - Windows 8.1 x64 Visual Studio 2012 w/ Intel Fortran 13 (cmake) - - Mac OS X Snow Leopard 10.6.8 gcc i686-apple-darwin11-llvm-gcc-4.2 (GCC) 4.2.1 from Xcode 3.2.6 - Darwin Kernel Version 10.8.0 g++ i686-apple-darwin11-llvm-g++-4.2 (GCC) 4.2.1 from Xcode 3.2.6 - 64-bit gfortran GNU Fortran (GCC) 4.6.2 - (fred) Intel C (icc), Fortran (ifort), C++ (icpc) - 12.1 Build 20120928 - - Mac OS X Lion 10.7.3 Apple clang/clang++ version 3.0 from Xcode 4.6.1 - 64-bit gfortran GNU Fortran (GCC) 4.6.2 - (duck) Intel icc/icpc/ifort version 13.0.3 - - Mac OS X Mountain Lion 10.8.1 Apple clang/clang++ version 4.2 from Xcode 4.6.1 - 64-bit gfortran GNU Fortran (GCC) 4.6.2 - (wren) Intel icc/icpc/ifort version 13.0.3 - - OpenVMS IA64 V8.4 HP C V7.3-018 - HP Fortran V8.2-104939-50H96 - HP C++ V7.4-004 - -Tested Configuration Features Summary -===================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90/ F90 C++ zlib SZIP - parallel F2003 parallel -Solaris2.11 32-bit n y/y n y y y -Solaris2.11 64-bit n y/y n y y y -Windows 7 y y/y n y y y -Windows 7 x64 y y/y n y y y -Windows 7 Cygwin n y/n n y y y -Windows 8.1 n y/y n y y y -Windows 8.1 x64 n y/y n y y y -Mac OS X Snow Leopard 10.6.8 64-bit n y/y n y y y -Mac OS X Lion 10.7.3 64-bit n y/y n y y y -Mac OS X Mountain Lion 10.8.1 64-bit n y/y n y y y -AIX 5.3 32- and 64-bit n y/n n y y y -CentOS 5.9 Linux 2.6.18-308 i686 GNU y y/y y y y y -CentOS 5.9 Linux 2.6.18-308 i686 Intel n y/y n y y y -CentOS 5.9 Linux 2.6.18-308 i686 PGI n y/y n y y y -CentOS 5.9 Linux 2.6.18 x86_64 GNU y y/y y y y y -CentOS 5.9 Linux 2.6.18 x86_64 Intel n y/y n y y y -CentOS 5.9 Linux 2.6.18 x86_64 PGI n y/y n y y y -CentOS 6.4 Linux 2.6.32 x86_64 GNU n y/y n y y y -CentOS 6.4 Linux 2.6.32 x86_64 Intel n y/y n y y y -Linux 2.6.32-358.2.1.el6.ppc64 n y/n n y y y -OpenVMS IA64 V8.4 n y/n n y y n - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -Solaris2.11 32-bit y y y y -Solaris2.11 64-bit y y y y -Windows 7 y y y y -Windows 7 x64 y y y y -Windows 7 Cygwin n n n y -Windows 8.1 y y y y -Windows 8.1 x64 y y y y -Mac OS X Snow Leopard 10.6.8 64-bit y n y n -Mac OS X Lion 10.7.3 64-bit y n y y -Mac OS X Mountain Lion 10.8.1 64-bit y n y y -AIX 5.3 32- and 64-bit y n n y -CentOS 5.9 Linux 2.6.18-308 i686 GNU y y y y -CentOS 5.9 Linux 2.6.18-308 i686 Intel y y y n -CentOS 5.9 Linux 2.6.18-308 i686 PGI y y y n -CentOS 5.9 Linux 2.6.18 x86_64 GNU y y y y -CentOS 5.9 Linux 2.6.18 x86_64 Intel y y y n -CentOS 5.9 Linux 2.6.18 x86_64 PGI y y y n -CentOS 6.4 Linux 2.6.32 x86_64 GNU y y y n -CentOS 6.4 Linux 2.6.32 x86_64 Intel y y y n -Linux 2.6.32-358.2.1.el6.ppc64 y y y n -OpenVMS IA64 V8.4 n n n n - -Compiler versions for each platform are listed in the preceding -"Supported Platforms" table. - - -More Tested Platforms -===================== -The following platforms are not supported but have been tested for this release. - - Linux 2.6.18-308.13.1.el5PAE MPICH mpich2-1.4.1p1 compiled with - #1 SMP i686 i686 i386 gcc 4.1.2 and gfortran 4.1.2 - (jam) g95 (GCC 4.0.3 (g95 0.94!) - - Linux 2.6.18-308.16.1.el5 MPICH mpich2-1.4.1p1 compiled with - #1 SMP x86_64 GNU/Linux gcc 4.1.2 and gfortran 4.1.2 - (koala) g95 (GCC 4.0.3 (g95 0.94!) - - FreeBSD 8.2-STABLE i386 gcc 4.5.4 [FreeBSD] 20110526 - (loyalty) gcc 4.6.1 20110527 - g++ 4.6.1 20110527 - gfortran 4.6.1 20110527 - - FreeBSD 8.2-STABLE amd64 gcc 4.5.4 [FreeBSD] 20110526 - (freedom) gcc 4.6.1 20110527 - g++ 4.6.1 20110527 - gfortran 4.6.1 20110527 - - Debian7.1.0 3.2.0-4-amd64 #1 SMP Debian 3.2.46-1 x86_64 GNU/Linux - gcc (Debian 4.7.2-5) 4.7.2 - GNU Fortran (Debian 4.7.2-5) 4.7.2 - (cmake and autotools) - - Fedora19 3.11.1-200.fc19.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc (GCC) 4.8.1 20130603 (Red Hat 4.8.1-1) - GNU Fortran (GCC) 4.8.1 20130603 (Red Hat 4.8.1-1) - (cmake and autotools) - - SUSE 12.3 3.7.10-1.16-desktop #1 SMP PREEMPT x86_64 x86_64 x86_64 GNU/Linux - gcc (SUSE Linux) 4.7.2 - GNU Fortran (SUSE Linux) 4.7.2 - (cmake and autotools) - - Ubuntu 13.04 3.8.0-30-generic #44-Ubuntu SMP x86_64 GNU/Linux - gcc (Ubuntu/Linaro 4.7.3-1ubuntu1) 4.7.3 - GNU Fortran (Ubuntu/Linaro 4.7.3-1ubuntu1) 4.7.3 - (cmake and autotools) - - Cray Linux Environment (CLE) PrgEnv-pgi/4.0.46 - hopper.nersc.gov pgcc 12.5-0 64-bit target on x86-64 Linux -tp shanghai - pgf90 12.5-0 64-bit target on x86-64 Linux -tp shanghai - pgCC 12.5-0 64-bit target on x86-64 Linux -tp shanghai - - -Known Problems -============== -* Several HDF5 command-line tools and tests leave behind generated files - that are not cleaned up with "make clean" or "make distclean" when software - is built in place. The issue will be addressed in the 1.8.13 release. We - recommend to use build directory to compile and test HDF5 as described - in the INSTALL file, section 4.2. - -* Source directory names with spaces in them will cause failures in configure - or make on Mac (HDFFV-8152), Linux, and probably all other platforms. If a - configure command with a space is run from a build directory, it will exit - with an error message: "checking whether build environment is sane... - configure: error: unsafe srcdir value: '/scr/lrknox/hdf5 v1.8.12'". If - configure is run inside or below the directory with the space in the name, - libtool will get the directory path from the system, put the part of the - path before the space in the libdir variable in .../src/libhdf5.la, and - then fail to find the nonexistent directory. This is a known libtool issue - and the suggested workaround is to rename the directory without spaces. - (LRK - 2013/10/22) - -* CLANG compiler with the options -fcatch-undefined-behavior and -ftrapv - catches some undefined behavior in the alignment algorithm of the macro - DETECT_I in H5detect.c (HDFFV-8147). This issue will be addressed in the - next release. (SLU - 2013/10/16) - -* Running make check for the tools can fail in the tools tests if make was not - run prior. The tests for the tools use other tools in the tests, therefore - all the tools should be built before testing the tools. (ADB - 2013/10/09) - -* Make provided by Solaris fails in "make check". Solaris users should use - gmake to build and install HDF5 software. (AKC - 2013/10/08 - HDFFV-8534) - -* On OpenVMS, two soft conversion functions (H5T__conv_i_f and H5T__conv_f_i) - have bugs. They convert data between floating-point numbers and integers. - But the library's default is hard conversion. The user should avoid - explicitly enabling soft conversion between floating-point numbers and - integers. (Issue VMS-8; SLU - 2013/09/19) - -* On OpenVMS, ZLIB 1.2.8 library doesn't work properly. ZLIB 1.2.5 works - fine. So please use ZLIB 1.2.5 to build HDF5 library. (Issue VMS-5; - SLU 2013/09/19) - -* When building using the Cray compilers on Cray machines, HDF5 - configure mistakenly thinks the compiler is an intel compiler and - sets the -std=c99 flag which breaks configure on Cray. To build HDF5 - properly on a Cray machine, please consult with the instructions in - INSTALL_parallel for building on Hopper. - (MSC - 2013/04/26 - HDFFV-8429) - -* The h5dump and h5diff utilities occasionally produce different output - between Linux and Windows systems. This is caused by lower-level library - routines that fail to write fill values to the user's buffer when reading - unallocated chunks from datasets that have a fill value set to - H5D_FILL_VALUE_DEFAULT. Due to platform differences the return of - spurious data values has only been encountered on Windows 32-bit systems. - (Issue HDFFV-8247; JP - 2013/03/27) - -* The 5.9 C++ compiler on Sun failed to compile a C++ test ttypes.cpp. It - complains with this message: - "/home/hdf5/src/H5Vprivate.h", line 130: Error: __func__ is not defined. - - The reason is that __func__ is a predefined identifier in C99 standard. The - HDF5 C library uses it in H5private.h. The test ttypes.cpp includes - H5private.h (H5Tpkg.h<-H5Fprivate.h<-H5Vprivate.h<-H5private.h). Sun's 5.9 - C++ compiler doesn't support __func__, thus fails to compile the C++ test. - But Sun's 5.11 C++ compiler does. To check whether your Sun C++ compiler - knows this identifier, try to compile the following simple C++ program: - #include - - int main(void) - { - printf("%s\n", __func__); - return 0; - } - (SLU - 2012/11/5) - -* The C++ and FORTRAN bindings are not currently working on FreeBSD with the - native release 8.2 compilers (4.2.1), but are working with gcc 4.6 from the - ports (and probably gcc releases after that). - (QAK - 2012/10/19) - -* The following h5dump test case fails in BG/P machines (and potentially other - machines that use a command script to launch executables): - - h5dump --no-compact-subset -d "AHFINDERDIRECT::ah_centroid_t[0] it=0 tl=0" - tno-subset.h5 - - This is due to the embedded spaces in the dataset name being interpreted - by the command script launcher as meta-characters, thus passing three - arguments to h5dump's -d flag. The command passes if run by hand, just - not via the test script. - (AKC - 2012/05/03) - -* On hopper, the build failed when RUNSERIAL and RUNPARALLEL are set - to aprun -np X, because the H5lib_settings.c file was not generated - properly. Not setting those environment variables works, because - configure was able to automatically detect that it is a Cray system - and used the proper launch commands when necessary. - (MSC - 2012/04/18) - -* The data conversion test dt_arith.c fails in "long double" to integer - conversion on Ubuntu 11.10 (3.0.0.13 kernel) with GCC 4.6.1 if the library - is built with optimization -O3 or -O2. The older GCC (4.5) or newer kernel - (3.2.2 on Fedora) do not have the problem. Users should lower the - optimization level (-O1 or -O0) by defining CFLAGS in the command line of - "configure" like: - - CFLAGS=-O1 ./configure - - This will overwrite the library's default optimization level. - (SLU - 2012/02/07 - HDFFV-7829) - This issue is no longer present on Ubuntu 12.10 (3.5.0 kernel) with - gcc 4.7.2. - -* The STDIO VFD does not work on some architectures, possibly due to 32/64 - bit or large file issues. The basic STDIO VFD test is known to fail on - 64-bit SunOS 5.10 on SPARC when built with -m64 and 32-bit OS X/Darwin - 10.7.0. The STDIO VFD test has been disabled while we investigate and - a fix should appear in a future release. - (DER - 2011/10/14 - HDFFV-8235) - -* h5diff can report inconsistent results when comparing datasets of enum type - that contain invalid values. This is due to how enum types are handled in - the library and will be addressed in a future release. - (DER - 2011/10/14 - HDFFV-7527) - -* The links test can fail under the stdio VFD due to some issues with external - links. This will be investigated and fixed in a future release. - (DER - 2011/10/14 - HDFFV-7768) - -* After the shared library support was fixed for some bugs, it was discovered - that "make prefix=XXX install" no longer works for shared libraries. It - still works correctly for static libraries. Therefore, if you want to - install the HDF5 shared libraries in a location such as /usr/local/hdf5, - you need to specify the location via the --prefix option during configure - time. E.g, ./configure --prefix=/usr/local/hdf5 ... - (AKC - 2011/05/07 - HDFFV-7583) - -* The parallel test, t_shapesame, in testpar/, may run for a long time and may - be terminated by the alarm signal. If that happens, one can increase the - alarm seconds (default is 1200 seconds = 20 minutes) by setting the - environment variable, $HDF5_ALARM_SECONDS, to a larger value such as 3600 - (60 minutes). Note that the t_shapesame test may fail in some systems - (see the "While working on the 1.8.6 release..." problem below). If - it does, it will waste more time if $HDF5_ALARM_SECONDS is set - to a larger value. - (AKC - 2011/05/07) - -* The C++ and FORTRAN bindings are not currently working on FreeBSD. - (QAK - 2011/04/26) - -* Shared Fortran libraries are not quite working on AIX. While they are - generated when --enable-shared is specified, the fortran and hl/fortran - tests fail. We are looking into the issue. HL and C++ shared libraries - should now be working as intended, however. - (MAM - 2011/04/20) - -* The --with-mpe configure option does not work with Mpich2. - (AKC - 2011/03/10) - -* While working on the 1.8.6 release of HDF5, a bug was discovered that can - occur when reading from a dataset in parallel shortly after it has been - written to collectively. The issue was exposed by a new test in the parallel - HDF5 test suite, but had existed before that. We believe the problem lies with - certain MPI implementations and/or file systems. - - We have provided a pure MPI test program, as well as a standalone HDF5 - program, that can be used to determine if this is an issue on your system. - They should be run across multiple nodes with a varying number of processes. - These programs can be found at: - http://www.hdfgroup.org/ftp/HDF5/examples/known_problems/ - (NAF - 2011/01/19) - -* All the VFL drivers aren't backward compatible. In H5FDpublic.h, the - structure H5FD_class_t changed in 1.8. There is new parameter added to - get_eoa and set_eoa callback functions. A new callback function - get_type_map was added in. The public function H5FDrealloc was taken - out in 1.8. The problem only happens when users define their own driver - for 1.6 and try to plug in 1.8 library. Because there's only one user - complaining about it, we (Elena, Quincey, and I) decided to leave it as - it is (see bug report #1279). Quincey will make a plan for 1.10. - (SLU - 2010/02/02) - -* The --enable-static-exec configure flag will only statically link libraries - if the static version of that library is present. If only the shared version - of a library exists (i.e., most system libraries on Solaris, AIX, and Mac, - for example, only have shared versions), the flag should still result in a - successful compilation, but note that the installed executables will not be - fully static. Thus, the only guarantee on these systems is that the - executable is statically linked with just the HDF5 library. - (MAM - 2009/11/04) - -* Parallel tests failed with 16 processes with data inconsistency at testphdf5 - / dataset_readAll. Parallel tests also failed with 32 and 64 processes with - collective abort of all ranks at t_posix_compliant / allwrite_allread_blocks - with MPI IO. - (CMC - 2009/04/28) - -* On an Intel 64 Linux cluster (RH 4, Linux 2.6.9) with Intel 10.0 compilers, - use -mp -O1 compilation flags to build the libraries. A higher level of - optimization causes failures in several HDF5 library tests. - -* A dataset created or rewritten with a v1.6.3 library or after cannot be read - with the v1.6.2 library or before when the Fletcher32 EDC filter is enabled. - There was a bug in the calculation of the Fletcher32 checksum in the - library before v1.6.3; the checksum value was not consistent between big- - endian and little-endian systems. This bug was fixed in Release 1.6.3. - However, after fixing the bug, the checksum value was no longer the same as - before on little-endian system. Library releases after 1.6.4 can still read - datasets created or rewritten with an HDF5 library of v1.6.2 or before. - (SLU - 2005/06/30) - - -%%%%1.8.11%%%% - - -HDF5 version 1.8.11 released on 2013-05-08 -================================================================================ - -INTRODUCTION -============ - -This document describes the differences between HDF5-1.8.10 and -HDF5-1.8.11-*, and contains information on the platforms tested and -known problems in HDF5-1.8.11-*. -For more details, see the files HISTORY-1_0-1_8_0_rc3.txt -and HISTORY-1_8.txt in the release_docs/ directory of the HDF5 source. - -Links to the HDF5 1.8.11 source code, documentation, and additional materials -can be found on the HDF5 web page at: - - http://www.hdfgroup.org/products/hdf5/ - -The HDF5 1.8.11 release can be obtained from: - - http://www.hdfgroup.org/HDF5/release/obtain5.html - -User documentation for 1.8.11 can be accessed directly at this location: - - http://www.hdfgroup.org/HDF5/doc/ - -New features in the HDF5-1.8.x release series, including brief general -descriptions of some new and modified APIs, are described in the "What's New -in 1.8.0?" document: - - http://www.hdfgroup.org/HDF5/doc/ADGuide/WhatsNew180.html - -All new and modified APIs are listed in detail in the "HDF5 Software Changes -from Release to Release" document, in the section "Release 1.8.11 (current -release) versus Release 1.8.10": - - http://www.hdfgroup.org/HDF5/doc/ADGuide/Changes.html - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS -======== - -- New Features -- Support for New Platforms, Languages, and Compilers -- Bug Fixes since HDF5-1.8.10 -- Supported Platforms -- Supported Configuration Features Summary -- More Tested Platforms -- Known Problems - - -New Features -============ - - Configuration - ------------- - - Libtool version number is changed to 8.0.0 because there are API - changes. See below for details. (AKC - 2013/05/07 HDFFV-8435) - - Mac OS X 10.7 (Lion) and 10.8 (Mountain Lion) uses clang/clang++ as the - default C and C++ compilers. (AKC - 2013/04/19 HDFFV-8245) - - CMake minimum is now 2.8.10. (ADB 2013/1/14) - - A new tool, cmakehdf5, which is a build command script similar to - buildhdf5 is added and is available in the bin directory. - (AKC - 2013/01/16 HDFFV-8336) - - Library - ------- - - The library can load filter libraries dynamically during runtime. Users - can set the search path through environment variable HDF5_PLUGIN_PATH - and call H5Pset_filter to enable a dynamic filter. (SLU - 2013/04/08) - - Added new API functions H5Dscatter and H5Dgather to scatter data to and - and gather data from a selection within a memory buffer. - (NAF - 2013/02/05) - - The library now supports the data conversion from enumeration to numeric - (integer and floating-point number) datatypes. See Issue HDFFV-8221. - (SLU - 2012/10/23) - - Parallel Library - ---------------- - - None - - Tools - ----- - - h5dump: added new option -O or -ddl to output the ddl text to a file. This - is a complement to the -o or --output option, which redirects the data to - a file. HDFFV-8229 (ADB - 2013/2/25) - - High-Level APIs - --------------- - - A new API function, H5DOwrite_chunk. This function writes a data chunk - directly into a file, bypassing hyperslab selection, data conversion, - and the filter pipeline. The user must be careful with the function and - clearly understand the I/O process of the library. (SLU - 2013/2/11) - - Fortran API - ----------- - - New API functions added (MSB - 2013/3/23): - - h5odecr_refcount_f, h5oexists_by_name_f, h5oget_comment_f, - h5oget_comment_by_name_f, h5oincr_refcount_f, h5oopen_by_idx_f, - h5oset_comment_f, h5oset_comment_by_name_f, h5oset_comment_by_name_f - - F2003: h5oget_info_f, h5oget_info_by_idx_f, h5ovisit_by_name_f - - - C++ API - ------- - - None - - -Support for New Platforms, Languages, and Compilers -=================================================== - - SunOS 5.11 (emu) 32-bit and 64-bit with Sun C/C++ 5.12 compiler and - Sun Fortran 95 8.6 compiler. - - Visual Studio 2012 w/ Intel Fortran 13 on Windows 7 - - g95 released new version recently and is tested in this release. - -Bug Fixes since HDF5-1.8.10 -========================== - - Configuration - ------------- - - Fixed Thread-safe configure failure for the AIX platform. - (AKC - 2013/04/19 HDFFV-8390) - - Configure will check the result of header searches before searching for - the library. - Fixes HDFFV-8257 (ADB 2013/03/04) - - HDF does not support building SHARED Fortran libraries on OSX. Added - CMake code to check for this condition. - Fixes HDFFV-8227 (ADB 2013/03/04) - - CMake builds on Windows will no longer use legacy naming for libraries. - The "dll" tag will no longer be added to the name of *.lib and *.dll. - The option HDF_LEGACY_NAMING is now OFF by default. - Fixes HDFFV-8292 (ADB 2013/01/30) - - Library - ------- - - The library now behaves correctly when performing large I/O operations - on Mac OS-X. Previously, single I/O operations > 2 GB would fail - since the Darwin read/write calls cannot handle the number of bytes - that their parameter types imply. - Fixes HDFFV-7975 and HDFFV-8240 (DER 2013/01/07) - - Fixed a bug in the core VFD that causes failures when opening files - > 2 GB. - Fixes HDFFV-8124 and HDFFV-8158 (DER 2013/01/07) - - Fixed a bug where uninitialized memory was read during variable-length - type conversion. This caused segfaults in netCDF. Fixes HDFFV-8159. - (DER 2013/03/30) - - Removed the H5Pset/get_dxpl_multi functions from the library. The - intended functionality for them was never fully implemented, and they - have always been fundamentally broken. NOTE: This does not affect - setting the multi VFD or any expected VFD functionality. Multi VFD - usage remains unchanged. - Fixes HDFFV-8296. (DER 2013/03/30) - - Parallel Library - ---------------- - - None - - Performance - ------------- - - None - - Tools - ----- - - h5redeploy is changed to do this by default: - Installation directories: - prefix architecture-independent files. - exec_prefix architecture-dependent files, default is . - libdir libraries, default is /lib. - includedir header files, default is . - This allows users to just change the first line of prefix=<...> and the - effect will change libdir and includedir too. (AKC 2013/04/05 HDFFV-8358) - - h5repack: Fixed failure to convert the layout of a small chunked - dataset (size < 1K) to contiguous layout. HDFFV-8214 (JKM 2013/03/26) - - h5dump: Fixed displaying compression ratio for unknown or user-defined - filters. HDFFV-8344 (XCAO 2013/03/19) - - h5dump: Changed UNKNOWN_FILTER to USER_DEFINED_FILTER for user defined - filter. HDFFV-8346 (XCAO 2013/03/19) - - h5diff: Fixed to return the correct exit code 1 when the program - detects a unique extra attribute. Prior to this fix, h5diff returned - exit code 0 indicating the two files are identical. - HDFFV-7643 (JKM 2013/02/15) - - h5dump: Fixed writing nulls to a binary file when exporting a dataset - with compound string datatype. HDFFV-8169 (ADB 2013/1/31) - - The following h5stat test case failed in BG/P machines (and potentially - other machines that display extra output if an MPI task returns with a - non-zero code.) - Testing h5stat notexist.h5 - The test script was fixed to ignore the extra output. HDFFV-8233 - (AKC - 2012/11/30) - - h5diff: Improved speed when comparing HDF5 files with lots of - attributes. Much slower performance was identified with release versions - from 1.8.7 to 1.8.10 compared to 1.8.6. (JKM 2012/10/19) - - F90 API - ------- - - The integer type of the 'offset' argument in h5pset_external_f and - h5pget_external_f was changed to INTEGER(KIND=OFF_T) to support 8-byte - integers, matching the C type definition of off_t. (MSB - 2013/3/23) - - h5fc updated to recognize .f95, .f03 and .f08 file extensions. - - C++ API - ------ - - The C++ wrappers DSetMemXferPropList::setMulti/getMulti were removed - because the C functions H5Pset/get_dxpl_multi functions are removed - from the library. Fixes HDFFV-8296 by DER. (BMR 2013/03/30) - - An exception thrown by an internal function was not propagating to the - test program during stack unwinding, so it couldn't be caught by the - test, and the program terminated "without an active exception." It - seemed that the problem happened when c_str() was used to generate - an equivalent const char* from a std::string and the resulting string - was passed to the internal function. As a work-around, we added a - try/catch around the the call to the internal function and when the - exception is caught there, it is re-thrown. Fixes HDFFV-8067. - (BMR 2013/03/30) - - High-Level APIs: - ------ - - Fixed a problem with H5DSget_scale_name including the NULL terminator - in the size calculation returned by the function. The API was changed - to NOT include the NULL terminator in the size of name returned - (MSB- 2013/2/10) - - Fortran High-Level APIs: - ------ - - None - - Testing - ------- - - In some Mac systems, testlibinfo.sh failed with this error: - Check file ../src/.libs/libhdf5.7.dylib - strings: object: ../src/.libs/libhdf5.7.dylib malformed object \ - (unknown load command 15) - The strings command of Mac systems inspects library files, and older - versions of strings may not know newer library formats, resulting - in errors. Fixed by sending the library file as stdin to the strings - command to avoid this problem. (AKC - 2013/03/08 HDFFV-8305) - - Fixed a typo in the ERROR macro in test/testhdf5.h. It segmentation - faulted when used before. (AKC - 2013/02/12 HDFFV-8267) - -Supported Platforms -=================== -The following platforms are supported and have been tested for this release. -They are built with the configure process unless specified otherwise. - - AIX 5.3 xlc 10.1.0.5 - (NASA G-ADA) xlC 10.1.0.5 - xlf90 12.1.0.6 - - Linux 2.6.18-308.13.1.el5PAE GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP i686 i686 i386 compilers for 32-bit applications; - (jam) Version 4.1.2 20080704 (Red Hat 4.1.2-54) - Version 4.6.3 - PGI C, Fortran, C++ Compilers for 32-bit - applications; - Version 11.9-0 - Version 12.5-0 - Intel(R) C, C++, Fortran Compiler for 32-bit - applications; - Version 12.1 (Build 20110811) - Version 12.1 (Build 20120212) - - Linux 2.6.18-308.16.1.el5 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers for 64-bit applications; - (koala) Version 4.1.2 20080704 (Red Hat 4.1.2-52) - Version 4.6.3 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 11.9-0 - Version 12.5-0 - Intel(R) C, C++, Fortran Compilers for - applications running on Intel(R) 64; - Version 12.1 (Build 20110811) - Version 12.1 (Build 20120212) - - Linux 2.6.32-358.2.1.el6 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (platypus) Version 4.4.7 20120313 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 12.1 20120212 - - Linux 2.6.32-358.2.1.el6.ppc64 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-3) - #1 SMP ppc64 GNU/Linux g++ (GCC) 4.4.7 20120313 (Red Hat 4.4.7-3) - (ostrich) GNU Fortran (GCC) 4.4.7 20120313 (Red Hat 4.4.7-3) - IBM XL C/C++ V11.1 - IBM XL Fortran V13.1 - - SunOS 5.11 32- and 64-bit Sun C 5.12 SunOS_sparc - (emu) Sun Fortran 95 8.6 SunOS_sparc - Sun C++ 5.12 SunOS_sparc - - Windows 7 Visual Studio 2008 w/ Intel Fortran 11 (cmake) - Visual Studio 2010 w/ Intel Fortran 12 (cmake) - Visual Studio 2012 w/ Intel Fortran 13 (cmake) - Cygwin(CYGWIN_NT-6.1 1.7.15(0.260/5/3) gcc(4.5.3) compiler and gfortran) - (cmake and autotools) - - Windows 7 x64 Visual Studio 2008 w/ Intel Fortran 11 (cmake) - Visual Studio 2010 w/ Intel Fortran 12 (cmake) - Visual Studio 2012 w/ Intel Fortran 13 (cmake) - Cygwin(CYGWIN_NT-6.1 1.7.15(0.260/5/3) gcc(4.5.3) compiler and gfortran) - (cmake and autotools) - - Mac OS X Snow Leopard 10.6.8 gcc i686-apple-darwin11-llvm-gcc-4.2 (GCC) 4.2.1 from Xcode 3.2.6 - Darwin Kernel Version 10.8.0 g++ i686-apple-darwin11-llvm-g++-4.2 (GCC) 4.2.1 from Xcode 3.2.6 - 64-bit gfortran GNU Fortran (GCC) 4.6.2 - (fred) Intel C (icc), Fortran (ifort), C++ (icpc) - 12.1 Build 20120928 - - Mac OS X Lion 10.7.3 Apple clang/clang++ version 3.0 from Xcode 4.6.1 - 64-bit gfortran GNU Fortran (GCC) 4.6.2 - (duck) Intel icc/icpc/ifort version 13.0 - - Mac OS X Mountain Lion 10.8.1 Apple clang/clang++ version 4.2 from Xcode 4.6.1 - 64-bit gfortran GNU Fortran (GCC) 4.6.2 - (wren) Intel icc/icpc/ifort version 13.0.1.119 - - -Tested Configuration Features Summary -===================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90/ F90 C++ zlib SZIP - parallel F2003 parallel -Solaris2.11 32-bit n y/y n y y y -Solaris2.11 64-bit n y/n n y y y -Windows 7 y y/y n y y y -Windows 7 x64 y y/y n y y y -Windows 7 Cygwin n y/n n y y y -Windows 7 x64 Cygwin n y/n n y y y -Mac OS X Snow Leopard 10.6.8 64-bit n y/y n y y y -Mac OS X Lion 10.7.3 64-bit n y/y n y y y -Mac OS X Mountain Lion 10.8.1 64-bit n y/y n y y y -AIX 5.3 32- and 64-bit n y/n n y y y -CentOS 5.9 Linux 2.6.18-308 i686 GNU y y/y y y y y -CentOS 5.9 Linux 2.6.18-308 i686 Intel n y/y n y y y -CentOS 5.9 Linux 2.6.18-308 i686 PGI n y/y n y y y -CentOS 5.9 Linux 2.6.18 x86_64 GNU y y/y y y y y -CentOS 5.9 Linux 2.6.18 x86_64 Intel n y/y n y y y -CentOS 5.9 Linux 2.6.18 x86_64 PGI n y/y n y y y -CentOS 6.4 Linux 2.6.32 x86_64 GNU n y/n n y y y -CentOS 6.4 Linux 2.6.32 x86_64 Intel n y/y n y y y -Linux 2.6.32-358.2.1.el6.ppc64 n y/n n y y y - - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -Solaris2.11 32-bit y y y y -Solaris2.11 64-bit y y y y -Windows 7 y y y y -Windows 7 x64 y y y y -Windows 7 Cygwin n n n y -Windows 7 x64 Cygwin n n n y -Mac OS X Snow Leopard 10.6.8 64-bit y n y n -Mac OS X Lion 10.7.3 64-bit y n y y -Mac OS X Mountain Lion 10.8.1 64-bit y n y y -AIX 5.3 32- and 64-bit y n n y -CentOS 5.9 Linux 2.6.18-308 i686 GNU y y y y -CentOS 5.9 Linux 2.6.18-308 i686 Intel y y y n -CentOS 5.9 Linux 2.6.18-308 i686 PGI y y y n -CentOS 5.9 Linux 2.6.18 x86_64 GNU y y y y -CentOS 5.9 Linux 2.6.18 x86_64 Intel y y y n -CentOS 5.9 Linux 2.6.18 x86_64 PGI y y y n -CentOS 6.4 Linux 2.6.32 x86_64 GNU y y y n -CentOS 6.4 Linux 2.6.32 x86_64 Intel y y y n -Linux 2.6.32-358.2.1.el6.ppc64 y y y n - -Compiler versions for each platform are listed in the preceding -"Supported Platforms" table. - - -More Tested Platforms -===================== -The following platforms are not supported but have been tested for this release. - - Linux 2.6.18-308.13.1.el5PAE MPICH mpich2-1.4.1p1 compiled with - #1 SMP i686 i686 i386 gcc 4.1.2 and gfortran 4.1.2 - (jam) g95 (GCC 4.0.3 (g95 0.94!) - - Linux 2.6.18-308.16.1.el5 MPICH mpich2-1.4.1p1 compiled with - #1 SMP x86_64 GNU/Linux gcc 4.1.2 and gfortran 4.1.2 - (koala) g95 (GCC 4.0.3 (g95 0.94!) - - FreeBSD 8.2-STABLE i386 gcc 4.2.1 [FreeBSD] 20070719 - (loyalty) gcc 4.6.1 20110422 - g++ 4.6.1 20110422 - gfortran 4.6.1 20110422 - - FreeBSD 8.2-STABLE amd64 gcc 4.2.1 [FreeBSD] 20070719 - (freedom) gcc 4.6.1 20110422 - g++ 4.6.1 20110422 - gfortran 4.6.1 20110422 - - Debian6.0.7 2.6.32-5-amd64 #1 SMP x86_64 GNU/Linux - gcc (Debian 4.4.5-8) 4.4.5 - GNU Fortran (Debian 4.4.5-8) 4.4.5 - (cmake and autotools) - - Fedora18 3.7.9-205.fc18.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc (GCC) 4.7.2 20121109 (Red Hat 4.7.2-8) - GNU Fortran (GCC) 4.7.2 20120507 (Red Hat 4.7.2-8) - (cmake and autotools) - - SUSE 12.3 3.7.10-1.1-desktop #1 SMP PREEMPT x86_64 x86_64 x86_64 GNU/Linux - gcc (SUSE Linux) 4.7.2 - GNU Fortran (SUSE Linux) 4.7.2 - (cmake and autotools) - - Ubuntu 12.10 3.5.0-25-generic #39-Ubuntu SMP x86_64 GNU/Linux - gcc (Ubuntu/Linaro 4.7.2-2ubuntu1) 4.7.2 - GNU Fortran (Ubuntu/Linaro 4.7.2-2ubuntu1) 4.7.2 - (cmake and autotools) - - Cray Linux Environment (CLE) PrgEnv-pgi/4.0.46 - hopper.nersc.gov pgcc 12.5-0 64-bit target on x86-64 Linux -tp shanghai - pgf90 12.5-0 64-bit target on x86-64 Linux -tp shanghai - pgCC 12.5-0 64-bit target on x86-64 Linux -tp shanghai - - -Known Problems -============== - -* When building using the Cray compilers on Cray machines, HDF5 - configure mistakenly thinks the compiler is an intel compiler and - sets the -std=c99 flag which breaks configure on Cray. To build HDF5 - properly on a Cray machine, please consult with the instructions in - INSTALL_parallel for building on Hopper. - (MSC - 2013/04/26 - HDFFV-8429) - -* The h5dump and h5diff utilities occasionally produce different output - between Linux and Windows systems. This is caused by lower-level library - routines that fail to write fill values to the user's buffer when reading - unallocated chunks from datasets that have a fill value set to - H5D_FILL_VALUE_DEFAULT. Due to platform differences the return of - spurious data values has only been encountered on Windows 32-bit systems. - (Issue HDFFV-8247; JP - 2013/03/27) - -* The 5.9 C++ compiler on Sun failed to compile a C++ test ttypes.cpp. It - complains with this message: - "/home/hdf5/src/H5Vprivate.h", line 130: Error: __func__ is not defined. - - The reason is that __func__ is a predefined identifier in C99 standard. The - HDF5 C library uses it in H5private.h. The test ttypes.cpp includes - H5private.h (H5Tpkg.h<-H5Fprivate.h<-H5Vprivate.h<-H5private.h). Sun's 5.9 - C++ compiler doesn't support __func__, thus fails to compile the C++ test. - But Sun's 5.11 C++ compiler does. To check whether your Sun C++ compiler - knows this identifier, try to compile the following simple C++ program: - #include - - int main(void) - { - printf("%s\n", __func__); - return 0; - } - (SLU - 2012/11/5) - -* The C++ and FORTRAN bindings are not currently working on FreeBSD with the - native release 8.2 compilers (4.2.1), but are working with gcc 4.6 from the - ports (and probably gcc releases after that). - (QAK - 2012/10/19) - -* The following h5dump test case fails in BG/P machines (and potentially other - machines that use a command script to launch executables): - - h5dump --no-compact-subset -d "AHFINDERDIRECT::ah_centroid_t[0] it=0 tl=0" - tno-subset.h5 - - This is due to the embedded spaces in the dataset name being interpreted - by the command script launcher as meta-characters, thus passing three - arguments to h5dump's -d flag. The command passes if run by hand, just - not via the test script. - (AKC - 2012/05/03) - -* On hopper, the build failed when RUNSERIAL and RUNPARALLEL are set - to aprun -np X, because the H5lib_settings.c file was not generated - properly. Not setting those environment variables works, because - configure was able to automatically detect that it's a Cray system - and used the proper launch commands when necessary. - (MSC - 2012/04/18) - -* The data conversion test dt_arith.c fails in "long double" to integer - conversion on Ubuntu 11.10 (3.0.0.13 kernel) with GCC 4.6.1 if the library - is built with optimization -O3 or -O2. The older GCC (4.5) or newer kernel - (3.2.2 on Fedora) doesn't have the problem. Users should lower the - optimization level (-O1 or -O0) by defining CFLAGS in the command line of - "configure" like: - - CFLAGS=-O1 ./configure - - This will overwrite the library's default optimization level. - (SLU - 2012/02/07 - HDFFV-7829) - This issue is no longer present on Ubuntu 12.10 (3.5.0 kernel) with - gcc 4.7.2. - -* The STDIO VFD does not work on some architectures, possibly due to 32/64 - bit or large file issues. The basic STDIO VFD test is known to fail on - 64-bit SunOS 5.10 on SPARC when built with -m64 and 32-bit OS X/Darwin - 10.7.0. The STDIO VFD test has been disabled while we investigate and - a fix should appear in a future release. - (DER - 2011/10/14 - HDFFV-8235) - -* h5diff can report inconsistent results when comparing datasets of enum type - that contain invalid values. This is due to how enum types are handled in - the library and will be addressed in a future release. - (DER - 2011/10/14 - HDFFV-7527) - -* The links test can fail under the stdio VFD due to some issues with external - links. This will be investigated and fixed in a future release. - (DER - 2011/10/14 - HDFFV-7768) - -* After the shared library support was fixed for some bugs, it was discovered - that "make prefix=XXX install" no longer works for shared libraries. It - still works correctly for static libraries. Therefore, if you want to - install the HDF5 shared libraries in a location such as /usr/local/hdf5, - you need to specify the location via the --prefix option during configure - time. E.g, ./configure --prefix=/usr/local/hdf5 ... - (AKC - 2011/05/07 - HDFFV-7583) - -* The parallel test, t_shapesame, in testpar/, may run for a long time and may - be terminated by the alarm signal. If that happens, one can increase the - alarm seconds (default is 1200 seconds = 20 minutes) by setting the - environment variable, $HDF5_ALARM_SECONDS, to a larger value such as 3600 - (60 minutes). Note that the t_shapesame test may fail in some systems - (see the "While working on the 1.8.6 release..." problem below). If - it does, it will waste more time if $HDF5_ALARM_SECONDS is set - to a larger value. - (AKC - 2011/05/07) - -* The C++ and FORTRAN bindings are not currently working on FreeBSD. - (QAK - 2011/04/26) - -* Shared Fortran libraries are not quite working on AIX. While they are - generated when --enable-shared is specified, the fortran and hl/fortran - tests fail. We are looking into the issue. HL and C++ shared libraries - should now be working as intended, however. - (MAM - 2011/04/20) - -* The --with-mpe configure option does not work with Mpich2. - (AKC - 2011/03/10) - -* While working on the 1.8.6 release of HDF5, a bug was discovered that can - occur when reading from a dataset in parallel shortly after it has been - written to collectively. The issue was exposed by a new test in the parallel - HDF5 test suite, but had existed before that. We believe the problem lies with - certain MPI implementations and/or file systems. - - We have provided a pure MPI test program, as well as a standalone HDF5 - program, that can be used to determine if this is an issue on your system. - They should be run across multiple nodes with a varying number of processes. - These programs can be found at: - http://www.hdfgroup.org/ftp/HDF5/examples/known_problems/ - (NAF - 2011/01/19) - -* The library's test dt_arith.c showed a compiler's rounding problem on - Cygwin when converting from unsigned long long to long double. The - library's own conversion works fine. We defined a macro for Cygwin to - skip this test until we can solve the problem. - (SLU - 2010/05/05 - HDFFV-1264) - -* All the VFL drivers aren't backward compatible. In H5FDpublic.h, the - structure H5FD_class_t changed in 1.8. There is new parameter added to - get_eoa and set_eoa callback functions. A new callback function - get_type_map was added in. The public function H5FDrealloc was taken - out in 1.8. The problem only happens when users define their own driver - for 1.6 and try to plug in 1.8 library. Because there's only one user - complaining about it, we (Elena, Quincey, and I) decided to leave it as - it is (see bug report #1279). Quincey will make a plan for 1.10. - (SLU - 2010/02/02) - -* The --enable-static-exec configure flag will only statically link libraries - if the static version of that library is present. If only the shared version - of a library exists (i.e., most system libraries on Solaris, AIX, and Mac, - for example, only have shared versions), the flag should still result in a - successful compilation, but note that the installed executables will not be - fully static. Thus, the only guarantee on these systems is that the - executable is statically linked with just the HDF5 library. - (MAM - 2009/11/04) - -* Parallel tests failed with 16 processes with data inconsistency at testphdf5 - / dataset_readAll. Parallel tests also failed with 32 and 64 processes with - collective abort of all ranks at t_posix_compliant / allwrite_allread_blocks - with MPI IO. - (CMC - 2009/04/28) - -* On an Intel 64 Linux cluster (RH 4, Linux 2.6.9) with Intel 10.0 compilers, - use -mp -O1 compilation flags to build the libraries. A higher level of - optimization causes failures in several HDF5 library tests. - -* A dataset created or rewritten with a v1.6.3 library or after cannot be read - with the v1.6.2 library or before when the Fletcher32 EDC filter is enabled. - There was a bug in the calculation of the Fletcher32 checksum in the - library before v1.6.3; the checksum value was not consistent between big- - endian and little-endian systems. This bug was fixed in Release 1.6.3. - However, after fixing the bug, the checksum value was no longer the same as - before on little-endian system. Library releases after 1.6.4 can still read - datasets created or rewritten with an HDF5 library of v1.6.2 or before. - (SLU - 2005/06/30) - - -%%%%1.8.10-patch1%%%% - - -HDF5 version 1.8.10-patch1 released on 2013-01-22 -================================================================================ - -INTRODUCTION -============ - -This document describes the differences between HDF5-1.8.9 and -HDF5 1.8.10, and contains information on the platforms tested and -known problems in HDF5-1.8.10. -For more details, see the files HISTORY-1_0-1_8_0_rc3.txt -and HISTORY-1_8.txt in the release_docs/ directory of the HDF5 source. - -Links to the HDF5 1.8.10 source code, documentation, and additional materials -can be found on the HDF5 web page at: - - http://www.hdfgroup.org/products/hdf5/ - -The HDF5 1.8.10 release can be obtained from: - - http://www.hdfgroup.org/HDF5/release/obtain5.html - -User documentation for 1.8.10 can be accessed directly at this location: - - http://www.hdfgroup.org/HDF5/doc/ - -New features in the HDF5-1.8.x release series, including brief general -descriptions of some new and modified APIs, are described in the "What's New -in 1.8.0?" document: - - http://www.hdfgroup.org/HDF5/doc/ADGuide/WhatsNew180.html - -All new and modified APIs are listed in detail in the "HDF5 Software Changes -from Release to Release" document, in the section "Release 1.8.10 (current -release) versus Release 1.8.9": - - http://www.hdfgroup.org/HDF5/doc/ADGuide/Changes.html - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS -======== - -- New Features -- Support for New Platforms, Languages, and Compilers -- Bug Fixes since HDF5-1.8.10 -- Bug Fixes since HDF5-1.8.9 -- Supported Platforms -- Supported Configuration Features Summary -- More Tested Platforms -- Known Problems - - -New Features -============ - - Configuration - ------------- - - A new tool, cmakehdf5, which is a build command script similar to - buildhdf5 is added and is available in the bin directory. - (AKC - 2013/01/21) - - Library - ------- - - Updated to latest autotools and changed all hard *.sh scripts to - configure managed *.sh.in files. Removed overloading of autotools - TESTS variable by examples and tests. Renamed configure.in to - configure.ac. (ADB - 2012/08/23 - HDFFV-8129) - - The data sieve buffer size was set for all the datasets in the file. It - could waste memory if any dataset size is smaller than the sieve buffer - size. Now the library picks the smaller one between the dataset size - and the sieve buffer size from the file access property. See Issue 7934. - (SLU - 2012/4/11) - - Parallel Library - ---------------- - - Added the H5Pget_mpio_no_collective_cause() function that retrieves - reasons why the collective I/O was broken during read/write IO access. - (JKM - 2012/08/30 HDFFV-8143) - - - Added H5Pget_mpio_actual_io_mode_f (MSB - 2012/09/27) - - Tools - ----- - - h5import: Changed to allow the use of h5dump output as input files to - h5import. h5dump must include the "-p" option to print the properties; - configuration file is captured output of h5dump. The restrictions are - that only one dataset with a simple datatype (integer, floating-point, - or string) can be processed. Integers and floating-point imports from - h5dump must use the "binary" option for the data file. The string version - uses the h5dump "-y --width=1" options to disable the indexing printouts, - print single columns, and obviously NOT use the "binary" option. - (ADB - 2012/07/19 HDFFV-721) - - High-Level APIs - --------------- - - None - - Fortran API - ----------- - - Fixed a typo in return value of the nh5dread_f_c function (was 1 - instead of 0 on success); fixed the return value to make it consistent - with other Fortran functions; cleaned debug statements from the code. - (EIP - 2012/06/23) - - C++ API - ------- - - None - - -Support for New Platforms, Languages, and Compilers -=================================================== - - None - -Bug Fixes since HDF5-1.8.10 -=========================== - Library - ------- - - The library now behaves correctly when performing large I/O operations on - Mac OS-X. Previously, single I/O operations > 2 GB would fail since the - Darwin read/write calls cannot handle the number of bytes that their - parameter types imply. - Fixes HDFFV-7975 and HDFFV-8240 (DER - 07 JAN 2013) - - Fixed a bug in the core VFD that cause failures when opening files > 2 GB. - Fixes HDFFV-8124 and HDFFV-8158 (DER - 07 JAN 2013) - - Tools - ----- - - The following h5stat test case failed in BG/P machines (and potentially - other machines that display extra output if an MPI task returns with a - non-zero code.) - Testing h5stat notexist.h5 - The test script was fixed to ignore the extra output. - HDFFV-8233 (AKC - 2012/12/17) - - h5diff: Fixed slowness when comparing HDF5 files with many attributes. - Much slower performance was identified with later release version - (from 1.8.7 to 1.8.10) compared to 1.8.6. The issue was introduced - from fixing an attribute related bug for 1.8.7 release in the past. - HDFFV-8145 (JKM 2012/12/13) - - Testing - ------- - - None - -Bug Fixes since HDF5-1.8.9 -========================== - - Configuration - ------------- - - Fixed configure --enable-production to not use -O optimization for Lion - and Mountain Lion systems when gcc (i686-apple-darwin11-llvm-gcc-4.2 - (GCC) 4.2.1) is used. Somehow the -O optimization will cause some of - the hard conversion code in test/dt_arith.c to fail. HDFFV-8017. - (AKC - 2012/10/10) - - Fixed AIX Fortran compiler flags to use appropriate settings for - debugging, profiling, and optimization situations. HDFFV-8069. - (AKC 2012/09/27) - - Library - ------- - - Fixed a memory leak exposed when inserting/removing a property - from a property list several times. HDFFV-8022. (MSC 2012/05/18) - - The file_image test will fail in the "initial file image and callbacks in - the core VFD" sub-test if the source directory is read-only as the test - fails to create its test files in the build directory. This has been - fixed. HDFFV-8009 (AKC - 2012/07/06) - - - Parallel Library - ---------------- - - The MPI-POSIX VFD was updated to include the POSIX and Windows - correctness features added that had already been added to the other VFDs. - HDFFV-8058/7845. (DER 2012/09/17) - - Performance - ------------- - - Removed program perform/benchpar from the enable-build-all list. The - program will be retired or moved to another location. HDFFV-8156 - (AKC 2012/10/01) - - Retired program perform/mpi-perf. Its purpose has been incorporated - into h5perf. (AKC 2012/09/21) - - Tools - ----- - - h5repack: "h5repack -f NONE file1.h5 out.h5" command failed if - source file contains chunked dataset and a chunk dim is bigger than - the dataset dim. Another issue is that the command changed max dims - if chunk dim is smaller than the dataset dim. These issue occurred - when dataset size is smaller than 64k (compact size limit) Fixed both. - HDFFV-8012 (JKM 2012/09/24) - - h5diff: Fixed the counter in verbose mode (-v, -r) so that it will no - longer add together the differences between datasets and the differences - between attributes of those datasets. This change makes the output of - verbose mode consistent for datasets, groups, and committed datatypes. - HDFFV-5919 (JKM 2012/09/10) - - h5diff: Fixed the incorrect result when comparing attribute data - values and the data type has the same class but different sizes. - HDFFV-7942 (JKM 2012/08/15) - - h5dump: Replaced single element fwrite with block writes. - HDFFV-1208 (ADB 2012/08/13) - - h5diff: Fixed test failure for "make check" due to failure of - copying test files when performed in HDF5 source tree. Also applied - to other tools. HDFFV-8107 (JKM 2012/08/01) - - ph5diff: Fixed intermittent hang issue on a certain operation in - parallel mode. It was detected by daily test for comparing - non-comparable objects, but it could have occurred in other - operations depending on machine condition. HDFFV-8003 (JKM 2012/08/01) - - h5diff: Fixed the function COPY_TESTFILES_TO_TESTDIR() of testh5diff.sh - to better report when there is an error in the file copying. - HDFFV-8105 (AKC 2012/07/22) - - h5dump: Fixed the sort by name display to maintain correct parent/child - relationships between ascending/descending order. - HDFFV-8095 (ADB 2012/07/12) - - h5dump: Fixed the display by creation order when using option -n - (print contents). - HDFFV-5942 (ADB 2012/07/09) - - h5dump: Changed to allow H5T_CSET_UTF8 to be displayed in h5dump output. - Used technique similar to what was done in h5ls (matches library - options). - HDFFV-7999 (ADB 2012/05/23) - - h5diff: Fixed the tool so that it will not check and display the status - of dangling links without setting the --follow-symlinks option. This - also improved performance when comparing lots of external links without - the --follow-symlinks option. - HDFFV-7998 (JKM 2012/04/26) - - F90 API - ------- - - - Fixed a typo in return value of the nh5dread_f_c function (was 1 - instead of 0 on success); fixed the return value to make it consistent - with other Fortran functions; cleaned debug statements from the code. - (EIP - 2012/06/23) - - - Fixed a problem writing/reading control characters to a dataset; writing - a string containing alerts, backspace, carriage_return, form_feed, - horizontal_tab, vertical_tab, or new_line is now tested and working. - (MSB - 2012/09/01) - - - Corrected the integer type of H5S_UNLIMITED_F to HSIZE_T (MSB - 2012/09/01) - - - Corrected the number of continuation lines in the src files - to be less than 32 lines for F95 compliance. (MSB - 2012/10/01) - - C++ API - ------ - - None - - High-Level APIs: - ------ - - - Fixed problem with H5TBdelete_record destroying all data following the - deletion of a row. (MSB- 2012/7/26) - - - Fixed H5LTget_attribute_string not closing an object identifier when an - error occurs. (MSB- 2012/7/21) - - - Corrected the return type of H5TBAget_fill from herr_t to htri_t to - reflect that a return value of 1 indicates that a fill value is - present, 0 indicates a fill value is not present, and <0 indicates an - error. - - Fortran High-Level APIs: - ------ - - None - -Supported Platforms -=================== - AIX 5.3 xlc 10.1.0.5 - (NASA G-ADA) xlC 10.1.0.5 - xlf90 12.1.0.6 - - Linux 2.6.18-308.13.1.el5PAE GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP i686 i686 i386 compilers for 32-bit applications; - (jam) Version 4.1.2 20080704 (Red Hat 4.1.2-52) - Version 4.6.3 - PGI C, Fortran, C++ Compilers for 32-bit - applications; - Version 11.9-0 - Intel(R) C, C++, Fortran Compiler for 32-bit - applications; - Version 12.1 - MPICH mpich2-1.4.1p1 compiled with - gcc 4.1.2 and gfortran 4.1.2 - - Linux 2.6.18-308.16.1.el5 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers for 32-bit applications; - (koala) Version 4.1.2 20080704 (Red Hat 4.1.2-52) - Version 4.6.3 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 11.9-0 - Version 12.5-0 - Intel(R) C, C++, Fortran Compilers for - applications running on Intel(R) 64; - Version 12.1 (Build 20110811) - Version 12.1 (Build 20120212) - MPICH mpich2-1.4.1p1 compiled with - gcc 4.1.2 and gfortran 4.1.2 - - Linux 2.6.32-220.7.1.el6.ppc64 gcc (GCC) 4.4.6 20110731 (Red Hat 4.4.6-3) - #1 SMP ppc64 GNU/Linux g++ (GCC) 4.4.6 20110731 - (ostrich) GNU Fortran (GCC) 4.4.6 20110731 (Red Hat 4.4.6-3) - - Linux 2.6.32-220.23.1.1chaos Intel C, C++, Fortran Compilers - ch5.x86_64 GNU/Linux Version 12.1.5.339 - (LLNL Aztec) - - IBM Blue Gene/P XL C for Blue Gene/P, bgxlc V9.0 - (LLNL uDawn) XL C++ for Blue Gene/P, bgxlC V9.0 - XL Fortran for Blue Gene/P, bgxlf90 V11.1 - - SunOS 5.10 32- and 64-bit Sun C 5.9 Sun OS_sparc Patch 124867-16 - (linew) Sun Fortran 95 8.3 Sun OS_sparc Patch 127000-13 - Sun C++ 5.9 Sun OS_sparc Patch 124863-26 - Sun C 5.11 SunOS_sparc - Sun Fortran 95 8.5 SunOS_sparc - Sun C++ 5.11 SunOS_sparc - - Windows XP Visual Studio 2008 w/ Intel Fortran 10.1 (project files) - - Windows XP x64 Visual Studio 2008 w/ Intel Fortran 10.1 (project files) - - Windows 7 Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - Visual Studio 2010 w/ Intel Fortran 12 (cmake) - Cygwin(CYGWIN_NT-6.1 1.7.15(0.260/5/3) gcc(4.5.3) compiler and gfortran) - (cmake and autotools) - - Windows 7 x64 Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - Visual Studio 2010 w/ Intel Fortran 12 (cmake) - Cygwin(CYGWIN_NT-6.1 1.7.15(0.260/5/3) gcc(4.5.3) compiler and gfortran) - (cmake and autotools) - - Mac OS X Snow Leopard 10.6.8 gcc i686-apple-darwin11-llvm-gcc-4.2 (GCC) 4.2.1 from Xcode 3.2.6 - Darwin Kernel Version 10.8.0 g++ i686-apple-darwin11-llvm-g++-4.2 (GCC) 4.2.1 from Xcode 3.2.6 - (fred) gfortran GNU Fortran (GCC) 4.6.2 - Intel C (icc), Fortran (ifort), C++ (icpc) - 12.1.0.038 Build 20110811 - - Mac OS X Snow Leopard 10.6.8 gcc i686-apple-darwin11-llvm-gcc-4.2 (GCC) 4.2.1 from Xcode 3.2.6 - Darwin Kernel Version 10.8.0 g++ i686-apple-darwin11-llvm-g++-4.2 (GCC) 4.2.1 from Xcode 3.2.6 - Intel 32-bit gfortran GNU Fortran (GCC) 4.6.1 - (tejeda) Intel C (icc), Fortran (ifort), C++ (icpc) - 12.1.0.038 Build 20110811 - - Mac OS X Lion 10.7.3 gcc i686-apple-darwin11-llvm-gcc-4.2 (GCC) 4.2.1 from Xcode 4.2.1 - 32- and 64-bit g++ i686-apple-darwin11-llvm-g++-4.2 (GCC) 4.2.1 from Xcode 4.2.1 - (duck) gfortran GNU Fortran (GCC) 4.6.2 - - Mac OS X Mountain Lion 10.8.1 cc Apple clang version 4.0 from Xcode 4.5.1 - (owl) c++ Apple clang version 4.0 from Xcode 4.5.1 - gcc i686-apple-darwin11-llvm-gcc-4.2 (GCC) 4.2.1 from Xcode 4.5.1 - g++ i686-apple-darwin11-llvm-g++-4.2 (GCC) 4.2.1 from Xcode 4.5.1 - gfortran GNU Fortran (GCC) 4.6.2 - - -Tested Configuration Features Summary -===================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90/ F90 C++ zlib SZIP - parallel F2003 parallel -Solaris2.10 32-bit n y/y n y y y -Solaris2.10 64-bit n y/n n y y y -Windows 7 y y/n n y y y -Windows 7 x64 y y/n n y y y -Mac OS X Snow Leopard 10.6.8 32-bit n y/y n y y n -Mac OS X Snow Leopard 10.6.8 64-bit n y/y n y y y -Mac OS X Lion 10.7.3 32-bit n y/y n y y n -Mac OS X Lion 10.7.3 64-bit n y/y n y y y -Mac OS X Mountain Lion 10.8.1 64-bit n y/n n y y n -AIX 5.3 32- and 64-bit y y/n y y y y -CentOS 5.5 Linux 2.6.18-308 i686 GNU y y/y y y y y -CentOS 5.5 Linux 2.6.18-308 i686 Intel n y/y n y y y -CentOS 5.5 Linux 2.6.18-308 i686 PGI n y/y n y y y -CentOS 5.5 Linux 2.6.18 x86_64 GNU y y/y y y y y -CentOS 5.5 Linux 2.6.18 x86_64 Intel n y/y n y y y -CentOS 5.5 Linux 2.6.18 x86_64 PGI n y/y n y y y -Linux 2.6.32-220.7.1.el6.ppc64 n y/n n y y y - - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -Solaris2.10 32-bit y y y y -Solaris2.10 64-bit n n n n -Windows 7 y y y y -Windows 7 x64 y y y y -Mac OS X Snow Leopard 10.6.8 32-bit y n y n -Mac OS X Snow Leopard 10.6.8 64-bit y n y n -Mac OS X Lion 10.7.3 32-bit y n y y -Mac OS X Lion 10.7.3 64-bit y n y y -Mac OS X Mountain Lion 10.8.1 64-bit y n y y -AIX 5.3 32- and 64-bit n n n y -CentOS 5.5 Linux 2.6.18-308 i686 GNU y y y y -CentOS 5.5 Linux 2.6.18-308 i686 Intel y y y n -CentOS 5.5 Linux 2.6.18-308 i686 PGI y y y n -CentOS 5.5 Linux 2.6.18 x86_64 GNU y y y y -CentOS 5.5 Linux 2.6.18 x86_64 Intel y y y n -CentOS 5.5 Linux 2.6.18 x86_64 PGI y y y n -Linux 2.6.32-220.7.1.el6.ppc64 y y y n - -Compiler versions for each platform are listed in the preceding -"Supported Platforms" table. - - -More Tested Platforms -===================== -The following platforms are not supported but have been tested for this release. - - FreeBSD 8.2-STABLE i386 gcc 4.2.1 [FreeBSD] 20070719 - (loyalty) gcc 4.6.1 20110422 - g++ 4.6.1 20110422 - gfortran 4.6.1 20110422 - - FreeBSD 8.2-STABLE amd64 gcc 4.2.1 [FreeBSD] 20070719 - (freedom) gcc 4.6.1 20110422 - g++ 4.6.1 20110422 - gfortran 4.6.1 20110422 - - Debian6.0.3 2.6.32-5-686 #1 SMP i686 GNU/Linux - gcc (Debian 4.4.5-8) 4.4.5 - GNU Fortran (Debian 4.4.5-8) 4.4.5 - (cmake and autotools) - - Debian6.0.3 2.6.32-5-amd64 #1 SMP x86_64 GNU/Linux - gcc (Debian 4.4.5-8) 4.4.5 - GNU Fortran (Debian 4.4.5-8) 4.4.5 - (cmake and autotools) - - Fedora17 3.5.2-1.fc17.i6866 #1 SMP i686 i686 i386 GNU/Linux - gcc (GCC) 4.7.0 20120507 (Red Hat 4.7.0-5) - GNU Fortran (GCC) 4.7.0 20120507 (Red Hat 4.7.0-5) - (cmake and autotools) - - Fedora17 3.5.2-1.fc17.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc (GCC) 4.7.0 20120507 (Red Hat 4.7.0-5) - GNU Fortran (GCC) 4.7.0 20120507 (Red Hat 4.7.0-5) - (cmake and autotools) - - SUSE 12.2 3.4.6-2.10-desktop #1 SMP PREEMPT i686 i686 i386 GNU/Linux - gcc (SUSE Linux) 4.7.1 - GNU Fortran (SUSE Linux) 4.7.1 - (cmake and autotools) - - SUSE 12.2 3.4.6-2.10-desktop #1 SMP PREEMPT x86_64 x86_64 x86_64 GNU/Linux - gcc (SUSE Linux) 4.7.1 - GNU Fortran (SUSE Linux) 4.7.1 - (cmake and autotools) - - Ubuntu 12.04 3.2.0-29-generic #46-Ubuntu SMP i686 GNU/Linux - gcc (Ubuntu/Linaro 4.6.3-1ubuntu5) 4.6.3 - GNU Fortran (Ubuntu/Linaro 4.6.3-1ubuntu5) 4.6.3 - (cmake and autotools) - - Ubuntu 12.04 3.2.0-29-generic #46-Ubuntu SMP x86_64 GNU/Linux - gcc (Ubuntu/Linaro 4.6.3-1ubuntu5) 4.6.3 - GNU Fortran (Ubuntu/Linaro 4.6.3-1ubuntu5) 4.6.3 - (cmake and autotools) - (Use optimization level -O1) - - Cray Linux Environment (CLE) PrgEnv-pgi/4.0.46 - hopper.nersc.gov pgcc 12.5-0 64-bit target on x86-64 Linux -tp shanghai - pgf90 12.5-0 64-bit target on x86-64 Linux -tp shanghai - pgCC 12.5-0 64-bit target on x86-64 Linux -tp shanghai - - -Known Problems -============== -* The C++ and FORTRAN bindings are not currently working on FreeBSD with the - native release 8.2 compilers (4.2.1), but are working with gcc 4.6 from the - ports (and probably gcc releases after that). - (QAK - 2012/10/19) - -* The following h5dump test case fails in BG/P machines (and potentially other - machines that use a command script to launch executables): - - h5dump --no-compact-subset -d "AHFINDERDIRECT::ah_centroid_t[0] it=0 tl=0" - tno-subset.h5 - - This is due to the embedded spaces in the dataset name being interpreted - by the command script launcher as meta-characters, thus passing three - arguments to h5dump's -d flag. The command passes if run by hand, just - not via the test script. - (AKC - 2012/05/03) - -* On hopper, the build failed when RUNSERIAL and RUNPARALLEL are set - to aprun -np X, because the H5lib_settings.c file was not generated - properly. Not setting those environment variables works, because - configure was able to automatically detect that it's a Cray system - and used the proper launch commands when necessary. - (MSC - 2012/04/18) - -* The data conversion test dt_arith.c fails in "long double" to integer - conversion on Ubuntu 11.10 (3.0.0.13 kernel) with GCC 4.6.1 if the library - is built with optimization -O3 or -O2. The older GCC (4.5) or newer kernel - (3.2.2 on Fedora) doesn't have the problem. Users should lower the - optimization level (-O1 or -O0) by defining CFLAGS in the command line of - "configure" like: - - CFLAGS=-O1 ./configure - - This will overwrite the library's default optimization level. - (SLU - 2012/02/07 - HDFFV-7829) - -* The STDIO VFD does not work on some architectures, possibly due to 32/64 - bit or large file issues. The basic STDIO VFD test is known to fail on - 64-bit SunOS 5.10 on SPARC when built with -m64 and 32-bit OS X/Darwin - 10.7.0. The STDIO VFD test has been disabled while we investigate and - a fix should appear in a future release. - (DER - 2011/10/14 - HDFFV-8235) - -* h5diff can report inconsistent results when comparing datasets of enum type - that contain invalid values. This is due to how enum types are handled in - the library and will be addressed in a future release. - (DER - 2011/10/14 - HDFFV-7527) - -* The links test can fail under the stdio VFD due to some issues with external - links. This will be investigated and fixed in a future release. - (DER - 2011/10/14 - HDFFV-7768) - -* After the shared library support was fixed for some bugs, it was discovered - that "make prefix=XXX install" no longer works for shared libraries. It - still works correctly for static libraries. Therefore, if you want to - install the HDF5 shared libraries in a location such as /usr/local/hdf5, - you need to specify the location via the --prefix option during configure - time. E.g, ./configure --prefix=/usr/local/hdf5 ... - (AKC - 2011/05/07 - HDFFV-7583) - -* The parallel test, t_shapesame, in testpar/, may run for a long time and may - be terminated by the alarm signal. If that happens, one can increase the - alarm seconds (default is 1200 seconds = 20 minutes) by setting the - environment variable, $HDF5_ALARM_SECONDS, to a larger value such as 3600 - (60 minutes). Note that the t_shapesame test may fail in some systems - (see the "While working on the 1.8.6 release..." problem below). If - it does, it will waste more time if $HDF5_ALARM_SECONDS is set - to a larger value. - (AKC - 2011/05/07) - -* The C++ and FORTRAN bindings are not currently working on FreeBSD. - (QAK - 2011/04/26) - -* Shared Fortran libraries are not quite working on AIX. While they are - generated when --enable-shared is specified, the fortran and hl/fortran - tests fail. We are looking into the issue. HL and C++ shared libraries - should now be working as intended, however. - (MAM - 2011/04/20) - -* The --with-mpe configure option does not work with Mpich2. - (AKC - 2011/03/10) - -* While working on the 1.8.6 release of HDF5, a bug was discovered that can - occur when reading from a dataset in parallel shortly after it has been - written to collectively. The issue was exposed by a new test in the parallel - HDF5 test suite, but had existed before that. We believe the problem lies with - certain MPI implementations and/or file systems. - - We have provided a pure MPI test program, as well as a standalone HDF5 - program, that can be used to determine if this is an issue on your system. - They should be run across multiple nodes with a varying number of processes. - These programs can be found at: - http://www.hdfgroup.org/ftp/HDF5/examples/known_problems/ - (NAF - 2011/01/19) - -* The library's test dt_arith.c showed a compiler's rounding problem on - Cygwin when converting from unsigned long long to long double. The - library's own conversion works fine. We defined a macro for Cygwin to - skip this test until we can solve the problem. - (SLU - 2010/05/05 - HDFFV-1264) - -* All the VFL drivers aren't backward compatible. In H5FDpublic.h, the - structure H5FD_class_t changed in 1.8. There is new parameter added to - get_eoa and set_eoa callback functions. A new callback function - get_type_map was added in. The public function H5FDrealloc was taken - out in 1.8. The problem only happens when users define their own driver - for 1.6 and try to plug in 1.8 library. Because there's only one user - complaining about it, we (Elena, Quincey, and I) decided to leave it as - it is (see bug report #1279). Quincey will make a plan for 1.10. - (SLU - 2010/02/02) - -* The --enable-static-exec configure flag will only statically link libraries - if the static version of that library is present. If only the shared version - of a library exists (i.e., most system libraries on Solaris, AIX, and Mac, - for example, only have shared versions), the flag should still result in a - successful compilation, but note that the installed executables will not be - fully static. Thus, the only guarantee on these systems is that the - executable is statically linked with just the HDF5 library. - (MAM - 2009/11/04) - -* Parallel tests failed with 16 processes with data inconsistency at testphdf5 - / dataset_readAll. Parallel tests also failed with 32 and 64 processes with - collective abort of all ranks at t_posix_compliant / allwrite_allread_blocks - with MPI IO. - (CMC - 2009/04/28) - -* On an Intel 64 Linux cluster (RH 4, Linux 2.6.9) with Intel 10.0 compilers, - use -mp -O1 compilation flags to build the libraries. A higher level of - optimization causes failures in several HDF5 library tests. - -* A dataset created or rewritten with a v1.6.3 library or after cannot be read - with the v1.6.2 library or before when the Fletcher32 EDC filter is enabled. - There was a bug in the calculation of the Fletcher32 checksum in the - library before v1.6.3; the checksum value was not consistent between big- - endian and little-endian systems. This bug was fixed in Release 1.6.3. - However, after fixing the bug, the checksum value was no longer the same as - before on little-endian system. Library releases after 1.6.4 can still read - datasets created or rewritten with an HDF5 library of v1.6.2 or before. - (SLU - 2005/06/30) - - -%%%%1.8.10%%%% - - -HDF5 version 1.8.10 released on 2012-10-26 -================================================================================ - -INTRODUCTION -============ - -This document describes the differences between HDF5-1.8.9 and -HDF5 1.8.10, and contains information on the platforms tested and -known problems in HDF5-1.8.10. -For more details, see the files HISTORY-1_0-1_8_0_rc3.txt -and HISTORY-1_8.txt in the release_docs/ directory of the HDF5 source. - -Links to the HDF5 1.8.10 source code, documentation, and additional materials -can be found on the HDF5 web page at: - - http://www.hdfgroup.org/products/hdf5/ - -The HDF5 1.8.10 release can be obtained from: - - http://www.hdfgroup.org/HDF5/release/obtain5.html - -User documentation for 1.8.10 can be accessed directly at this location: - - http://www.hdfgroup.org/HDF5/doc/ - -New features in the HDF5-1.8.x release series, including brief general -descriptions of some new and modified APIs, are described in the "What's New -in 1.8.0?" document: - - http://www.hdfgroup.org/HDF5/doc/ADGuide/WhatsNew180.html - -All new and modified APIs are listed in detail in the "HDF5 Software Changes -from Release to Release" document, in the section "Release 1.8.10 (current -release) versus Release 1.8.9": - - http://www.hdfgroup.org/HDF5/doc/ADGuide/Changes.html - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS -======== - -- New Features -- Support for New Platforms, Languages, and Compilers -- Bug Fixes since HDF5-1.8.9 -- Supported Platforms -- Supported Configuration Features Summary -- More Tested Platforms -- Known Problems - - -New Features -============ - - Configuration - ------------- - - None - - Library - ------- - - Updated to latest autotools and changed all hard *.sh scripts to - configure managed *.sh.in files. Removed overloading of autotools - TESTS variable by examples and tests. Renamed configure.in to - configure.ac. (ADB - 2012/08/23 - HDFFV-8129) - - The data sieve buffer size was set for all the datasets in the file. It - could waste memory if any dataset size is smaller than the sieve buffer - size. Now the library picks the smaller one between the dataset size - and the sieve buffer size from the file access property. See Issue 7934. - (SLU - 2012/4/11) - - Parallel Library - ---------------- - - Added the H5Pget_mpio_no_collective_cause() function that retrieves - reasons why the collective I/O was broken during read/write IO access. - (JKM - 2012/08/30 HDFFV-8143) - - - Added H5Pget_mpio_actual_io_mode_f (MSB - 2012/09/27) - - Tools - ----- - - h5import: Changed to allow the use of h5dump output as input files to - h5import. h5dump must include the "-p" option to print the properties; - configuration file is captured output of h5dump. The restrictions are - that only one dataset with a simple datatype (integer, floating-point, - or string) can be processed. Integers and floating-point imports from - h5dump must use the "binary" option for the data file. The string version - uses the h5dump "-y --width=1" options to disable the indexing printouts, - print single columns, and obviously NOT use the "binary" option. - (ADB - 2012/07/19 HDFFV-721) - - High-Level APIs - --------------- - - None - - Fortran API - ----------- - - Fixed a typo in return value of the nh5dread_f_c function (was 1 - instead of 0 on success); fixed the return value to make it consistent - with other Fortran functions; cleaned debug statements from the code. - (EIP - 2012/06/23) - - C++ API - ------- - - None - - -Support for New Platforms, Languages, and Compilers -=================================================== - - None - -Bug Fixes since HDF5-1.8.9 -========================== - - Configuration - ------------- - - Fixed configure --enable-production to not use -O optimization for Lion - and Mountain Lion systems when gcc (i686-apple-darwin11-llvm-gcc-4.2 - (GCC) 4.2.1) is used. Somehow the -O optimization will cause some of - the hard conversion code in test/dt_arith.c to fail. HDFFV-8017. - (AKC - 2012/10/10) - - Fixed AIX Fortran compiler flags to use appropriate settings for - debugging, profiling, and optimization situations. HDFFV-8069. - (AKC 2012/09/27) - - Library - ------- - - Fixed a memory leak exposed when inserting/removing a property - from a property list several times. HDFFV-8022. (MSC 2012/05/18) - - The file_image test will fail in the "initial file image and callbacks in - the core VFD" sub-test if the source directory is read-only as the test - fails to create its test files in the build directory. This has been - fixed. HDFFV-8009 (AKC - 2012/07/06) - - - Parallel Library - ---------------- - - The MPI-POSIX VFD was updated to include the POSIX and Windows - correctness features added that had already been added to the other VFDs. - HDFFV-8058/7845. (DER 2012/09/17) - - Performance - ------------- - - Removed program perform/benchpar from the enable-build-all list. The - program will be retired or moved to another location. HDFFV-8156 - (AKC 2012/10/01) - - Retired program perform/mpi-perf. Its purpose has been incorporated - into h5perf. (AKC 2012/09/21) - - Tools - ----- - - h5repack: "h5repack -f NONE file1.h5 out.h5" command failed if - source file contains chunked dataset and a chunk dim is bigger than - the dataset dim. Another issue is that the command changed max dims - if chunk dim is smaller than the dataset dim. These issue occurred - when dataset size is smaller than 64k (compact size limit) Fixed both. - HDFFV-8012 (JKM 2012/09/24) - - h5diff: Fixed the counter in verbose mode (-v, -r) so that it will no - longer add together the differences between datasets and the differences - between attributes of those datasets. This change makes the output of - verbose mode consistent for datasets, groups, and committed datatypes. - HDFFV-5919 (JKM 2012/09/10) - - h5diff: Fixed the incorrect result when comparing attribute data - values and the data type has the same class but different sizes. - HDFFV-7942 (JKM 2012/08/15) - - h5dump: Replaced single element fwrite with block writes. - HDFFV-1208 (ADB 2012/08/13) - - h5diff: Fixed test failure for "make check" due to failure of - copying test files when performed in HDF5 source tree. Also applied - to other tools. HDFFV-8107 (JKM 2012/08/01) - - ph5diff: Fixed intermittent hang issue on a certain operation in - parallel mode. It was detected by daily test for comparing - non-comparable objects, but it could have occurred in other - operations depending on machine condition. HDFFV-8003 (JKM 2012/08/01) - - h5diff: Fixed the function COPY_TESTFILES_TO_TESTDIR() of testh5diff.sh - to better report when there is an error in the file copying. - HDFFV-8105 (AKC 2012/07/22) - - h5dump: Fixed the sort by name display to maintain correct parent/child - relationships between ascending/descending order. - HDFFV-8095 (ADB 2012/07/12) - - h5dump: Fixed the display by creation order when using option -n - (print contents). - HDFFV-5942 (ADB 2012/07/09) - - h5dump: Changed to allow H5T_CSET_UTF8 to be displayed in h5dump output. - Used technique similar to what was done in h5ls (matches library - options). - HDFFV-7999 (ADB 2012/05/23) - - h5diff: Fixed the tool so that it will not check and display the status - of dangling links without setting the --follow-symlinks option. This - also improved performance when comparing lots of external links without - the --follow-symlinks option. - HDFFV-7998 (JKM 2012/04/26) - - F90 API - ------- - - - Fixed a typo in return value of the nh5dread_f_c function (was 1 - instead of 0 on success); fixed the return value to make it consistent - with other Fortran functions; cleaned debug statements from the code. - (EIP - 2012/06/23) - - - Fixed a problem writing/reading control characters to a dataset; writing - a string containing alerts, backspace, carriage_return, form_feed, - horizontal_tab, vertical_tab, or new_line is now tested and working. - (MSB - 2012/09/01) - - - Corrected the integer type of H5S_UNLIMITED_F to HSIZE_T (MSB - 2012/09/01) - - - Corrected the number of continuation lines in the src files - to be less than 32 lines for F95 compliance. (MSB - 2012/10/01) - - C++ API - ------ - - None - - High-Level APIs: - ------ - - - Fixed problem with H5TBdelete_record destroying all data following the - deletion of a row. (MSB- 2012/7/26) - - - Fixed H5LTget_attribute_string not closing an object identifier when an - error occurs. (MSB- 2012/7/21) - - - Corrected the return type of H5TBAget_fill from herr_t to htri_t to - reflect that a return value of 1 indicates that a fill value is - present, 0 indicates a fill value is not present, and <0 indicates an - error. - - Fortran High-Level APIs: - ------ - - None - -Supported Platforms -=================== - AIX 5.3 xlc 10.1.0.5 - (NASA G-ADA) xlC 10.1.0.5 - xlf90 12.1.0.6 - - Linux 2.6.18-308.13.1.el5PAE GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP i686 i686 i386 compilers for 32-bit applications; - (jam) Version 4.1.2 20080704 (Red Hat 4.1.2-52) - Version 4.6.3 - PGI C, Fortran, C++ Compilers for 32-bit - applications; - Version 11.9-0 - Intel(R) C, C++, Fortran Compiler for 32-bit - applications; - Version 12.1 - MPICH mpich2-1.4.1p1 compiled with - gcc 4.1.2 and gfortran 4.1.2 - - Linux 2.6.18-308.16.1.el5 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers for 32-bit applications; - (koala) Version 4.1.2 20080704 (Red Hat 4.1.2-52) - Version 4.6.3 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 11.9-0 - Version 12.5-0 - Intel(R) C, C++, Fortran Compilers for - applications running on Intel(R) 64; - Version 12.1 (Build 20110811) - Version 12.1 (Build 20120212) - MPICH mpich2-1.4.1p1 compiled with - gcc 4.1.2 and gfortran 4.1.2 - - Linux 2.6.32-220.7.1.el6.ppc64 gcc (GCC) 4.4.6 20110731 (Red Hat 4.4.6-3) - #1 SMP ppc64 GNU/Linux g++ (GCC) 4.4.6 20110731 - (ostrich) GNU Fortran (GCC) 4.4.6 20110731 (Red Hat 4.4.6-3) - - Linux 2.6.32-220.23.1.1chaos Intel C, C++, Fortran Compilers - ch5.x86_64 GNU/Linux Version 12.1.5.339 - (LLNL Aztec) - - IBM Blue Gene/P XL C for Blue Gene/P, bgxlc V9.0 - (LLNL uDawn) XL C++ for Blue Gene/P, bgxlC V9.0 - XL Fortran for Blue Gene/P, bgxlf90 V11.1 - - SunOS 5.10 32- and 64-bit Sun C 5.9 Sun OS_sparc Patch 124867-16 - (linew) Sun Fortran 95 8.3 Sun OS_sparc Patch 127000-13 - Sun C++ 5.9 Sun OS_sparc Patch 124863-26 - Sun C 5.11 SunOS_sparc - Sun Fortran 95 8.5 SunOS_sparc - Sun C++ 5.11 SunOS_sparc - - Windows XP Visual Studio 2008 w/ Intel Fortran 10.1 (project files) - - Windows XP x64 Visual Studio 2008 w/ Intel Fortran 10.1 (project files) - - Windows 7 Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - Visual Studio 2010 w/ Intel Fortran 12 (cmake) - Cygwin(CYGWIN_NT-6.1 1.7.15(0.260/5/3) gcc(4.5.3) compiler and gfortran) - (cmake and autotools) - - Windows 7 x64 Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - Visual Studio 2010 w/ Intel Fortran 12 (cmake) - Cygwin(CYGWIN_NT-6.1 1.7.15(0.260/5/3) gcc(4.5.3) compiler and gfortran) - (cmake and autotools) - - Mac OS X Snow Leopard 10.6.8 gcc i686-apple-darwin11-llvm-gcc-4.2 (GCC) 4.2.1 from Xcode 3.2.6 - Darwin Kernel Version 10.8.0 g++ i686-apple-darwin11-llvm-g++-4.2 (GCC) 4.2.1 from Xcode 3.2.6 - (fred) gfortran GNU Fortran (GCC) 4.6.2 - Intel C (icc), Fortran (ifort), C++ (icpc) - 12.1.0.038 Build 20110811 - - Mac OS X Snow Leopard 10.6.8 gcc i686-apple-darwin11-llvm-gcc-4.2 (GCC) 4.2.1 from Xcode 3.2.6 - Darwin Kernel Version 10.8.0 g++ i686-apple-darwin11-llvm-g++-4.2 (GCC) 4.2.1 from Xcode 3.2.6 - Intel 32-bit gfortran GNU Fortran (GCC) 4.6.1 - (tejeda) Intel C (icc), Fortran (ifort), C++ (icpc) - 12.1.0.038 Build 20110811 - - Mac OS X Lion 10.7.3 gcc i686-apple-darwin11-llvm-gcc-4.2 (GCC) 4.2.1 from Xcode 4.2.1 - 32- and 64-bit g++ i686-apple-darwin11-llvm-g++-4.2 (GCC) 4.2.1 from Xcode 4.2.1 - (duck) gfortran GNU Fortran (GCC) 4.6.2 - - Mac OS X Mountain Lion 10.8.1 cc Apple clang version 4.0 from Xcode 4.5.1 - (owl) c++ Apple clang version 4.0 from Xcode 4.5.1 - gcc i686-apple-darwin11-llvm-gcc-4.2 (GCC) 4.2.1 from Xcode 4.5.1 - g++ i686-apple-darwin11-llvm-g++-4.2 (GCC) 4.2.1 from Xcode 4.5.1 - gfortran GNU Fortran (GCC) 4.6.2 - - -Tested Configuration Features Summary -===================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90/ F90 C++ zlib SZIP - parallel F2003 parallel -Solaris2.10 32-bit n y/y n y y y -Solaris2.10 64-bit n y/n n y y y -Windows 7 y y/n n y y y -Windows 7 x64 y y/n n y y y -Mac OS X Snow Leopard 10.6.8 32-bit n y/y n y y n -Mac OS X Snow Leopard 10.6.8 64-bit n y/y n y y y -Mac OS X Lion 10.7.3 32-bit n y/y n y y n -Mac OS X Lion 10.7.3 64-bit n y/y n y y y -Mac OS X Mountain Lion 10.8.1 64-bit n y/n n y y n -AIX 5.3 32- and 64-bit y y/n y y y y -CentOS 5.5 Linux 2.6.18-308 i686 GNU y y/y y y y y -CentOS 5.5 Linux 2.6.18-308 i686 Intel n y/y n y y y -CentOS 5.5 Linux 2.6.18-308 i686 PGI n y/y n y y y -CentOS 5.5 Linux 2.6.18 x86_64 GNU y y/y y y y y -CentOS 5.5 Linux 2.6.18 x86_64 Intel n y/y n y y y -CentOS 5.5 Linux 2.6.18 x86_64 PGI n y/y n y y y -Linux 2.6.32-220.7.1.el6.ppc64 n y/n n y y y - - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -Solaris2.10 32-bit y y y y -Solaris2.10 64-bit n n n n -Windows 7 y y y y -Windows 7 x64 y y y y -Mac OS X Snow Leopard 10.6.8 32-bit y n y n -Mac OS X Snow Leopard 10.6.8 64-bit y n y n -Mac OS X Lion 10.7.3 32-bit y n y y -Mac OS X Lion 10.7.3 64-bit y n y y -Mac OS X Mountain Lion 10.8.1 64-bit y n y y -AIX 5.3 32- and 64-bit n n n y -CentOS 5.5 Linux 2.6.18-308 i686 GNU y y y y -CentOS 5.5 Linux 2.6.18-308 i686 Intel y y y n -CentOS 5.5 Linux 2.6.18-308 i686 PGI y y y n -CentOS 5.5 Linux 2.6.18 x86_64 GNU y y y y -CentOS 5.5 Linux 2.6.18 x86_64 Intel y y y n -CentOS 5.5 Linux 2.6.18 x86_64 PGI y y y n -Linux 2.6.32-220.7.1.el6.ppc64 y y y n - -Compiler versions for each platform are listed in the preceding -"Supported Platforms" table. - - -More Tested Platforms -===================== -The following platforms are not supported but have been tested for this release. - - FreeBSD 8.2-STABLE i386 gcc 4.2.1 [FreeBSD] 20070719 - (loyalty) gcc 4.6.1 20110422 - g++ 4.6.1 20110422 - gfortran 4.6.1 20110422 - - FreeBSD 8.2-STABLE amd64 gcc 4.2.1 [FreeBSD] 20070719 - (freedom) gcc 4.6.1 20110422 - g++ 4.6.1 20110422 - gfortran 4.6.1 20110422 - - Debian6.0.3 2.6.32-5-686 #1 SMP i686 GNU/Linux - gcc (Debian 4.4.5-8) 4.4.5 - GNU Fortran (Debian 4.4.5-8) 4.4.5 - (cmake and autotools) - - Debian6.0.3 2.6.32-5-amd64 #1 SMP x86_64 GNU/Linux - gcc (Debian 4.4.5-8) 4.4.5 - GNU Fortran (Debian 4.4.5-8) 4.4.5 - (cmake and autotools) - - Fedora17 3.5.2-1.fc17.i6866 #1 SMP i686 i686 i386 GNU/Linux - gcc (GCC) 4.7.0 20120507 (Red Hat 4.7.0-5) - GNU Fortran (GCC) 4.7.0 20120507 (Red Hat 4.7.0-5) - (cmake and autotools) - - Fedora17 3.5.2-1.fc17.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc (GCC) 4.7.0 20120507 (Red Hat 4.7.0-5) - GNU Fortran (GCC) 4.7.0 20120507 (Red Hat 4.7.0-5) - (cmake and autotools) - - SUSE 12.2 3.4.6-2.10-desktop #1 SMP PREEMPT i686 i686 i386 GNU/Linux - gcc (SUSE Linux) 4.7.1 - GNU Fortran (SUSE Linux) 4.7.1 - (cmake and autotools) - - SUSE 12.2 3.4.6-2.10-desktop #1 SMP PREEMPT x86_64 x86_64 x86_64 GNU/Linux - gcc (SUSE Linux) 4.7.1 - GNU Fortran (SUSE Linux) 4.7.1 - (cmake and autotools) - - Ubuntu 12.04 3.2.0-29-generic #46-Ubuntu SMP i686 GNU/Linux - gcc (Ubuntu/Linaro 4.6.3-1ubuntu5) 4.6.3 - GNU Fortran (Ubuntu/Linaro 4.6.3-1ubuntu5) 4.6.3 - (cmake and autotools) - - Ubuntu 12.04 3.2.0-29-generic #46-Ubuntu SMP x86_64 GNU/Linux - gcc (Ubuntu/Linaro 4.6.3-1ubuntu5) 4.6.3 - GNU Fortran (Ubuntu/Linaro 4.6.3-1ubuntu5) 4.6.3 - (cmake and autotools) - (Use optimization level -O1) - - Cray Linux Environment (CLE) PrgEnv-pgi/4.0.46 - hopper.nersc.gov pgcc 12.5-0 64-bit target on x86-64 Linux -tp shanghai - pgf90 12.5-0 64-bit target on x86-64 Linux -tp shanghai - pgCC 12.5-0 64-bit target on x86-64 Linux -tp shanghai - - -Known Problems -============== -* The following h5stat test case fails in BG/P machines (and potentially other - machines that display extra output if an MPI task returns with a non-zero - code.) - Testing h5stat notexist.h5 - - The test actually runs and passes as expected. It is the extra output from - the MPI process that causes the test script to fail. This will be fixed - in the next release. (AKC - 2012/10/25 - HDFFV-8233) - -* The C++ and FORTRAN bindings are not currently working on FreeBSD with the - native release 8.2 compilers (4.2.1), but are working with gcc 4.6 from the - ports (and probably gcc releases after that). - (QAK - 2012/10/19) - -* The following h5dump test case fails in BG/P machines (and potentially other - machines that use a command script to launch executables): - - h5dump --no-compact-subset -d "AHFINDERDIRECT::ah_centroid_t[0] it=0 tl=0" - tno-subset.h5 - - This is due to the embedded spaces in the dataset name being interpreted - by the command script launcher as meta-characters, thus passing three - arguments to h5dump's -d flag. The command passes if run by hand, just - not via the test script. - (AKC - 2012/05/03) - -* On hopper, the build failed when RUNSERIAL and RUNPARALLEL are set - to aprun -np X, because the H5lib_settings.c file was not generated - properly. Not setting those environment variables works, because - configure was able to automatically detect that it's a Cray system - and used the proper launch commands when necessary. - (MSC - 2012/04/18) - -* The data conversion test dt_arith.c fails in "long double" to integer - conversion on Ubuntu 11.10 (3.0.0.13 kernel) with GCC 4.6.1 if the library - is built with optimization -O3 or -O2. The older GCC (4.5) or newer kernel - (3.2.2 on Fedora) doesn't have the problem. Users should lower the - optimization level (-O1 or -O0) by defining CFLAGS in the command line of - "configure" like: - - CFLAGS=-O1 ./configure - - This will overwrite the library's default optimization level. - (SLU - 2012/02/07 - HDFFV-7829) - -* The STDIO VFD does not work on some architectures, possibly due to 32/64 - bit or large file issues. The basic STDIO VFD test is known to fail on - 64-bit SunOS 5.10 on SPARC when built with -m64 and 32-bit OS X/Darwin - 10.7.0. The STDIO VFD test has been disabled while we investigate and - a fix should appear in a future release. - (DER - 2011/10/14 - HDFFV-8235) - -* h5diff can report inconsistent results when comparing datasets of enum type - that contain invalid values. This is due to how enum types are handled in - the library and will be addressed in a future release. - (DER - 2011/10/14 - HDFFV-7527) - -* The links test can fail under the stdio VFD due to some issues with external - links. This will be investigated and fixed in a future release. - (DER - 2011/10/14 - HDFFV-7768) - -* After the shared library support was fixed for some bugs, it was discovered - that "make prefix=XXX install" no longer works for shared libraries. It - still works correctly for static libraries. Therefore, if you want to - install the HDF5 shared libraries in a location such as /usr/local/hdf5, - you need to specify the location via the --prefix option during configure - time. E.g, ./configure --prefix=/usr/local/hdf5 ... - (AKC - 2011/05/07 - HDFFV-7583) - -* The parallel test, t_shapesame, in testpar/, may run for a long time and may - be terminated by the alarm signal. If that happens, one can increase the - alarm seconds (default is 1200 seconds = 20 minutes) by setting the - environment variable, $HDF5_ALARM_SECONDS, to a larger value such as 3600 - (60 minutes). Note that the t_shapesame test may fail in some systems - (see the "While working on the 1.8.6 release..." problem below). If - it does, it will waste more time if $HDF5_ALARM_SECONDS is set - to a larger value. - (AKC - 2011/05/07) - -* The C++ and FORTRAN bindings are not currently working on FreeBSD. - (QAK - 2011/04/26) - -* Shared Fortran libraries are not quite working on AIX. While they are - generated when --enable-shared is specified, the fortran and hl/fortran - tests fail. We are looking into the issue. HL and C++ shared libraries - should now be working as intended, however. - (MAM - 2011/04/20) - -* The --with-mpe configure option does not work with Mpich2. - (AKC - 2011/03/10) - -* While working on the 1.8.6 release of HDF5, a bug was discovered that can - occur when reading from a dataset in parallel shortly after it has been - written to collectively. The issue was exposed by a new test in the parallel - HDF5 test suite, but had existed before that. We believe the problem lies with - certain MPI implementations and/or file systems. - - We have provided a pure MPI test program, as well as a standalone HDF5 - program, that can be used to determine if this is an issue on your system. - They should be run across multiple nodes with a varying number of processes. - These programs can be found at: - http://www.hdfgroup.org/ftp/HDF5/examples/known_problems/ - (NAF - 2011/01/19) - -* The library's test dt_arith.c showed a compiler's rounding problem on - Cygwin when converting from unsigned long long to long double. The - library's own conversion works fine. We defined a macro for Cygwin to - skip this test until we can solve the problem. - (SLU - 2010/05/05 - HDFFV-1264) - -* All the VFL drivers aren't backward compatible. In H5FDpublic.h, the - structure H5FD_class_t changed in 1.8. There is new parameter added to - get_eoa and set_eoa callback functions. A new callback function - get_type_map was added in. The public function H5FDrealloc was taken - out in 1.8. The problem only happens when users define their own driver - for 1.6 and try to plug in 1.8 library. Because there's only one user - complaining about it, we (Elena, Quincey, and I) decided to leave it as - it is (see bug report #1279). Quincey will make a plan for 1.10. - (SLU - 2010/02/02) - -* The --enable-static-exec configure flag will only statically link libraries - if the static version of that library is present. If only the shared version - of a library exists (i.e., most system libraries on Solaris, AIX, and Mac, - for example, only have shared versions), the flag should still result in a - successful compilation, but note that the installed executables will not be - fully static. Thus, the only guarantee on these systems is that the - executable is statically linked with just the HDF5 library. - (MAM - 2009/11/04) - -* Parallel tests failed with 16 processes with data inconsistency at testphdf5 - / dataset_readAll. Parallel tests also failed with 32 and 64 processes with - collective abort of all ranks at t_posix_compliant / allwrite_allread_blocks - with MPI IO. - (CMC - 2009/04/28) - -* On an Intel 64 Linux cluster (RH 4, Linux 2.6.9) with Intel 10.0 compilers, - use -mp -O1 compilation flags to build the libraries. A higher level of - optimization causes failures in several HDF5 library tests. - -* A dataset created or rewritten with a v1.6.3 library or after cannot be read - with the v1.6.2 library or before when the Fletcher32 EDC filter is enabled. - There was a bug in the calculation of the Fletcher32 checksum in the - library before v1.6.3; the checksum value was not consistent between big- - endian and little-endian systems. This bug was fixed in Release 1.6.3. - However, after fixing the bug, the checksum value was no longer the same as - before on little-endian system. Library releases after 1.6.4 can still read - datasets created or rewritten with an HDF5 library of v1.6.2 or before. - (SLU - 2005/06/30) - - -%%%%1.8.9%%%% - - -HDF5 version 1.8.9 released on 2012-05-09 -================================================================================ - -INTRODUCTION -============ - -This document describes the differences between HDF5-1.8.8 and -HDF5 1.8.9. It also contains information on the platforms tested and -known problems in HDF5-1.8.9. - -For more details, see the files HISTORY-1_0-1_8_0_rc3.txt -and HISTORY-1_8.txt in the release_docs/ directory of the HDF5 source. - -Links to the HDF5 1.8.9 source code, documentation, and additional materials -can be found on the HDF5 web page at: - - http://www.hdfgroup.org/products/hdf5/ - -The HDF5 1.8.9 release can be obtained from: - - http://www.hdfgroup.org/HDF5/release/obtain5.html - -User documentation for 1.8.9 can be accessed directly at this location: - - http://www.hdfgroup.org/HDF5/doc/ - -New features in the HDF5-1.8.x release series, including brief general -descriptions of some new and modified APIs, are described in the "What's New -in 1.8.0?" document: - - http://www.hdfgroup.org/HDF5/doc/ADGuide/WhatsNew180.html - -All new and modified APIs are listed in detail in the "HDF5 Software Changes -from Release to Release" document, in the section "Release 1.8.9 (current -release) versus Release 1.8.8": - - http://www.hdfgroup.org/HDF5/doc/ADGuide/Changes.html - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS -======== - -- New Features -- Support for New Platforms, Languages, and Compilers -- Bug Fixes since HDF5-1.8.8 -- Platforms Tested -- Supported Configuration Features Summary -- Known Problems - - -New Features -============ - - Configuration - ------------- - - None - - Library - ------- - - Added new feature to merge committed datatypes when copying objects, - using new H5O_COPY_MERGE_COMMITTED_DTYPE_FLAG, modified by new API - routines: H5Padd_merge_committed_dtype_path(), - H5Pfree_merge_committed_dtype_paths(), H5Pset_mcdt_search_cb() and - H5Pget_mcdt_search_cb(). (QAK - 2012/03/30) - - Added new feature which allows working with files in memory in the - same ways files are worked with on disk. New API routines include - H5Pset_file_image, H5Pget_file_image, H5Pset_file_image_callbacks, - H5Pget_file_image_callbacks, H5Fget_file_image, and - H5LTopen_file_image. (QAK - 2012/04/17) - - Parallel Library - ---------------- - - Corrected memory allocation error in MPI datatype construction code. - (QAK - 2012/04/23) - - Add two new routines to set/get the atomicity parameter in the - MPI library to perform atomic operations. Some file systems (for - example PVFS2) do not support atomic updates, so those routines - would not be supported. (MSC - 2012/03/27 - HDFFV-7961) - - Tools - ----- - - h5repack: Added ability to set the metadata block size of the output - file, with the '-M'/'--metadata_block_size' command line parameter. - (QAK - 2012/03/30) - - h5stat: Added ability to display a summary of the file space usage for a - file, with the '-S'/'--summary' command line parameter. (QAK - 2012/03/28) - - h5dump: Added capability for "-a" option to show attributes containing "/" - by using an escape character. For example, for a dataset "/dset" - containing attribute "speed(m/h)", use "h5dump -a "/dset/speed(\/h)" - to show the content of the attribute. (PC - 2012/03/12 - HDFFV-7523) - - h5dump: Added ability to apply command options across multiple files using a - wildcard in the filename. Unix example; "h5dump -H -d Dataset1 tarr*.h5". - Cross platform example; "h5dump -H -d Dataset1 tarray1.h5 tarray2.h5 tarray3.h5". - (ADB - 2012/03/12 - HDFFV-7876). - - h5dump: Added new option --no-compact-subset. This option will not - interpret the '[' character as starting the compact form of - subsetting. This is useful when the "h5dump error: unable to - open dataset "datset_name"" message is output because a dataset - name contains a '[' character. (ADB - 2012/03/05 - HDFFV-7689). - - h5repack: Improved performance for big chunked datasets (size > 128MB) - when used with the layout (-l) or compression (-f) options. - Before this change, repacking datasets with chunks with a large first - dimension would take extremely long. For example, repacking a dataset - with chunk dimensions of 1024x5x1 might take many hours to process - while changing a dataset with chunk dimensions set to 1x5x1024 - might take under an hour. After this change, processing the dataset - with chunk dimensions of 1024x5x1 takes about 15 minutes, and processing - a dataset with chunk dimensions of 1x5x1024 takes about 14 minutes. - (JKM - 2012/03/01 - HDFFV-7862) - - High-Level APIs - --------------- - - New API: H5LTpath_valid (Fortran: h5ltpath_valid_f) checks - if a path is correct, determines if a link resolves to a valid - object, and checks that the link does not dangle. (MSB - 2012/03/15) - - Fortran API - ----------- - - - Added for the C API the Fortran wrapper: - h5ocopy_f (MSB - 2012/03/22) - - C++ API - ------- - - None - - -Support for New Platforms, Languages, and Compilers -=================================================== - - None - -Bug Fixes since HDF5-1.8.8 -========================== - - Configuration - ------------- - - Fixed Makefile issue in which "-Wl," was not properly specified - prior to -rpath when building parallel Fortran libraries with - an Intel compiler. (MAM - 2012/03/26) - - Makefiles generated by other packages using h5cc as the compiler - no longer error when 'make' is invoked more than once in order - to 'rebuild' after changes to source. (MAM - 2012/03/26) - - Added code to display the version information of XL Fortran and C++ - in the summary of configure. (AKC - 2012/02/28 - HDFFV-7793) - - Updated all CMakeLists.txt files to indicate the minimum CMake version is - the current standard of 2.8.6 (ADB - 2011/12/05 - HDFFV-7854) - - Library - ------- - - Windows and STDIO correctness changes have been propagated from the SEC2 - and old Windows drivers to the STDIO VFD. (DER - 2012/03/30 - HDFFV-7917) - - Fixed an error that would occur when copying an object with attribute - creation order tracked and indexed. (NAF - 2012/03/28 - HDFFV-7762) - - Fixed a bug in H5Ocopy(): When copying an opened object, call the - object's flush class action to ensure that cached data is flushed so - that H5Ocopy will get the correct data. (VC - 2012/03/27 - HDFFV-7853) - - The istore test will now skip the sparse 50x50x50 test when the VFD does - not support sparse files on that platform. The most important platforms - on which this will be skipped are Windows (NTFS sparse files are not - supported) and Mac OS-X (HFS sparse files are not supported). This - fixes CTest timeout issues on Windows. (DER - 2012/03/27 - HDFFV-7769) - - Windows and POSIX correctness changes have been propagated from the SEC2 - VFD to the Core VFD. This mainly affects file operations on the - driver's backing store and fixes a problem on Windows where large files - could not be read. (DER - 2012/03/27 - HDFFV-7916 - HDFFV-7603) - - When an application tries to write or read many small data chunks and - runs out of memory, the library had a segmentation fault. The fix is to - return the error stack with proper information. - (SLU - 2012/03/23 - HDFFV-7785) - - H5Pset_data_transform had a segmentation fault in some cases like x*-100. - It works correctly now and handles other cases like 100-x or 2/x. - (SLU - 2012/03/15 - HDFFV-7922) - - Fixed rare corruption bugs that could occur when using the new object - header format. (NAF - 2012/03/15 - HDFFV-7879) - - Fixed an error that occurred when creating a contiguous dataset with a - zero-sized dataspace and space allocation time set to 'early'. - (QAK - 2012/03/12) - - Changed Windows thread creation to use _beginthread() instead of - CreateThread(). Threads created by the latter can be killed in - low-memory situations. (DER - 2012/02/10 - HDFFV-7780) - - Creating a dataset in a read-only file caused a segmentation fault when - the file is closed. It's fixed. The attempt to create a dataset will - fail with an error indicating the file is read-only. - (SLU - 2012/01/25 - HDFFV-7756) - - Fixed a segmentation fault that could occur when shrinking a dataset - with chunks larger than 1 MB. (NAF - 2011/11/30 - HDFFV-7833) - - Fixed a bug that could cause H5Oget_info to return the wrong address - after copying a committed (named) datatype. (NAF - 2011/11/14) - - The library allowed the conversion of strings between ASCII and UTF8 - We have corrected it to report an error under this situation. - (SLU - 2011/11/8 - HDFFV-7582) - - Fixed a segmentation fault when the library tried to shrink the size - of a compound datatype through H5Tset_size immediately after the - datatype was created. (SLU - 2011/11/4 - HDFFV-7618) - - Parallel Library - ---------------- - - None - - Tools - ----- - - h5unjam: Fixed a segmentation fault that occurred when h5unjam was used - with the -V (show version) option. (JKM - 2012/04/19 - HDFFV-8001) - - h5repack: Fixed a failure that occurred when repacking the chunk size - of a specified chunked dataset with unlimited max dims. - (JKM - 2012/04/11 - HDFFV-7993) - - h5diff: Fixed a failure when comparing groups. Before the fix, if an - object in a group was compared with an object in another group where - both had the same name but the object type was different, then h5diff - would fail. After the fix, h5diff detects such cases as non-comparable - and displays appropriate error messages. - (JKM - 2012/03/28 - HDFFV-7644) - - h5diff: If unique objects exist only in one file and if h5diff is set to - exclude the unique objects with the --exclude-path option, then h5diff - might miss excluding some objects. This was fixed to correctly exclude - objects. (JKM - 2012/03/20 - HDFFV-7837) - - h5diff: When two symbolic dangling links are compared with the - --follow-symlinks option, the result should be the same. This worked when - comparing two files, but didn't work when comparing two objects. - h5diff now works when comparing two objects. - (JKM - 2012/03/09 - HDFFV-7835) - - h5dump: Added the tools library error stack to properly catch error - information generated within the library. (ADB - 2012/03/12 - HDFFV-7958) - - h5dump: Changed the process where an open link used to fail. Now dangling - links no longer throw error messages. (ADB - 2012/03/12 - HDFFV-7839) - - h5dump: Refactored code to remove duplicated functions. Split XML - functions from DDL functions. Corrected indentation and formatting - errors. Also fixed subsetting counting overflow (HDFFV-5874). Verified - all tools call tools_init() in main. The USER_BLOCK data now correctly - displays within the SUPER_BLOCK info. NOTE: WHITESPACE IN THE OUTPUT - HAS CHANGED. (ADB - 2012/02/17 - HDFFV-7560) - - h5diff: Fixed to prevent from displaying error stack message when - comparing two dangling symbolic links with the follow-symlinks option. - (JKM - 2012/01/13 - HDFFV-7836) - - h5repack: Fixed a memory leak that occurred with the handling of - variable length strings in attributes. - (JKM - 2012/01/10 - HDFFV-7840) - - h5ls: Fixed a segmentation fault that occurred when accessing region - reference data in an attribute. (JKM - 2012/01/06 - HDFFV-7838) - - F90 API - ------- - - None - - C++ API - ------ - - None - - High-Level APIs: - ------ - - None - - Fortran High-Level APIs: - ------ - - h5ltget_attribute_string_f: The h5ltget_attribute_string_f used to return - the C NULL character in the returned character buffer. The returned - character buffer now does not return the C NULL character; the buffer - is blank-padded if needed. (MSB - 2012/03/23) - - -Platforms Tested -================ -The following platforms and compilers have been tested for this release. - - AIX 5.3 xlc 10.1.0.5 - (NASA G-ADA) xlC 10.1.0.5 - xlf90 12.1.0.6 - - FreeBSD 8.2-STABLE i386 gcc 4.2.1 [FreeBSD] 20070719 - (loyalty) g++ 4.2.1 [FreeBSD] 20070719 - gcc 4.6.1 20110422 - g++ 4.6.1 20110422 - gfortran 4.6.1 20110422 - - FreeBSD 8.2-STABLE amd64 gcc 4.2.1 [FreeBSD] 20070719 - (freedom) g++ 4.2.1 [FreeBSD] 20070719 - gcc 4.6.1 20110422 - g++ 4.6.1 20110422 - gfortran 4.6.1 20110422 - - Linux 2.6.18-194.3.1.el5PAE GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP i686 i686 i386 compilers for 32-bit applications; - (jam) Version 4.1.2 20080704 (Red Hat 4.1.2-52) - Version 4.5.2 - PGI C, Fortran, C++ Compilers for 32-bit - applications; - Version 11.8-0 - Version 11.9-0 - Intel(R) C, C++, Fortran Compiler for 32-bit - applications; - Version 12.0 - Version 12.1 - MPICH mpich2-1.3.1 compiled with - gcc 4.1.2 and gfortran 4.1.2 - - Linux 2.6.18-308.1.1.el5 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers for 32-bit applications; - (koala) Version 4.1.2 20080704 (Red Hat 4.1.2-52) - Version 4.5.2 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 11.9-0 (64-bit) - Version 11.8-0 (32-bit) - Intel(R) C, C++, Fortran Compilers for - applications running on Intel(R) 64; - Version 12.0 - Version 12.1 - MPICH mpich2-1.3.1 compiled with - gcc 4.1.2 and gfortran 4.1.2 - - Linux 2.6.32-220.7.1.el6.ppc64 gcc (GCC) 4.4.6 20110731 (Red Hat 4.4.6-3) - #1 SMP ppc64 GNU/Linux g++ (GCC) 4.4.6 20110731 - (ostrich) GNU Fortran (GCC) 4.4.6 20110731 (Red Hat 4.4.6-3) - - Linux 2.6.18-108chaos Intel C, C++, Fortran Compilers Version 11.1 - #1 SMP x86_64 GNU/Linux - (LLNL Aztec) - - IBM Blue Gene/P XL C for Blue Gene/P, bgxlc V9.0 - (LLNL uDawn) XL C++ for Blue Gene/P, bgxlC V9.0 - XL Fortran for Blue Gene/P, bgxlf0 V11.1 - - SunOS 5.10 32- and 64-bit Sun C 5.9 Sun OS_sparc Patch 124867-16 - (linew) Sun Fortran 95 8.3 Sun OS_sparc Patch 127000-13 - Sun C++ 5.9 Sun OS_sparc Patch 124863-26 - Sun C 5.11 SunOS_sparc - Sun Fortran 95 8.5 SunOS_sparc - Sun C++ 5.11 SunOS_sparc - - SGI Altix UV Intel(R) C, Fortran Compilers - SGI ProPack 7 Linux Version 11.1 20100806 - 2.6.32.24-0.2.1.2230.2.PTF- SGI MPT 2.02 - default #1 SMP - (NCSA ember) - - Dell NVIDIA Cluster Intel(R) C, Fortran Compilers - Red Hat Enterprise Linux 6 Version 12.0.4 20110427 - 2.6.32-131.4.1.el6.x86_64 mvapich2 1.7rc1-intel-12.0.4 - (NCSA forge) - - Windows XP Visual Studio 2008 w/ Intel Fortran 10.1 (project files) - Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - Visual Studio 2010 w/ Intel Fortran 12 (cmake) - - Windows XP x64 Visual Studio 2008 w/ Intel Fortran 10.1 (project files) - Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - Visual Studio 2010 w/ Intel Fortran 12 (cmake) - - Windows 7 Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - Visual Studio 2010 w/ Intel Fortran 12 (cmake) - Cygwin(1.7.9 native gcc(4.5.3) compiler and gfortran) - - Windows 7 x64 Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - Visual Studio 2010 w/ Intel Fortran 12 (cmake) - Cygwin(1.7.9 native gcc(4.5.3) compiler and gfortran) - - Mac OS X Snow Leopard 10.6.8 i686-apple-darwin10-gcc-4.2.1 (GCC) 4.2.1 (gcc) - Darwin Kernel Version 10.8.0 i686-apple-darwin10-g++-4.2.1 (GCC) 4.2.1 (g++) - Intel 64-bit (Apple Inc. build 5666) (dot 3) - (fred) GNU Fortran (GCC) 4.6.1 (gfortran) - Intel C (icc), Fortran (ifort), C++ (icpc) - 12.1.0.038 Build 20110811 - - Mac OS X Snow Leopard 10.6.8 i686-apple-darwin10-gcc-4.2.1 (GCC) 4.2.1 (gcc) - Darwin Kernel Version 10.8.0 i686-apple-darwin10-g++-4.2.1 (GCC) 4.2.1 (g++) - Intel 32-bit (Apple Inc. build 5666) (dot 3) - (tejeda) GNU Fortran (GCC) 4.6.1 (gfortran) - Intel C (icc), Fortran (ifort), C++ (icpc) - 12.1.0.038 Build 20110811 - - Mac OS X Lion 10.7.3 GCC 4.2.1 gcc - 32- and 64-bit GNU Fortran (GCC) 4.6.1 gfortran - (duck) GCC 4.2.1. g++ - - Debian6.0.3 2.6.32-5-686 #1 SMP i686 GNU/Linux - gcc (Debian 4.4.5-8) 4.4.5 - GNU Fortran (Debian 4.4.5-8) 4.4.5 - - Debian6.0.3 2.6.32-5-amd64 #1 SMP x86_64 GNU/Linux - gcc (Debian 4.4.5-8) 4.4.5 - GNU Fortran (Debian 4.4.5-8) 4.4.5 - - Fedora16 3.2.9-2.fc16.i6866 #1 SMP i686 i686 i386 GNU/Linux - gcc (GCC) 4.6.2 20111027 (Red Hat 4.6.2-1) - GNU Fortran (GCC) 4.6.2 20111027 (Red Hat 4.6.2-1) - - Fedora16 3.2.9-2.fc16.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc (GCC) 4.6.2 20111027 (Red Hat 4.6.2-1) - GNU Fortran (GCC) 4.6.2 20111027 (Red Hat 4.6.2-1) - - SUSE 12.1 3.1.9-1.4-desktop #1 SMP PREEMPT i686 i686 i386 GNU/Linux - gcc (SUSE Linux) 4.6.2 - GNU Fortran (SUSE Linux) 4.6.2 - - SUSE 12.1 3.1.9-1.4-desktop #1 SMP PREEMPT x86_64 x86_64 x86_64 GNU/Linux - gcc (SUSE Linux) 4.6.2 - GNU Fortran (SUSE Linux) 4.6.2 - - Ubuntu 11.10 3.0.0-16-generic #29-Ubuntu SMP i686 GNU/Linux - gcc (Ubuntu/Linaro 4.6.1-9ubuntu3) 4.6.1 - GNU Fortran (Ubuntu/Linaro 4.6.4-9ubuntu3) 4.6.1 - - Ubuntu 11.10 3.0.0-16-generic #29-Ubuntu SMP x86_64 GNU/Linux - gcc (Ubuntu/Linaro 4.6.1-9ubuntu3) 4.6.1 - GNU Fortran (Ubuntu/Linaro 4.6.1-9ubuntu3) 4.6.1 - - Cray Linux Environment (CLE) PrgEnv-pgi 2.2.74 - hopper.nersc.gov pgcc 11.9-0 64-bit target on x86-64 Linux -tp k8e - pgf90 11.9-0 64-bit target on x86-64 Linux -tp k8e - pgCC 11.9-0 64-bit target on x86-64 Linux -tp k8e - - -Tested Configuration Features Summary -===================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90 F90 C++ zlib SZIP - parallel parallel -Solaris2.10 32-bit n y n y y y -Solaris2.10 64-bit n y n y y y -Windows XP n y(4) n y y y -Windows XP x64 n y(4) n y y y -Windows Vista n y(4) n y y y -Windows Vista x64 n y(4) n y y y -Mac OS X Snow Leopard 10.6.8 32-bit n y n y y n -Mac OS X Snow Leopard 10.6.8 64-bit n y n y y y -Mac OS X Lion 10.7.3 32-bit n y n y y n -Mac OS X Lion 10.7.3 64-bit n y n y y y -AIX 5.3 32- and 64-bit y y y y y y -FreeBSD 8.2-STABLE 32&64 bit n x n x y y -CentOS 5.5 Linux 2.6.18-194 i686 GNU (1)W y y(2) y y y y -CentOS 5.5 Linux 2.6.18-194 i686 Intel W n y n y y y -CentOS 5.5 Linux 2.6.18-194 i686 PGI W n y n y y y -CentOS 5.5 Linux 2.6.18 x86_64 GNU (1) W y y(3) y y y y -CentOS 5.5 Linux 2.6.18 x86_64 Intel W n y n y y y -CentOS 5.5 Linux 2.6.18 x86_64 PGI W n y n y y y -Linux 2.6.32-220.7.1.el6.ppc64 n y n y y y -SGI ProPack 7 Linux 2.6.32.24 y y y y y y -Red Hat Enterprise Linux 6 y y y y y y -CLE hopper.nersc.gov y y(3) y y y n - - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -Solaris2.10 32-bit y y y y -Solaris2.10 64-bit n n n n -Windows XP y y(4) y n -Windows XP x64 y y(4) y n -Windows Vista y y(4) y y -Windows Vista x64 y y(4) y y -Mac OS X Snow Leopard 10.6.8 32-bit y n y n -Mac OS X Snow Leopard 10.6.8 64-bit y n y n -Mac OS X Lion 10.7.3 32-bit y n y y -Mac OS X Lion 10.7.3 64-bit y n y y -AIX 5.3 32- and 64-bit n n n y -FreeBSD 8.2-STABLE 32&64 bit y x x y -CentOS 5.5 Linux 2.6.18-194 i686 GNU (1)W y y(2) y y -CentOS 5.5 Linux 2.6.18-194 i686 Intel W y y y n -CentOS 5.5 Linux 2.6.18-194 i686 PGI W y y y n -CentOS 5.5 Linux 2.6.18 x86_64 GNU (1) W y y y y -CentOS 5.5 Linux 2.6.18 x86_64 Intel W y y y n -CentOS 5.5 Linux 2.6.18 x86_64 PGI W y y y n -Linux 2.6.32-220.7.1.el6.ppc64 y y y n -SGI ProPack 7 Linux 2.6.32.24 y y y n -Red Hat Enterprise Linux 6 y y y n -CLE hopper.nersc.gov n n n n - - (1) Fortran compiled with gfortran. - (2) With PGI and Absoft compilers. - (3) With PGI compiler for Fortran. - (4) Using Visual Studio 2008 w/ Intel Fortran 10.1 (Cygwin shared libraries are not supported) - (5) C and C++ shared libraries will not be built when Fortran is enabled. - Compiler versions for each platform are listed in the preceding - "Platforms Tested" table. - - -Known Problems -============== -* The h5repacktst test fails on AIX 32-bit because the test uses more - memory than the default amount. The failure message typically looks like: - - "time: 0551-010 The process was stopped abnormally. Try again." - - This is an issue with the test only and does not represent a problem with - the library. To allow the test to pass, request more memory when testing - via appropriate command such as: - - $ env LDR_CNRTL=MAXDATA=0x20000000@DSA make check - - (AKC - 2012/05/09 - HDFFV-8016) - -* The file_image test will fail in the "initial file image and callbacks in - the core VFD" sub-test if the source directory is read-only as the test - fails to create its test files in the build directory. This will be - resolved in a future release. - (AKC - 2012/05/05 - HDFFV-8009) - -* The dt_arith test reports several errors involving "long double" on - Mac OS X 10.7 Lion when any level of optimization is enabled. The test does - not fail in debug mode. This will be addressed in a future release. - (SLU - 2012/05/08) - -* The following h5dump test case fails in BG/P machines (and potentially other - machines that use a command script to launch executables): - - h5dump --no-compact-subset -d "AHFINDERDIRECT::ah_centroid_t[0] it=0 tl=0" - tno-subset.h5 - - This is due to the embedded spaces in the dataset name being interpreted - by the command script launcher as meta-characters, thus passing three - arguments to h5dump's -d flag. The command passes if run by hand, just - not via the test script. - (AKC - 2012/05/03) - -* The ph5diff (parallel h5diff) tool can intermittently hang in parallel mode - when comparing two HDF5 files that contain objects with the same names but - with different object types. - (JKM - 2012/04/27) - -* On hopper, the build failed when RUNSERIAL and RUNPARALLEL are set - to aprun -np X, because the H5lib_settings.c file was not generated - properly. Not setting those environment variables works, because - configure was able to automatically detect that it's a Cray system - and used the proper launch commands when necessary. - (MSC - 2012/04/18) - -* The data conversion test dt_arith.c fails in "long double" to integer - conversion on Ubuntu 11.10 (3.0.0.13 kernel) with GCC 4.6.1 if the library - is built with optimization -O3 or -O2. The older GCC (4.5) or newer kernel - (3.2.2 on Fedora) doesn't have the problem. Users should lower the - optimization level (-O1 or -O0) by defining CFLAGS in the command line of - "configure" like: - - CFLAGS=-O1 ./configure - - This will overwrite the library's default optimization level. - (SLU - 2012/02/07 - HDFFV-7829) - -* The STDIO VFD does not work on some architectures, possibly due to 32/64 - bit or large file issues. The basic STDIO VFD test is known to fail on - 64-bit SunOS 5.10 on SPARC when built with -m64 and 32-bit OS X/Darwin - 10.7.0. The STDIO VFD test has been disabled while we investigate and - a fix should appear in a future release. - (DER - 2011/10/14) - -* h5diff can report inconsistent results when comparing datasets of enum type - that contain invalid values. This is due to how enum types are handled in - the library and will be addressed in a future release. - (DER - 2011/10/14 - HDFFV-7527) - -* The links test can fail under the stdio VFD due to some issues with external - links. This will be investigated and fixed in a future release. - (DER - 2011/10/14 - HDFFV-7768) - -* After the shared library support was fixed for some bugs, it was discovered - that "make prefix=XXX install" no longer works for shared libraries. It - still works correctly for static libraries. Therefore, if you want to - install the HDF5 shared libraries in a location such as /usr/local/hdf5, - you need to specify the location via the --prefix option during configure - time. E.g, ./configure --prefix=/usr/local/hdf5 ... - (AKC - 2011/05/07 - HDFFV-7583) - -* The parallel test, t_shapesame, in testpar/, may run for a long time and may - be terminated by the alarm signal. If that happens, one can increase the - alarm seconds (default is 1200 seconds = 20 minutes) by setting the - environment variable, $HDF5_ALARM_SECONDS, to a larger value such as 3600 - (60 minutes). Note that the t_shapesame test may fail in some systems - (see the "While working on the 1.8.6 release..." problem below). If - it does, it will waste more time if $HDF5_ALARM_SECONDS is set - to a larger value. - (AKC - 2011/05/07) - -* The C++ and FORTRAN bindings are not currently working on FreeBSD. - (QAK - 2011/04/26) - -* Shared Fortran libraries are not quite working on AIX. While they are - generated when --enable-shared is specified, the fortran and hl/fortran - tests fail. We are looking into the issue. HL and C++ shared libraries - should now be working as intended, however. - (MAM - 2011/04/20) - -* The --with-mpe configure option does not work with Mpich2. - (AKC - 2011/03/10) - -* While working on the 1.8.6 release of HDF5, a bug was discovered that can - occur when reading from a dataset in parallel shortly after it has been - written to collectively. The issue was exposed by a new test in the parallel - HDF5 test suite, but had existed before that. We believe the problem lies with - certain MPI implementations and/or file systems. - - We have provided a pure MPI test program, as well as a standalone HDF5 - program, that can be used to determine if this is an issue on your system. - They should be run across multiple nodes with a varying number of processes. - These programs can be found at: - http://www.hdfgroup.org/ftp/HDF5/examples/known_problems/ - (NAF - 2011/01/19) - -* The library's test dt_arith.c showed a compiler's rounding problem on - Cygwin when converting from unsigned long long to long double. The - library's own conversion works fine. We defined a macro for Cygwin to - skip this test until we can solve the problem. - (SLU - 2010/05/05 - HDFFV-1264) - -* All the VFL drivers aren't backward compatible. In H5FDpublic.h, the - structure H5FD_class_t changed in 1.8. There is new parameter added to - get_eoa and set_eoa callback functions. A new callback function - get_type_map was added in. The public function H5FDrealloc was taken - out in 1.8. The problem only happens when users define their own driver - for 1.6 and try to plug in 1.8 library. Because there's only one user - complaining about it, we (Elena, Quincey, and I) decided to leave it as - it is (see bug report #1279). Quincey will make a plan for 1.10. - (SLU - 2010/02/02) - -* MinGW has a missing libstdc++.dll.a library file and will not successfully link - C++ applications/tests. Do not use the enable-cxx configure option. Read all of - the INSTALL_MINGW.txt file for all restrictions. - (ADB - 2009/11/11) - -* The --enable-static-exec configure flag will only statically link libraries - if the static version of that library is present. If only the shared version - of a library exists (i.e., most system libraries on Solaris, AIX, and Mac, - for example, only have shared versions), the flag should still result in a - successful compilation, but note that the installed executables will not be - fully static. Thus, the only guarantee on these systems is that the - executable is statically linked with just the HDF5 library. - (MAM - 2009/11/04) - -* The PathScale MPI implementation, accessing a Panasas file system, would - cause H5Fcreate() with H5F_ACC_EXCL to fail even when the file does not - exist. This is due to the MPI_File_open() call failing if the mode has - the MPI_MODE_EXCL bit set. - (AKC - 2009/08/11 - HDFFV-988) - -* Parallel tests failed with 16 processes with data inconsistency at testphdf5 - / dataset_readAll. Parallel tests also failed with 32 and 64 processes with - collective abort of all ranks at t_posix_compliant / allwrite_allread_blocks - with MPI IO. - (CMC - 2009/04/28) - -* For Red Storm, a Cray XT3 system, the tools/h5ls/testh5ls.sh and - tools/h5copy/testh5copy.sh will fail some of its sub-tests. These sub-tests - are expected to fail and should exit with a non-zero code but the yod - command does not propagate the exit code of the executables. Yod always - returns 0 if it can launch the executable. The test suite shell expects - a non-zero for this particular test, therefore it concludes the test has - failed when it receives 0 from yod. Skip all the "failing" test for now - by changing them as following. - - ======== Original tools/h5ls/testh5ls.sh ========= - TOOLTEST tgroup-1.ls 1 -w80 -r -g tgroup.h5 - ======== Change to =============================== - echo SKIP TOOLTEST tgroup-1.ls 1 -w80 -r -g tgroup.h5 - ================================================== - - ======== Original tools/h5copy/testh5copy.sh ========= - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d grp_rename - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d /grp_rename/grp_dsets - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_dsets -d /E/F/grp_dsets - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_nested -d /G/H/grp_nested - H5LSTEST $FILEOUT - ======== Change to =============================== - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d grp_rename - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d /grp_rename/grp_dsets - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_dsets -d /E/F/grp_dsets - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_nested -d /G/H/grp_nested - echo SKIP H5LSTEST $FILEOUT - ================================================== - (AKC - 2008/11/10) - -* For Red Storm, a Cray XT3 system, the yod command sometimes gives the - message, "yod allocation delayed for node recovery". This interferes with - test suites that do not expect to see this message. See the section of "Red - Storm" in file INSTALL_parallel for a way to deal with this problem. - (AKC - 2008/05/28) - -* On an Intel 64 Linux cluster (RH 4, Linux 2.6.9) with Intel 10.0 compilers, - use -mp -O1 compilation flags to build the libraries. A higher level of - optimization causes failures in several HDF5 library tests. - -* On mpich 1.2.5 and 1.2.6, if more than two processes contribute no IO and - the application asks to do collective IO, we have found that when using 4 - processors, a simple collective write will sometimes be hung. This can be - verified with t_mpi test under testpar. - -* A dataset created or rewritten with a v1.6.3 library or after cannot be read - with the v1.6.2 library or before when the Fletcher32 EDC filter is enabled. - There was a bug in the calculation of the Fletcher32 checksum in the - library before v1.6.3; the checksum value was not consistent between big- - endian and little-endian systems. This bug was fixed in Release 1.6.3. - However, after fixing the bug, the checksum value was no longer the same as - before on little-endian system. Library releases after 1.6.4 can still read - datasets created or rewritten with an HDF5 library of v1.6.2 or before. - (SLU - 2005/06/30) - -* On IBM AIX systems, parallel HDF5 mode will fail some tests with error - messages like "INFO: 0031-XXX ...". This is from the command `poe'. - Set the environment variable MP_INFOLEVEL to 0 to minimize the messages - and run the tests again. - - The tests may fail with messages like "The socket name is already in use", - but HDF5 does not use sockets. This failure is due to problems with the - poe command trying to set up the debug socket. To resolve this problem, - check to see whether there are many old /tmp/s.pedb.* files staying around. - These are sockets used by the poe command and left behind due to failed - commands. First, ask your system administrator to clean them out. - Lastly, request IBM to provide a means to run poe without the debug socket. - (AKC - 2004/12/08) - - -%%%%1.8.8%%%% - - -HDF5 version 1.8.8 released on 2011-11-15 -================================================================================ - -INTRODUCTION -============ - -This document describes the differences between HDF5-1.8.7 and -HDF5 1.8.8, and contains information on the platforms tested and -known problems in HDF5-1.8.8. -For more details, see the files HISTORY-1_0-1_8_0_rc3.txt -and HISTORY-1_8.txt in the release_docs/ directory of the HDF5 source. - -Links to the HDF5 1.8.8 source code, documentation, and additional materials -can be found on the HDF5 web page at: - - http://www.hdfgroup.org/products/hdf5/ - -The HDF5 1.8.8 release can be obtained from: - - http://www.hdfgroup.org/HDF5/release/obtain5.html - -User documentation for 1.8.8 can be accessed directly at this location: - - http://www.hdfgroup.org/HDF5/doc/ - -New features in the HDF5-1.8.x release series, including brief general -descriptions of some new and modified APIs, are described in the "What's New -in 1.8.0?" document: - - http://www.hdfgroup.org/HDF5/doc/ADGuide/WhatsNew180.html - -All new and modified APIs are listed in detail in the "HDF5 Software Changes -from Release to Release" document, in the section "Release 1.8.8 (current -release) versus Release 1.8.7": - - http://www.hdfgroup.org/HDF5/doc/ADGuide/Changes.html - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS -======== - -- New Features -- Support for New Platforms, Languages, and Compilers -- Bug Fixes since HDF5-1.8.7 -- Platforms Tested -- Supported Configuration Features Summary -- Known Problems - - -New Features -============ - - Configuration - ------------- - - Added the --enable-fortran2003 flag to enable Fortran2003 support - in the HDF5 Fortran library. The flag should be used along with the - --enable-fortran flag and takes affect only when the Fortran compiler - is Fortran2003 compliant. (EIP - 2011/11/14) - - Added checks for clock_gettime and mach/mach_time.h to both configure and - CMake. This will support the move from gettimeofday to clock_gettime's - monotonic timer in the profiling code in a future release. - (DER - 2011/10/12) - - Library - ------- - - The Windows VFD code has been removed with the exception of the functions - which set it (H5Pset_fapl_windows, for example). Setting the Windows - VFD now really sets the SEC2 VFD. The WINDOWS_MAX_BUF and - WINDOWS_USE_STDIO configuration options and #defines have also been - removed. NOTE: Since the Windows VFD was a clone of the SEC2 VFD, this - change should be transparent to users. - (DER - 2011/10/12 - HDFFV-7740, HDFFV-7744) - - H5Tcreate now supports the string type (fixed-length and variable- - length). (SLU - 2011/05/20) - - Parallel Library - ---------------- - - Added new H5Pget_mpio_actual_chunk_opt_mode and - H5Pget_mpio_actual_io_mode API routines for querying whether/how - a collective I/O operation completed. (QAK - 2011/10/12) - - Tools - ----- - - None - - High-Level APIs - --------------- - - Added the following Fortran wrappers for the Dimension Scale APIs: - h5dsset_scale_f - h5dsattach_scale_f - h5dsdetach_scale_f - h5dsis_attached_f - h5dsis_scale_f - h5dsset_label_f - h5dsget_label_f - h5dsget_scale_name_f - h5dsget_num_scales_f - (EIP for SB - 2011/10/13 - HDFFV-3797) - - Fortran API - ----------- - - The HDF5 Fortran library was enhanced to support the Fortran 2003 standard. - The following features are available when the HDF5 library is configured - using the --enable-fortran and --enable-fortran2003 configure flags AND - if the Fortran compiler is Fortran 2003 compliant: - - - Subroutines overloaded with the C_PTR derived type: - h5pget_f - h5pget_fill_value_f - h5pinsert_f - h5pregister_f - h5pset_f - h5pset_fill_value_f - h5rcreate_f - h5rderefrence_f - h5rget_name_f - h5rget_obj_type_f - - Subroutines overloaded with the C_PTR derived type - and simplified signatures: - h5aread_f - h5awrite_f - h5dread_f - h5dwrite_f - - New subroutines - h5dvlen_reclaim_f - h5literate_by_name_f - h5literate_f - h5ovisit_f - h5tconvert_f - h5pset_nbit_f - h5pset_scaleoffset_f - - Subroutines with additional optional parameters: - h5pcreate_class_f - (EIP - 2011/10/14) - - C++ API - ------- - - None - - -Support for New Platforms, Languages, and Compilers -=================================================== - - None - -Bug Fixes since HDF5-1.8.7 -========================== - - Configuration - ------------- - - Changed the size of H5_SIZEOF_OFF_T to 4 bytes (was 8) in the VMS - h5pubconf.h based on the output of a test program. (DER - 2011/10/12) - - The Windows and VMS versions of H5pubconf.h were brought into sync with - the linux/posix version. (DER - 2011/10/12) - - Fixed a bug in the bin/trace Perl script where API functions - that take a variable number of arguments were not processed for - trace statement fixup. (DER - 2011/08/25) - - The --enable-h5dump-packed-bits configure option has been removed. - The h5dump code that this option conditionally enabled is now always - compiled into h5dump. Please refer to the h5dump reference manual for - usage of the packed bits feature. (MAM - 2011/06/23 - HDFFV-7592) - - Configure now uses the same flags and symbols in its tests that are - used to build the library. (DER - 2011/05/24) - - Library - ------- - - Corrected the error when copying attributes between files which are using - different versions of the file format. (QAK - 2011/10/20 - HDFFV-7718) - - Corrected the error when loading local heaps from the file, which could - cause the size of the local heap's data block to increase dramatically. - (QAK - 2011/10/14 - HDFFV-7767) - - An application does not need to do H5O_move_msgs_forward() when writing - attributes. Tests were checked into the performance suite. - (VC - 2011/10/13 - HDFFV-7640) - - Fixed a bug that occurred when using H5Ocopy on a committed datatype - containing an attribute using that committed datatype. - (NAF - 2011/10/13 - HDFFV-5854) - - Added generic VFD I/O types to the SEC2 and log VFDs to ensure correct - I/O sizes (and remove compiler warnings) between Windows and true POSIX - systems. (DER - 2011/10/12) - - Corrected some Windows behavior in the SEC2 and log VFDs. This mainly - involved datatype correctness fixes, Windows API call error checks, - and adding the volume serial number to the VFD cmp functions. - (DER - 2011/10/12) - - Converted post-checks for the appropriate POSIX I/O sizes to pre-checks - in order to avoid platform-specific or undefined behavior. - (DER - 2011/10/12) - - #ifdef _WIN32 instances have been changed to #ifdef H5_HAVE_WIN32_API. - H5_HAVE_VISUAL_STUDIO checks have been added where necessary. This is in - CMake only as configure never sets _WIN32. (ADB - 2011/09/12) - - CLANG compiler with the options -fcatch-undefined-behavior and -ftrapv - discovered 3 problems in tests and tools' library: - 1. In dsets.c, left shifting an unsigned int for 32 bits or more - caused undefined behavior. - 2. In dt_arith.c, the INIT_INTEGER macro definition has an overflow - when the value is a negative minimal and is being subtracted from one. - 3. In tools/lib/h5tools_str.c, right shifting an int value for 32 bits - or more caused undefined behavior. - All the problems have been corrected. (SLU - 2011/09/02 - HDFFV-7674) - - H5Epush2() now has the correct trace functionality (this is related to the - bin/trace Perl script bug noted in the configure section). - (DER - 2011/08/25) - - Corrected mismatched function name typo of h5pget_dxpl_mpio_c and - h5pfill_value_defined_c. (AKC - 2011/08/22 - HDFFV-7641) - - Corrected an internal error in the library where objects that use committed - (named) datatypes and were accessed from two different file IDs could confuse - the two and cause erroneous failures. (QAK - 2011/07/18 - HDFFV-7638) - - In v1.6 of the library, there was an EOA for the whole MULTI file saved in the - super block. We took it out in v1.8 of the library because it's meaningless - for the MULTI file. v1.8 of the library saves the EOA for the metadata file - instead, but this caused a backward compatibility problem. - A v1.8 library couldn't open the file created with the v1.6 library. We - fixed the problem by checking the EOA value to detect the file - created with v1.6 library. (SLU - 2011/06/22) - - When a dataset had filters and reading data failed, the error message - didn't say which filter wasn't registered. It's fixed now. (SLU - 2011/06/03) - - Parallel Library - ---------------- - - The Special Collective IO (IO when some processes do not contribute to the - IO) and the Complex Derived Datatype MPI functionalities are no longer - conditionally enabled in the library by configure. They are always - enabled in order to take advantage of performance boosts from these - behaviors. Older MPI implementations that do not allow for these - functionalities can no longer by used by HDF5. - (MAM - 2011/07/08 - HDFFV-7639). - - Tools - ----- - - h5diff: fixed segfault over non-comparable attribute with different - dimension or rank, along with '-c' option to display details. - (JKM - 2011/10/24 - HDFFV-7770) - - Fixed h5diff to display all the comparable objects and attributes - regardless of detecting non-comparables. (JKM - 2011/09/16 - HDFFV-7693) - - Fixed h5repack to update the values of references(object and region) of - attributes in h5repack for 1) references, 2) arrays of references, - 3) variable-length references, and 4) compound references. - (PC - 2011/09/14 - HDFFV-5932) - - h5diff: fixed a segfault over a dataset with container types - array and variable-length (vlen) along with multiple nested compound types. - Example: compound->array->compound, compound->vlen->compound. - (JKM - 2011/09/01 - HDFFV-7712) - - h5repack: added macro to handle a failure in H5Dread/write when memory - allocation failed inside the library. (PC - 2011/08/19) - - Fixed h5jam to not to allow the specifying of an HDF5 formatted file as - an input file for the -u (user block file) option. The original HDF5 file - would not be accessible if this behavior was allowed. - (JKM - 2011/08/19 - HDFFV-5941) - - Revised the command help pages of h5jam and h5unjam. The descriptions - were not up to date and some were missing. - (JKM - 2011/08/15 - HDFFV-7515) - - Fixed h5dump to correct the schema location: - - (ADB - 2011/08/10) - - h5repack: h5repack failed to copy a dataset if the layout is changed - from chunked with unlimited dimensions to contiguous. - (PC - 2011/07/15 - HDFFV-7649) - - Fixed h5diff: the "--delta" option considers two NaN of the same type - are different. This is wrong based on the h5diff description in the - Reference Manual. (PC - 2011/07/15 - HDFFV-7656) - - Fixed h5diff to display an instructive error message and exit with - an instructive error message when mutually exclusive options - (-d, -p and --use-system-epsilon) are used together. - (JKM - 2011/07/07 - HDFFV-7600) - - Fixed h5dump so that it displays the first line of each element in correct - position for multiple dimension array types. Before this fix, - the first line of each element in an array was - displayed after the last line of previous element without - moving to the next line (+indentation). - (JKM - 2011/06/15 - HDFFV-5878) - - Fixed h5dump so that it will display the correct value for - H5T_STD_I8LE datasets on the Blue-gene system (ppc64, linux, Big-Endian, - clustering). (AKC & JKM - 2011/05/12 - HDFFV-7594) - - Fixed h5diff to compare a file to itself correctly. Previously h5diff - reported either the files were different or not compatible in certain - cases even when comparing a file to itself. This fix also improves - performance when comparing the same target objects through verifying - the object and file addresses before comparing the details - in the objects. Examples of details are datasets and attributes. - (XCAO & JKM - 2011/05/06 - HDFFV-5928) - - F90 API - ------- - - Modified the h5open_f and h5close_f subroutines to not to call H5open - and H5close correspondingly. While the H5open call just adds overhead, - the H5close call called by a Fortran application shuts down the HDF5 - library. This makes the library inaccessible to the application. - (EIP & SB - 2011/10/13 - HDFFV-915) - - Fixed h5tget_tag_f where the length of the C string was used to - repack the C string into the Fortran string. This lead to memory - corruption in the calling program. (SB - 2011/07/26) - - Added defined constants: - H5T_ORDER_MIXED_F (HDFFV-2767) - H5Z_SO_FLOAT_DSCALE_F - H5Z_SO_FLOAT_ESCALE_F - H5Z_SO_INT_F - H5Z_SO_INT_MINBITS_DEFAULT_F - H5O_TYPE_UNKNOWN_F - H5O_TYPE_GROUP_F - H5O_TYPE_DATASET_F - H5O_TYPE_NAMED_DATATYPE_F - H5O_TYPE_NTYPES_F - - C++ API - ------ - - None - - High-Level APIs: - ------ - - Fixed the H5LTdtype_to_text function. It had some memory problems when - dealing with some complicated data types. (SLU - 2011/10/19 - HDFFV-7701) - - Fixed H5DSset_label seg faulting when retrieving the length of a - dimension label that was not set. (SB - 2011/08/07 - HDFFV-7673) - - Fixed a dimension scale bug where if you create a dimscale, attach two - datasets to it, and then unattach them, you get an error if they are - unattached in order, but no error if you unattach them in reverse order. - (SB - 2011/06/07 - HDFFV-7605) - - Fortran High-Level APIs: - ------ - - None - - -Platforms Tested -================ -The following platforms and compilers have been tested for this release. - - AIX 5.3 xlc 10.1.0.5 - (NASA G-ADA) xlC 10.1.0.5 - xlf90 12.1.0.6 - - FreeBSD 8.2-STABLE i386 gcc 4.2.1 [FreeBSD] 20070719 - (loyalty) g++ 4.2.1 [FreeBSD] 20070719 - gcc 4.6.1 20110422 - g++ 4.6.1 20110422 - gfortran 4.6.1 20110422 - - FreeBSD 8.2-STABLE amd64 gcc 4.2.1 [FreeBSD] 20070719 - (freedom) g++ 4.2.1 [FreeBSD] 20070719 - gcc 4.6.1 20110422 - g++ 4.6.1 20110422 - gfortran 4.6.1 20110422 - - IBM Blue Gene/P bgxlc 9.0.0.9 - (LLNL uDawn) bgxlf90 11.1.0.7 - bgxlC 9.0.0.9 - - Linux 2.6.16.60-0.54.5-smp Intel(R) C, C++, Fortran Compilers - x86_64 Version 11.1 20090630 - (INL Icestorm) - - Linux 2.6.18-194.el5 x86_64 Intel(R) C, C++, Fortran Compilers - (INL Fission) Version 12.0.2 20110112 - - Linux 2.6.18-108chaos x86_64 Intel(R) C, C++, Fortran Compilers - (LLNL Aztec) Version 11.1 20090630 - - Linux 2.6.18-194.3.1.el5PAE gcc (GCC) 4.1.2 and 4.4.2 - #1 SMP i686 i686 i386 GNU Fortran (GCC) 4.1.2 20080704 - (jam) (Red Hat 4.1.2-48) and 4.4.2 - PGI C, Fortran, C++ 10.4-0 32-bit - PGI C, Fortran, C++ 10.6-0 32-bit - Intel(R) C Compiler for 32-bit - applications, Version 11.1 - Intel(R) C++ Compiler for 32-bit - applications, Version 11.1 - Intel(R) Fortran Compiler for 32-bit - applications, Version 11.1 - MPICH mpich2-1.3.1 compiled with - gcc 4.1.2 and gfortran 4.1.2 - - Linux 2.6.18-238.12.1.el5 gcc 4.1.2 and 4.4.2 - #1 SMP x86_64 GNU/Linux GNU Fortran (GCC) 4.1.2 20080704 - (koala) (Red Hat 4.1.2-46) and 4.4.2 - tested for both 32- and 64-bit binaries - Intel(R) C, C++, Fortran Compilers for - applications running on Intel(R) 64, - Version 11.1. - PGI C, Fortran, C++ Version 9.0-4 - for 64-bit target on x86-64 - MPICH mpich2-1.3.1 compiled with - gcc 4.1.2 and gfortran 4.1.2 - - SGI Altix UV Intel(R) C, Fortran Compilers - SGI ProPack 7 Linux Version 11.1 20100806 - 2.6.32.24-0.2.1.2230.2.PTF- SGI MPT 2.02 - default #1 SMP - (NCSA ember) - - Dell NVIDIA Cluster Intel(R) C, Fortran Compilers - Red Hat Enterprise Linux 6 Version 12.0.4 20110427 - 2.6.32-131.4.1.el6.x86_64 mvapich2 1.7rc1-intel-12.0.4 - (NCSA forge) - - SunOS 5.10 32- and 64-bit Sun C 5.11 SunOS_sparc 2010/08/13 - Sun Fortran 95 8.5 SunOS_sparc 2010/08/13 - Sun C++ 5.11 SunOS_sparc 2010/08/13 - - Windows XP Visual Studio 2008 w/ Intel Fortran 10.1 (project files) - Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - Visual Studio 2010 (cmake) - Cygwin(1.7.9 native gcc(4.5.3) compiler and gfortran) - - Windows XP x64 Visual Studio 2008 w/ Intel Fortran 10.1 (project files) - Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - Visual Studio 2010 (cmake) - Cygwin(1.7.9 native gcc(4.5.3) compiler and gfortran) - - Windows Vista Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - - Windows Vista x64 Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - - Windows 7 Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - - Windows 7 x64 Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - - Mac OS X 10.8.0 (Intel 64-bit) i686-apple-darwin10-gcc-4.2.1 (GCC) 4.2.1 (Apple Inc. build 5666) (dot 3) - Darwin Kernel Version 10.8.0 GNU Fortran (GCC) 4.6.1 - Intel C, C++ and Fortran compilers 12.1.0 - - Mac OS X 10.8.0 (Intel 32-bit) i686-apple-darwin10-gcc-4.2.1 (GCC) 4.2.1 (Apple Inc. build 5666) (dot 3) - Darwin Kernel Version 10.8.0 GNU Fortran (GCC) version 4.6.1 - Intel C, C++ and Fortran compilers 12.1.0 - - Fedora 12 2.6.32.16-150.fc12.ppc64 #1 SMP ppc64 GNU/Linux - gcc (GCC) 4.4.4 20100630 (Red Hat 4.4.4-10) - GNU Fortran (GCC) 4.4.4 20100630 (Red Hat 4.4.4-10) - - Debian6.0.3 2.6.32-5-686 #1 SMP i686 GNU/Linux - gcc (Debian 4.4.5-8) 4.4.5 - GNU Fortran (Debian 4.4.5-8) 4.4.5 - - Debian6.0.3 2.6.32-5-amd64 #1 SMP x86_64 GNU/Linux - gcc (Debian 4.4.5-8) 4.4.5 - GNU Fortran (Debian 4.4.5-8) 4.4.5 - - Fedora15 2.6.40.6-0.fc15.i686.PAE #1 SMP i686 i686 i386 GNU/Linux - gcc (GCC) 4.6.1 20110908 (Red Hat 4.6.1-9) - GNU Fortran (GCC) 4.6.1 20110908 (Red Hat 4.6.1-9) - - Fedora15 2.6.40.6-0.fc15.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc (GCC) 4.6.1 20110908 (Red Hat 4.6.1-9) - GNU Fortran (GCC) 4.6.1 20110908 (Red Hat 4.6.1-9) - - SUSE 11.4 2.6.37.6-0.7-desktop #1 SMP PREEMPT i686 i686 i386 GNU/Linux - gcc (SUSE Linux) 4.5.1 20101208 - GNU Fortran (SUSE Linux) 4.5.1 20101208 - - SUSE 11.4 2.6.37.6-0.7-desktop #1 SMP PREEMPT x86_64 x86_64 x86_64 GNU/Linux - gcc (SUSE Linux) 4.5.1 20101208 - GNU Fortran (SUSE Linux) 4.5.1 20101208 - - Ubuntu 11.10 3.0.0-12-generic #20-Ubuntu SMP i686 GNU/Linux - gcc (Ubuntu/Linaro 4.6.1-9ubuntu3) 4.6.1 - GNU Fortran (Ubuntu/Linaro 4.6.4-9ubuntu3) 4.6.1 - - Ubuntu 11.10 3.0.0-12-generic #20-Ubuntu SMP x86_64 GNU/Linux - gcc (Ubuntu/Linaro 4.6.1-9ubuntu3) 4.6.1 - GNU Fortran (Ubuntu/Linaro 4.6.1-9ubuntu3) 4.6.1 - - OpenVMS Alpha 8.3 HP C V7.3-009 - HP Fortran V8.2-104679-48H9K - HP C++ V7.3-009 - - Cray Linux Environment (CLE) PrgEnv-pgi 2.2.74 - hopper.nersc.gov pgcc 11.7-0 64-bit target on x86-64 Linux -tp k8e - franklin.nersc.gov pgf90 11.7-0 64-bit target on x86-64 Linux -tp k8e - pgCC 11.7-0 64-bit target on x86-64 Linux -tp k8e - -Tested Configuration Features Summary -===================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90 F90 C++ zlib SZIP - parallel parallel -Solaris2.10 32-bit n y n y y y -Solaris2.10 64-bit n y n y y y -Windows XP n y(4) n y y y -Windows XP x64 n y(4) n y y y -Windows Vista n y(4) n y y y -Windows Vista x64 n y(4) n y y y -OpenVMS Alpha n y n y y n -Mac OS X 10.8 Intel 32-bit n y n y y y -Mac OS X 10.8 Intel 64-bit n y n y y y -AIX 5.3 32- and 64-bit n y n y y y -FreeBSD 8.2-STABLE 32&64 bit n x n x y y -CentOS 5.5 Linux 2.6.18-194 i686 GNU (1)W y y(2) y y y y -CentOS 5.5 Linux 2.6.18-194 i686 Intel W n y n y y n -CentOS 5.5 Linux 2.6.18-194 i686 PGI W n y n y y n -CentOS 5.5 Linux 2.6.16 x86_64 GNU (1) W y y(3) y y y y -CentOS 5.5 Linux 2.6.16 x86_64 Intel W n y n y y n -CentOS 5.5 Linux 2.6.16 x86_64 PGI W n y n y y y -Fedora 12 Linux 2.6.32.16-150.fc12.ppc64 n y n y y y -SGI ProPack 7 Linux 2.6.32.24 y y y y y y -Red Hat Enterprise Linux 6 y y y y y y -CLE hopper.nersc.gov y y(3) y y y n -CLE franklin.nersc.gov y y(3) y y y n - - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -Solaris2.10 32-bit y y y y -Solaris2.10 64-bit y y y y -Windows XP y y(4) y n -Windows XP x64 y y(4) y n -Windows Vista y y(4) y y -Windows Vista x64 y y(4) y y -OpenVMS Alpha n n n n -Mac OS X 10.8 Intel 32-bit y(5) n y n -Mac OS X 10.8 Intel 64-bit y(5) n y n -AIX 5.3 32- and 64-bit n n n y -FreeBSD 8.2-STABLE 32&64 bit y x x y -CentOS 5.5 Linux 2.6.18-128 i686 GNU (1)W y y(2) y y -CentOS 5.5 Linux 2.6.18-128 i686 Intel W y y y n -CentOS 5.5 Linux 2.6.18-128 i686 PGI W y y y n -CentOS 5.5 Linux 2.6.16 x86_64 GNU (1) W y y y y -CentOS 5.5 Linux 2.6.16 x86_64 Intel W y y y n -CentOS 5.5 Linux 2.6.16 x86_64 PGI W y y y n -Fedora 12 Linux 2.6.32.16-150.fc12.ppc64 y y y y -SGI ProPack 7 Linux 2.6.32.24 y y y n -Red Hat Enterprise Linux 6 y y y n -CLE hopper.nersc.gov n n n n -CLE franklin.nersc.gov n n n n - - (1) Fortran compiled with gfortran. - (2) With PGI and Absoft compilers. - (3) With PGI compiler for Fortran. - (4) Using Visual Studio 2008 w/ Intel Fortran 10.1 (Cygwin shared libraries are not supported) - (5) C and C++ shared libraries will not be built when Fortran is enabled. - Compiler versions for each platform are listed in the preceding - "Platforms Tested" table. - - -Known Problems -============== - -* The STDIO VFD does not work on some architectures, possibly due to 32/64 - bit or large file issues. The basic STDIO VFD test is known to fail on - 64-bit SunOS 5.10 on SPARC when built with -m64 and 32-bit OS X/Darwin - 10.7.0. The STDIO VFD test has been disabled while we investigate and - a fix should appear in a future release, possibly 1.8.9. - (DER - 2011/10/14) - -* h5diff can report inconsistent results when comparing datasets of enum type - that contain invalid values. This is due to how enum types are handled in - the library and will be addressed in the next release. - (DER - 2011/10/14 - HDFFV-7527) - -* The links test can fail under the stdio VFD due to some issues with external - links. This will be investigated and fixed in a future release. - (DER - 2011/10/14 - HDFFV-7768) - -* After the shared library support was fixed for some bugs, it was discovered - that "make prefix=XXX install" no longer works for shared libraries. It - still works correctly for static libraries. Therefore, if you want to - install the HDF5 shared libraries in a location such as /usr/local/hdf5, - you need to specify the location via the --prefix option during configure - time. E.g, ./configure --prefix=/usr/local/hdf5 ... - (AKC - 2011/05/07 - HDFFV-7583) - -* The parallel test, t_shapesame, in testpar/, may run for a long time and may - be terminated by the alarm signal. If that happens, one can increase the - alarm seconds (default is 1200 seconds = 20 minutes) by setting the - environment variable, $HDF5_ALARM_SECONDS, to a larger value such as 3600 - (60 minutes). Note that the t_shapesame test may fail in some systems - (see the "While working on the 1.8.6 release..." problem below). If - it does, it will waste more time if $HDF5_ALARM_SECONDS is set - to a larger value. (AKC - 2011/05/07) - -* The C++ and FORTRAN bindings are not currently working on FreeBSD. - (QAK - 2011/04/26) - -* Shared Fortran libraries are not quite working on AIX. While they are - generated when --enable-shared is specified, the fortran and hl/fortran - tests fail. We are looking into the issue. HL and C++ shared libraries - should now be working as intended, however. (MAM - 2011/04/20) - -* The --with-mpe configure option does not work with Mpich2. (AKC - 2011/03/10) - -* While working on the 1.8.6 release of HDF5, a bug was discovered that can - occur when reading from a dataset in parallel shortly after it has been - written to collectively. The issue was exposed by a new test in the parallel - HDF5 test suite, but had existed before that. We believe the problem lies with - certain MPI implementations and/or file systems. - - We have provided a pure MPI test program, as well as a standalone HDF5 - program, that can be used to determine if this is an issue on your system. - They should be run across multiple nodes with a varying number of processes. - These programs can be found at: - http://www.hdfgroup.org/ftp/HDF5/examples/known_problems/ - (NAF - 2011/01/19) - -* The library's test dt_arith.c showed a compiler's rounding problem on - Cygwin when converting from unsigned long long to long double. The - library's own conversion works fine. We defined a macro for Cygwin to - skip this test until we can solve the problem. - (SLU - 2010/05/05 - HDFFV-1264) - -* All the VFL drivers aren't backward compatible. In H5FDpublic.h, the - structure H5FD_class_t changed in 1.8. There is new parameter added to - get_eoa and set_eoa callback functions. A new callback function - get_type_map was added in. The public function H5FDrealloc was taken - out in 1.8. The problem only happens when users define their own driver - for 1.6 and try to plug in 1.8 library. Because there's only one user - complaining about it, we (Elena, Quincey, and I) decided to leave it as - it is (see bug report #1279). Quincey will make a plan for 1.10. - (SLU - 2010/02/02) - -* MinGW has a missing libstdc++.dll.a library file and will not successfully link - C++ applications/tests. Do not use the enable-cxx configure option. Read all of - the INSTALL_MINGW.txt file for all restrictions. (ADB - 2009/11/11) - -* The --enable-static-exec configure flag will only statically link libraries - if the static version of that library is present. If only the shared version - of a library exists (i.e., most system libraries on Solaris, AIX, and Mac, - for example, only have shared versions), the flag should still result in a - successful compilation, but note that the installed executables will not be - fully static. Thus, the only guarantee on these systems is that the - executable is statically linked with just the HDF5 library. - (MAM - 2009/11/04) - -* The PathScale MPI implementation, accessing a Panasas file system, would - cause H5Fcreate() with H5F_ACC_EXCL to fail even when the file does not - exist. This is due to the MPI_File_open() call failing if the mode has - the MPI_MODE_EXCL bit set. (AKC - 2009/08/11 - HDFFV-988) - -* Parallel tests failed with 16 processes with data inconsistency at testphdf5 - / dataset_readAll. Parallel tests also failed with 32 and 64 processes with - collective abort of all ranks at t_posix_compliant / allwrite_allread_blocks - with MPI IO. (CMC - 2009/04/28) - -* For Red Storm, a Cray XT3 system, the tools/h5ls/testh5ls.sh and - tools/h5copy/testh5copy.sh will fail some of its sub-tests. These sub-tests - are expected to fail and should exit with a non-zero code but the yod - command does not propagate the exit code of the executables. Yod always - returns 0 if it can launch the executable. The test suite shell expects - a non-zero for this particular test, therefore it concludes the test has - failed when it receives 0 from yod. Skip all the "failing" test for now - by changing them as following. - - ======== Original tools/h5ls/testh5ls.sh ========= - TOOLTEST tgroup-1.ls 1 -w80 -r -g tgroup.h5 - ======== Change to =============================== - echo SKIP TOOLTEST tgroup-1.ls 1 -w80 -r -g tgroup.h5 - ================================================== - - ======== Original tools/h5copy/testh5copy.sh ========= - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d grp_rename - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d /grp_rename/grp_dsets - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_dsets -d /E/F/grp_dsets - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_nested -d /G/H/grp_nested - H5LSTEST $FILEOUT - ======== Change to =============================== - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d grp_rename - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d /grp_rename/grp_dsets - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_dsets -d /E/F/grp_dsets - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_nested -d /G/H/grp_nested - echo SKIP H5LSTEST $FILEOUT - ================================================== - (AKC - 2008/11/10) - -* For Red Storm, a Cray XT3 system, the yod command sometimes gives the - message, "yod allocation delayed for node recovery". This interferes with - test suites that do not expect to see this message. See the section of "Red - Storm" in file INSTALL_parallel for a way to deal with this problem. - (AKC - 2008/05/28) - -* On an Intel 64 Linux cluster (RH 4, Linux 2.6.9) with Intel 10.0 compilers, - use -mp -O1 compilation flags to build the libraries. A higher level of - optimization causes failures in several HDF5 library tests. - -* On mpich 1.2.5 and 1.2.6, if more than two processes contribute no IO and - the application asks to do collective IO, we have found that when using 4 - processors, a simple collective write will sometimes be hung. This can be - verified with t_mpi test under testpar. - -* A dataset created or rewritten with a v1.6.3 library or after cannot be read - with the v1.6.2 library or before when the Fletcher32 EDC filter is enabled. - There was a bug in the calculation of the Fletcher32 checksum in the - library before v1.6.3; the checksum value was not consistent between big- - endian and little-endian systems. This bug was fixed in Release 1.6.3. - However, after fixing the bug, the checksum value was no longer the same as - before on little-endian system. Library releases after 1.6.4 can still read - datasets created or rewritten with an HDF5 library of v1.6.2 or before. - (SLU - 2005/06/30) - -* On IBM AIX systems, parallel HDF5 mode will fail some tests with error - messages like "INFO: 0031-XXX ...". This is from the command `poe'. - Set the environment variable MP_INFOLEVEL to 0 to minimize the messages - and run the tests again. - - The tests may fail with messages like "The socket name is already in use", - but HDF5 does not use sockets. This failure is due to problems with the - poe command trying to set up the debug socket. To resolve this problem, - check to see whether there are many old /tmp/s.pedb.* files staying around. - These are sockets used by the poe command and left behind due to failed - commands. First, ask your system administrator to clean them out. - Lastly, request IBM to provide a means to run poe without the debug socket. - (AKC - 2004/12/08) - - -%%%%1.8.7%%%% - - -HDF5 version 1.8.7 released on Tue May 10 09:24:44 CDT 2011 -================================================================================ - -INTRODUCTION -============ - -This document describes the differences between HDF5-1.8.6 and -HDF5 1.8.7, and contains information on the platforms tested and -known problems in HDF5-1.8.7. -For more details, see the files HISTORY-1_0-1_8_0_rc3.txt -and HISTORY-1_8.txt in the release_docs/ directory of the HDF5 source. - -Links to the HDF5 1.8.7 source code, documentation, and additional materials -can be found on the HDF5 web page at: - - http://www.hdfgroup.org/products/hdf5/ - -The HDF5 1.8.7 release can be obtained from: - - http://www.hdfgroup.org/HDF5/release/obtain5.html - -User documentation for 1.8.7 can be accessed directly at this location: - - http://www.hdfgroup.org/HDF5/doc/ - -New features in the HDF5-1.8.x release series, including brief general -descriptions of some new and modified APIs, are described in the "What's New -in 1.8.0?" document: - - http://www.hdfgroup.org/HDF5/doc/ADGuide/WhatsNew180.html - -All new and modified APIs are listed in detail in the "HDF5 Software Changes -from Release to Release" document, in the section "Release 1.8.7 (current -release) versus Release 1.8.6": - - http://www.hdfgroup.org/HDF5/doc/ADGuide/Changes.html - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS -======== - -- New Features -- Support for New Platforms, Languages, and Compilers -- Bug Fixes since HDF5-1.8.6 -- Platforms Tested -- Supported Configuration Features Summary -- Known Problems - - -New Features -============ - - Configuration - ------------- - - Configure now generates Makefiles that build in "silent make mode" - by default in which compile and link lines are significantly - simplified for clarity. To override this and view actual compile and - link lines during building, the --disable-silent-rules flag can be used - at configure time, or the 'make' command can be followed by V=1, to - indicate a "verbose" make. (MAM - 2011/4/14). - - Added mpicc and mpif90 as the default C and Fortran compilers for Linux - systems when --enable-parallel is specified but no $CC or $FC is defined. - (AKC - 2011/2/7) - - Added a new configure option, "--enable-unsupported", which can - be used to stop configure from preventing the use of unsupported - configure option combinations, such as c++ in parallel or Fortran - with threadsafe. Use at your own risk, as it may result in a - library that won't compile or run as expected! - (MAM - 2010/11/17 - Bug 2061) - - Library - ------- - - The library allows the dimension size of a dataspace to be zero. In - the past, the library would allow this only if the maximal dimension - size was unlimited. Now there is no such restriction, but no data - can be written to this kind of dataset. (SLU - 2011/4/20) - - We added two new macros, H5_VERSION_GE and H5_VERSION_LE, to let users - compare certain version numbers with the library being used. (SLU - - 2011/4/20) - - Added ability to cache files opened through external links. Added new - public functions H5Pset_elink_file_cache_size(), - H5Pget_elink_file_cache_size(), and H5Fclear_elink_file_cache(). - (NAF - 2011/02/17) - - Finished implementing all options for 'log' VFD. (QAK - 2011/1/25) - - Removed all old code for Metrowerks compilers, bracketed by - __MWERKS__). Metrowerks compiler is long gone. (AKC - 2010/11/17) - - Parallel Library - ---------------- - - None - - Tools - ----- - - h5diff: Added new "verbose with levels" option, '-vN, --verbose=N'. - The old '-v, --verbose' option is deprecated but remains available; - it is exactly equivalent to '-v0, --verbose=0'. - The new levels 1 ('-v1' or '--verbose=1') and 2 ('-v2' or - '--verbose=2') can be specified to view more information regarding - attributes differences. Bug #2121 (JKM 2011/3/23) - - h5dump: Added new option --enable-error-stack. This option will - display error stack information in the output stream. This is - useful when the "h5dump: Unable to print data" message is output. - (ADB - 2011/03/03) - - High-Level APIs - --------------- - - Fortran LT make datasets routines (H5LTmake_dataset_f, - h5ltmake_dataset_int_f, h5ltmake_dataset_float_f, h5ltmake_dataset_double_f) - and LT read datasets routines (h5ltread_dataset_f,h5ltread_dataset_int_f, - h5ltread_dataset_float_f, 5ltread_dataset_double_f) can now handle - 4-dimensional to 7-dimensional rank datasets. HDFFV-1217 (MSB-2011/4/24/2011) - - F90 API - ------- - - None - - C++ API - ------- - - None - - -Support for New Platforms, Languages, and Compilers -=================================================== - - Intel V11.1 uses now -O3 optimization in production mode (EIP - 2010/10/08) - - - -Bug Fixes since HDF5-1.8.6 -========================== - - Configuration - ------------- - - Shared C++ and HL libraries on AIX should now be working correctly. - Note that Fortran shared libraries are still not working on AIX. - (See the Known Problems section, below). (MAM - 2011/4/20) - - Removed config/ibm-aix6.x. All IBM-AIX settings are in one file, - ibm-aix. (AKC - 2011/4/14) - - Shared C libraries are no longer disabled on Mac when Fortran - is enabled. Shared Fortran libraries are still not supported on Mac, - so configure will disable them by default, but this is overridable - with the new --enable-unsupported configure option. The configure - summary has been updated to reflect the fact that the shared-ness of - the C++/Fortran wrapper libraries may not align with the C library. - (MAM - 2011/04/11 - HDFFV-4353). - - Library - ------- - - Changed assertion failure when decoding a compound datatype with no - fields into a normal error failure. Also prohibit using this sort - of datatype for creating an attribute (as is already the case for - datasets and committed (named) datatypes). (QAK - 2011/04/15, Jira - issue #HDFFV-2766) - - Tell the VFL flush call that the file will be closing, allowing - the VFDs to avoid sync'ing the file (particularly valuable in parallel). - (QAK - 2011/03/09) - - The datatype handler created with H5Tencode/decode used to have the - reference count 0 (zero); it now has the reference count 1 (one). - (SLU - 2011/2/18) - - Fixed the definition of H5_HAVE_GETTIMEOFDAY on Windows so that - HDgettimeofday() is defined and works properly. Bug HDFFV-5931 - (DER - 2011/04/14) - - Added basic VFD tests for the Windows, STDIO and log VFD tests. - (DER - 2011/04/11) - - Parallel Library - ---------------- - - None - - Tools - ----- - - Updated h5dump test case script to prevent entire test failure when - source directory is read-only. Bug #HDFFV-4342 (JKM 2011/4/12) - - Fixed h5dump displaying incorrect values for H5T_STD_I8BE type data in - attribute on Big-Endian machine. H5T_STD_I8BE is unsigned 8bit type, - so h5dump is supposed to display -2 instead of 254. It worked correctly - on Little-Endian system , but not on Big-Endian system. Bug #HDFFV-4358 - (JKM 04/08/2011) - - Updated some HDF5 tools to standardize the option name as - '--enable-error-stack' for printing HDF5 error stack messages. h5ls and - h5dump have been updated. For h5ls, this replaces "-e/--errors" option, - which is deprecated. For h5dump, this is a new option. Bug #2182 - (JKM 2011/3/30) - - Fixed the h5diff --use-system-epsilon option. The formula used in the - calculation was changed from ( |a - b| / b ) to ( |a - b| ). - This was done to improve performance. Bug #2184 (JKM 2011/3/24) - - Fixed output for H5T_REFERENCE in h5dump. According to the BNF document - the output of a H5T_REFERENCE should be followed by the type; - ::= H5T_REFERENCE { } - ::= H5T_STD_REF_OBJECT | H5T_STD_REF_DSETREG - Previously this was only displayed if the -R option was used. - Bug #1725 (ADB 2011/3/28) - - Fixed two h5diff issues. 1) h5diff compared attributes correctly only - when two objects had the same number of attributes and the attribute - names were identical. 2) h5diff did not display useful information about - attribute differences. Bug #2121 (JKM 2011/3/17) - - Fixed a memory leak in h5diff that occurred when accessing symbolic links - with the --follow-symlink option. Bug #2214 (JKM 2011/3/18) - - Fixed a memory leak in h5diff that occurred when accessing variable length - string data. Bug #2216 (JKM 2011/3/18) - - Fixed and improved the help page for h5ls -a, --address option. - Bug #1904 (JKM 2011/3/11) - - Fixed h5copy to enable copying an object into the same HDF5 file. - Previously h5copy displayed an error message when the target file - was the same as the source file. (XCAO 2011/3/8) - - Fixed an h5dump problem that caused the tool to skip some data elements - in large datasets with a large array datatype on Windows. This issue - arose only on Windows due to the different return behavior of the - _vsnprintf() function. Bug #2161 (JKM 2011/3/3) - - Fixed h5dump which was skipping some array indices in large datasets - with a relatively large array datatype. The interval of skipped indices - varied according to the size of the array. Bug #2092 (JKM 2011/2/15) - - Fixed h5diff which was segfaulting when comparing compound datasets - with a combination of fixed-length string datatypes and variable-length - string datatypes in certain orders. Bug #2089 (JKM 2010/12/28) - - Improved h5diff performance. 1) Now use HDmemcmp() before comparing two - elements. 2) Replace expensive H5Tequals() calls. 3) Retrieve datatype - information at dataset level, not at each element level for compound - datasets. HDFFV-7516 (JKM 2011/4/18) - - Fixed h5ls to display nested compound types with curly brackets - when -S (--simple) option is used with -l (--label), so it shows - which members (in curly brackets) belong to which nested compound type, - making the output clearer. Bug #1979 (JKM 2010/11/09) - - Fixed h5diff to handle variable-length strings in a compound dataset - and variable-length string arrays in a compound dataset correctly. - Garbage values were previously displayed when h5diff compared multiple - variable-length strings in a compound type dataset. - Bug #1989 (JKM 2010/10/28) - - Fixed h5copy to fail gracefully when copying an object to a non- - existing group without the -p option. Bug #2040 (JKM 2010/10/18) - - F90 API - ------ - - None - - C++ API - ------ - - None - - High-Level APIs: - ------ - - None - - Fortran High-Level APIs: - ------ - - h5tbmake_table_f: Fixed error in passing an array of characters with different - length field names. - - h5tget_field_info_f: Fixed error with packing the C strings into a Fortran - array of strings. Added optional argument called 'maxlen_out' which returns - the maximum string character length in a field name element. - Bug HDFFV-1255 (MSB- 4/17/2011) - - - - -Platforms Tested -================ -The following platforms and compilers have been tested for this release. - - AIX 6.1 xlc 11.1.0.3 - (NCSA BP) xlC 11.1.0.3 - xlf90 13.1.0.3 - mpcc_r 11.1.0.3 - mpxlf90_r 13.1.0.3 - - FreeBSD 8.2-STABLE i386 gcc 4.2.1 [FreeBSD] 20070719 - (loyalty) g++ 4.2.1 [FreeBSD] 20070719 - gcc 4.6.1 20110422 - g++ 4.6.1 20110422 - gfortran 4.6.1 20110422 - - FreeBSD 8.2-STABLE amd64 gcc 4.2.1 [FreeBSD] 20070719 - (freedom) g++ 4.2.1 [FreeBSD] 20070719 - gcc 4.6.1 20110422 - g++ 4.6.1 20110422 - gfortran 4.6.1 20110422 - - Linux 2.6.18-194.3.1.el5PAE gcc (GCC) 4.1.2 and 4.4.2 - #1 SMP i686 i686 i386 G95 (GCC 4.0.3 (g95 0.93!) Apr 21 2010) - (jam) GNU Fortran (GCC) 4.1.2 20080704 - (Red Hat 4.1.2-48) and 4.4.2 - PGI C, Fortran, C++ 10.4-0 32-bit - PGI C, Fortran, C++ 10.6-0 32-bit - Intel(R) C Compiler for 32-bit - applications, Version 11.1 - Intel(R) C++ Compiler for 32-bit - applications, Version 11.1 - Intel(R) Fortran Compiler for 32-bit - applications, Version 11.1 - Absoft 32-bit Fortran 95 10.0.7 - MPICH mpich2-1.3.1 compiled with - gcc 4.1.2 and gfortran 4.1.2 - - Linux 2.6.18-194.17.1.el5 gcc 4.1.2 and 4.4.2 - #1 SMP x86_64 GNU/Linux G95 (GCC 4.0.3 (g95 0.93!) Apr 21 2010) - (amani) tested for both 32- and 64-bit binaries - GNU Fortran (GCC) 4.1.2 20080704 - (Red Hat 4.1.2-46) and 4.4.2 - Intel(R) C, C++, Fortran Compilers for - applications running on Intel(R) 64, - Version 11.1. - PGI C, Fortran, C++ Version 9.0-4 - for 64-bit target on x86-64 - MPICH mpich2-1.3.1 compiled with - gcc 4.1.2 and gfortran 4.1.2 - - SGI ProPack 7 Linux Intel(R) C++ Version 11.1 20100806 - 2.6.32.24-0.2.1.2230.2.PTF- Intel(R) Fortran Version 11.1 20100806 - default #1 SMP SGI MPT 2.01 - SGI Altix UV - (NCSA ember) - - SunOS 5.10 32- and 64-bit Sun C 5.9 Sun OS_sparc Patch 124867-16 - (linew) Sun Fortran 95 8.3 Sun OS_sparc Patch 127000-13 - Sun C++ 5.9 Sun OS_sparc Patch 124863-26 - Sun C 5.10 SunOS_sparc Patch 141861-07 - Sun Fortran 95 8.4 SunOS_sparc Patch 128231-06 - Sun C++ 5.10 SunOS_sparc 128228-11 - - Intel Xeon Linux 2.6.18- gcc 4.2.4 - 92.1.10.el5_lustre.1.6.6smp- Intel(R) C++ Version 10.1.017 - perfctr #8 SMP Intel(R) Fortran Compiler Version 10.1.017 - (NCSA abe) Open MPI 1.3.2 - MVAPICH2-1.5.1_pgi-10.8 - - Windows XP Visual Studio 2008 w/ Intel Fortran 10.1 (project files) - Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - Visual Studio 2010 (cmake) - Cygwin(1.7.7 native gcc(4.3.4) compiler and gfortran) - - Windows XP x64 Visual Studio 2008 w/ Intel Fortran 10.1 (project files) - Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - Visual Studio 2010 (cmake) - Cygwin(1.7.7 native gcc(4.3.4) compiler and gfortran) - - Windows Vista Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - - Windows Vista x64 Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - - Windows 7 Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - - Windows 7 x64 Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - - Mac OS X 10.7.0 (Intel 64-bit) i686-apple-darwin10-gcc-4.2.1 (GCC) 4.2.1 - Darwin Kernel Version 10.7.0 GNU Fortran (GCC) 4.6.0 20101106 (experimental) - Intel C, C++ and Fortran compilers 12.0.1.122 20101110 - - Mac OS X 10.7.0 (Intel 32-bit) i686-apple-darwin10-gcc-4.2.1 (GCC) 4.2.1 (Apple Inc. build 5666) (dot 3) - Darwin Kernel Version 10.7.0 GNU Fortran (GCC) version 4.4.0 20090123 (experimental) - [trunk revision 143587] - - Fedora 12 2.6.32.16-150.fc12.ppc64 #1 SMP ppc64 GNU/Linux - gcc (GCC) 4.4.4 20100630 (Red Hat 4.4.4-10) - GNU Fortran (GCC) 4.4.4 20100630 (Red Hat 4.4.4-10) - - Debian6.01 2.6.32-5-686 #1 SMP i686 GNU/Linux - gcc (Debian 4.4.5-8) 4.4.5 - GNU Fortran (Debian 4.4.5-8) 4.4.5 - - Debian6.01 2.6.32-5-amd64 #1 SMP x86_64 GNU/Linux - gcc (Debian 4.4.5-8) 4.4.5 - GNU Fortran (Debian 4.4.5-8) 4.4.5 - - Fedora14 2.6.35.12-88.fc14.i686.PAE #1 SMP i686 i686 i386 GNU/Linux - gcc (GCC) 4.5.1 20100924 (Red Hat 4.5.1-4) - GNU Fortran (GCC) 4.5.1 20100924 (Red Hat 4.5.1-4) - - Fedora14 2.6.35.12-88.fc14.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc (GCC) 4.5.1 20100924 (Red Hat 4.5.1-4) - GNU Fortran (GCC) 4.5.1 20100924 (Red Hat 4.5.1-4) - - SUSE 11.4 2.6.37.1-1.2-desktop #1 SMP PREEMPT i686 i686 i386 GNU/Linux - gcc (SUSE Linux) 4.5.1 20101208 - GNU Fortran (SUSE Linux) 4.5.1 20101208 - - SUSE 11.4 2.6.37.1-1.2-desktop #1 SMP PREEMPT x86_64 x86_64 x86_64 GNU/Linux - gcc (SUSE Linux) 4.5.1 20101208 - GNU Fortran (SUSE Linux) 4.5.1 20101208 - - Ubuntu 10.10 2.6.35-28-generic #50-Ubuntu SMP i686 GNU/Linux - gcc (Ubuntu/Linaro 4.4.4-14ubuntu5) 4.4.5 - GNU Fortran (Ubuntu/Linaro 4.4.4-14ubuntu5) 4.4.5 - - Ubuntu 10.10 2.6.35-28-generic #50-Ubuntu SMP x86_64 GNU/Linux - gcc (Ubuntu/Linaro 4.4.4-14ubuntu5) 4.4.5 - GNU Fortran (Ubuntu/Linaro 4.4.4-14ubuntu5) 4.4.5 - - OpenVMS Alpha 8.3 HP C V7.3-009 - HP Fortran V8.2-104679-48H9K - HP C++ V7.3-009 - -Tested Configuration Features Summary -======================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90 F90 C++ zlib SZIP - parallel parallel -Solaris2.10 32-bit n y n y y y -Solaris2.10 64-bit n y n y y y -Windows XP n y(4) n y y y -Windows XP x64 n y(4) n y y y -Windows Vista n y(4) n y y y -Windows Vista x64 n y(4) n y y y -OpenVMS Alpha n y n y y n -Mac OS X 10.7 Intel 32-bit n y n y y y -Mac OS X 10.7 Intel 64-bit n y n y y y -AIX 6.1 32- and 64-bit y y y y y y -FreeBSD 8.2-STABLE 32&64 bit n x n x y y -CentOS 5.5 Linux 2.6.18-194 i686 GNU (1)W y y(2) y y y y -CentOS 5.5 Linux 2.6.18-194 i686 Intel W n y n y y n -CentOS 5.5 Linux 2.6.18-194 i686 PGI W n y n y y n -CentOS 5.5 Linux 2.6.16 x86_64 GNU (1) W y y(3) y y y y -CentOS 5.5 Linux 2.6.16 x86_64 Intel W n y n y y n -CentOS 5.5 Linux 2.6.16 x86_64 PGI W n y n y y y -RedHat EL4 2.6.18 Xeon Lustre C y y y y y n -Fedora 12 Linux 2.6.32.16-150.fc12.ppc64 n y n y y y -SGI Linux 2.6.32.19 y y y y y y - - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -Solaris2.10 32-bit y y y y -Solaris2.10 64-bit y y y y -Windows XP y y(4) y n -Windows XP x64 y y(4) y n -Windows Vista y y(4) y y -Windows Vista x64 y y(4) y y -OpenVMS Alpha n n n n -Mac OS X 10.7 Intel 32-bit y(5) n y n -Mac OS X 10.7 Intel 64-bit y(5) n y n -AIX 6.1 32- and 64-bit n n n y -FreeBSD 8.2-STABLE 32&64 bit y x x y -CentOS 5.5 Linux 2.6.18-128 i686 GNU (1)W y y(2) y y -CentOS 5.5 Linux 2.6.18-128 i686 Intel W y y y n -CentOS 5.5 Linux 2.6.18-128 i686 PGI W y y y n -CentOS 5.5 Linux 2.6.16 x86_64 GNU (1) W y y y y -CentOS 5.5 Linux 2.6.16 x86_64 Intel W y y y n -CentOS 5.5 Linux 2.6.16 x86_64 PGI W y y y n -RedHat EL4 2.6.18 Xeon Lustre C y y y n -Fedora 12 Linux 2.6.32.16-150.fc12.ppc64 y y y y -SGI Linux 2.6.32.19 y y y y - - (1) Fortran compiled with gfortran. - (2) With PGI and Absoft compilers. - (3) With PGI compiler for Fortran. - (4) Using Visual Studio 2008 w/ Intel Fortran 10.1 (Cygwin shared libraries are not supported) - (5) C and C++ shared libraries will not be built when Fortran is enabled. - Compiler versions for each platform are listed in the preceding - "Platforms Tested" table. - - -Known Problems -============== -* After the shared library support was fixed for some bugs, it was discovered - that "make prefix=XXX install" no longer works for shared libraries. It - still works correctly for static libraries. Therefore, if you want to - install the HDF5 shared libraries in a location such as /usr/local/hdf5, - you need to specify the location via the --prefix option during configure - time. E.g, ./configure --prefix=/usr/local/hdf5 ... - (AKC - 2011/05/07 HDFFV-7583) - -* The parallel test, t_shapesame, in testpar/, may run for a long time and may - be terminated by the alarm signal. If that happens, one can increase the - alarm seconds (default is 1200 seconds = 20 minutes) by setting the - environment variable, $HDF5_ALARM_SECONDS, to a larger value such as 3600 - (60 minutes). Note that the t_shapesame test may fail in some systems - (see the "While working on the 1.8.6 release..." problem below). If - it does, it will waste more time if $HDF5_ALARM_SECONDS is set - to a larger value. (AKC - 2011/05/07) - -* The C++ and FORTRAN bindings are not currently working on FreeBSD. - (QAK - 2011/04/26) - -* Shared Fortran libraries are not quite working on AIX. While they are - generated when --enable-shared is specified, the fortran and hl/fortran - tests fail. We are looking into the issue. HL and C++ shared libraries - should now be working as intended, however. (MAM - 2011/04/20) - -* The --with-mpe configure option does not work with Mpich2. AKC - 2011/03/10 - -* If parallel gmake (e.g., gmake -j 4) is used, the "gmake clean" command - sometimes fails in the perform directory due to the attempt to remove the - executable of h5perf or h5perf_serial by two "parallel" commands. This error - has no consequence on the functionality of the HDF5 library or install. It - is fixed in the next release. AKC - 2011/01/25 - -* While working on the 1.8.6 release of HDF5, a bug was discovered that can - occur when reading from a dataset in parallel shortly after it has been - written to collectively. The issue was exposed by a new test in the parallel - HDF5 test suite, but had existed before that. We believe the problem lies with - certain MPI implementations and/or file systems. - - We have provided a pure MPI test program, as well as a standalone HDF5 - program, that can be used to determine if this is an issue on your system. - They should be run across multiple nodes with a varying number of processes. - These programs can be found at: - http://www.hdfgroup.org/ftp/HDF5/examples/known_problems/ - -* The library's test dt_arith.c showed a compiler's rounding problem on - Cygwin when converting from unsigned long long to long double. The - library's own conversion works fine. We defined a macro for Cygwin to - skip this test until we can solve the problem. Please see bug #1813. - SLU - 2010/5/5 - -* All the VFL drivers aren't backward compatible. In H5FDpublic.h, the - structure H5FD_class_t changed in 1.8. There is new parameter added to - get_eoa and set_eoa callback functions. A new callback function - get_type_map was added in. The public function H5FDrealloc was taken - out in 1.8. The problem only happens when users define their own driver - for 1.6 and try to plug in 1.8 library. Because there's only one user - complaining about it, we (Elena, Quincey, and I) decided to leave it as - it is (see bug report #1279). Quincey will make a plan for 1.10. - SLU - 2010/2/2 - -* MinGW has a missing libstdc++.dll.a library file and will not successfully link - C++ applications/tests. Do not use the enable-cxx configure option. Read all of - the INSTALL_MINGW.txt file for all restrictions. ADB - 2009/11/11 - -* The PathScale MPI implementation, accessing a Panasas file system, would - cause H5Fcreate() with H5F_ACC_EXCL to fail even when the file does not - exist. This is due to the MPI_File_open() call failing if the mode has - the MPI_MODE_EXCL bit set. (See bug 1468 for details.) AKC - 2009/8/11 - -* Parallel tests failed with 16 processes with data inconsistency at testphdf5 - / dataset_readAll. Parallel tests also failed with 32 and 64 processes with - collective abort of all ranks at t_posix_compliant / allwrite_allread_blocks - with MPI IO. CMC - 2009/04/28 - -* For Red Storm, a Cray XT3 system, the tools/h5ls/testh5ls.sh and - tools/h5copy/testh5copy.sh will fail some of its sub-tests. These sub-tests - are expected to fail and should exit with a non-zero code but the yod - command does not propagate the exit code of the executables. Yod always - returns 0 if it can launch the executable. The test suite shell expects - a non-zero for this particular test, therefore it concludes the test has - failed when it receives 0 from yod. Skip all the "failing" test for now - by changing them as following. - - ======== Original tools/h5ls/testh5ls.sh ========= - TOOLTEST tgroup-1.ls 1 -w80 -r -g tgroup.h5 - ======== Change to =============================== - echo SKIP TOOLTEST tgroup-1.ls 1 -w80 -r -g tgroup.h5 - ================================================== - - ======== Original tools/h5copy/testh5copy.sh ========= - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d grp_rename - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d /grp_rename/grp_dsets - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_dsets -d /E/F/grp_dsets - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_nested -d /G/H/grp_nested - H5LSTEST $FILEOUT - ======== Change to =============================== - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d grp_rename - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d /grp_rename/grp_dsets - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_dsets -d /E/F/grp_dsets - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_nested -d /G/H/grp_nested - echo SKIP H5LSTEST $FILEOUT - ================================================== - AKC - 2008/11/10 - -* For Red Storm, a Cray XT3 system, the yod command sometimes gives the - message, "yod allocation delayed for node recovery". This interferes with - test suites that do not expect to see this message. See the section of "Red - Storm" in file INSTALL_parallel for a way to deal with this problem. - AKC - 2008/05/28 - -* On an Intel 64 Linux cluster (RH 4, Linux 2.6.9) with Intel 10.0 compilers, - use -mp -O1 compilation flags to build the libraries. A higher level of - optimization causes failures in several HDF5 library tests. - -* On mpich 1.2.5 and 1.2.6, if more than two processes contribute no IO and - the application asks to do collective IO, we have found that when using 4 - processors, a simple collective write will sometimes be hung. This can be - verified with t_mpi test under testpar. - -* A dataset created or rewritten with a v1.6.3 library or after cannot be read - with the v1.6.2 library or before when the Fletcher32 EDC filter is enabled. - There was a bug in the calculation of the Fletcher32 checksum in the - library before v1.6.3; the checksum value was not consistent between big- - endian and little-endian systems. This bug was fixed in Release 1.6.3. - However, after fixing the bug, the checksum value was no longer the same as - before on little-endian system. Library releases after 1.6.4 can still read - datasets created or rewritten with an HDF5 library of v1.6.2 or before. - SLU - 2005/6/30 - -* On IBM AIX systems, parallel HDF5 mode will fail some tests with error - messages like "INFO: 0031-XXX ...". This is from the command `poe'. - Set the environment variable MP_INFOLEVEL to 0 to minimize the messages - and run the tests again. - - The tests may fail with messages like "The socket name is already in use", - but HDF5 does not use sockets. This failure is due to problems with the - poe command trying to set up the debug socket. To resolve this problem, - check to see whether there are many old /tmp/s.pedb.* files staying around. - These are sockets used by the poe command and left behind due to failed - commands. First, ask your system administrator to clean them out. - Lastly, request IBM to provide a means to run poe without the debug socket. - -* The --enable-static-exec configure flag will only statically link libraries - if the static version of that library is present. If only the shared version - of a library exists (i.e., most system libraries on Solaris, AIX, and Mac, - for example, only have shared versions), the flag should still result in a - successful compilation, but note that the installed executables will not be - fully static. Thus, the only guarantee on these systems is that the - executable is statically linked with just the HDF5 library. - -* There is also a configure error on Altix machines that incorrectly reports - when a version of Szip without an encoder is being used. - -* On cobalt, an SGI Altix SMP ia64 system, Intel compiler version 10.1 (which - is the default on that system) does not work properly and results in - failures during make check (in a static build) and make installcheck (during - a shared build). This appears to be a compiler optimization problem. - Reducing optimization by setting CFLAGS to -O1 or below resolves the issue. - Alternatively, using a newer version of the compiler (11.0) also works as - intended. MAM - 2010/06/01 - -* h5diff will not report enum value differences when one or both of the values - is not a valid enumeration value. The source of this bug has been identified - and it will be fixed in 1.8.8. DER - 2011/04/27 - - -%%%%1.8.6%%%% - - -HDF5 version 1.8.6 released on Mon Feb 14 10:26:30 CST 2011 -================================================================================ - -INTRODUCTION -============ - -This document describes the differences between HDF5-1.8.5 and -HDF5 1.8.6, and contains information on the platforms tested and -known problems in HDF5-1.8.6. -For more details, see the files HISTORY-1_0-1_8_0_rc3.txt -and HISTORY-1_8.txt in the release_docs/ directory of the HDF5 source. - -Links to the HDF5 1.8.6 source code, documentation, and additional materials -can be found on the HDF5 web page at: - - http://www.hdfgroup.org/products/hdf5/ - -The HDF5 1.8.6 release can be obtained from: - - http://www.hdfgroup.org/HDF5/release/obtain5.html - -User documentation for 1.8.6 can be accessed directly at this location: - - http://www.hdfgroup.org/HDF5/doc/ - -New features in the HDF5-1.8.x release series, including brief general -descriptions of some new and modified APIs, are described in the "What's New -in 1.8.0?" document: - - http://www.hdfgroup.org/HDF5/doc/ADGuide/WhatsNew180.html - -All new and modified APIs are listed in detail in the "HDF5 Software Changes -from Release to Release" document, in the section "Release 1.8.6 (current -release) versus Release 1.8.5": - - http://www.hdfgroup.org/HDF5/doc/ADGuide/Changes.html - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS -======== - -- New Features -- Support for New Platforms, Languages, and Compilers -- Bug Fixes since HDF5-1.8.5 -- Platforms Tested -- Supported Configuration Features Summary -- Known Problems - - -New Features -============ - - Configuration - ------------- - - CMake: Improved CPack packaging, added parallel commands, improved - configuration options (better similarity to configure), added more - tests, better support for use in external cmake projects. - (ADB - 2010/10/07) - - The default configuration setting for official releases is - --enable-production. For unofficial releases, the default configuration - setting has been --disable-production. (AKC - 2010/05/28) - Library - ------- - - Added support for thread safety on Windows using the Windows threads - library. Use the HDF5_ENABLE_THREADSAFE option in CMake on a Windows - platform to enable this functionality. This is supported on Windows - Vista and newer Windows operating systems. (MAM - 2010/09/10) - - H5Tset_order and H5Tget_order now support all datatypes. A new byte - order, H5T_ORDER_MIXED, has been added specifically for a compound - datatype and its derived type. (SLU - 2010/8/23) - - Improved performance of metadata I/O by changing the default algorithm - to perform I/O from all processes (instead of just process 0) when using - parallel I/O drivers. (QAK - 2010/07/19) - - Improved performance of I/O on datasets with the same shape, but - different rank. (QAK - 2010/07/19) - - Improved performance of the chunk cache by avoiding unnecessary b-tree - lookups of chunks already in cache. (NAF - 2010/06/15) - - Parallel Library - ---------------- - - None - - Tools - ----- - - h5diff: Added a new flag: --exclude-path. The specified path to an - object will be excluded when comparing two files or two groups. If a - group is specified to be excluded, all member objects of that group - will be excluded. (JKM - 2010/09/16). - - h5ls: Added a new flag: --no-dangling-links. See --help output for - details. (JKM - 2010/06/15) - - h5ls: Added a new flag --follow-symlinks. See --help output for - details. (JKM - 2010/05/25) - - High-Level APIs - --------------- - - None - - F90 API - ------- - - None - - C++ API - ------- - - None - - -Support for New Platforms, Languages, and Compilers -=================================================== - - Sun C and C++ 5.10 and Sun Fortran 95 8.4. - - Mac OS X 10.6.4 with gcc 4.2.1 and gfortran 4.6 - - -Bug Fixes since HDF5-1.8.5 -========================== - - Configuration - ------------- - - The default number of MPI processes for testing purposes has been - changed from 3 to 6. (AKC - 2010/11/11) - - Some tests in tools/h5repack may fail in AIX systems when -q32 mode is - used. The error is caused by not requesting enough memory in default. - Added "env LDR_CNTRL=MAXDATA=0x20000000@DSA" into the $RUNSERIAL and - $RUNPARALLE in the AIX config file so that executables are tested with - more memory. (AKC - 2010/11/11) - - Removed recognition of the parallel compilers of LAM(hcc) and - ChMPIon(cmpicc) since we have no access to these two MPI implementations - and cannot verify their correctness. (AKC - 2010/07/14 - Bug 1921) - - PHDF5 was changed to use "mpiexec" instead of mpirun as the default - MPI applications startup command as defined in the MPI-2 definition, - section 4.1. (AKC - 2010/06/11 - Bug 1921) - - Library - ------- - - Fixed a bug that caused big endian machines to generate corrupt files - when using the scale-offset filter with floating point data or fill - values. Note that such datasets will no longer be readable by any - by any machine after this patch. (NAF - 2010/02/02 - Bug 2131) - - Retrieving a link's name by index in the case where the link is external - and the file that the link refers to doesn't exist will now fail - gracefully rather than cause a segmentation fault. (MAM - 2010/11/17) - - Modified metadata accumulator to better track accumulated dirty metadata - in an effort to reduce unnecessary I/O in certain situations and to - fix some other corner cases which were prone to error. (MAM - 2010/10/15) - - Added a new set of unit tests that are run during 'make check' to verify - the behavior of the metadata accumulator. (MAM - 2010/10/15) - - Modified library to always cache symbol table information. Libraries - from version 1.6.3 and earlier have a bug which causes them to require - this information for some operations. (NAF - 2010/09/21 - Bug 1864) - - Fixed a bug where the library could generate an assertion/core dump when - a file that had been created with H5Pset_libver_bounds(fapl, - H5F_LIBVER_LATEST, H5F_LIBVER_LATEST) but didn't have a superblock - extension was later reopened. (QAK - 2010/09/16 - Bug 1968) - - Fixed a bug that could occur when getting information for a new-style - group that was previously opened through a file handle that was later - closed. (NAF - 2010/09/15) - - Added define check in H5public.h if stdint.h is supported by the C++ - compiler. This define is only available on Windows with VS2010 and using - CMake to build the library. (ADB - 2010/09/13 - Bug 1938) - - When a mandatory filter failed to write data chunks, the dataset - couldn't close (bug 1260). The fix releases all resources and closes - the dataset but returns a failure. (SLU - 2010/09/08) - - H5Eset_current_stack now also closes the error stack set as the - default. This is to avoid a potential problem. - (SLU - 2010/09/07 - Bug 1799) - - Corrected situation where 1-D chunked dataset could get created by an - application without calling H5Pset_chunk(). H5Pset_chunk is now - required for creating all chunked datasets. (QAK - 2010/09/02) - - Fixed many memory issues that valgrind exposed. (QAK - 2010/08/24) - - Fixed the bug in the filter's public CAN_APPLY function. The return - value should be htri_t not herr_t. (SLU - 2010/08/05 - Bug 1239) - - Fixed the STDIO VFD to use fseeko64 instead of fseek64 for 64-bit I/O - support. (AKC - 2010/7/30) - - Fixed a bug in the direct I/O driver that could render files with certain - kinds of unaligned data unreadable or corrupt them. (NAF - 2010/07/28) - - valgrind reported an error of copying data to itself when a new attribute - is written. Fixed by taking out the memcpy step in the attribute code. - (SLU - 2010/07/28 - Bug 1956) - - Corrected various issues in the MPI datatype creation code which could - cause resource leaks or incorrect behavior (and may improve the - performance as well). (QAK - 2010/07/19) - - Fixed a bug that could cause file corruption when using non-default sizes - of addresses and/or lengths. This bug could also cause uncorrupted files - with this property to be unreadable. This bug was introduced in 1.8.5. - (NAF - 2010/07/16 - Bug 1951) - - Parallel Library - ---------------- - - None - - Tools - ----- - - Fixed h5diff to compare member objects and groups recursively when - two files or groups are compared. (JKM - 2010/9/16 - Bug 1975) - - Fixed h5repack to be able to convert a dataset to COMPACT layout. - (JKM - 2010/09/15 - Bug 1896) - - Changed h5ls to not interpret special characters in object or attribute - names for output. (JKM - 2010/06/28 - Bug 1784) - - Revised the order of arguments for h5cc, h5fc, h5c++, h5pcc and h5pfc. - CPPFLAGS, CFLAGS, LDFLAGS, and LIBS have been duplicated with an H5BLD_ - prefix to put the flags and paths from the hdf5 build in the correct - places and allow the script user to add entries in CPPFLAGS, CFLAGS, - LDFLAGS, and LIBS that will take precedence over those from the hdf5 - build. The user can make these entries persistent by editing - CFLAGSBASE, CPPFLAGSBASE, LDFLAGSBASE, and LIBSBASE near the top of - the script or temporary by setting HDF5_CFLAGS, HDF5_CPPFLAGS, - HDF5_LDFLAGS, or HDF5_LIBS in the environment. The new order of - arguments in these scripts is $CLINKER $H5BLD_CPPFLAGS $CPPFLAGS - $H5BLD_CFLAGS $CFLAGS $LDFLAGS $clibpath $link_objs $LIBS $link_args - $shared_link. (LRK - 2010/10/25 - Bug 1973) - - F90 API - ------ - - None - - C++ API - ------ - - None - - High-Level APIs: - ------ - - None - - Fortran High-Level APIs: - ------ - - None - - -Platforms Tested -================ -The following platforms and compilers have been tested for this release. - - AIX 6.1 xlc 11.1.0.3 - (NCSA BP) xlC 11.1.0.3 - xlf 13.1.0.3 - mpcc_r 11.1.0.3 - mpxlf_r 13.1.0.3 - - FreeBSD 6.3-STABLE i386 gcc 3.4.6 [FreeBSD] 20060305 - (duty) g++ 3.4.6 [FreeBSD] 20060305 - gcc 4.4.5 20100803 - g++ 4.4.5 20100803 - gfortran 4.4.5 20100803 - - FreeBSD 6.3-STABLE amd64 gcc 3.4.6 [FreeBSD] 20060305 - (liberty) g++ 3.4.6 [FreeBSD] 20060305 - gcc 4.4.5 20100803 - g++ 4.4.5 20100803 - gfortran 4.4.5 20100803 - - Linux 2.6.18-194.3.1.el5PAE gcc (GCC) 4.1.2 and 4.4.2 - #1 SMP i686 i686 i386 G95 (GCC 4.0.3 (g95 0.93!) Apr 21 2010) - (jam) GNU Fortran (GCC) 4.1.2 20080704 - (Red Hat 4.1.2-48) and 4.4.2 - PGI C, Fortran, C++ 10.4-0 32-bit - PGI C, Fortran, C++ 10.6-0 32-bit - Intel(R) C Compiler for 32-bit - applications, Version 11.1 - Intel(R) C++ Compiler for 32-bit - applications, Version 11.1 - Intel(R) Fortran Compiler for 32-bit - applications, Version 11.1 - Absoft 32-bit Fortran 95 10.0.7 - MPICH mpich2-1.3.1 compiled with - gcc 4.1.2 and gfortran 4.1.2 - - Linux 2.6.18-194.17.1.el5 gcc 4.1.2 and 4.4.2 - #1 SMP x86_64 GNU/Linux G95 (GCC 4.0.3 (g95 0.93!) Apr 21 2010) - (amani) tested for both 32- and 64-bit binaries - GNU Fortran (GCC) 4.1.2 20080704 - (Red Hat 4.1.2-46) and 4.4.2 - Intel(R) C, C++, Fortran Compilers for - applications running on Intel(R) 64, - Version 11.1. - PGI C, Fortran, C++ Version 9.0-4 - for 64-bit target on x86-64 - MPICH mpich2-1.3.1 compiled with - gcc 4.1.2 and gfortran 4.1.2 - - SGI ProPack 7 Linux Intel(R) C++ Version 11.1 20100806 - 2.6.32.19-0.3.1.1982.0.PTF- Intel(R) Fortran Version 11.1 20100806 - default #1 SMP SGI MPT 2.01 - SGI Altix UV - (NCSA ember) - - SunOS 5.10 32- and 64-bit Sun C 5.9 Sun OS_sparc Patch 124867-16 - (linew) Sun Fortran 95 8.3 Sun OS_sparc Patch 127000-13 - Sun C++ 5.9 Sun OS_sparc Patch 124863-62 - Sun C 5.10 SunOS_sparc Patch 141861-07 - Sun Fortran 95 8.4 SunOS_sparc Patch 128231-06 - Sun C++ 5.10 SunOS_sparc 128228-11 - - Intel Xeon Linux 2.6.18- gcc 4.2.4 - 92.1.10.el5_lustre.1.6.6smp- Intel(R) C++ Version 10.1.017 - perfctr #8 SMP Intel(R) Fortran Compiler Version 10.1.017 - (NCSA abe) Open MPI 1.3.2 - MVAPICH2-1.5.1_pgi-10.8 - - Windows XP Visual Studio 2008 w/ Intel Fortran 10.1 (project files) - Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - Visual Studio 2010 (cmake) - Cygwin(1.7.7 native gcc(4.3.4) compiler and gfortran) - - Windows XP x64 Visual Studio 2008 w/ Intel Fortran 10.1 (project files) - Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - Visual Studio 2010 (cmake) - Cygwin(1.7.7 native gcc(4.3.4) compiler and gfortran) - - Windows Vista Visual Studio 2008 w/ Intel Fortran 10.1 (project files) - Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - - Windows Vista x64 Visual Studio 2008 w/ Intel Fortran 10.1 (project files) - Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - - Windows 7 Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - - Windows 7 x64 Visual Studio 2008 w/ Intel Fortran 11.1 (cmake) - - Mac OS X 10.6.3 (Intel 64-bit) i686-apple-darwin10-gcc-4.2.1 (GCC) 4.2.1 - Darwin Kernel Version 10.3.1 GNU Fortran (GCC) 4.5.0 20090910 - Intel C, C++ and Fortran compilers 11.1 20100806 - - Mac OS X 10.6.4 (Intel 32-bit) i686-apple-darwin10-gcc-4.2.1 (GCC) 4.2.1 - Darwin Kernel Version 10.4.0 GNU Fortran (GCC) 4.6.0 20101106 - Intel C, C++ and Fortran compilers 12.0.0 20101110 - - Mac OS X 10.6.4 (Intel 64-bit) i686-apple-darwin10-gcc-4.2.1 (GCC) 4.2.1 (Apple Inc. build 5659) - Darwin Kernel Version 10.6.0 GNU Fortran (GCC) 4.5.0 20090910 - Intel C, C++ and Fortran compilers 11.1 20100806 - - Fedora 12 2.6.32.16-150.fc12.ppc64 #1 SMP ppc64 GNU/Linux - gcc (GCC) 4.4.4 20100630 (Red Hat 4.4.4-10) - GNU Fortran (GCC) 4.4.4 20100630 (Red Hat 4.4.4-10) - - Debian5.06 2.6.26-2-686 #1 SMP i686 GNU/Linux - gcc (Debian 4.3.2-1.1) 4.3.2 - GNU Fortran (Debian 4.3.2-1.1) 4.3.2 - - Debian5.06 2.6.26-2-amd64 #1 SMP x86_64 GNU/Linux - gcc (Debian 4.3.2-1.1) 4.3.2 - GNU Fortran (Debian 4.3.2-1.1) 4.3.2 - - Fedora14 2.6.35.6-48.fc14.i686.PAE #1 SMP i686 i686 i386 GNU/Linux - gcc (GCC) 4.5.1 20100924 (Red Hat 4.5.1-4) - GNU Fortran (GCC) 4.5.1 20100924 (Red Hat 4.5.1-4) - - Fedora14 2.6.35.6-48.fc14.x86_64 #1 SMP x86_64 x86_64 x86_64 GNU/Linux - gcc (GCC) 4.5.1 20100924 (Red Hat 4.5.1-4) - GNU Fortran (GCC) 4.5.1 20100924 (Red Hat 4.5.1-4) - - SUSE 11.3 2.6.34.7-0.7-desktop #1 SMP PREEMPT i686 i686 i386 GNU/Linux - gcc (SUSE Linux) 4.5.0 20100604 [gcc-4_5-branch revision 160292] - GNU Fortran (SUSE Linux) 4.5.0 20100604 [gcc-4_5-branch revision 160292] - - SUSE 11.3 2.6.34.7-0.7-desktop #1 SMP PREEMPT x86_64 x86_64 x86_64 GNU/Linux - gcc (SUSE Linux) 4.5.0 20100604 [gcc-4_5-branch revision 160292] - GNU Fortran (SUSE Linux) 4.5.0 20100604 [gcc-4_5-branch revision 160292] - - Ubuntu 10.10 2.6.35-25-generic #44-Ubuntu SMP i686 GNU/Linux - gcc (Ubuntu/Linaro 4.4.4-14ubuntu5) 4.4.5 - GNU Fortran (Ubuntu/Linaro 4.4.4-14ubuntu5) 4.4.5 - - Ubuntu 10.10 2.6.35-25-generic #44-Ubuntu SMP x86_64 GNU/Linux - gcc (Ubuntu/Linaro 4.4.4-14ubuntu5) 4.4.5 - GNU Fortran (Ubuntu/Linaro 4.4.4-14ubuntu5) 4.4.5 - - OpenVMS Alpha 8.3 HP C V7.3-009 - HP Fortran V8.2-104679-48H9K - HP C++ V7.3-009 - -Tested Configuration Features Summary -======================================== - - In the tables below - y = tested - n = not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90 F90 C++ zlib SZIP - parallel parallel -Solaris2.10 32-bit n y n y y y -Solaris2.10 64-bit n y n y y y -Windows XP n y(4) n y y y -Windows XP x64 n y(4) n y y y -Windows Vista n y(4) n y y y -Windows Vista x64 n y(4) n y y y -OpenVMS Alpha n y n y y n -Mac OS X 10.6 Intel n y n y y y -AIX 6.1 32- and 64-bit y y y y y y -FreeBSD 6.3-STABLE 32&64 bit n y n y y y -CentOS 5.5 Linux 2.6.18-194 i686 GNU (1)W y y(2) y y y y -CentOS 5.5 Linux 2.6.18-194 i686 Intel W n y n y y n -CentOS 5.5 Linux 2.6.18-194 i686 PGI W n y n y y n -CentOS 5.5 Linux 2.6.16 x86_64 GNU (1) W y y(3) y y y y -CentOS 5.5 Linux 2.6.16 x86_64 Intel W n y n y y n -CentOS 5.5 Linux 2.6.16 x86_64 PGI W n y n y y y -RedHat EL4 2.6.18 Xeon Lustre C y y y y y n -Fedora 12 Linux 2.6.32.16-150.fc12.ppc64 n y n y y y -SGI Linux 2.6.32.19 y y y y y y - - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -Solaris2.10 32-bit y y y y -Solaris2.10 64-bit y y y y -Windows XP y y(4) y n -Windows XP x64 y y(4) y n -Windows Vista y y(4) y y -Windows Vista x64 y y(4) y y -OpenVMS Alpha n n n n -Mac OS X 10.6 y(5) n y n -AIX 6.1 32- and 64-bit n n n y -FreeBSD 6.3-STABLE 32&64 bit y n y y -CentOS 5.5 Linux 2.6.18-128 i686 GNU (1)W y y(2) y y -CentOS 5.5 Linux 2.6.18-128 i686 Intel W y y y n -CentOS 5.5 Linux 2.6.18-128 i686 PGI W y y y n -CentOS 5.5 Linux 2.6.16 x86_64 GNU (1) W y y y y -CentOS 5.5 Linux 2.6.16 x86_64 Intel W y y y n -CentOS 5.5 Linux 2.6.16 x86_64 PGI W y y y n -RedHat EL4 2.6.18 Xeon Lustre C y y y n -Fedora 12 Linux 2.6.32.16-150.fc12.ppc64 y y y y -SGI Linux 2.6.32.19 y y y y - - (1) Fortran compiled with gfortran. - (2) With PGI and Absoft compilers. - (3) With PGI compiler for Fortran. - (4) Using Visual Studio 2008 w/ Intel Fortran 10.1 (Cygwin shared libraries are not supported) - (5) C and C++ shared libraries will not be built when Fortran is enabled. - Compiler versions for each platform are listed in the preceding - "Platforms Tested" table. - - -Known Problems -============== -* examples/run-all-ex.sh does not work on Cygwin. (NAF - 2011/02/11) - -* Parallel test, t_shapesame in testpar, is rather unstable as it continues to - have occasional errors in AIX and quite often in NCSA Abe. It is being built - but it is not run automatically in the "make check" command. One would have to - run it by hand to see if it works in a particular machine. AKC - 2011/01/28 - -* Although OpenVMS Alpha is supported, there are several problems with the C - test suite - getname.c, lheap.c, lheap.c, mtime.c, and stab.c. The test - suite for h5diff also fails. These failures are from the tests, not the - library. We have fixed these failures. But it's too late to put the fixes - into this release. If you install the 1.8.6 library, it should still work - despite of these test failures. If you want the working copy without any - test failure, you can request it from us. SLU - 2011/01/26 - -* If parallel gmake (e.g., gmake -j 4) is used, the "gmake clean" command - sometimes fails in the perform directory due to the attempt to remove the - executable of h5perf or h5perf_serial by two "parallel" commands. This error - has no consequence on the functionality of the HDF5 library or install. It - is fixed in the next release. AKC - 2011/01/25 - -* While working on the 1.8.6 release of HDF5, a bug was discovered that can - occur when reading from a dataset in parallel shortly after it has been - written to collectively. The issue was exposed by a new test in the parallel - HDF5 test suite, but had existed before that. We believe the problem lies with - certain MPI implementations and/or filesystems. - - We have provided a pure MPI test program, as well as a standalone HDF5 - program, that can be used to determine if this is an issue on your system. - They should be run across multiple nodes with a varying number of processes. - These programs can be found at: - http://www.hdfgroup.org/ftp/HDF5/examples/known_problems/ - -* The h5diff tool can display garbage values when variable-length strings in - a compound type dataset are compared. This also occurs with variable-length - string arrays in a compound type dataset. See bug #1989. This will be fixed - in the next release. JKM - 2010/11/05 - -* The AIX --enable-shared setting does not quite work. It can produce a shared - library, but there cannot be more than one shared library that is - interlinked. This means that the high level APIs will not work which is not - very useful. We hope to have a solution in the next release. - (AKC - 2010/10/15) - -* H5Eset_auto can cause a seg fault for a library API call if the application - compiles with -DH5_USE_16_API (see bug 1707). It will be fixed in the - next release. SLU - 2010/10/5 - -* The library's test dt_arith.c showed a compiler's rounding problem on - Cygwin when converting an unsigned long long to a long double. The - library's own conversion works fine. We defined a macro for Cygwin to - skip this test until we can solve the problem. Please see bug #1813. - SLU - 2010/5/5 - -* All the VFL drivers aren't backwardly compatible. In H5FDpublic.h, the - structure H5FD_class_t changed in 1.8. A new parameter was added to the - get_eoa and set_eoa callback functions, and a new callback function - get_type_map was added. The public function H5FDrealloc was taken out in - 1.8. The problem only happens when users define their own driver for 1.6 - and try to plug in a 1.8 library. This will be fixed in 1.10. SLU - 2010/2/2 - -* MinGW has a missing libstdc++.dll.a library file and will not successfully link - C++ applications/tests. Do not use the enable-cxx configure option. Read all of - the INSTALL_MINGW.txt file for all restrictions. ADB - 2009/11/11 - -* The PathScale MPI implementation, accessing a Panasas file system, would - cause H5Fcreate() with H5F_ACC_EXCL to fail even when the file does not - exist. This is due to the MPI_File_open() call failing if the amode has - the MPI_MODE_EXCL bit set. (See bug 1468 for details.) AKC - 2009/8/11 - -* Parallel tests failed with 16 processes with data inconsistency at testphdf5 - / dataset_readAll. Parallel tests also failed with 32 and 64 processes with - collective abort of all ranks at t_posix_compliant / allwrite_allread_blocks - with MPI IO. CMC - 2009/04/28 - -* For Red Storm, a Cray XT3 system, the tools/h5ls/testh5ls.sh and - tools/h5copy/testh5copy.sh will fail some of their sub-tests. These - sub-tests are expected to fail and should exit with a non-zero code but - the yod command does not propagate the exit code of the executables. Yod - always returns 0 if it can launch the executable. The test suite shell - expects a non-zero for this particular test. Therefore, it concludes the - test has failed when it receives 0 from yod. To skip all the "failing" - tests for now, change them as shown below. - - ======== Original tools/h5ls/testh5ls.sh ========= - TOOLTEST tgroup-1.ls 1 -w80 -r -g tgroup.h5 - ======== Change to =============================== - echo SKIP TOOLTEST tgroup-1.ls 1 -w80 -r -g tgroup.h5 - ================================================== - - ======== Original tools/h5copy/testh5copy.sh ========= - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d grp_rename - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d /grp_rename/grp_dsets - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_dsets -d /E/F/grp_dsets - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_nested -d /G/H/grp_nested - H5LSTEST $FILEOUT - ======== Change to =============================== - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d grp_rename - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d /grp_rename/grp_dsets - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_dsets -d /E/F/grp_dsets - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_nested -d /G/H/grp_nested - echo SKIP H5LSTEST $FILEOUT - ================================================== - AKC - 2008/11/10 - -* For Red Storm, a Cray XT3 system, the yod command sometimes gives the - message "yod allocation delayed for node recovery." This interferes - with test suites that do not expect to see this message. See the "Red Storm" - section in file INSTALL_parallel for a way to deal with this problem. - AKC - 2008/05/28 - -* On an Intel 64 Linux cluster (RH 4, Linux 2.6.9) with Intel 10.0 compilers, - use the -mp -O1 compilation flags to build the libraries. A higher level - of optimization causes failures in several HDF5 library tests. - -* On mpich 1.2.5 and 1.2.6 on a system using four processors, if more than - two processes contribute no I/O and the application asks to do collective - I/O, we have found that a simple collective write will sometimes hang. This - can be verified with the t_mpi test under testpar. - -* A dataset created or rewritten with a v1.6.3 or later library cannot be - read with the v1.6.2 or earlier library when the Fletcher32 EDC filter - is enabled. There was a bug in the calculation of the Fletcher32 checksum - in the library before v1.6.3; the checksum value was not consistent - between big-endian and little-endian systems. This bug was fixed in - Release 1.6.3. However, after fixing the bug, the checksum value was no - longer the same as before on little-endian system. Library releases after - 1.6.4 can still read datasets created or rewritten with an HDF5 library of - v1.6.2 or earlier. SLU - 2005/6/30 - -* On IBM AIX systems, parallel HDF5 mode will fail some tests with error - messages like "INFO: 0031-XXX ...". This is from the command `poe'. To - work around this, set the environment variable MP_INFOLEVEL to 0 to - minimize the messages and run the tests again. The tests may fail with - messages like "The socket name is already in use", but HDF5 does not use - sockets. This failure is due to problems with the poe command trying to - set up the debug socket. To resolve this problem, check to see whether - there are any old /tmp/s.pedb.* files around. These are sockets used by - the poe command and left behind if the command failed at some point. To - resolve this, ask your system administrator to remove the - old/tmp/s.pedb.* files, and then ask IBM to provide a means to run poe - without the debug socket. - -* The --enable-static-exec configure flag will only statically link - libraries if the static version of that library is present. If only the - shared version of a library exists (i.e., most system libraries on - Solaris, AIX, and Mac, for example, only have shared versions), the flag - should still result in a successful compilation, but note that the - installed executables will not be fully static. Thus, the only guarantee - on these systems is that the executable is statically linked with just - the HDF5 library. - -* On an SGI Altix SMP ia64 system, the Intel compiler version 10.1 (which - is the default on that system) does not work properly and results in - failures during the make check (in a static build) and the make - installcheck (in a shared build). This appears to be a compiler - optimization problem. Reducing the optimization by setting CFLAGS to - -O1 or below resolves the issue. Using a newer version of the compiler - (11.0) avoids the issue. MAM - 2010/06/01 - -* On solaris systems, when running the examples with the scripts installed in - .../share/hdf5_examples, two of the c tests, h5_extlink and h5_elink_unix2win - may fail or generate HDF5 errors because the script commands in c/run-c-ex.sh - fail to create test directories red, blue, and u2w. Moving the '!' in lines - 67, 70, 73 of run-c-ex.sh will fix the problem. For example the script command - "if ! test -d red; then" will work on solaris if changed to - "if test ! -d red; then". - - -%%%%1.8.5%%%% - - -HDF5 version 1.8.5 released on Fri Jun 4 13:27:31 CDT 2010 -================================================================================ - -INTRODUCTION -============ - -This document describes the differences between HDF5-1.8.4 and HDF5 1.8.5, and -contains information on the platforms tested and known problems in HDF5-1.8.5. -For more details, see the files HISTORY-1_0-1_8_0_rc3.txt and HISTORY-1_8.txt -in the release_docs/ directory of the HDF5 source. - -Links to the HDF5 1.8.5 source code, documentation, and additional materials -can be found on the HDF5 web page at: - - http://www.hdfgroup.org/products/hdf5/ - -The HDF5 1.8.5 release can be obtained from: - - http://www.hdfgroup.org/HDF5/release/obtain5.html - -User documentation for 1.8.5 can be accessed directly at this location: - - http://www.hdfgroup.org/HDF5/doc/ - -New features in the HDF5-1.8.x release series, including brief general -descriptions of some new and modified APIs, are described in the "What's New -in 1.8.0?" document: - - http://www.hdfgroup.org/HDF5/doc/ADGuide/WhatsNew180.html - -All new and modified APIs are listed in detail in the "HDF5 Software Changes -from Release to Release" document, in the section "Release 1.8.5 (current -release) versus Release 1.8.4": - - http://www.hdfgroup.org/HDF5/doc/ADGuide/Changes.html - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS -======== - -- New Features -- Support for New Platforms, Languages, and Compilers -- Bug Fixes since HDF5-1.8.4 -- Platforms Tested -- Supported Configuration Features Summary -- Known Problems - - -New Features -============ - - Configuration - ------------- - - CMake Early Access: This release adds support for building HDF5 using - the CMake system. Initial work has targeted Windows, but other platforms - can be used. See the CMake.TXT file for more information. Version - 2.8.1 of CMake is required. - - Configure now adds appropriate defines for supporting large (64-bit) - files on all systems, where supported, by default, instead of only Linux. - This large file support is controllable with the --enable-largefile - configure option. The Linux-specific --enable-linux-lfs option has been - deprecated in favor of this new option. Please note that specifying - --disable-large does NOT attempt to "turn off" largefile support if it - is natively supported by the compiler, but rather just disables - configure from actively trying to add any additional compiler flags. - (MAM - 2010/05/05 - Bug # 1772/1434) - - Fixed an signal handling mask error in H5detect that might result in - SIGBUS or SIGSEGV failures in some platforms such as Linux on Sparc. - (AKC - 2010/4/28 - Bug # 1764) - - Fixed various "strict aliasing" problems, allowing higher levels - of compiler optimization (in particular, allowing '-O3' to work - with recent versions of GCC). (QAK - 2010/04/26) - - Upgraded versions of autotools used to generate configuration suite. - We now use Automake 1.11.1, Autoconf 2.65, and Libtool 2.2.6b. - (MAM - 2010/04/15) - - Added the xlc-* and mpcc_r-* BASENAME patterns to be recognized as IBM - compilers so that the IBM compiler options can be added properly. This - allows non-system-default compiler command names (e.g. xlc-m.n.k.l) be - recognized. (AKC - 2009/11/26) - - Library - ------- - - Performance is substantially improved when extending a dataset with early - allocation. (NAF - 2010/03/24 - Bug # 1637) - - Added support for filtering densely stored groups. Many of the API - functions related to filters have been extended to support dense groups - as well as datasets. Pipeline messages can now be stored in a group's - object header. (NAF/QAK - 2009/11/3) - - Parallel Library - ---------------- - - None - - Tools - ----- - - h5dump: Added the new packed bits feature which prints packed bits stored - in an integer dataset. (AKC/ADB - 2010/5/7) - - h5diff: Fixed incorrect behavior (hang) in parallel mode when specifying - invalid options (ex: -v and -q). (JKM - 2010/02/17) - - h5diff: Added new flag --no-dangling-links (see --help for details). - (JKM - 2010/02/10) - - h5diff: Added new flag --follow-symlinks (see --help for details). - (JKM - 2010/01/25) - - h5diff: Added a fix to correct the display of garbage values when - displaying big-endian data on a little-endian machine. (JKM - 2009/11/20) - - High-Level APIs - --------------- - - None - - F90 API - ------- - - None - - C++ API - ------- - - New member functions - + Overloaded CommonFG::getObjnameByIdx to take char* for name. - + Overloaded CommonFG::getObjTypeByIdx to return type name as a char*. - (BMR - 2010/05/10) - + Added DataSet::getInMemDataSize() to simplify getting the dataset's - data size in memory. (BMR - 2009/07/26) - - -Support for New Platforms, Languages, and Compilers -=================================================== - - AIX 6.1 has been added. (AKC - 2010/1/4) - - -Bug Fixes since HDF5-1.8.4 -========================== - - Configuration - ------------- - - Fixed various "strict aliasing" problems, allowing higher levels - of compiler optimization (in particular, allowing '-O3' to work - with recent versions of GCC). (QAK - 2010/04/26) - - Library - ------- - - Fixed a file corruption bug that could happen when shrinking a compressed - dataset. (NAF - 2010/05/20) - - Fixed some memory leaks in VL datatype conversion when strings are - used as fill values. (MAM - 2010/05/12 - Bug # 1826) - - Fixed an H5Rcreate failure when passing in a -1 for the dataspace - identifier. (ADB - 2010/4/28) - - Fixed a bug when copying objects with NULL references with the - H5O_COPY_EXPAND_REFERENCE_FLAG flag set. (NAF - 2010/04/08 - Bug # 1815) - - Added a mechanism to the H5I interface to save returned object identifier - structures for immediate re-use if needed. This addresses a potential - performance issue by delaying the case when the next identifier to be - registered has grown so large that it wraps around and needs to be - checked to see whether it is available for distribution. - (MAM - 2010/03/15 - Bug # 1730) - - Files can now be concurrently opened more than once using the core file - driver, as long as the backing store is used. (NAF - 2010/03/09) - - Added support for H5O_COPY_EXPAND_EXT_LINK_FLAG to H5Ocopy. External - links will now be expanded if this flag is set. - (NAF - 2010/03/05 - Bug # 1733) - - Fixed a bug where the library, when traversing an external link, would - reopen the source file if nothing else worked. (NAF - 2010/03/05) - - Fixed a bug where fractal heap identifiers for attributes and shared - object header messages could be incorrectly encoded in the file for - files created on big-endian platforms. - Please see http://www.hdfgroup.org/HDF5/release/known_problems if you - suspect you have a file with this problem. - (QAK - 2010/02/23 - Bug # 1755) - - Fixed an intermittent bug in the b-tree code which could be triggered - by expanding and shrinking chunked datasets in certain ways. - (NAF - 2010/02/16) - - H5Tdetect_class said a VL string is a string type. But when it's in - a compound type, it said it's a VL type. THis has been fixed to be - consistent; it now always returns a string type. - (SLU - 2009/12/10 - Bug # 1584) - - Allow "child" files from external links to be correctly located when - relative to a "parent" file that is opened through a symbolic link. - (QAK - 2009/12/01) - - Parallel Library - ---------------- - - Parallel mode in AIX will fail some of the testcheck_version.sh tests - where it treats "exit(134) the same as if process 0 had received an abort - signal. Fixed. (AKC - 2009/11/3) - - Tools - ----- - - Fixed h5ls to return exit code 1 (error) when a non-existent file is - specified. (JKM - 2010/04/27 - Bug # 1793) - - Fixed h5copy failure when copying a dangling link that is specified - directly. (JKM - 2010/04/22 - Bug # 1817) - - Fixed an h5repack failure that lost attributes from a dataset of - reference type. (JKM - 2010/3/25 - Bug # 1726) - - Fixed h5repack error that set NULL for object reference values for - datasets, groups, or named datatypes. (JKM - 2010/03/19 - Bug # 1814) - - F90 API - ------ - - None - - C++ API - ------ - - The constructor PropList::PropList(id) was fixed to act properly - according to the nature of 'id'. When 'id' is a property class - identifier, a new property list will be created. When 'id' is a - property list identifier, a copy of the property list will be made. - (BMR - 2010/5/9) - - The parameters 'size' and 'bufsize' in CommonFG::getLinkval and - CommonFG::getComment, respectively, now have default values for the - user's convenience. (BMR - 2009/10/23) - - NULL pointer accessing was fixed. (BMR - 2009/10/05 - Bug # 1061) - - Read/write methods of DataSet and Attribute classes were fixed - to handle string correctly. (BMR - 2009/07/26) - - High-Level APIs: - ------ - - Fixed a bug in H5DSattach_scale, H5DSis_attached, and H5DSdetach_scale - caused by using the H5Tget_native_type function to determine the native - type for reading the REFERENCE_LIST attribute. This bug was exposed - on Mac PPC. (EIP - 2010/05/22 - Bug # 1851) - - Fixed a bug in the H5DSdetach_scale function when 0 bytes were - allocated after the last reference to a dimension scale was removed - from the list of references in a VL element of the DIMENSION_LIST - attribute. Modified the function to comply with the specification: - the DIMENSION_LIST attribute is now deleted when no dimension scales - are left attached. (EIP - 2010/05/14 - Bug # 1822) - - Fortran High-Level APIs: - ------ - - None - - -Platforms Tested -================ -The following platforms and compilers have been tested for this release. - - AIX 5.3 xlc 7.0.0.9, 8.0.0.20, 9.0.0.4 - (LLNL Up) xlC 7.0.0.9, 8.0.0.20, 9.0.0.4 - xlf 9.1.0.9, 10.1.0.9, 11.1.0.7 - mpcc_r 7.0.0.9 - mpxlf_r 09.01.0000.0008 - - AIX 6.1 xlc 10.1.0.6 - (NCSA BP) xlC 10.1.0.6 - xlf 12.1.0.7 - - Cray XT3 (2.1.56) cc (pgcc) 10.0-0 - (SNL red storm) ftn (pgf90) 10.0-0 - CC (pgCC) 10.0-0 - - FreeBSD 6.3-STABLE i386 gcc 3.4.6 [FreeBSD] 20060305 - (duty) g++ 3.4.6 [FreeBSD] 20060305 - gcc 4.3.4 20090419 - g++ 4.3.4 20090419 - gfortran 4.3.4 20090419 - - FreeBSD 6.3-STABLE amd64 gcc 3.4.6 [FreeBSD] 20060305 - (liberty) g++ 3.4.6 [FreeBSD] 20060305 - gcc 4.4.1 20090421 - g++ 4.4.1 20090421 - gfortran 4.4.1 20090421 - - Linux 2.6.18-128.1.6.el5xen gcc (GCC) 4.1.2 20080704 and 4.4.2 - #1 SMP i686 i686 i386 GNU Fortran (GCC) 4.1.2 20080704 and 4.4.2 - (jam) g++ (GCC) 4.1.2 20080704 and 4.4.2 - G95 (GCC 4.0.3 (g95 0.93!) Apr 21 2010) - Absoft 32-bit Fortran 95 10.0.7 - PGI C, Fortran, C++ 10.4-0 32-bit - Intel(R) C, C++, Fortran Compilers for 32-bit - applications, Version 11.1 Build 20090827 - MPICH mpich2-1.0.8 compiled with - gcc 4.1.2 and GNU Fortran (GCC) 4.1.2 - - Linux 2.6.18-164.el5 #1 SMP gcc 4.1.2 20080704 and gcc 4.4.2 - x86_64 GNU/Linux GNU Fortran (GCC) 4.1.2 20080704 and 4.4.2 - (amani) g++ (GCC) 4.1.2 20080704 and 4.4.2 - G95 (GCC 4.0.3 (g95 0.93!) Apr 21 2010) - Intel(R) C, C++, Fortran Compilers for - applications running on Intel(R) 64, - Version 11.1 Build 20090827. - PGI C, Fortran, C++ Version 10.4-0 - for 32 & 64-bit target on x86-64 - MPICH mpich2-1.0.8 compiled with - gcc 4.1.2 and GNU Fortran (GCC) 4.1.2 - - Linux 2.6.16.54-0.2.5 #1 Intel(R) C++ Version 11.0.074 - SGI Altix SMP ia64 Intel(R) Fortran Itanium(R) Version 11.0.074 - (cobalt) SGI MPI 1.38 - - SunOS 5.10 32- and 64-bit Sun C 5.9 SunOS_sparc Patch 124867-14 - (linew) Sun Fortran 95 8.3 SunOS_sparc - Patch 127000-13 - Sun C++ 5.9 SunOS_sparc Patch 124863-23 - - Intel Xeon Linux 2.6.18- Intel(R) C++ Version 10.0.026 - 92.1.10.el5_lustre.1.6.6smp- Intel(R) Fortran Compiler Version 10.0.026 - perfctr #7 SMP Open MPI 1.2.2 - (abe) MVAPICH2-0.9.8p28p2patched-intel-ofed-1.2 - compiled with icc v10.0.026 and ifort 10.0.026 - - Linux 2.6.18-76chaos #1 SMP Intel(R) C, C++, Fortran Compilers for - SMP x86_64 GNU/Linux applications running on Intel(R) 64, - (SNL Glory) Versions 11.1. - - Windows XP Visual Studio 2008 w/ Intel Fortran 10.1 - Cygwin(1.7.5 native gcc(4.3.4) compiler and - gfortran) - - Windows XP x64 Visual Studio 2008 w/ Intel Fortran 10.1 - - Windows Vista Visual Studio 2008 w/ Intel Fortran 10.1 - - Windows Vista x64 Visual Studio 2008 w/ Intel Fortran 10.1 - - MAC OS 10.6.3 (Intel) i686-apple-darwin10-gcc-4.2.1 (GCC) 4.2.1 - (pahra) GNU Fortran (GCC) 4.5.0 20090910 - i686-apple-darwin10-g++-4.2.1 (GCC) 4.2.1 - Intel C, C++ and Fortran compilers 11.1 - - MAC OS 10.5.8 (Intel) i686-apple-darwin9-gcc-4.0.1 (GCC) 4.0.1 - (tejeda) - - MAC OS 10.5 (PPC) powerpc-apple-darwin9-gcc-4.0.1 (GCC) 4.0.1 - (juniper-w) - - OpenVMS Alpha V8.3 HP C V7.3-009 - HP C++ V7.3-009 - HP Fortran V8.0-1-104669-48GBT - -Supported Configuration Features Summary -======================================== - - In the tables below - y = tested and supported - n = not supported or not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90 F90 C++ zlib SZIP - parallel parallel -Solaris2.10 32-bit n y n y y y -Solaris2.10 64-bit n y n y y y -Windows XP n y(4) n(4) y y y -Windows XP x64 n y(4) n(4) y y y -Windows Vista n y(4) n(4) y y y -Windows Vista x64 n y(4) n(4) y y y -Mac OS X 10.5 PPC n n n n y n -Mac OS X 10.5 Intel n y n y y y -Mac OS X 10.6 Intel n y n y y y -AIX 5.3 32- and 64-bit n y n y y n -AIX 6.1 32- and 64-bit n y n y y n -FreeBSD 6.3-STABLE 32&64 bit n y n y y y -RedHat EL4 2.6.9-42 i686 GNU (1) W y y y y y y -RedHat EL5 2.6.18-128 i686 GNU (1)W y y(2) y y y y -RedHat EL5 2.6.18-128 i686 Intel W n y n y y n -RedHat EL5 2.6.18-128 i686 PGI W n y n y y n -SuSe Linux 2.6.16 x86_64 GNU (1) W y y(3) y y y y -SuSe Linux 2.6.16 x86_64 Intel W n y n y y n -SuSe Linux 2.6.16 x86_64 PGI W n y n y y y -SuSe Linux 2.6.16 SGI Altix ia64 C y y y y y y -RedHat EL4 2.6.18 Xeon Lustre C y y y y y n -Cray XT3 2.1.56 y y y y y n -OpenVMS Alpha V8.3 n y n y y n - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -Solaris2.10 32-bit y y y y -Solaris2.10 64-bit y y y y -Windows XP y y(4) y n -Windows XP x64 y y(4) y n -Windows Vista y y(4) y n -Windows Vista x64 y y(4) y n -Mac OS X 10.5 PPC y n n n -Mac OS X 10.5 (Intel) y(5) n y n -Mac OS X 10.6 (Intel) y(5) n y n -AIX 5.3 32- and 64-bit n n n n -AIX 6.1 32- and 64-bit n n n n -FreeBSD 6.3-STABLE 32&64 bit y n y y -RedHat EL4 2.6.9-42 i686 GNU (1) W y y y y -RedHat EL5 2.6.18-128 i686 GNU (1)W y y(2) y y -RedHat EL5 2.6.18-128 i686 Intel W y y y n -RedHat EL5 2.6.18-128 i686 PGI W y y y n -SuSe Linux 2.6.16 x86_64 GNU (1) W y y y y -SuSe Linux 2.6.16 x86_64 Intel W y y y n -SuSe Linux 2.6.16 x86_64 PGI W y y y n -SuSe Linux 2.6.16 SGI Altix ia64 C y n -RedHat EL4 2.6.18 Xeon Lustre C y y y n -Cray XT3 2.1.56 n n n n -OpenVMS Alpha V8.3 n n n n - - (1) Fortran compiled with g95. - (2) With PGI and Absoft compilers. - (3) With PGI compiler for Fortran. - (4) Using Visual Studio 2008. (Cygwin shared libraries are not - supported.) - (5) Shared C and C++ are disabled when Fortran is configured in. - Compiler versions for each platform are listed in the preceding - "Platforms Tested" table. - - -Known Problems -============== -* The library's test dt_arith.c exposed a compiler's rounding problem on - Cygwin when converting from unsigned long long to long double. The - library's own conversion works correctly. A macro is defined for Cygwin - to skip this test until we can solve the problem. (Please see bug #1813.) - SLU - 2010/5/5 - -* All the VFL drivers aren't backward compatible. In H5FDpublic.h, the - structure H5FD_class_t changed in 1.8. There is a new parameter added to - get_eoa and set_eoa callback functions. A new callback function - get_type_map was added. The public function H5FDrealloc was taken - out in 1.8. The problem only happens when users define their own driver - for 1.6 and try to plug it into a 1.8 library. This affects a very small - number of users. (See bug report #1279.) SLU - 2010/2/2 - -* MinGW has a missing libstdc++.dll.a library file and will not successfully - link C++ applications/tests. Do not use the enable-cxx configure option. - Read all of the INSTALL_MINGW.txt file for all restrictions. - ADB - 2009/11/11 - -* Some tests in tools/h5repack may fail in AIX systems when -q32 mode is used. - The error is due to insufficient memory requested. Request a large amount - of runtime memory by setting the following environment variable for more - memory. - LDR_CNTRL=MAXDATA=0x20000000@DSA - AKC - 2009/10/31 - -* The PathScale MPI implementation, accessing a Panasas file system, would - cause H5Fcreate() with H5F_ACC_EXCL to fail even when the file is not - existing. This is due to the MPI_File_open() call failing if the amode has - the MPI_MODE_EXCL bit set. (See bug 1468 for details.) AKC - 2009/8/11 - -* Parallel tests failed with 16 processes with data inconsistency at testphdf5 - / dataset_readAll. Parallel tests also failed with 32 and 64 processes with - collective abort of all ranks at t_posix_compliant / allwrite_allread_blocks - with MPI IO. CMC - 2009/04/28 - -* For gcc v4.3 and v4.4, with production mode, if -O3 is used, H5Tinit.c - would fail to compile. Actually bad H5Tinit.c is produced. If -O (same - as -O1) is used, H5Tinit.c compiled okay but test/dt_arith would fail. - When -O0 (no optimizatio) is used, H5Tinit.c compilete okay and all - tests passed. Therefore, -O0 is imposed for v4.3 and v4.4 of gcc. - AKC - 2009/04/20 - -* For Red Storm, a Cray XT3 system, the tools/h5ls/testh5ls.sh and - tools/h5copy/testh5copy.sh will fail some of its sub-tests. These sub-tests - are expected to fail and should exit with a non-zero code but the yod - command does not propagate the exit code of the executables. Yod always - returns 0 if it can launch the executable. The test suite shell expects - a non-zero for this particular test, therefore it concludes the test has - failed when it receives 0 from yod. Skip all the "failing" test for now - by changing them as following. - - ======== Original tools/h5ls/testh5ls.sh ========= - TOOLTEST tgroup-1.ls 1 -w80 -r -g tgroup.h5 - ======== Change to =============================== - echo SKIP TOOLTEST tgroup-1.ls 1 -w80 -r -g tgroup.h5 - ================================================== - - ======== Original tools/h5copy/testh5copy.sh ========= - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d grp_rename - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d /grp_rename/grp_dsets - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_dsets -d /E/F/grp_dsets - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_nested -d /G/H/grp_nested - H5LSTEST $FILEOUT - ======== Change to =============================== - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d grp_rename - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d /grp_rename/grp_dsets - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_dsets -d /E/F/grp_dsets - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_nested -d /G/H/grp_nested - echo SKIP H5LSTEST $FILEOUT - ================================================== - AKC - 2008/11/10 - -* For Red Storm, a Cray XT3 system, the yod command sometimes gives the - message, "yod allocation delayed for node recovery". This interferes with - test suites that do not expect seeing this message. See the section of "Red - Storm" in file INSTALL_parallel for a way to deal with this problem. - AKC - 2008/05/28 - -* On Intel 64 Linux cluster (RH 4, Linux 2.6.9) with Intel 10.0 compilers, - use -mp -O1 compilation flags to build the libraries. A higher level of - optimization causes failures in several HDF5 library tests. - -* On mpich 1.2.5 and 1.2.6, if more than two processes contribute no IO and - the application asks to do collective IO, we have found that when using 4 - processors, a simple collective write will sometimes be hung. This can be - verified with t_mpi test under testpar. - -* A dataset created or rewritten with a v1.6.3 library or after cannot be read - with the v1.6.2 library or before when the Fletcher32 EDC filter is enabled. - There was a bug in the calculation of the Fletcher32 checksum in the - library before v1.6.3; the checksum value was not consistent between big- - endian and little-endian systems. This bug was fixed in Release 1.6.3. - However, after fixing the bug, the checksum value was no longer the same as - before on little-endian system. Library releases after 1.6.4 can still read - datasets created or rewritten with an HDF5 library of v1.6.2 or before. - SLU - 2005/6/30 - -* On IBM AIX systems, parallel HDF5 mode will fail some tests with error - messages like "INFO: 0031-XXX ...". This is from the command `poe'. - Set the environment variable MP_INFOLEVEL to 0 to minimize the messages - and run the tests again. - - The tests may fail with messages like "The socket name is already in use," - but HDF5 does not use sockets. This failure is due to problems with the - poe command trying to set up the debug socket. To resolve this problem, - check to see whether there are many old /tmp/s.pedb.* files staying around. - These are sockets used by the poe command and left behind due to failed - commands. First, ask your system administrator to clean them out. - Lastly, request IBM to provide a means to run poe without the debug socket. - -* The --enable-static-exec configure flag will only statically link libraries - if the static version of that library is present. If only the shared version - of a library exists (i.e., most system libraries on Solaris, AIX, and Mac, - for example, only have shared versions), the flag should still result in a - successful compilation, but note that the installed executables will not be - fully static. Thus, the only guarantee on these systems is that the - executable is statically linked with just the HDF5 library. - -* There is also a configure error on Altix machines that incorrectly reports - when a version of Szip without an encoder is being used. - -* On FREE-BSD systems when shared libraries are disabled, make install fails - in install-examples with the error '"Makefile", line 635: Need an operator'. - When this error occurs removing or commenting out the line "export - LD_LIBRARY_PATH=$(LL_PATH)" (line 635 in examples/Makefile) will allow make - install to finish installing examples. The problem will be fixed in the - next release. LRK - 2010/05/26 - -* On cobalt, an SGI Altix SMP ia64 system, Intel compiler version 10.1 (which - is the default on that system) does not work properly and results in - failures during make check (in a static build) and make installcheck (during - a shared build). This appears to be a compiler optimization problem. - Reducing optimization by setting CFLAGS to -O1 or below resolves the issue. - Alternatively, using a newer version of the compiler (11.0) also works as - intended. MAM - 2010/06/01 - - -%%%%1.8.4%%%% - - -HDF5 version 1.8.4 released on Tue Nov 10 15:33:14 CST 2009 -================================================================================ - -INTRODUCTION -============ - -This document describes the differences between HDF5-1.8.3 and -HDF5 1.8.4, and contains information on the platforms tested and -known problems in HDF5-1.8.4 -For more details, see the files HISTORY-1_0-1_8_0_rc3.txt -and HISTORY-1_8.txt in the release_docs/ directory of the HDF5 source. - -Links to the HDF5 1.8.4 source code, documentation, and additional materials -can be found on the HDF5 web page at: - - http://www.hdfgroup.org/products/hdf5/ - -The HDF5 1.8.4 release can be obtained from: - - http://www.hdfgroup.org/HDF5/release/obtain5.html - -User documentation for 1.8.4 can be accessed directly at this location: - - http://www.hdfgroup.org/HDF5/doc/ - -New features in the HDF5-1.8.x release series, including brief general -descriptions of some new and modified APIs, are described in the "What's New -in 1.8.0?" document: - - http://www.hdfgroup.org/HDF5/doc/ADGuide/WhatsNew180.html - -All new and modified APIs are listed in detail in the "HDF5 Software Changes -from Release to Release" document, in the section "Release 1.8.4 (current -release) versus Release 1.8.3": - - http://www.hdfgroup.org/HDF5/doc/ADGuide/Changes.html - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS -======== - -- New Features -- Support for New Platforms, Languages, and Compilers -- Bug Fixes since HDF5-1.8.3 -- Platforms Tested -- Supported Configuration Features Summary -- Known Problems - - -New Features -============ - - Configuration - ------------- - - Configuration suite now uses Automake 1.11 and Autoconf 2.64. - MAM 2009/08/31. - - Changed default Gnu fortran compiler from g95 to gfortran since - gfortran is more likely installed with gcc now. -AKC 2009/07/19- - - Library - ------- - - The embedded library information is displayed by H5check_version() if a - version mismatch is detected. Also changed H5check_version() to - suppress the warning message totally if $HDF5_DISABLE_VERSION_CHECK is 2 - or higher. (Old behavior treated 3 or higher the same as 1, that is - print a warning and allows the program to continue. (AKC - 2009/9/28) - - If a user does not care for the extra library information insert - in the executables, he may turn it off by --disable-embedded-libinfo - during configure. (AKC - 2009/9/15) - - Parallel Library - ---------------- - - None - - Tools - ----- - - h5diff: h5diff treats two INFINITY values different. Fixed by checking - (value==expect) before call ABS(...) at h5diff_array.c. This will make - that (INF==INF) is true (INF is treated as an number instead of NaN) - (PC -- 2009/07/28) - - h5diff: add option "--use-system-epsilon" to print difference if - (|a-b| > EPSILON). - Change default to use strict equality (PC -- 2009/09/12) - - High-Level APIs - --------------- - - None - - F90 API - ------- - - Added H5Oopen_by_addr_f MSB - 9/14/09 - - C++ API - ------- - - None - - -Support for New Platforms, Languages, and Compilers -=================================================== - - PathScale compilers are recognized and can build the HDF5 library - properly. AKC - 2009/7/28 - - - -Bug Fixes since HDF5-1.8.3 -========================== - - Configuration - ------------- - - Removed the following config files, as we no longer support them: - config/dec-osf*, config/hpux11.00, config/irix5.x, - config/powerpc-ibm-aix4.x config/rs6000-ibm-aix5.x config/unicos* - MAM - 2009/10/08 - - Modified configure and make process to properly preserve user's CFLAGS - (and company) environment variables. Build will now properly use - automake's AM_CFLAGS for any compiler flags set by the configure - process. Configure will no longer modify CFLAGS directly, nor will - setting CFLAGS during make completely replace what configure has set up. - MAM - 2009/10/08 - - Support for TFLOPS, config/intel-osf1, is removed since the TFLOPS - machine has long retired. AKC - 2009/10/06. - - Added $(EXEEXT) extension to H5detect when it's executed in the - src/Makefile to generate H5Tinit.c so it works correctly on platforms - that require the full extension when running executables. - MAM - 2009/10/01 - BZ #1613 - - Configure will now set FC and CXX to "no" when fortran and c++ - are not being compiled, respectively, so configure will not run - some of the compiler tests for these languages when they are not - being used. MAM - 2009/10/01 - - The --enable-static-exec flag will now properly place the -static flag - on the link line of all installed executables. This will force the - executable to link with static libraries over shared libraries, provided - the static libraries are available. MAM - 2009/08/31 - BZ #1583 - - The PathScale compiler (v3.2) was mistaken as gcc v4.2.0 but it fails to - recognize some gcc options. Fixed. (see bug 1301). AKC - 2009/7/28 - - - Library - ------- - - Fixed a bug where writing and deleting many global heap objects (i.e. - variable length data) would render the file unreadable. Previously - created files exhibiting this problem should now be readable. - NAF - 2009/10/27 - 1483 - - Fixed error in library's internal caching mechanisms which could cause - an assertion failure (and attendant core dump) when encountering an - unusually formatted file. (QAK - 2009/10/13) - - Fixed incorrect return value for H5Pget_preserve. AKC - 2009/10/08 - 1628 - - Fixed an assertion failure that occurred when H5Ocopy was called on a - dataset using a vlen inside a compound. NAF - 2009/10/02 - 1597 - - Fixed incorrect return value for H5Pget_filter_by_id1/2 in H5Ppublic.h. - NAF - 2009/09/25 - 1620 - - Fixed a bug where properties weren't being compared with the registered - compare callback. NAF - 2009/09/25 - 1555 - - Corrected problem where library would re-write the superblock in a file - opened for R/W access, even when no changes were made to the file. - (QAK - 2009/08/20, Bz#1473) - - Fixed a bug where H5Pget_filter_by_id would succeed when called for a - filter that wasn't present. NAF - 2009/06/25 - 1250 - - Fixed an issue with committed compound datatypes containing a vlen. Also - fixed memory leaks involving committed datatypes. NAF - 2009/06/10 - 1593 - - Parallel Library - ---------------- - - None - - Tools - ----- - - h5dump/h5ls display buffer resize fixed in tools library. - ADB - 2009/7/21 - 1520 - - perf_serial test added to Windows projects and check batch file. - ADB - 2009/06/11 -1504 - - - F90 API - ------ - - Fixed bug in h5lget_info_by_idx_f by adding missing arguments, - consequently changing the API. New API is: - - SUBROUTINE h5lget_info_by_idx_f(loc_id, group_name, index_field, order, n, & - link_type, f_corder_valid, corder, cset, address, val_size, hdferr, lapl_id) - - MSB - 2009/9/17 - 1652 - - - Corrected the values for the H5L_flags FORTRAN constants: - H5L_LINK_ERROR_F, H5L_LINK_HARD_F, H5L_LINK_SOFT_F, H5L_LINK_EXTERNAL_F - MSB - 2009-09-17 - 1653 - - - Added FORTRAN equivalent of C constant H5T_ORDER_NONE: H5T_ORDER_NONE_F - MSB - 2009-9-24 - 1471 - - C++ API - ------ - - None - - High-Level APIs: - ------ - - Fixed a bug where the H5TB API would forget the order of fields when added - out of offset order. NAF - 2009/10/27 - 1582 - - H5DSis_attached failed to account for different platform types. Added a - get native type call. ADB - 2009/9/29 - 1562 - - Fortran High-Level APIs: - ------ - - Lite: the h5ltread_dataset_string_f and h5ltget_attribute_string_f functions - had memory problems with the g95 fortran compiler. (PVN � 5/13/2009) 1522 - - - -Platforms Tested -================ -The following platforms and compilers have been tested for this release. - - AIX 5.3 xlc 7.0.0.8 - (LLNL Up) xlf 09.01.0000.0008 - xlC 7.0.0.8 - mpcc_r 7.0.0.8 - mpxlf_r 09.01.0000.0008 - - Cray XT3 (2.0.41) cc (pgcc) 7.1-4 - (SNL red storm) ftn (pgf90) 7.1-4 - CC (pgCC) 7.1-4 - - FreeBSD 6.3-STABLE i386 gcc 3.4.6 [FreeBSD] 20060305 - (duty) g++ 3.4.6 [FreeBSD] 20060305 - gcc 4.3.5 20091004 - g++ 4.3.5 20091004 - gfortran 4.3.5 20091004 - - FreeBSD 6.3-STABLE amd64 gcc 3.4.6 [FreeBSD] 20060305 - (liberty) g++ 3.4.6 [FreeBSD] 20060305 - gcc 4.4.2 20091006 - g++ 4.4.2 20091006 - gfortran 4.4.2 20091006 - - Linux 2.6.18-164.el5 gcc (GCC) 4.1.2 20080704 - #1 SMP i686 i686 i386 G95 (GCC 4.0.3 (g95 0.92!) Jun 24 2009) - (jam) GNU Fortran (GCC) 4.1.2 20080704 - (Red Hat 4.1.2-46) - PGI C, Fortran, C++ 8.0-5 32-bit - PGI C, Fortran, C++ 8.0-1 32-bit - Intel(R) C Compiler for 32-bit - applications, Versions 11.0, 11.1 - Intel(R) C++ Compiler for 32-bit - applications, Version 11.0, 11.1 - Intel(R) Fortran Compiler for 32-bit - applications, Version 11.0, 11.1 - Absoft 32-bit Fortran 95 10.0.7 - MPICH mpich2-1.0.8 compiled with - gcc (GCC) 4.1.2 and G95 - (GCC 4.0.3 (g95 0.92!) - - Linux 2.6.18-164.el5 #1 SMP gcc 4.1.2 20080704 - x86_64 GNU/Linux G95 (GCC 4.0.3 (g95 0.92!) Jun 24 2009) - (amani) tested for both 32- and 64-bit binaries - Intel(R) C, C++, Fortran Compilers for - applications running on Intel(R) 64, - Versions 11.1. - PGI C, Fortran, C++ Version 9.0-4 - for 64-bit target on x86-64 - gcc 4.1.2 and G95 (GCC 4.0.3 (g95 0.92!) - MPICH mpich2-1.0.8 compiled with - gcc 4.1.2 and G95 (GCC 4.0.3 (g95 0.92!) - GNU Fortran (GCC) 4.1.2 20080704 - (Red Hat 4.1.2-46) - - - Linux 2.6.16.60-0.42.5 #1 Intel(R) C++ Version 10.1.017 - SGI Altix SMP ia64 Intel(R) Fortran Itanium(R) Version 10.1.017 - (cobalt) SGI MPI 1.38 - - SunOS 5.10 32- and 64-bit Sun C 5.9 SunOS_sparc Patch 124867-11 2009/04/30 - (linew) Sun Fortran 95 8.3 SunOS_sparc - Patch 127000-11 2009/10/06 - Sun C++ 5.9 SunOS_sparc - Patch 124863-16 2009/09/15 - - Intel Xeon Linux 2.6.18- Intel(R) C++ Version 10.0.026 - 92.1.10.el5_lustre.1.6.6smp- Intel(R) Fortran Compiler Version 10.0.026 - perfctr #6 SMP Open MPI 1.2.2 - (abe) MVAPICH2-0.9.8p28p2patched-intel-ofed-1.2 - compiled with icc v10.0.026 and ifort 10.0.026 - - IA-64 Linux 2.4.21-309.tg1 gcc (GCC) 3.2.2 - #1 SMP ia64 Intel(R) C++ Version 8.1.037 - (NCSA tg-login) Intel(R) Fortran Compiler Version 8.1.033 - mpich-gm-1.2.7p1..16-intel-8.1.037-r1 - - Linux 2.6.9-55.0.9.EL_lustre Intel(R) C, C++, Fortran Compilers for - .1.4.11.1smp #1 SMP applications running on Intel(R) 64, - SMP x86_64 GNU/Linux Versions 10.1. - (SNL Thunderbird) - - Linux 2.6.18-76chaos #1 SMP Intel(R) C, C++, Fortran Compilers for - SMP x86_64 GNU/Linux applications running on Intel(R) 64, - (SNL Glory) Versions 10.1. - - Windows XP Visual Studio 2005 w/ Intel Fortran 9.1 - Cygwin(native gcc compiler and g95) - - Windows XP x64 Visual Studio 2005 w/ Intel Fortran 9.1 - - Windows Vista Visual Studio 2005 w/ Intel Fortran 9.1 - - Windows Vista x64 Visual Studio 2005 w/ Intel Fortran 9.1 - - MAC OS 10.5.6 (Intel) i686-apple-darwin9-gcc-4.0.1 (GCC) 4.0.1 - GNU Fortran (GCC) 4.3.0 20070810 - G95 (GCC 4.0.3 (g95 0.91!) Apr 24 2008) - Intel C, C++ and Fortran compilers 10.1 - - -Supported Configuration Features Summary -======================================== - - In the tables below - y = tested and supported - n = not supported or not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90 F90 C++ zlib SZIP - parallel parallel -Solaris2.10 32-bit n y n y y y -Solaris2.10 64-bit n y n y y y -Windows XP n y(4) n(4) y y y -Windows XP x64 n y(4) n(4) y y y -Windows Vista n n n y y y -Mac OS X 10.5 Intel n y n y y y -AIX 5.3 32- and 64-bit n y n y y n -FreeBSD 6.3-STABLE 32&64 bit n y n y y y -RedHat EL5 2.6.18-164 i686 GNU (1)W y y(2) y y y y -RedHat EL5 2.6.18-164 i686 Intel W n y n y y n -RedHat EL5 2.6.18-164 i686 PGI W n y n y y n -RedHat EL5 2.6.18-164 x86_64 GNU(1)W y y(3) y y y y -RedHat EL5 2.6.18-164 x86_64 IntelW n y n y y n -RedHat EL5 2.6.18-164 x86_64 PGI W n y n y y y -SuSe Linux 2.6.16 SGI Altix ia64 C y y y y y y -RedHat EL4 2.6.18 Xeon Lustre C y y y y y n -SuSe Linux 2.4.21 ia64 Intel C y y y y y n -Cray XT3 2.0.62 y y y y y n - - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -Solaris2.10 32-bit y y y y -Solaris2.10 64-bit y y y y -Windows XP y y(4) y y -Windows XP x64 y y(4) y y -Windows Vista y n n y -Mac OS X 10.5 y n y n -AIX 5.3 32- and 64-bit n n n n -FreeBSD 6.3-STABLE 32&64 bit y y y y -RedHat EL5 2.6.18-164 i686 GNU (1)W y y(2) y y -RedHat EL5 2.6.18-164 i686 Intel W y y y n -RedHat EL5 2.6.18-164 i686 PGI W y y y n -RedHat EL5 2.6.18-164 x86_64 GNU(1)W y y y y -RedHat EL5 2.6.18-164 x86_64 IntelW y y y n -RedHat EL5 2.6.18-164 x86_64 PGI W y y y n -SuSe Linux 2.6.16 SGI Altix ia64 C y n -RedHat EL4 2.6.18 Xeon Lustre C y y y n -SuSe Linux 2.4.21 ia64 Intel C y y y n -Cray XT3 2.0.62 n n n n - - (1) Fortran compiled with g95. - (2) With PGI and Absoft compilers. - (3) With PGI compiler for Fortran. - (4) Using Visual Studio 2005 or Cygwin - Compiler versions for each platform are listed in the preceding - "Platforms Tested" table. - - -Known Problems -============== -* Parallel mode in AIX will fail some of the testcheck_version.sh tests where - it treats "exit(134) the same as if process 0 had received an abort signal. - This is fixed and will be available in the next release. AKC - 2009/11/3 - -* Some tests in tools/h5repack may fail in AIX systems when -q32 mode is used. - The error is due to insufficient memory requested. Request a large amount - of runtime memory by setting the following environment variable for more - memory. - LDR_CNTRL=MAXDATA=0x20000000@DSA - AKC - 2009/10/31 - -* The PathScale MPI implementation, accessing a Panasas file system, would - cause H5Fcreate() with H5F_ACC_EXCL to fail even when the file is not - existing. This is due to the MPI_File_open() call failing if the amode has - the MPI_MODE_EXCL bit set. (See bug 1468 for details.) AKC - 2009/8/11 - -* Parallel tests failed with 16 processes with data inconsistency at testphdf5 - / dataset_readAll. Parallel tests also failed with 32 and 64 processes with - collective abort of all ranks at t_posix_compliant / allwrite_allread_blocks - with MPI IO. CMC - 2009/04/28 - -* There is a known issue in which HDF5 will change the timestamp on a file - simply by opening it with read/write permissions, even if the file is not - modified in any way. This is due to the way in which HDF5 manages the file - superblock. A fix is currently underway and should be included in the 1.8.4 - release of HDF5. MAM - 2009/04/28 - -* For gcc v4.3 and v4.4, with production mode, if -O3 is used, H5Tinit.c - would fail to compile. Actually bad H5Tinit.c is produced. If -O (same - as -O1) is used, H5Tinit.c compiled okay but test/dt_arith would fail. - When -O0 (no optimizatio) is used, H5Tinit.c compilete okay and all - tests passed. Therefore, -O0 is imposed for v4.3 and v4.4 of gcc. - AKC - 2009/04/20 - -* For Red Storm, a Cray XT3 system, the tools/h5ls/testh5ls.sh and - tools/h5copy/testh5copy.sh will fail some of its sub-tests. These sub-tests - are expected to fail and should exit with a non-zero code but the yod - command does not propagate the exit code of the executables. Yod always - returns 0 if it can launch the executable. The test suite shell expects - a non-zero for this particular test, therefore it concludes the test has - failed when it receives 0 from yod. Skip all the "failing" test for now - by changing them as following. - - ======== Original tools/h5ls/testh5ls.sh ========= - TOOLTEST tgroup-1.ls 1 -w80 -r -g tgroup.h5 - ======== Change to =============================== - echo SKIP TOOLTEST tgroup-1.ls 1 -w80 -r -g tgroup.h5 - ================================================== - - ======== Original tools/h5copy/testh5copy.sh ========= - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d grp_rename - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d /grp_rename/grp_dsets - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_dsets -d /E/F/grp_dsets - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_nested -d /G/H/grp_nested - H5LSTEST $FILEOUT - ======== Change to =============================== - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d grp_rename - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d /grp_rename/grp_dsets - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_dsets -d /E/F/grp_dsets - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_nested -d /G/H/grp_nested - echo SKIP H5LSTEST $FILEOUT - ================================================== - AKC - 2008/11/10 - -* For Red Storm, a Cray XT3 system, the yod command sometimes gives the - message, "yod allocation delayed for node recovery". This interferes with - test suites that do not expect seeing this message. See the section of "Red - Storm" in file INSTALL_parallel for a way to deal with this problem. - AKC - 2008/05/28 - -* On Intel 64 Linux cluster (RH 4, Linux 2.6.9) with Intel 10.0 compilers, - use -mp -O1 compilation flags to build the libraries. A higher level of - optimization causes failures in several HDF5 library tests. - -* On mpich 1.2.5 and 1.2.6, if more than two processes contribute no IO and - the application asks to do collective IO, we have found that when using 4 - processors, a simple collective write will sometimes be hung. This can be - verified with t_mpi test under testpar. - -* A dataset created or rewritten with a v1.6.3 library or after cannot be read - with the v1.6.2 library or before when the Fletcher32 EDC filter is enabled. - There was a bug in the calculation of the Fletcher32 checksum in the - library before v1.6.3; the checksum value was not consistent between big- - endian and little-endian systems. This bug was fixed in Release 1.6.3. - However, after fixing the bug, the checksum value was no longer the same as - before on little-endian system. Library releases after 1.6.4 can still read - datasets created or rewritten with an HDF5 library of v1.6.2 or before. - SLU - 2005/6/30 - -* On IBM AIX systems, parallel HDF5 mode will fail some tests with error - messages like "INFO: 0031-XXX ...". This is from the command `poe'. - Set the environment variable MP_INFOLEVEL to 0 to minimize the messages - and run the tests again. - - The tests may fail with messages like "The socket name is already in use", - but HDF5 does not use sockets. This failure is due to problems with the - poe command trying to set up the debug socket. To resolve this problem, - check to see whether there are many old /tmp/s.pedb.* files staying around. - These are sockets used by the poe command and left behind due to failed - commands. First, ask your system administrator to clean them out. - Lastly, request IBM to provide a means to run poe without the debug socket. - -* The --enable-static-exec configure flag will only statically link libraries - if the static version of that library is present. If only the shared version - of a library exists (i.e., most system libraries on Solaris, AIX, and Mac, - for example, only have shared versions), the flag should still result in a - successful compilation, but note that the installed executables will not be - fully static. Thus, the only guarantee on these systems is that the - executable is statically linked with just the HDF5 library. - -* There is also a configure error on Altix machines that incorrectly reports - when a version of Szip without an encoder is being used. - -%%%%1.8.3%%%% - - -HDF5 version 1.8.3 released on Mon May 4 09:21:00 CDT 2009 -================================================================================ - -INTRODUCTION -============ - -This document describes the differences between HDF5-1.8.2 and -HDF5 1.8.3, and contains information on the platforms tested and -known problems in HDF5-1.8.3. -For more details, see the files HISTORY-1_0-1_8_0_rc3.txt -and HISTORY-1_8.txt in the release_docs/ directory of the HDF5 source. - -Links to the HDF5 1.8.3 source code, documentation, and additional materials -can be found on the HDF5 web page at: - - http://www.hdfgroup.org/products/hdf5/ - -The HDF5 1.8.3 release can be obtained from: - - http://www.hdfgroup.org/HDF5/release/obtain5.html - -User documentation for 1.8.3 can be accessed directly at this location: - - http://www.hdfgroup.org/HDF5/doc/ - -New features in the HDF5-1.8.x release series, including brief general -descriptions of some new and modified APIs, are described in the "What's New -in 1.8.0?" document: - - http://www.hdfgroup.org/HDF5/doc/ADGuide/WhatsNew180.html - -All new and modified APIs are listed in detail in the "HDF5 Software Changes -from Release to Release" document, in the section "Release 1.8.3 (current -release) versus Release 1.8.2": - - http://www.hdfgroup.org/HDF5/doc/ADGuide/Changes.html - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS -======== - -- New Features -- Support for New Platforms, Languages, and Compilers -- Bug Fixes since HDF5-1.8.2 -- Platforms Tested -- Supported Configuration Features Summary -- Known Problems - - -New Features -============ - - Configuration - ------------- - - Added libtool version numbers to generated c++, fortran, and - hl libraries. MAM 2009/04/19. - - Regenerated Makefile.ins using Automake 1.10.2. MAM 2009/04/19. - - Added a Make target of check-all-install to test the correctness of - installing via the prefix= or $DESTDIR options. AKC - 2009/04/14 - - Library - ------- - - Embed the content of libhdf5.settings into the hdf5 executables - so that an "orphaned" executables can display (via the Unix - strings command, for example) the library settings used to build - the executables. This is a prototype implementation. Improvement will - be added in next release. AKC - 2009/04/20 - - Separated "factory" free list class from block free lists. These free - lists are dynamically created and manage blocks of a fixed size. - H5set_free_list_limits() will use the same settings specified for block - free lists for factory free lists. NAF - 2009/04/08 - - Added support for dense attributes to H5Ocopy. XCao/NAF - 2009/01/29 - - Added H5Pset_elink_cb and H5Pget_elink_cb functions to support a - user-defined callback function for external link traversal. - NAF - 2009/01/08 - - Added H5Pset_elink_acc_flags and H5Pget_elink_acc_flags functions to - allow the user to specify the file access flags used to open the target - file of an external link. NAF - 2009/01/08 - - Added H5Pset_chunk_cache() and H5Pget_chunk_cache() functions to allow - individual rdcc configuration for each dataset. Added - H5Dget_access_plist() function to retrieve a dataset access property - list from a dataset. NAF - 2008/11/12 - - Added H5Iis_valid() function to check if an id is valid without - producing an error message. NAF - 2008/11/5 - - Added code to maintain a min_clean_fraction in the metadata cache when - in serial mode. MAM - 2009/01/9 - - Parallel Library - ---------------- - - Modified parallel tests to run with arbitrary number of processes. The - modified tests are testphdf5 (parallel dataset access), t_chunk_alloc - (chunk allocation), and t_posix_compliant (posix compliance). The rest of - the parallel tests already use in the code the number of processes - available in the communicator. (CMC - 2009/04/28) - - Tools - ----- - - h5diff new flag, -c, --compare, list objects that are not comparable. - PVN - 2009/4/2 - 1368 - - h5diff new flag, -N, --nan, avoids NaNs detection. PVN - 2009/4/2 - - h5dump correctly specifies XML dtd / schema urls ADB - 2009/4/3 - 1519 - - h5repack now handles group creation order. PVN - 2009/4/2 - 1402 - - h5repack: When user doesn't specify a chunk size, h5repack now - defines a default chunk size as the same size of the size of the - hyperslab used to read the chunks. The size of the hyperslabs are - defined as the size of each dimension or a predefined constant, - whatever is smaller. This assures that the chunk read fits in the - chunk cache. PVN - 2008/11/21 - - High-Level APIs - --------------- - - Table: In version 3.0 of Table, the writing of the "NROWS" attribute - (used to store number of records) was deprecated. PVN - 2008/11/24 - - F90 API - ------- - - Added for the C APIs the Fortran wrappers: - h5dget_access_plist_f - h5iis_valid_f - h5pset_chunk_cache_f - h5pget_chunk_cache_f - MSB - 2009/04/17 - - C++ API - ------- - - None - - -Support for New Platforms, Languages, and Compilers -=================================================== - - -Bug Fixes since HDF5-1.8.2 -========================== - - Configuration - ------------- - - The --includedir=DIR configuration option now works as intended, and - can be used to specify the location to install C header files. The - default location remains unchanged, residing at ${prefix}/include. - MAM - 2009/03/10 - BZ #1381 - - Configure no longer removes the '-g' flag from CFLAGS when in production - mode if it has been explicitly set in the CFLAGS environment variable - prior to configuration. MAM - 2009/03/09 - BZ #1401 - - Library - ------- - - Added versioning to H5Z_class_t struct to allow compatibility with 1.6 - API. NAF - 2009/04/20 - 1533 - - Fixed a problem with using data transforms with non-native types in the - file. NAF - 2009/04/20 - 1548 - - Added direct.h include file to windows section of H5private.h - to fix _getcwd() warning. ADB - 2009/04/14 - 1536 - - Fixed a bug that prevented external links from working after calling - H5close(). NAF - 2009/04/10 - 1539 - - Modified library to write cached symbol table information to the - superblock, to allow library versions 1.3.0 to 1.6.3 to read files created - by this version. NAF - 2009/04/08 - 1423 - - Changed skip lists to use a deterministic algorithm. The library should - now never call rand() or srand(). NAF - 2009/04/08 - 503 - - Fixed a bug where H5Lcopy and H5Lmove wouldn't create intermediate groups - when that property was set. NAF - 2009/04/07 - 1526 - - Fixed a bug that caused files with a user block to grow by the size of the - user block every time they were opened. NAF - 2009/03/26 - 1499 - - Fixed a rare problem that could occur with files using the old (pre 1.4) - array datatype. NAF - 2009/03/23 - - Modified library to be able to open files with corrupt root group symbol - table messages, and correct these errors if they are found. Such files - can only be successfully opened with write access. NAF - 2009/03/23 - 1189 - - Removed the long_long #define and replaced all instances with - "long long". This caused problems with third party products. All - currently supported compilers support the type. ADB - 2009/03/05 - - Fixed various bugs that could prevent the fill value from being written - in certain rare cases. NAF - 2009/02/26 - 1469 - - Fixed a bug that prevented more than one dataset chunk from being cached - at a time. NAF - 2009/02/12 - 1015 - - Fixed an assertion failure caused by opening an attribute multiple times - through multiple file handles. NAF - 2009/02/12 - 1420 - - Fixed a problem that could prevent the user from adding attributes (or any - object header message) in some circumstances. NAF - 2009/02/12 - 1427 - - Fixed a bug that could cause problems when an attribute was added to a - committed datatype using the committed datatype's datatype. - NAF - 2009/02/12 - - Fixed a bug that could cause problems when copying an object with a shared - message in its own object header. NAF - 2009/01/29 - - Changed H5Tset_order to properly reject H5T_ORDER_NONE for most datatypes. - NAF - 2009/01/27 - 1443 - - Fixed a bug where H5Tpack wouldn't remove trailing space from an otherwise - packed compound type. NAF - 2009/01/14 - - Fixed up some old v2 btree assertions that get run in debug mode that - were previously failing on compilation, and removed some of the - more heavily outdated and non-rewritable ones. MAM - 2008/12/15 - - Fixed a bug that could cause problems when "automatically" unmounting - multiple files. NAF - 2008/11/17 - - H5Dset_extent: when shrinking dimensions, some chunks were not deleted. - PVN - 2009/01/8 - - Parallel Library - ---------------- - - None - - Tools - ----- - - Fixed many problems that could occur when using h5repack with named - datatypes. NAF - 2009/4/20 - 1516/1466 - - h5dump, h5diff, h5repack were not reading (by hyperslabs) datasets - that have a datatype datum size greater than H5TOOLS_BUFSIZE, a constant - defined as 1024Kb, such as array types with large dimensions. - PVN - 2009/4/1 - 1501 - - h5import: By selecting a compression type, a big endian byte order - was being selected. PVN - 2009/3/11 - 1462 - - zip_perf.c had missing argument on one of the open() calls. Fixed. - AKC - 2008/12/9 - - F90 API - ------ - - None - - C++ API - ------ - - None - - High-Level APIs: - ------ - - Dimension scales: The scale index return value in H5DSiterate_scales - was not always incremented. PVN - 2009/4/8 - 1538 - - Fortran High-Level APIs: - ------ - - Lite: The h5ltget_dataset_info_f function (gets information about - a dataset) was not correctly returning the dimension array - PVN - 2009/3/23 - - -Platforms Tested -================ -The following platforms and compilers have been tested for this release. - - AIX 5.3 xlc 7.0.0.8 - (LLNL Up) xlf 09.01.0000.0008 - xlC 7.0.0.8 - mpcc_r 7.0.0.8 - mpxlf_r 09.01.0000.0008 - - Cray XT3 (2.0.41) cc (pgcc) 7.1-4 - (SNL red storm) ftn (pgf90) 7.1-4 - CC (pgCC) 7.1-4 - - FreeBSD 6.3-STABLE i386 gcc 3.4.6 [FreeBSD] 20060305 - (duty) g++ 3.4.6 [FreeBSD] 20060305 - gcc 4.3.4 20090419 - g++ 4.3.4 20090419 - gfortran 4.3.4 20090419 - - FreeBSD 6.3-STABLE amd64 gcc 3.4.6 [FreeBSD] 20060305 - (liberty) g++ 3.4.6 [FreeBSD] 20060305 - gcc 4.4.1 20090421 - g++ 4.4.1 20090421 - gfortran 4.4.1 20090421 - - IRIX64 6.5 (64 & n32) MIPSpro cc 7.4.4m - F90 MIPSpro 7.4.4m - C++ MIPSpro cc 7.4.4m - - Linux 2.6.18-128.1.6.el5xen gcc (GCC) 4.1.2 - #1 SMP i686 i686 i386 G95 (GCC 4.0.3 (g95 0.92!) Feb 4 2009) - (jam) PGI C, Fortran, C++ 7.2-1 32-bit - PGI C, Fortran, C++ 8.0-1 32-bit - Intel(R) C Compiler for 32-bit - applications, Versions 10.1, 11.0 - Intel(R) C++ Compiler for 32-bit - applications, Version 10.1, 11.0 - Intel(R) Fortran Compiler for 32-bit - applications, Version 10.1, 11.0 - Absoft 32-bit Fortran 95 10.0.7 - MPICH mpich2-1.0.8 compiled with - gcc 4.1.2 and G95 (GCC 4.0.3 (g95 0.92!) - - Linux 2.6.9-42.0.10.ELsmp #1 gcc (GCC) 3.4.6 - SMP i686 i686 i386 G95 (GCC 4.0.3 (g95 0.92!) Feb 4 2009) - (kagiso) MPICH mpich2-1.0.8 compiled with - gcc 3.4.6 and G95 (GCC 4.0.3 (g95 0.92!) - - Linux 2.6.16.60-0.37-smp #1 gcc 4.1.2 - SMP x86_64 GNU/Linux G95 (GCC 4.0.3 (g95 0.92!) Feb 4 2009) - (smirom) Intel(R) C, C++, Fortran Compilers for - applications running on Intel(R) 64, - Versions 10.1, 11.0. - PGI C, Fortran, C++ Version 7.2-1, 8.0-1 - for 64-bit target on x86-64 - gcc 4.1.2 and G95 (GCC 4.0.3 (g95 0.92!) - MPICH mpich2-1.0.8 compiled with - gcc 4.1.2 and G95 (GCC 4.0.3 (g95 0.92!) - tested for both 32- and 64-bit binaries - - Linux 2.6.16.54-0.2.5 #1 Intel(R) C++ Version 10.1.017 - SGI Altix SMP ia64 Intel(R) Fortran Itanium(R) Version 10.1.017 - (cobalt) SGI MPI 1.38 - - SunOS 5.10 32- and 64-bit Sun WorkShop 6 update 2 C 5.9 Patch 124867-09 - (linew) Sun WorkShop 6 update 2 Fortran 95 8.3 - Patch 127000-07 - Sun WorkShop 6 update 2 C++ 5.8 - Patch 124863-11 - - Intel Xeon Linux 2.6.18- gcc 3.4.6 20060404 - 92.1.10.el5_lustre.1.6.6smp- Intel(R) C++ Version 10.0.026 - perfctr #2 SMP Intel(R) Fortran Compiler Version 10.0.026 - (abe) Open MPI 1.2.2 - MVAPICH2-0.9.8p28p2patched-intel-ofed-1.2 - compiled with icc v10.0.026 and ifort 10.0.026 - - IA-64 Linux 2.4.21-309.tg1 gcc (GCC) 3.2.2 - #1 SMP ia64 Intel(R) C++ Version 8.1.037 - (NCSA tg-login) Intel(R) Fortran Compiler Version 8.1.033 - mpich-gm-1.2.7p1..16-intel-8.1.037-r1 - - Linux 2.6.9-55.0.9.EL_lustre Intel(R) C, C++, Fortran Compilers for - .1.4.11.1smp #1 SMP applications running on Intel(R) 64, - SMP x86_64 GNU/Linux Versions 9.1. - (SNL Spirit) - - Linux 2.6.9-55.0.9.EL_lustre Intel(R) C, C++, Fortran Compilers for - .1.4.11.1smp #1 SMP applications running on Intel(R) 64, - SMP x86_64 GNU/Linux Versions 10.1. - (SNL Thunderbird) - - Linux 2.6.18-63chaos #1 SMP Intel(R) C, C++, Fortran Compilers for - SMP x86_64 GNU/Linux applications running on Intel(R) 64, - (SNL Glory) Versions 10.1. - - Linux 2.6.18-63chaos #1 SMP Intel(R) C, C++, Fortran Compilers for - SMP x86_64 GNU/Linux applications running on Intel(R) 64, - (LLNL Zeus) Versions 9.1. - gcc/gfortran/g++ (GCC) 4.1.2. - - Windows XP Visual Studio .NET - Visual Studio 2005 w/ Intel Fortran 9.1 - Cygwin(native gcc compiler and g95) - - Windows XP x64 Visual Studio 2005 w/ Intel Fortran 9.1 - - Windows Vista Visual Studio 2005 - - MAC OS 10.5.6 (Intel) i686-apple-darwin9-gcc-4.0.1 (GCC) 4.0.1 - GNU Fortran (GCC) 4.3.0 20070810 - G95 (GCC 4.0.3 (g95 0.91!) Apr 24 2008) - Intel C, C++ and Fortran compilers 10.1 - - -Supported Configuration Features Summary -======================================== - - In the tables below - y = tested and supported - n = not supported or not tested in this release - C = Cluster - W = Workstation - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90 F90 C++ zlib SZIP - parallel parallel -Solaris2.10 32-bit n y n y y y -Solaris2.10 64-bit n y n y y y -IRIX64_6.5 32-bit n n n n y y -IRIX64_6.5 64-bit n y y y y y -Windows XP n y(4) n(4) y y y -Windows XP x64 n y(4) n(4) y y y -Windows Vista n n n y y y -Mac OS X 10.5 Intel n y n y y y -AIX 5.3 32- and 64-bit n y n y y n -FreeBSD 6.3-STABLE 32&64 bit n y n y y y -RedHat EL4 2.6.9-42 i686 GNU (1) W y y y y y y -RedHat EL5 2.6.18-128 i686 GNU (1)W y y(2) y y y y -RedHat EL5 2.6.18-128 i686 Intel W n y n y y n -RedHat EL5 2.6.18-128 i686 PGI W n y n y y n -SuSe Linux 2.6.16 x86_64 GNU (1) W y y(3) y y y y -SuSe Linux 2.6.16 x86_64 Intel W n y n y y n -SuSe Linux 2.6.16 x86_64 PGI W n y n y y y -SuSe Linux 2.6.16 SGI Altix ia64 C y y y y y y -RedHat EL4 2.6.18 Xeon Lustre C y y y y y n -SuSe Linux 2.4.21 ia64 Intel C y y y y y n -Cray XT3 2.0.41 y y y y y n - - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -Solaris2.10 32-bit y y y y -Solaris2.10 64-bit y y y y -IRIX64_6.5 32-bit y dna y y -IRIX64_6.5 64-bit y y n y -Windows XP y y(4) y y -Windows XP x64 y y(4) y y -Windows Vista y n n y -Mac OS X 10.5 y n y n -AIX 5.3 32- and 64-bit n n n n -FreeBSD 6.3-STABLE 32&64 bit y n y y -RedHat EL4 2.6.9-42 i686 GNU (1) W y y y y -RedHat EL5 2.6.18-128 i686 GNU (1)W y y(2) y y -RedHat EL5 2.6.18-128 i686 Intel W y y y n -RedHat EL5 2.6.18-128 i686 PGI W y y y n -SuSe Linux 2.6.16 x86_64 GNU (1) W y y y y -SuSe Linux 2.6.16 x86_64 Intel W y y y n -SuSe Linux 2.6.16 x86_64 PGI W y y y n -SuSe Linux 2.6.16 SGI Altix ia64 C y n -RedHat EL4 2.6.18 Xeon Lustre C y y y n -SuSe Linux 2.4.21 ia64 Intel C y y y n -Cray XT3 2.0.41 n n n n - - (1) Fortran compiled with g95. - (2) With PGI and Absoft compilers. - (3) With PGI compiler for Fortran. - (4) Using Visual Studio 2005 or Cygwin - Compiler versions for each platform are listed in the preceding - "Platforms Tested" table. - - -Known Problems -============== -* Parallel tests failed with 16 processes with data inconsistency at testphdf5 - / dataset_readAll. Parallel tests also failed with 32 and 64 processes with - collective abort of all ranks at t_posix_compliant / allwrite_allread_blocks - with MPI IO. CMC - 2009/04/28 - -* There is a known issue in which HDF5 will change the timestamp on a file - simply by opening it with read/write permissions, even if the file is not - modified in any way. This is due to the way in which HDF5 manages the file - superblock. A fix is currently underway and should be included in the 1.8.4 - release of HDF5. MAM - 2009/04/28 - -* For gcc v4.3 and v4.4, with production mode, if -O3 is used, H5Tinit.c - would fail to compile. Actually bad H5Tinit.c is produced. If -O (same - as -O1) is used, H5Tinit.c compiled okay but test/dt_arith would fail. - When -O0 (no optimizatio) is used, H5Tinit.c compilete okay and all - tests passed. Therefore, -O0 is imposed for v4.3 and v4.4 of gcc. - AKC - 2009/04/20 - -* For Red Storm, a Cray XT3 system, the tools/h5ls/testh5ls.sh and - tools/h5copy/testh5copy.sh will fail some of its sub-tests. These sub-tests - are expected to fail and should exit with a non-zero code but the yod - command does not propagate the exit code of the executables. Yod always - returns 0 if it can launch the executable. The test suite shell expects - a non-zero for this particular test, therefore it concludes the test has - failed when it receives 0 from yod. Skip all the "failing" test for now - by changing them as following. - - ======== Original tools/h5ls/testh5ls.sh ========= - TOOLTEST tgroup-1.ls 1 -w80 -r -g tgroup.h5 - ======== Change to =============================== - echo SKIP TOOLTEST tgroup-1.ls 1 -w80 -r -g tgroup.h5 - ================================================== - - ======== Original tools/h5copy/testh5copy.sh ========= - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d grp_rename - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d /grp_rename/grp_dsets - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_dsets -d /E/F/grp_dsets - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_nested -d /G/H/grp_nested - H5LSTEST $FILEOUT - ======== Change to =============================== - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d grp_rename - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d /grp_rename/grp_dsets - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_dsets -d /E/F/grp_dsets - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_nested -d /G/H/grp_nested - echo SKIP H5LSTEST $FILEOUT - ================================================== - AKC - 2008/11/10 - -* For Red Storm, a Cray XT3 system, the yod command sometimes gives the - message, "yod allocation delayed for node recovery". This interferes with - test suites that do not expect seeing this message. See the section of "Red - Storm" in file INSTALL_parallel for a way to deal with this problem. - AKC - 2008/05/28 - -* We have discovered two problems when running collective IO parallel HDF5 - tests with chunking storage on the ChaMPIon MPI compiler on tungsten, a - Linux cluster at NCSA. - - Under some complex selection cases: - 1) MPI_Get_element returns the wrong value. - 2) MPI_Type_struct also generates the wrong derived datatype and corrupt - data may be generated. - These issues arise only when turning on collective IO with chunking storage - with some complex selections. We have not found these problems on other - MPI-IO compilers. If you encounter these problems, you may use independent - IO instead. - - To avoid this behavior, change the following line in your code - H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE); - to - H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_INDEPENDENT); - KY - 2007/08/24 - -* On Intel 64 Linux cluster (RH 4, Linux 2.6.9) with Intel 10.0 compilers, - use -mp -O1 compilation flags to build the libraries. A higher level of - optimization causes failures in several HDF5 library tests. - -* For LLNL, uP: both serial and parallel tests pass. - Zeus: Serial tests pass but parallel tests fail with a known problem in MPI. - ubgl: Serial tests pass but parallel tests fail. - -* On mpich 1.2.5 and 1.2.6, if more than two processes contribute no IO and - the application asks to do collective IO, we have found that when using 4 - processors, a simple collective write will sometimes be hung. This can be - verified with t_mpi test under testpar. - -* On IRIX6.5, when the C compiler version is greater than 7.4, complicated - MPI derived datatype code will work. However, the user should increase - the value of the MPI_TYPE_MAX environment variable to some appropriate value - to use collective irregular selection code. For example, the current - parallel HDF5 test needs to raise MPI_TYPE_MAX to 200,000 to pass the test. - -* A dataset created or rewritten with a v1.6.3 library or after cannot be read - with the v1.6.2 library or before when the Fletcher32 EDC filter is enabled. - There was a bug in the calculation of the Fletcher32 checksum in the - library before v1.6.3; the checksum value was not consistent between big- - endian and little-endian systems. This bug was fixed in Release 1.6.3. - However, after fixing the bug, the checksum value was no longer the same as - before on little-endian system. Library releases after 1.6.4 can still read - datasets created or rewritten with an HDF5 library of v1.6.2 or before. - SLU - 2005/6/30 - -* On IBM AIX systems, parallel HDF5 mode will fail some tests with error - messages like "INFO: 0031-XXX ...". This is from the command `poe'. - Set the environment variable MP_INFOLEVEL to 0 to minimize the messages - and run the tests again. - - The tests may fail with messages like "The socket name is already in use", - but HDF5 does not use sockets. This failure is due to problems with the - poe command trying to set up the debug socket. To resolve this problem, - check to see whether there are many old /tmp/s.pedb.* files staying around. - These are sockets used by the poe command and left behind due to failed - commands. First, ask your system administrator to clean them out. - Lastly, request IBM to provide a means to run poe without the debug socket. - -* The --enable-static-exec configure flag fails to compile for Solaris - platforms. This is due to the fact that not all of the system libraries on - Solaris are available in a static format. - - The --enable-static-exec configure flag also fails to correctly compile - on IBM SP2 platforms for serial mode. The parallel mode works fine with - this option. - - It is suggested that you do not use this option on these platforms - during configuration. - -* There is also a configure error on Altix machines that incorrectly reports - when a version of Szip without an encoder is being used. - -* Information about building with PGI and Intel compilers is available in - the INSTALL file sections 4.7 and 4.8. - - -%%%%1.8.2%%%% - - -HDF5 version 1.8.2 released on Mon Nov 10 15:43:09 CST 2008 -================================================================================ - -INTRODUCTION -============ - -This document describes the differences between HDF5-1.8.1 and HDF5 1.8.2, -and contains information on the platforms tested and known problems in -HDF5-1.8.2. For more details, see the files HISTORY-1_0-1_8_0_rc3.txt -and HISTORY-1_8.txt in the release_docs/ directory of the HDF5 source. - -Links to the HDF5 1.8.2 source code, documentation, and additional materials -can be found on the HDF5 web page at: - - http://www.hdfgroup.org/products/hdf5/ - -The HDF5 1.8.2 release can be obtained from: - - http://www.hdfgroup.org/HDF5/release/obtain5.html - -User documentation for 1.8.2 can be accessed directly at this location: - - http://www.hdfgroup.org/HDF5/doc/ - -New features in the HDF5-1.8.x release series, including brief general -descriptions of some new and modified APIs, are described in the "What's New -in 1.8.0?" document: - - http://www.hdfgroup.org/HDF5/doc/ADGuide/WhatsNew180.html - -All new and modified APIs are listed in detail in the "HDF5 Software Changes -from Release to Release" document, in the section "Release 1.8.2 (current -release) versus Release 1.8.1": - - http://www.hdfgroup.org/HDF5/doc/ADGuide/Changes.html - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS -======== - -- New Features -- Support for new platforms and languages -- Bug Fixes since HDF5-1.8.1 -- Platforms Tested -- Supported Configuration Features Summary -- Known Problems - - -New Features -============ - - Configuration - ------------- - - Upgraded libtool to version 2.2.6a. (MAM - 2008/10/15). - - Library - ------- - - Added two new public routines: H5Pget_elink_fapl() and - H5Pset_elink_fapl(). (see bug #1247) (VC - 2008/10/13) - - Improved free space tracking in file to be faster. (QAK - 2008/10/06) - - Added 'mounted' field to H5G_info_t struct. (QAK - 2008/07/15) - - Parallel Library - ---------------- - - None - - Tools - ----- - - h5repack: added new options -u and -b to add a userblock to an HDF5 - file during the repack. (PVN - 2008/08/26) - - h5repack: added options -t and -a to call H5Pset_alignment while - creating a repacked file. (PVN - 2008/08/29) - - h5ls: added capability to traverse through external links when the -r - (recursive) flag is given. (NAF - 2008/09/16) - - h5ls: added -E option to enable traversal of external links. - h5ls will not traverse external links without this flag being set. - (NAF - 2008/10/06) - - h5dump: when -b flag is used without a keyword after it, binary - output defaults to NATIVE. MEMORY keyword was deprecated - and replaced by NATIVE keyword. (PVN - 2008/10/30) - - h5diff: returns 1 when file graphs differ by any object. - Error return code was changed to 2 from -1. (PVN - 2008/10/30) - - h5import: TEXTFPE (scientific format) was deprecated. Use TEXTFP - instead (PVN - 2008/10/30) - - - - F90 API - ------ - - Added optional parameter 'mounted' to H5Gget_info_f, - H5Gget_info_by_idx_f, H5Gget_info_by_name_f (MSB - 2008/09/24) - - Added H5Tget_native_type_f (MSB - 2008/09/30) - - - C++ API - ------ - - These member functions were added as wrapper for H5Rdereference to - replace the incorrect IdComponent::dereference(). - void H5Object::dereference(H5Object& obj, void* ref, - H5R_type_t ref_type=H5R_OBJECT) - void H5Object::dereference(H5File& h5file, void* ref, - H5R_type_t ref_type=H5R_OBJECT) - void H5Object::dereference(Attribute& obj, void* ref, - H5R_type_t ref_type=H5R_OBJECT) - - In addition, these constructors were added to create the associated - objects by way of dereference: - DataSet(H5Object& obj, void* ref, H5R_type_t ref_type=H5R_OBJECT) - DataSet(H5File& file, void* ref, H5R_type_t ref_type=H5R_OBJECT) - DataSet(Attribute& attr, void* ref, H5R_type_t ref_type=H5R_OBJECT) - Group(H5Object& obj, void* ref, H5R_type_t ref_type=H5R_OBJECT) - Group(H5File& obj, void* ref, H5R_type_t ref_type=H5R_OBJECT) - Group(Attribute& attr, void* ref, H5R_type_t ref_type=H5R_OBJECT) - DataType(H5Object& obj, void* ref, H5R_type_t ref_type=H5R_OBJECT) - DataType(H5File& file, void* ref, H5R_type_t ref_type=H5R_OBJECT) - DataType(Attribute& attr, void* ref, H5R_type_t ref_type=H5R_OBJECT) - (BMR - 2008/10/29) - - -Support for New Platforms, Languages, and Compilers -=================================================== - - Intel 10.1 is supported on Mac OS X 10.5.4. - Note: - When Fortran is enabled, configure automatically - disables the build of shared libraries (i.e., only - static C and C++ HDF5 libraries will be built - along with the static HDF5 Fortran library). - Intel 10.1 C and C++ compilers require - "-no-multibyte-chars" compilation flag due to the known - bug in the compilers. - (EIP - 2008/10/30) - - -Bug Fixes since HDF5-1.8.1 -========================== - - Configuration - ------------- - - Fixed error with 'make check install' failing due to h5dump - needing other tools built first. (MAM - 2008/10/15). - - When using shared szip, it is no longer necessary to specify - the path to the shared szip libraries in LD_LIBRARY_PATH. - (MAM - 2008/10/15). - - The file libhdf5_fortran.settings is not installed since its content - is included in libhdf5.settings now. (AKC - 2008/10/21) - - "make DESTDIR=xxx install" failed to install some tools and files - (e.g., h5cc and fortran modules). Fixed. (AKC - 2008/10/8). - - Library - ------- - - H5Ovisit and H5Ovisit_by_name will now properly terminate when the - callback function returns a positive value on the starting object. - (NAF - 2008/11/03) - - Fixed an error where a null message could be created that was larger - than could be written to the file. (NAF - 2008/10/23) - - Corrected error with family/split/multi VFD not updating driver info - when "latest" version of the file format used. (QAK - 2008/10/14) - - Corrected alignment+threshold errors to work correctly when metadata - aggregation is enabled. (QAK - 2008/10/06) - - Changed H5Fget_obj_count and H5Fget_obj_ids to ignore objects - registered by the library for internal library use. - (NAF - 2008/10/06) - - Fixed potential memory leak during compound conversion. - (NAF - 2008/10/06) - - Changed the return value of H5Fget_obj_count from INT to SSIZE_T. - Also changed the return value of H5Fget_obj_ids from HERR_T to - SSIZE_T and the type of the parameter MAX_OBJS from INT to SIZE_T. - (SLU - 2008/09/26) - - Fixed an issue that could cause data to be improperly overwritten - during compound type conversion. (NAF - 2008/09/19) - - Fixed pointer alignment violations that could occur during vlen - conversion. (NAF - 2008/09/16) - - Fixed problem where library could cause a segmentation fault when - an invalid location ID was given to H5Giterate(). (QAK - 2008/08/19) - - Fixed improper shutdown when objects have reference count > 1. The - library now tracks reference count due to the application separately - from that due to internal library routines. (NAF - 2008/08/19) - - Fixed assertion failure caused by incorrect array datatype version. - (NAF - 2008/08/08) - - Fixed an issue where mount point traversal would fail when using - multiple handles for the child. (NAF - 2008/08/07) - - Fixed an issue where mount points were inaccessible when using - multiple file handles for the parent. The mount table is now in - the shared file structure (the parent pointer is still in the - top structure). (NAF - 2008/08/07) - - Fixed assertion failure caused by incorrect array datatype version. - (NAF - 2008/08/04) - - Fixed issue where a group could have a file mounted on it twice. - (QAK - 2008/07/15) - - When an attribute was opened twice and data was written with - one of the handles, the file didn't have the data. It happened - because each handle had its own object structure, and the empty - one overwrote the data with fill value. This is fixed by making - some attribute information like the data be shared in the - attribute structure. (SLU - 2008/07/07) - - Fixed a Windows-specific issue in the ohdr test which was causing - users in some timezones to get false errors. This a deficiency in - the Windows mktime() function, and has been handled properly. - (SJW - 2008/06/19) - - Parallel Library - ---------------- - - None - - Tools - ----- - - h5dump now checks for uniqueness of committed datatypes. - (NAF - 2008/10/15) - - Fixed unnecessary indentation of committed datatypes in h5dump. - (NAF - 2008/10/15) - - Fixed bugs in h5stat: segmemtation fault when printing groups and - print warning message when traversal of objects is unsuccessful. - (see bug #1253) (VC- 2008/10/13) - - Fixed bug in h5ls that prevented relative group listings (like - "h5ls foo.h5/bar") from working correctly (QAK - 2008/06/03) - - h5dump: when doing binary output (-b), the stdout printing of - attributes was done incorrectly. Removed printing of attributes - when doing binary output. (PVN - 2008/06/05) - - - F90 API - ------ - - h5sselect_elements_f: Added additional operators H5S_SELECT_APPEND - and H5S_SELECT_PREPEND (MSB - 2008/09/30) - - h5sget_select_elem_pointlist: Fixed list of returned points by - rearranging the point list correctly by accounting for C - conventions. (MSB - 2008/09/30) - - h5sget_select_hyper_blocklist_f: Fixed error in transposed dimension - of arrays.(MSB - 2008/9/30) - - h5sget_select_bounds_f: Swapped array bounds to account for C and - Fortran reversed array notation (MSB - 2008/9/30) - - Changed to initializing string to a blank character instead of a - null type in tH5P.f90 to fix compiling error using AIX 5.3.0 - (MSB - 2008/7/29) - - Fixed missing commas in H5test_kind.f90 detected by NAG compiler - (MSB - 2008/7/29) - - Fixed passing and array to a scalar in tH5A_1_8.f90 detected by - NAG compiler (MSB - 2008/7/29) - - Added the ability of the test programs to use the status of - HDF5_NOCLEANUP to determine if the *.h5 files should be removed - or not after the tests are completed (MSB - 2008/10/1) - - In nh5tget_offset_c: (MSB 9/12/2008) - If offset was equal to 0 it returned the error code of -1, - this was changed to return an error code of -1 when the offset - value is < 0. - - Uses intrinsic Fortran function SIZEOF if available when detecting - type of INTEGERs and REALs in H5test_kind.f90 (MSB - 2008/9/3) - - Put the DOUBLE PRECISION interfaces in a separate module and - added a USE statement for the module. The interfaces are - included/excluded depending on the state of FORTRAN_DEFAULT_REAL - is DBLE_F which detects if the default REAL is DOUBLE PRECISION. - This allows the library to be compiled with -r8 Fortran flag - without the user needing to edit the source code. - (MSB - 200/8/27) - - Enable building shared library for fortran by adding the flag -fPIC - to the compile flags for versions of Intel Fortran compiler >=9 - (MSB - 2008/8/26) - - C++ API - ------ - - Fixed a design bug which allowed an Attribute object to create/modify - attributes (bugzilla #1068). The API class hierarchy was revised - to address the problem. Classes AbstractDS and Attribute are moved - out of H5Object. Class Attribute now multiply inherits from - IdComponent and AbstractDs and class DataSet from H5Object and - AbstractDs. In addition, the data member IdComponent::id was - moved into subclasses: Attribute, DataSet, DataSpace, DataType, - H5File, Group, and PropList. (BMR - 2008/05/20) - - IdComponent::dereference was incorrect and replaced as described - in "New Features" section. - (BMR - 2008/10/29) - - -Platforms Tested -================ -The following platforms and compilers have been tested for this release. - - AIX 5.3 xlc 7.0.0.8 - xlf 09.01.0000.0008 - xlC 7.0.0.8 - mpcc_r 7.0.0.8 - mpxlf_r 09.01.0000.0008 - - Cray XT3 (2.0.41) cc (pgcc) 7.1-4 - (red storm) ftn (pgf90) 7.1-4 - CC (pgCC) 7.1-4 - - FreeBSD 6.3-STABLE i386 gcc 3.4.6 [FreeBSD] 20060305 - (duty) g++ 3.4.6 [FreeBSD] 20060305 - gcc 4.2.5 20080702 - g++ 4.2.5 20080702 - gfortran 4.2.5 20080702 - - FreeBSD 6.3-STABLE amd64 gcc 3.4.6 [FreeBSD] 20060305 - (liberty) g++ 3.4.6 [FreeBSD] 20060305 - gcc 4.2.5 20080702 - g++ 4.2.5 20080702 - gfortran 4.2.5 20080702 - - IRIX64 6.5 (64 & n32) MIPSpro cc 7.4.4m - F90 MIPSpro 7.4.4m - C++ MIPSpro cc 7.4.4m - - Linux 2.6.9-42.0.10.ELsmp #1 gcc (GCC) 3.4.6 - SMP i686 i386 G95 (GCC 4.0.3 (g95 0.92!) April 18 2007) - (kagiso) PGI C, Fortran, C++ 7.2-1 32-bit - Intel(R) C Compiler for 32-bit - applications, Version 10.1 - Intel(R) C++ Compiler for 32-bit - applications, Version 10.1 - Intel(R) Fortran Compiler for 32-bit - applications, Version 10.1 - Absoft 32-bit Fortran 95 10.0.4 - MPICH mpich-1.2.7 compiled with - gcc 3.4.6 and G95 (GCC 4.0.3 (g95 0.92!) - MPICH mpich2-1.0.6p1 compiled with - gcc 3.4.6 and G95 (GCC 4.0.3 (g95 0.92!) - - Linux 2.6.16.46-0.14-smp #1 Intel(R) C++ for Intel(R) EM64T - SMP x86_64 GNU/Linux Ver. 10.1.013 - (smirom) Intel(R) Fortran Intel(R) EM64T - Ver. 10.1.013 - PGI C, Fortran, C++ Version 7.2-1 - for 64-bit target on x86-64 - MPICH mpich-1.2.7 compiled with - gcc 4.1.2 and G95 (GCC 4.0.3 (g95 0.92!) - MPICH mpich2-1.0.7 compiled with - gcc 4.1.2 and G95 (GCC 4.0.3 (g95 0.92!) - tested for both 32- and 64-bit binaries - - Linux 2.6.16.54-0.2.5 #1 Intel(R) C++ Version 10.1.017 - Altix SMP ia64 Intel(R) Fortran Itanium(R) Version 10.1.017 - (cobalt) SGI MPI 1.16 - - SunOS 5.10 32- and 64-bit Sun WorkShop 6 update 2 C 5.8 - (linew) Sun WorkShop 6 update 2 Fortran 95 8.2 - Sun WorkShop 6 update 2 C++ 5.8 - Patch 121019-06 - - Xeon Linux 2.6.9-42.0.10.EL_lustre-1.4.10.1smp - (abe) Intel(R) C++ Version 10.0.026 - Intel(R) Fortran Compiler Version 10.0.026 - Open MPI 1.2.2 - MVAPICH2-0.9.8p28p2patched-intel-ofed-1.2 - compiled with icc v10.0.026 and - ifort 10.0.026 - - IA-64 Linux 2.4.21-309.tg1 #1 SMP - ia64 gcc (GCC) 3.2.2 - (NCSA tg-login) Intel(R) C++ Version 8.1.037 - Intel(R) Fortran Compiler Version 8.1.033 - mpich-gm-1.2.7p1..16-intel-8.1.037-r1 - - Intel 64 Linux 2.6.9-42.0.10.EL_lustre-1.4.10.1smp - (abe) gcc 3.4.6 20060404 - Intel(R) C++ Version 10.0 - Intel (R) Fortran Compiler Version 10.0 - mvapich2-0.9.8p2patched-intel-ofed-1.2 - - Windows XP Visual Studio .NET - Visual Studio 2005 w/ Intel Fortran 9.1 - Cygwin(native gcc compiler and g95) - - Windows XP x64 Visual Studio 2005 w/ Intel Fortran 9.1 - - Windows Vista Visual Studio 2005 - - MAC OS 10.5.4 (Intel) i686-apple-darwin9-gcc-4.0.1 (GCC) 4.0.1 - GNU Fortran (GCC) 4.3.0 20070810 - G95 (GCC 4.0.3 (g95 0.91!) Apr 24 2008) - Intel C, C++ and Fortran compilers 10.1 - - -Supported Configuration Features Summary -======================================== - - In the tables below - y = tested and supported - n = not supported or not tested in this release - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90 F90 C++ zlib SZIP - parallel parallel -Solaris2.10 32-bit n y n y y y -Solaris2.10 64-bit n y n y y y -IRIX64_6.5 32-bit n n n n y y -IRIX64_6.5 64-bit n y y y y y -Windows XP n y(15) n(15) y y y -Windows XP x64 n y(15) n(15) y y y -Windows Vista n n n y y y -Mac OS X 10.5 Intel n y n y y y -AIX 5.3 32- and 64-bit n y n y y n -FreeBSD 6.3-STABLE -32&64 bit n y n y y y -RedHat EL4 (3) W y(1) y(10) y(1) y y y -RedHat EL4 Intel (3) W n y n y y n -RedHat EL4 PGI (3) W n y n y y n -SuSe x86_64 gcc(3,12) W y(2) y(11) y(2) y y y -SuSe x86_64 Int(3,12) W n y(13) n y y n -SuSe x86_64 PGI(3,12) W n y(8) n y y y -Linux 2.6 SuSE ia64 C - Intel (3,7) y y y y y n -Linux 2.6 SGI Altix - ia64 Intel (3) y y y y y y -Linux 2.6 RHEL C - Lustre Intel (5) y(4) y y(4) y y n -Cray XT3 2.0.41 y y y y y n - - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -Solaris2.10 32-bit y y y y -Solaris2.10 64-bit y y y y -IRIX64_6.5 32-bit y dna y y -IRIX64_6.5 64-bit y y n y -Windows XP y y(15) y y -Windows XP x64 y y(15) y y -Windows Vista y n n y -Mac OS X 10.5 y n y n -AIX 5.3 32- and 64-bit n n n n -FreeBSD 6.2 32&64 bit y n y y -RedHat EL4 (3) W y y(10) y y -RedHat EL4 Intel (3) W y y y n -RedHat EL4 PGI (3) W y y y n -SuSe x86_64 GNU(3,12) W y y y y -SuSe x86_64 Int(3,12) W y y y n -SuSe x86_64 PGI(3,12) W y y y n -Linux 2.4 SuSE C - ia64 C Intel (7) y y y n -Linux 2.4 SGI Altix C - ia64 Intel y n -Linux 2.6 RHEL C - Lustre Intel (5) y y y n -Cray XT3 2.0.41 n n n n - - Notes: (1) Using mpich2 1.0.6. - (2) Using mpich2 1.0.7. - (3) Linux 2.6 with GNU, Intel, and PGI compilers, as indicated. - W or C indicates workstation or cluster, respectively. - (4) Using mvapich2 0.9.8. - (5) Linux 2.6.9-42.0.10. Xeon cluster with ELsmp_perfctr_lustre - and Intel compilers - (6) Linux 2.4.21-32.0.1. Xeon cluster with ELsmp_perfctr_lustre - and Intel compilers - (7) Linux 2.4.21, SuSE_292.till. Ia64 cluster with Intel compilers - (8) pgf90 - (9) With Compaq Visual Fortran 6.6c compiler. - (10) With PGI and Absoft compilers. - (11) PGI and Intel compilers for both C and Fortran - (12) AMD Opteron x86_64 - (13) ifort - (14) Yes with C and Fortran, but not with C++ - (15) Using Visual Studio 2005 or Cygwin - (16) Not tested for this release. - Compiler versions for each platform are listed in the preceding - "Platforms Tested" table. - - -Known Problems -============== -* For Red Storm, a Cray XT3 system, the tools/h5ls/testh5ls.sh and - tools/h5copy/testh5copy.sh will fail some of its sub-tests. These sub-tests - are expected to fail and should exit with a non-zero code but the yod - command does not propagate the exit code of the executables. Yod always - returns 0 if it can launch the executable. The test suite shell expects - a non-zero for this particular test, therefore it concludes the test has - failed when it receives 0 from yod. Skip all the "failing" test for now - by changing them as following. - - ======== Original tools/h5ls/testh5ls.sh ========= - TOOLTEST tgroup-1.ls 1 -w80 -r -g tgroup.h5 - ======== Change to =============================== - echo SKIP TOOLTEST tgroup-1.ls 1 -w80 -r -g tgroup.h5 - ================================================== - - ======== Original tools/h5copy/testh5copy.sh ========= - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d grp_rename - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d /grp_rename/grp_dsets - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_dsets -d /E/F/grp_dsets - TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_nested -d /G/H/grp_nested - H5LSTEST $FILEOUT - ======== Change to =============================== - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d grp_rename - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -v -s grp_dsets -d /grp_rename/grp_dsets - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_dsets -d /E/F/grp_dsets - echo SKIP TOOLTEST_FAIL -i $TESTFILE -o $FILEOUT -vp -s /grp_nested -d /G/H/grp_nested - echo SKIP H5LSTEST $FILEOUT - ================================================== - AKC - 2008/11/10 - -* For Red Storm, a Cray XT3 system, the yod command sometimes gives the - message, "yod allocation delayed for node recovery". This interferes with - test suites that do not expect seeing this message. See the section of "Red - Storm" in file INSTALL_parallel for a way to deal with this problem. - AKC - 2008/05/28 - -* We have discovered two problems when running collective IO parallel HDF5 - tests with chunking storage on the ChaMPIon MPI compiler on tungsten, a - Linux cluster at NCSA. - - Under some complex selection cases: - 1) MPI_Get_element returns the wrong value. - 2) MPI_Type_struct also generates the wrong derived datatype and corrupt - data may be generated. - These issues arise only when turning on collective IO with chunking storage - with some complex selections. We have not found these problems on other - MPI-IO compilers. If you encounter these problems, you may use independent - IO instead. - - To avoid this behavior, change the following line in your code - H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE); - to - H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_INDEPENDENT); - - KY - 2007/08/24 - -* On Intel 64 Linux cluster (RH 4, Linux 2.6.9) with Intel 10.0 compilers, - use -mp -O1 compilation flags to build the libraries. A higher level of - optimization causes failures in several HDF5 library tests. - -* For LLNL, uP: both serial and parallel tests pass. - Zeus: Serial tests pass but parallel tests fail with a known problem in MPI. - ubgl: Serial tests pass but parallel tests fail. - -* On mpich 1.2.5 and 1.2.6, if more than two processes contribute no IO and - the application asks to do collective IO, we have found that when using 4 - processors, a simple collective write will sometimes be hung. This can be - verified with t_mpi test under testpar. - -* On IRIX6.5, when the C compiler version is greater than 7.4, complicated - MPI derived datatype code will work. However, the user should increase - the value of the MPI_TYPE_MAX environment variable to some appropriate value - to use collective irregular selection code. For example, the current - parallel HDF5 test needs to raise MPI_TYPE_MAX to 200,000 to pass the test. - -* A dataset created or rewritten with a v1.6.3 library or after cannot be read - with the v1.6.2 library or before when the Fletcher32 EDC filter is enabled. - There was a bug in the calculation of the Fletcher32 checksum in the - library before v1.6.3; the checksum value was not consistent between big- - endian and little-endian systems. This bug was fixed in Release 1.6.3. - However, after fixing the bug, the checksum value was no longer the same as - before on little-endian system. Library releases after 1.6.4 can still read - datasets created or rewritten with an HDF5 library of v1.6.2 or before. - SLU - 2005/6/30 - -* On IBM AIX systems, parallel HDF5 mode will fail some tests with error - messages like "INFO: 0031-XXX ...". This is from the command `poe'. - Set the environment variable MP_INFOLEVEL to 0 to minimize the messages - and run the tests again. - - The tests may fail with messages like "The socket name is already in use", - but HDF5 does not use sockets. This failure is due to problems with the - poe command trying to set up the debug socket. To resolve this problem, - check to see whether there are many old /tmp/s.pedb.* files staying around. - These are sockets used by the poe command and left behind due to failed - commands. First, ask your system administrator to clean them out. - Lastly, request IBM to provide a means to run poe without the debug socket. - -* The --enable-static-exec configure flag fails to compile for Solaris - platforms. This is due to the fact that not all of the system libraries on - Solaris are available in a static format. - - The --enable-static-exec configure flag also fails to correctly compile - on IBM SP2 platforms for serial mode. The parallel mode works fine with - this option. - - It is suggested that you do not use this option on these platforms - during configuration. - -* There is also a configure error on Altix machines that incorrectly reports - when a version of Szip without an encoder is being used. - -* Information about building with PGI and Intel compilers is available in - the INSTALL file sections 4.7 and 4.8. - - - - -%%%%1.8.1%%%% - - -HDF5 version 1.8.1 released on Thu May 29 15:28:55 CDT 2008 -================================================================================ - -INTRODUCTION -============ - -This document describes the differences between the HDF5-1.8.1 release -and HDF5 1.8.0, and contains information on the platforms tested and known -problems in HDF5-1.8.1. For more details, see the files -HISTORY-1_0-1_8_0_rc3.txt and HISTORY-1_8.txt in the release_docs/ directory -of the HDF5 source. - -Links to the HDF5 1.8.1 source code, documentation, and additional materials -can be found on the HDF5 web page at: - - http://www.hdfgroup.org/products/hdf5/ - -The HDF5 1.8.1 release can be obtained from: - - http://www.hdfgroup.org/HDF5/release/obtain5.html - -User documentation for 1.8.1 can be accessed directly at this location: - - http://www.hdfgroup.org/HDF5/doc/ - -New features in the HDF5-1.8.x release series, including brief general -descriptions of some new and modified APIs, are described in the "What's New -in 1.8.0?" document: - - http://www.hdfgroup.org/HDF5/doc/ADGuide/WhatsNew180.html - -All new and modified APIs are listed in detail in the "HDF5 Software Changes -from Release to Release" document, in the section "Release 1.8.1 (current -release) versus Release 1.8.0": - - http://www.hdfgroup.org/HDF5/doc/ADGuide/Changes.html - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS -======== - -- New Features -- Support for new platforms and languages -- Bug Fixes since HDF5-1.8.0 -- Platforms Tested -- Supported Configuration Features Summary -- Known Problems - - -New Features -============ - - Configuration - ------------- - - The lib/libhdf5.settings file contains much more configure - information. (AKC - 2008/05/18) - - - The new configure option "--disable-sharedlib-rpath" disables - embedding the '-Wl,-rpath' information into executables when - shared libraries are produced, and instead solely relies on the - information in LD_LIBRARY_PATH. (MAM - 2008/05/15) - - - Configuration suite now uses Autoconf 2.61, Automake 1.10.1, and - Libtool 2.2.2 (MAM - 2008/05/01) - - Source code distribution - ======================== - - Library - ------- - - None - - Parallel Library - ---------------- - - None - - Tools - ----- - - h5repack: Reinstated the -i and -o command line flags to specify - input and output files. h5repack now understands both the old - syntax (with -i and -o) and the new syntax introduced in Release - 1.8.0. (PVN - 2008/05/23) - - h5dump: Added support for external links, displaying the object that - an external link points to. (PVN - 2008/05/12) - - h5dump: Added an option, -m, to allow user-defined formatting in the - output of floating point numbers. (PVN - 2008/05/06) - - h5dump, in output of the -p option: Added effective data compression - ratio to the dataset storage layout output when a compression filter - has been applied to a dataset. (PVN - 2008/05/01) - - F90 API - ------ - - New H5A, H5G, H5L, H5O, and H5P APIs to enable 1.8 features were - added. See "Release 1.8.1 (current release) versus Release 1.8.0" in - the document "HDF5 Software Changes from Release to Release" - (http://hdfgroup.org/HDF5/doc/ADGuide/Changes.html) for the - complete list of the new APIs. - - C++ API - ------ - - None - - -Support for New Platforms, Languages, and Compilers -=================================================== - - Both serial and parallel HDF5 are supported for the Red Storm machine - which is a Cray XT3 system. - - - The Fortran library will work correctly if compiled with the -i8 - flag. This has been tested with the g95, PGI and Intel Fortran - compilers. - - -Bug Fixes since HDF5-1.8.0 -========================== - - Configuration - ------------- - - None - - Source code distribution - ======================== - - Library - ------- - - Chunking: Chunks greater than 4GB are disallowed. - (QAK - 2008/05/16) - - Fixed the problem with searching for a target file when following - an external link. The search pattern will depend on whether the - target file's pathname is an absolute or a relative path. - Please see the H5Lcreate_external description in the "HDF5 - Reference Manual" (http://hdfgroup.org/HDF5/doc/RM/RM_H5L.html). - (VC - 2008/04/08) - - Fixed possible file corruption bug when encoding datatype - descriptions for compound datatypes whose size was between - 256 and 511 bytes and the file was opened with the "use the - latest format" property enabled (with H5Pset_libver_bounds). - (QAK - 2008/03/13) - - Fixed bug in H5Aget_num_attrs() routine to correctly handle an - invalid location identifier. (QAK - 2008/03/11) - - Parallel Library - ---------------- - - None - - Tools - ----- - - Fixed bug in h5diff that prevented datasets and attributes with - variable-length string elements from comparing correctly. - (QAK - 2008/02/28) - - Fixed bug in h5dump that caused binary output to be made only for - the first dataset, when several datasets were requested. - (PVN - 2008/04/07) - - F90 API - ------ - - The h5tset(get)_fields subroutines were missing the parameter to - specify a sign position; fixed. (EIP - 2008/05/23) - - Many APIs were fixed to work with the 8-byte integers in Fortran vs. - 4-byte integers in C. This change is transparent to user applications. - - C++ API - ------ - - The class hierarchy was revised to address the problem reported - in bugzilla #1068, Attribute should not be derived from base - class H5Object. Classes AbstractDS was moved out of H5Object. - Class Attribute now multiply inherits from IdComponent and - AbstractDs and class DataSet from H5Object and AbstractDs. - In addition, data member IdComponent::id was moved into subclasses: - Attribute, DataSet, DataSpace, DataType, H5File, Group, and PropList. - (BMR - 2008/05/20) - - IdComponent::dereference was incorrect; it was changed from: - void IdComponent::dereference(IdComponent& obj, void* ref) - to: - void H5Object::dereference(H5File& h5file, void* ref) - void H5Object::dereference(H5Object& obj, void* ref) - (BMR - 2008/05/20) - - Revised Attribute::write and Attribute::read wrappers to handle - memory allocation/deallocation properly. (bugzilla 1045) - (BMR - 2008/05/20) - - -Platforms Tested -================ -The following platforms and compilers have been tested for this release. - - Cray XT3 (2.0.41) cc (pgcc) 7.1-4 - (red storm) ftn (pgf90) 7.1-4 - CC (pgCC) 7.1-4 - mpicc 1.0.2 - mpif90 1.0.2 - - FreeBSD 6.2-STABLE i386 gcc 3.4.6 [FreeBSD] 20060305 - (duty) g++ 3.4.6 [FreeBSD] 20060305 - gcc 4.2.1 20080123 - g++ 4.2.1 20080123 - gfortran 4.2.1 20070620 - - FreeBSD 6.2-STABLE amd64 gcc 3.4.6 [FreeBSD] 20060305 - (liberty) g++ 3.4.6 [FreeBSD] 20060305 - gcc 4.2.1 20080123 - g++ 4.2.1 20080123 - gfortran 4.2.1 20080123 - - IRIX64 6.5 (64 & n32) MIPSpro cc 7.4.4m - F90 MIPSpro 7.4.4m - C++ MIPSpro cc 7.4.4m - - Linux 2.6.9 (RHEL4) Intel 10.0 compilers - (abe.ncsa.uiuc.edu) - - Linux 2.4.21-47 gcc 3.2.3 20030502 - (osage) - - Linux 2.6.9-42.0.10 gcc,g++ 3.4.6 20060404, G95 (GCC 4.0.3) - (kagiso) PGI 7.1-6 (pgcc, pgf90, pgCC) - Intel 9.1 (icc, ifort, icpc) - - Linux 2.6.16.27 x86_64 AMD gcc 4.1.0 (SuSE Linux), g++ 4.1.0, - (smirom) g95 (GCC 4.0.3) - PGI 7.1-6 (pgcc, pgf90, pgCC) - Intel 9.1 (icc, ifort, icpc) - - Linux 2.6.5-7.252.1-rtgfx #1 Intel(R) C++ Version 9.0 - SMP ia64 Intel(R) Fortran Itanium(R) Version 9.0 - (cobalt) SGI MPI - - SunOS 5.8 32,46 Sun WorkShop 6 update 2 C 5.3 - (Solaris 2.8) Sun WorkShop 6 update 2 Fortran 95 6.2 - Sun WorkShop 6 update 2 C++ 5.3 - - SunOS 5.10 cc: Sun C 5.8 - (linew) f90: Sun Fortran 95 8.2 - CC: Sun C++ 5.8 - - Xeon Linux 2.4.21-32.0.1.ELsmp-perfctr-lustre - (tungsten) gcc 3.2.2 20030222 - Intel(R) C++ Version 9.0 - Intel(R) Fortran Compiler Version 9.0 - - IA-64 Linux 2.4.21.SuSE_309.tg1 ia64 - (NCSA tg-login) gcc 3.2.2 - Intel(R) C++ Version 8.1 - Intel(R) Fortran Compiler Version 8.1 - mpich-gm-1.2.6..14b-intel-r2 - - Intel 64 Linux 2.6.9-42.0.10.EL_lustre-1.4.10.1smp - (abe) gcc 3.4.6 20060404 - Intel(R) C++ Version 10.0 - Intel (R) Fortran Compiler Version 10.0 - mvapich2-0.9.8p2patched-intel-ofed-1.2 - - Windows XP Visual Studio .NET - Visual Studio 2005 w/ Intel Fortran 9.1 - Cygwin(native gcc compiler and g95) - MinGW(native gcc compiler and g95) - - Windows XP x64 Visual Studio 2005 w/ Intel Fortran 9.1 - - Windows Vista Visual Studio 2005 - - MAC OS 10.5.2 (Intel) i686-apple-darwin9-gcc-4.0.1 (GCC) 4.0.1 - GNU Fortran (GCC) 4.3.0 20070810 - G95 (GCC 4.0.3 (g95 0.91!) Apr 24 2008) - - -Supported Configuration Features Summary -======================================== - - In the tables below - y = tested and supported - n = not supported or not tested in this release - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90 F90 C++ zlib SZIP - parallel parallel -SunOS5.10 64-bit n y n y y y -SunOS5.10 32-bit n y n y y y -IRIX64_6.5 64-bit n y y y y y -IRIX64_6.5 32-bit n n n n y y -Windows XP n y(15) n(15) y y y -Windows XP x64 n y(15) n(15) y y y -Windows Vista n n n y y y -Mac OS X 10.5 Intel n y n y y y -FreeBSD 4.11 n n n y y y -RedHat EL3 W (3) y(1) y(10) y(1) y y y -RedHat EL3 W Intel (3) n y n y y n -RedHat EL3 W PGI (3) n y n y y n -SuSe x86_64 gcc (3,12) y(2) y(11) y(2) y y y -SuSe x86_64 Int (3,12) n y(13) n y y n -SuSe x86_64 PGI (3,12) n y(8) n y y y -Linux 2.4 Xeon C - Lustre Intel (3,6) n y n y y n -Linux 2.6 SuSE ia64 C - Intel (3,7) y y y y y n -Linux 2.6 SGI Altix - ia64 Intel (3) y y y y y y -Linux 2.6 RHEL C - Lustre Intel (5) y(4) y y(4) y y n -Cray XT3 2.0.41 y y y y y n - - -Platform Shared Shared Shared Thread- - C libs F90 libs C++ libs safe -Solaris2.10 64-bit y y y y -Solaris2.10 32-bit y y y y -IRIX64_6.5 64-bit y y n y -IRIX64_6.5 32-bit y dna y y -Windows XP y y(15) y y -Windows XP x64 y y(15) y y -Windows Vista y n n y -Mac OS X 10.3 y n -FreeBSD 4.11 y n y y -RedHat EL3 W (3) y y(10) y y -RedHat EL3 W Intel (3) y y y n -RedHat EL3 W PGI (3) y y y n -SuSe x86_64 W GNU (3,12) y y y y -SuSe x86_64 W Int (3,12) y y y n -SuSe x86_64 W PGI (3,12) y y y n -Linux 2.4 Xeon C - Lustre Intel (6) y y y n -Linux 2.4 SuSE - ia64 C Intel (7) y y y n -Linux 2.4 SGI Altix - ia64 Intel y n -Linux 2.6 RHEL C - Lustre Intel (5) y y y n -Cray XT3 2.0.41 n n n n n - - Notes: (1) Using mpich2 1.0.6. - (2) Using mpich2 1.0.7. - (3) Linux 2.6 with GNU, Intel, and PGI compilers, as indicated. - W or C indicates workstation or cluster, respectively. - (4) Using mvapich2 0.9.8. - (5) Linux 2.6.9-42.0.10. Xeon cluster with ELsmp_perfctr_lustre - and Intel compilers - (6) Linux 2.4.21-32.0.1. Xeon cluster with ELsmp_perfctr_lustre - and Intel compilers - (7) Linux 2.4.21, SuSE_292.till. Ia64 cluster with Intel compilers - (8) pgf90 - (9) With Compaq Visual Fortran 6.6c compiler. - (10) With PGI and Absoft compilers. - (11) PGI and Intel compilers for both C and Fortran - (12) AMD Opteron x86_64 - (13) ifort - (14) Yes with C and Fortran, but not with C++ - (15) Using Visual Studio 2005 or Cygwin - (16) Not tested for this release. - Compiler versions for each platform are listed in the preceding - "Platforms Tested" table. - - -Known Problems -============== -* For Red Storm, a Cray XT3 system, the yod command sometimes gives the - message, "yod allocation delayed for node recovery". This interferes with - test suites that do not expect seeing this message. See the section of "Red - Storm" in file INSTALL_parallel for a way to deal with this problem. - AKC - 2008/05/28 - -* For Red Storm, a Cray XT3 system, the tools/h5ls/testh5ls.sh will fail on - the test "Testing h5ls -w80 -r -g tgroup.h5" fails. This test is - expected to fail and exit with a non-zero code but the yod command does - not propagate the exit code of the executables. Yod always returns 0 if it - can launch the executable. The test suite shell expects a non-zero for - this particular test, therefore it concludes the test has failed when it - receives 0 from yod. To bypass this problem for now, change the following - lines in the tools/h5ls/testh5ls.sh. - ======== Original ========= - # The following combination of arguments is expected to return an error message - # and return value 1 - TOOLTEST tgroup-1.ls 1 -w80 -r -g tgroup.h5 - ======== Skip the test ========= - echo SKIP TOOLTEST tgroup-1.ls 1 -w80 -r -g tgroup.h5 - ======== end of bypass ======== - AKC - 2008/05/28 - -* We have discovered two problems when running collective IO parallel HDF5 - tests with chunking storage on the ChaMPIon MPI compiler on tungsten, a - Linux cluster at NCSA. - - Under some complex selection cases: - 1) MPI_Get_element returns the wrong value. - 2) MPI_Type_struct also generates the wrong derived datatype and corrupt - data may be generated. - These issues arise only when turning on collective IO with chunking storage - with some complex selections. We have not found these problems on other - MPI-IO compilers. If you encounter these problems, you may use independent - IO instead. - - To avoid this behavior, change the following line in your code - H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE); - to - H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_INDEPENDENT); - - KY - 2007/08/24 - -* For SNL, spirit/liberty/thunderbird: The serial tests pass but parallel - tests failed with MPI-IO file locking message. AKC - 2007/6/25 - -* On Intel 64 Linux cluster (RH 4, Linux 2.6.9) with Intel 10.0 compilers, - use -mp -O1 compilation flags to build the libraries. A higher level of - optimization causes failures in several HDF5 library tests. - -* For LLNL, uP: both serial and parallel tests pass. - Zeus: Serial tests pass but parallel tests fail with a known problem in MPI. - ubgl: Serial tests pass but parallel tests fail. - -* Configuring with --enable-debug=all produces compiler errors on most - platforms: Users who want to run HDF5 in debug mode should use - --enable-debug rather than --enable-debug=all to enable debugging - information on most modules. - -* On Mac OS 10.4, test/dt_arith.c has some errors in conversion from long - double to (unsigned) long long and from (unsigned) long long to long double. - -* On Altix SGI with Intel 9.0, testmeta.c would not compile with -O3 - optimization flag. - -* On VAX, the Scaleoffset filter is not supported. The Scaleoffset filter - supports only the IEEE standard for floating-point data; it cannot be applied - to HDF5 data generated on VAX. - -* On Cray X1, a lone colon on the command line of h5dump --xml (as in - the testh5dumpxml.sh script) is misinterpereted by the operating system - and causes an error. - -* On mpich 1.2.5 and 1.2.6, if more than two processes contribute no IO and - the application asks to do collective IO, we have found that when using 4 - processors, a simple collective write will sometimes be hung. This can be - verified with t_mpi test under testpar. - -* On IRIX6.5, when the C compiler version is greater than 7.4, complicated - MPI derived datatype code will work. However, the user should increase - the value of the MPI_TYPE_MAX environment variable to some appropriate value - to use collective irregular selection code. For example, the current - parallel HDF5 test needs to raise MPI_TYPE_MAX to 200,000 to pass the test. - -* A dataset created or rewritten with a v1.6.3 library or after cannot be read - with the v1.6.2 library or before when the Fletcher32 EDC filter is enabled. - There was a bug in the calculation of the Fletcher32 checksum in the - library before v1.6.3; the checksum value was not consistent between big- - endian and little-endian systems. This bug was fixed in Release 1.6.3. - However, after fixing the bug, the checksum value was no longer the same as - before on little-endian system. Library releases after 1.6.4 can still read - datasets created or rewritten with an HDF5 library of v1.6.2 or before. - SLU - 2005/6/30 - -* For version 6 (6.02 and 6.04) of the Portland Group compiler on the AMD - Opteron processor, there is a bug in the compiler for optimization(-O2). - The library failed in several tests, all related to the MULTI driver. - The problem has been reported to the vendor. - -* On IBM AIX systems, parallel HDF5 mode will fail some tests with error - messages like "INFO: 0031-XXX ...". This is from the command `poe'. - Set the environment variable MP_INFOLEVEL to 0 to minimize the messages - and run the tests again. - - The tests may fail with messages like "The socket name is already in use", - but HDF5 does not use sockets. This failure is due to problems with the - poe command trying to set up the debug socket. To resolve this problem, - check to see whether there are many old /tmp/s.pedb.* files staying around. - These are sockets used by the poe command and left behind due to failed - commands. First, ask your system administrator to clean them out. - Lastly, request IBM to provide a means to run poe without the debug socket. - -* The --enable-static-exec configure flag fails to compile for Solaris - platforms. This is due to the fact that not all of the system libraries on - Solaris are available in a static format. - - The --enable-static-exec configure flag also fails to correctly compile - on IBM SP2 platforms for serial mode. The parallel mode works fine with - this option. - - It is suggested that you do not use this option on these platforms - during configuration. - -* With the gcc 2.95.2 compiler, HDF5 uses the `-ansi' flag during - compilation. The ANSI version of the compiler complains about not being - able to handle the `long long' datatype with the warning: - - warning: ANSI C does not support `long long' - - This warning is innocuous and can be safely ignored. - -* The ./dsets tests fail on the TFLOPS machine if the test program, - dsets.c, is compiled with the -O option. The HDF5 library still works - correctly with the -O option. The test program works fine if it is - compiled with -O1 or -O0. Only -O (same as -O2) causes the test - program to fail. - -* Not all platforms behave correctly with Szip's shared libraries. Szip is - disabled in these cases, and a message is relayed at configure time. Static - libraries should be working on all systems that support Szip and should be - used when shared libraries are unavailable. - - There is also a configure error on Altix machines that incorrectly reports - when a version of Szip without an encoder is being used. - -* On some platforms that use Intel and Absoft compilers to build the HDF5 - Fortran library, compilation may fail for fortranlib_test.f90, fflush1.f90 - and fflush2.f90 complaining about the exit subroutine. Comment out the line - IF (total_error .ne. 0) CALL exit (total_error). - -* Information about building with PGI and Intel compilers is available in - the INSTALL file sections 4.7 and 4.8. - -* On at least one system, SDSC DataStar, the scheduler (in this case - LoadLeveler) sends job status updates to standard error when you run - any executable that was compiled with the parallel compilers. - - This causes problems when running "make check" on parallel builds, as - many of the tool tests function by saving the output from test runs, - and comparing it to an exemplar. - - The best solution is to reconfigure the target system so it no longer - inserts the extra text. However, this may not be practical. - - In such cases, one solution is to "setenv HDF5_Make_Ignore yes" prior to - the configure and build. This will cause "make check" to continue after - detecting errors in the tool tests. However, in the case of SDSC DataStar, - it also leaves you with some 150 "failed" tests to examine by hand. - - A second solution is to write a script to run serial tests and filter - out the text added by the scheduler. A sample script used on SDSC - DataStar is given below, but you will probably have to customize it - for your installation. - - Observe that the basic idea is to insert the script as the first item - on the command line which executes the the test. The script then - executes the test and filters out the offending text before passing - it on. - - #!/bin/csh - - set STDOUT_FILE=~/bin/serial_filter.stdout - set STDERR_FILE=~/bin/serial_filter.stderr - - rm -f $STDOUT_FILE $STDERR_FILE - - ($* > $STDOUT_FILE) >& $STDERR_FILE - - set RETURN_VALUE=$status - - cat $STDOUT_FILE - - tail +3 $STDERR_FILE - - exit $RETURN_VALUE - - You get the HDF5 make files and test scripts to execute your filter script - by setting the environment variable "RUNSERIAL" to the full path of the - script prior to running configure for parallel builds. Remember to - "unsetenv RUNSERIAL" before running configure for a serial build. - - Note that the RUNSERIAL environment variable exists so that we can - prefix serial runs as necessary on the target system. On DataStar, - no prefix is necessary. However on an MPICH system, the prefix might - have to be set to something like "/usr/local/mpi/bin/mpirun -np 1" to - get the serial tests to run at all. - - In such cases, you will have to include the regular prefix in your - filter script. - -* H5Ocopy() does not copy reg_ref attributes correctly when shared-message - is turn on. The value of the reference in the destination attriubte is - wrong. This H5Ocopy problem will affect the h5copy tool. - -* In the C++ API, it appears that there are bugs in Attribute::write/read - and DataSet::write/read for fixed- and variable-len strings. The problems - are being worked on and a patch will be provided when the fixes are - available. - - -%%%%1.8.0%%%% - - -HDF5 version 1.8.0 released on Tue Feb 12 20:41:19 CST 2008 -================================================================================ - -INTRODUCTION -============ - -This document describes the differences between the HDF5-1.6.x release series -and HDF5 1.8.0, and contains information on the platforms tested and known -problems in HDF5-1.8.0. For more details, see the HISTORY-1_0-1_8_0_rc3.txt -file in the -release_docs/ directory of the HDF5 source. - -Links to the HDF5 1.8.0 source code, documentation, and additional materials -can be found on the HDF5 web page at: - - http://www.hdfgroup.org/products/hdf5/ - -The HDF5 1.8.0 release can be obtained from: - - http://www.hdfgroup.org/HDF5/release/obtain5.html - -User documentation for 1.8.0 can be accessed directly at this location: - - http://www.hdfgroup.org/HDF5/doc/ - -New features in 1.8.0, including brief general descriptions of some new -and modified APIs, are described in the "What's New in 1.8.0?" document: - - http://www.hdfgroup.org/HDF5/doc/ADGuide/WhatsNew180.html - -All new and modified APIs are listed in detail in the "HDF5 Software Changes -from Release to Release" document, in the section "Release 1.8.0 (current -release) versus Release 1.6.x": - - http://www.hdfgroup.org/HDF5/doc/ADGuide/Changes.html - -If you have any questions or comments, please send them to the HDF Help Desk: - - help@hdfgroup.org - - -CONTENTS -======== - -- New Features -- Removed Feature -- Support for new platforms and languages -- Bug Fixes since HDF5-1.6.0 -- Platforms Tested -- Supported Configuration Features Summary -- Known Problems - - -New Features -============ - - HDF5 Release 1.8.0 is a major release with many changes and new features. - - New format and interface features discussed in the "What's New in - HDF5 1.8.0" document include the following: - - Enhanced group object management - Enhanced attribute management and more efficient meta data handling - Expanded datatype features - Creation order tracking and indexing - Improved meta data caching and cache control - UTF-8 encoding - New I/O filters: n-bit and scale+offset compression - New link (H5L) and object (H5O) interfaces and features - External and user-defined links - New high-level APIs: - HDF5 Packet Table (H5PT) and HDF5 Dimension Scale (H5DS) - C++ and Fortran interfaces for older high-level APIs: - H5Lite (H5LT), H5Image (H5IM), and H5Table (H5TB) - New and improved tools - And more... - - http://hdfgroup.org/HDF5/doc/ADGuide/WhatsNew180.html - - - New APIs associated with these features, other interface changes - (e.g., ENUM and struct definitions), and new library configuration flags - are listed in the "Release 1.8.0 (current release) versus Release 1.6.x" - section of "HDF5 Software Changes from Release to Release." - - http://hdfgroup.org/HDF5/doc/ADGuide/Changes.html - -Compatibility -------------- - Many HDF5 users and user communities have existing applications that - they may wish to port to Release 1.8.0. Alternatively, some users may - wish to take advantage of Release 1.8.0's improved performance without - having to port such applications. To facilitate managing application - compatibility and porting applications from release to release, the HDF - Team has implemented the following features: - Individually-configurable macros that selectively map common - interface names to the old and new interfaces - Library configuration options to configure the macro mappings - - Two related documents accompany this release: - "API Compatibility Macros in HDF5" discusses the specifics of the - new individually-configurable macros and library configuration - options. - http://hdfgroup.org/HDF5/doc/RM/APICompatMacros.html - - "New Features in HDF5 Release 1.8.0 and Backward/Forward Format - Compatibility Issues" discusses each new feature with regard to - its impact on format compatibility. - http://hdfgroup.org/HDF5/doc/ADGuide/CompatFormat180.html - -Referenced documents --------------------- - http://hdfgroup.org/HDF5/doc/ADGuide/WhatsNew180.html - "What's New in HDF5 1.8.0" - - http://hdfgroup.org/HDF5/doc/ADGuide/Changes.html - The "Release 1.8.0 (current release) versus Release 1.6.x " - section in "HDF5 Software Changes from Release to Release" - - http://hdfgroup.org/HDF5/doc/RM/APICompatMacros.html - "API Compatibility Macros in HDF5" - - http://hdfgroup.org/HDF5/doc/ADGuide/CompatFormat180.html - "New Features in HDF5 Release 1.8.0 and Backward/Forward Format - Compatibility Issues" - - -Removed Feature -=============== -The stream virtual file driver (H5FD_STREAM) have been removed in this -release. This affects the functions H5Pset_fapl_stream and H5Pget_fapl_stream -and the constant H5FD_STREAM. - -This virtual file driver will be available at -http://hdf5-addons.origo.ethz.ch/. Note that at the time of this release, -the transition is still in progress; the necessary integration tools may -not be available when HDF5 Release 1.8.0 first comes out. - - -Support for New Platforms, Languages, and Compilers -=================================================== - - Support for Open VMS 7.3 was added. - - -Bug Fixes since HDF5-1.6.0 -========================== - This release contains numerous bug fixes. For details, see the - "Changes from 1.6.0 to 1.8.0-rc3" section of the HISTORY.txt file for - this release. - - -Platforms Tested -================ -The following platforms and compilers have been tested for for this release. - - AIX 5.2 (32/64 bit) xlc 8.0.0.11 - xlC 8.0 - xlf 10.01.0000.0 - mpcc_r 6.0.0.8 - mpxlf_r 8.1.1.7 - - FreeBSD 6.2-STABLE i386 gcc 3.4.6 [FreeBSD] 20060305 - (duty) g++ 3.4.6 [FreeBSD] 20060305 - gcc 4.2.1 20080123 - g++ 4.2.1 20080123 - gfortran 4.2.1 20070620 - - FreeBSD 6.2-STABLE amd64 gcc 3.4.6 [FreeBSD] 20060305 - (liberty) g++ 3.4.6 [FreeBSD] 20060305 - gcc 4.2.1 20080123 - g++ 4.2.1 20080123 - gfortran 4.2.1 20080123 - - IRIX64 6.5 (64 & n32) MIPSpro cc 7.4.4m - F90 MIPSpro 7.4.4m - C++ MIPSpro cc 7.4.4m - - Linux 2.6.9 (RHEL4) Intel 10.0 compilers - (abe.ncsa.uiuc.edu) - - Linux 2.4.21-47 gcc 3.2.3 20030502 - (osage) - - Linux 2.6.9-42.0.10 gcc 3.4.6 20060404 - (kagiso) PGI 7.0-7 (pgcc, pgf90, pgCC) - Intel 9.1 (icc, ifort, icpc) - - Linux 2.6.16.27 x86_64 AMD gcc 4.1.0 (SuSE Linux), g++ 4.1.0, - (smirom) g95 (GCC 4.0.3) - PGI 6.2-5 (pgcc, pgf90, pgCC) - Intel 9.1 (icc, iort, icpc) - - Linux 2.6.5-7.252.1-rtgfx #1 Intel(R) C++ Version 9.0 - SMP ia64 Intel(R) Fortran Itanium(R) Version 9.0 - (cobalt) SGI MPI - - SunOS 5.8 32,46 Sun WorkShop 6 update 2 C 5.3 - (Solaris 2.8) Sun WorkShop 6 update 2 Fortran 95 6.2 - Sun WorkShop 6 update 2 C++ 5.3 - - SunOS 5.10 cc: Sun C 5.8 - (linew) f90: Sun Fortran 95 8.2 - CC: Sun C++ 5.8 - - Xeon Linux 2.4.21-32.0.1.ELsmp-perfctr-lustre - (tungsten) gcc 3.2.2 20030222 - Intel(R) C++ Version 9.0 - Intel(R) Fortran Compiler Version 9.0 - - IA-64 Linux 2.4.21.SuSE_292.til1 ia64 - (NCSA tg-login) gcc 3.2.2 - Intel(R) C++ Version 8.1 - Intel(R) Fortran Compiler Version 8.1 - mpich-gm-1.2.5..10-intel-r2 - - Windows XP Visual Studio .NET - Visual Studio 2005 w/ Intel Fortran 9.1 - Cygwin(native gcc compiler and g95) - MinGW(native gcc compiler and g95) - - Windows XP x64 Visual Studio 2005 w/ Intel Fortran 9.1 - - Windows Vista Visual Studio 2005 - - MAC OS 10.4 (Intel) gcc i686-apple-darwin8-gcc-4.0.1 (GCC) 4.0.1 - G95 (GCC 4.0.3 (g95 0.91!) Nov 21 2006) - - Alpha Open VMS 7.3 Compaq C V6.5-001-48BCD - HP Fortran V7.6-3276 - Compaq C++ V6.5-004 - - -Supported Configuration Features Summary -======================================== - - In the tables below - y = tested and supported - n = not supported or not tested in this release - x = not working in this release - dna = does not apply - ( ) = footnote appears below second table - = testing incomplete on this feature or platform - -Platform C F90 F90 C++ zlib SZIP - parallel parallel -SunOS5.8 64-bit n y n y y y -SunOS5.8 32-bit n y n y y y -SunOS5.10 64-bit y(1) y n y y y -SunOS5.10 32-bit y(1) y n y y y -IRIX64_6.5 64-bit n y y y y y -IRIX64_6.5 32-bit n n n n y y -AIX-5.2 32-bit y y y y y y -AIX-5.2 64-bit y y y y y y -Windows XP n y(15) n(15) y y y -Windows XP x64 n y(15) n(15) y y y -Windows Vista n n n y y y -Mac OS X 10.4 PowerPC n n -Mac OS X 10.4 Intel n y n y y y -FreeBSD 4.11 n n n y y y -RedHat EL3 W (3) y(1a) y(10) y(1a) y y y -RedHat EL3 W Intel (3) n y n y y n -RedHat EL3 W PGI (3) n y n y y n -SuSe x86_64 gcc (3,12) y(1a) y(11) n y y y -SuSe x86_64 Int (3,12) n y(13) n y y n -SuSe x86_64 PGI (3,12) n y(8) n y y y -Linux 2.4 Xeon C - Lustre Intel (3,6) n y n y y n -Linux 2.6 SuSE ia64 C - Intel (3,7) y y y y y n -Linux 2.6 SGI Altix - ia64 Intel (3) y y y y y y -Alpha OpenVMS 7.3.2 n y n y n n - - - -Platform Shared Shared Shared static- Thread- - C libs F90 libs C++ libs exec safe -Solaris2.8 64-bit y y y x y -Solaris2.8 32-bit y y y x y -Solaris2.10 64-bit y x y -Solaris2.10 32-bit y x y -IRIX64_6.5 64-bit y y n y y -IRIX64_6.5 32-bit y dna y y y -AIX-5.2 & 5.3 32-bit n n n y n -AIX-5.2 & 5.3 64-bit n n n y n -Windows XP y y(15) y y y -Windows XP x64 y y(15) y y y -Windows Vista y n n y y -Mac OS X 10.3 y y n -FreeBSD 4.11 y n y y y -RedHat EL3 W (3) y y(10) y y y -RedHat EL3 W Intel (3) y y y y n -RedHat EL3 W PGI (3) y y y y n -SuSe x86_64 W GNU (3,12) y y y y y -SuSe x86_64 W Int (3,12) y y y y(14) n -SuSe x86_64 W PGI (3,12) y y y y(14) n -Linux 2.4 Xeon C - Lustre Intel (6) y y y y n -Linux 2.4 SuSE - ia64 C Intel (7) y y y y n -Linux 2.4 SGI Altix - ia64 Intel y y n -Alpha OpenVMS 7.3.2 n n n y n - - Notes: (1) Using mpich 1.2.6. - (1a) Using mpich2 1.0.6. - (2) Using mpt and mpich 1.2.6. - (3) Linux 2.6 with GNU, Intel, and PGI compilers, as indicated. - W or C indicates workstation or cluster, respectively. - - (6) Linux 2.4.21-32.0.1. Xeon cluster with ELsmp_perfctr_lustre - and Intel compilers - (7) Linux 2.4.21, SuSE_292.till. Ia64 cluster with Intel -compilers - (8) pgf90 - (9) With Compaq Visual Fortran 6.6c compiler. - (10) With PGI and Absoft compilers. - (11) PGI and Intel compilers for both C and Fortran - (12) AMD Opteron x86_64 - (13) ifort - (14) Yes with C and Fortran, but not with C++ - (15) Using Visual Studio 2005 or Cygwin - (16) Not tested for this release. - Compiler versions for each platform are listed in the preceding - "Platforms Tested" table. - - -Known Problems -============== -* We have discovered two problems when running collective IO parallel HDF5 - tests with chunking storage on the ChaMPIon MPI compiler on tungsten, a - Linux cluster at NCSA. - - Under some complex selection cases: - 1) MPI_Get_element returns the wrong value. - 2) MPI_Type_struct also generates the wrong derived datatype and corrupt - data may be generated. - These issues arise only when turning on collective IO with chunking storage - with some complex selections. We have not found these problems on other - MPI-IO compilers. If you encounter these problems, you may use independent - IO instead. - - To avoid this behavior, change the following line in your code - H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE); - - to - H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_INDEPENDENT); - - KY - 2007/08/24 - -* For SNL, spirit/liberty/thunderbird: The serial tests pass but parallel - tests failed with MPI-IO file locking message. AKC - 2007/6/25 - -* On Intel 64 Linux cluster (RH 4, Linux 2.6.9) with Intel 10.0 compilers, - use -mp -O1 compilation flags to build the libraries. A higher level of - optimization causes failures in several HDF5 library tests. - -* For SNL, Red Storm: Only parallel HDF5 is supported. The serial tests pass - when run against the parallel library; the parallel tests also pass, but - with lots of non-fatal error messages. - -* For LLNL, uP: both serial and parallel tests pass. - Zeus: Serial tests pass but parallel tests fail with a known problem in MPI. - ubgl: Serial tests pass but parallel tests fail. - -* On SUN 5.10 C++, testing fails in the "Testing Shared Datatypes with - Attributes" test. - -* Configuring with --enable-debug=all produces compiler errors on most - platforms: Users who want to run HDF5 in debug mode should use - --enable-debug rather than --enable-debug=all to enable debugging - information on most modules. - -* On Mac OS 10.4, test/dt_arith.c has some errors in conversion from long - double to (unsigned) long long and from (unsigned) long long to long double. - -* On Altix SGI with Intel 9.0, testmeta.c would not compile with -O3 - optimization flag. - -* On VAX, the Scaleoffset filter is not supported. The filter cannot be - applied to HDF5 data generated on VAX. The Scaleoffset filter only supports - the IEEE standard for floating-point data. - -* On Cray X1, a lone colon on the command line of h5dump --xml (as in - the testh5dumpxml.sh script) is misinterpereted by the operating system - and causes an error. - -* On mpich 1.2.5 and 1.2.6, if more than two processes contribute no IO and - the application asks to do collective IO, we have found that when using 4 - processors, a simple collective write will sometimes be hung. This can be - verified with t_mpi test under testpar. - -* On IRIX6.5, when the C compiler version is greater than 7.4, complicated - MPI derived datatype code will work. However, the user should increase - the value of the MPI_TYPE_MAX environment variable to some appropriate value - to use collective irregular selection code. For example, the current - parallel HDF5 test needs to raise MPI_TYPE_MAX to 200,000 to pass the test. - -* A dataset created or rewritten with a v1.6.3 library or after cannot be read - with the v1.6.2 library or before when the Fletcher32 EDC filter is enabled. - There was a bug in the calculating code of the Fletcher32 checksum in the - library before v1.6.3; the checksum value was not consistent between big- - endian and little-endian systems. This bug was fixed in Release 1.6.3. - However, after fixing the bug, the checksum value was no longer the same as - before on little-endian system. Library releases after 1.6.4 can still read - datasets created or rewritten with an HDF5 library of v1.6.2 or before. - SLU - 2005/6/30 - -* For version 6 (6.02 and 6.04) of the Portland Group compiler on the AMD - Opteron processor, there is a bug in the compiler for optimization(-O2). - The library failed in several tests, all related to the MULTI driver. - The problem has been reported to the vendor. - -* On IBM AIX systems, parallel HDF5 mode will fail some tests with error - messages like "INFO: 0031-XXX ...". This is from the command `poe'. - Set the environment variable MP_INFOLEVEL to 0 to minimize the messages - and run the tests again. - - The tests may fail with messages like "The socket name is already in use", - but HDF5 does not use sockets. This failure is due to problems with the - poe command trying to set up the debug socket. To resolve this problem, - check to see whether there are many old /tmp/s.pedb.* files staying around. - These are sockets used by the poe command and left behind due to failed - commands. First, ask your system administrator to clean them out. - Lastly, request IBM to provide a means to run poe without the debug socket. - -* The --enable-static-exec configure flag fails to compile for Solaris - platforms. This is due to the fact that not all of the system libraries on - Solaris are available in a static format. - - The --enable-static-exec configure flag also fails to correctly compile - on IBM SP2 platform for the serial mode. The parallel mode works fine with - this option. - - It is suggested that you do not use this option on these platforms - during configuration. - -* With the gcc 2.95.2 compiler, HDF5 uses the `-ansi' flag during - compilation. The ANSI version of the compiler complains about not being - able to handle the `long long' datatype with the warning: - - warning: ANSI C does not support `long long' - - This warning is innocuous and can be safely ignored. - -* The ./dsets tests fail on the TFLOPS machine if the test program, - dsets.c, is compiled with the -O option. The HDF5 library still works - correctly with the -O option. The test program works fine if it is - compiled with -O1 or -O0. Only -O (same as -O2) causes the test - program to fail. - -* Not all platforms behave correctly with Szip's shared libraries. Szip is - disabled in these cases, and a message is relayed at configure time. Static - libraries should be working on all systems that support Szip and should be - used when shared libraries are unavailable. - - There is also a configure error on Altix machines that incorrectly reports - when a version of Szip without an encoder is being used. - -* On some platforms that use Intel and Absoft compilers to build the HDF5 - Fortran library, compilation may fail for fortranlib_test.f90, fflush1.f90 - and fflush2.f90 complaining about the exit subroutine. Comment out the line - IF (total_error .ne. 0) CALL exit (total_error). - -* Information about building with PGI and Intel compilers is available in - the INSTALL file sections 4.7 and 4.8. - -* On at least one system, SDSC DataStar, the scheduler (in this case - LoadLeveler) sends job status updates to standard error when you run - any executable that was compiled with the parallel compilers. - - This causes problems when running "make check" on parallel builds, as - many of the tool tests function by saving the output from test runs, - and comparing it to an exemplar. - - The best solution is to reconfigure the target system so it no longer - inserts the extra text. However, this may not be practical. - - In such cases, one solution is to "setenv HDF5_Make_Ignore yes" prior to - the configure and build. This will cause "make check" to continue after - detecting errors in the tool tests. However, in the case of SDSC DataStar, - it also leaves you with some 150 "failed" tests to examine by hand. - - A second solution is to write a script to run serial tests and filter - out the text added by the scheduler. A sample script used on SDSC - DataStar is given below, but you will probably have to customize it - for your installation. - - Observe that the basic idea is to insert the script as the first item - on the command line which executes the the test. The script then - executes the test and filters out the offending text before passing - it on. - - #!/bin/csh - - set STDOUT_FILE=~/bin/serial_filter.stdout - set STDERR_FILE=~/bin/serial_filter.stderr - - rm -f $STDOUT_FILE $STDERR_FILE - - ($* > $STDOUT_FILE) >& $STDERR_FILE - - set RETURN_VALUE=$status - - cat $STDOUT_FILE - - tail +3 $STDERR_FILE - - exit $RETURN_VALUE - - You get the HDF5 make files and test scripts to execute your filter script - by setting the environment variable "RUNSERIAL" to the full path of the - script prior to running configure for parallel builds. Remember to - "unsetenv RUNSERIAL" before running configure for a serial build. - - Note that the RUNSERIAL environment variable exists so that we can - can prefix serial runs as necessary on the target system. On DataStar, - no prefix is necessary. However on an MPICH system, the prefix might - have to be set to something like "/usr/local/mpi/bin/mpirun -np 1" to - get the serial tests to run at all. - - In such cases, you will have to include the regular prefix in your - filter script. - -* H5Ocopy() does not copy reg_ref attributes correctly when shared-message - is turn on. The value of the reference in the destination attriubte is - wrong. This H5Ocopy problem will affect the h5copy tool. - diff --git a/release_docs/NEWSLETTER.txt b/release_docs/NEWSLETTER.txt new file mode 100644 index 00000000000..f03f710d717 --- /dev/null +++ b/release_docs/NEWSLETTER.txt @@ -0,0 +1,25 @@ +INTRODUCTION +============ + +This purpose of this document is to contain entries that can be used to quickly +produce a release newsletter. When something is added to the library that is +"newsletter worthy" (i.e., new feature, CVE fix, etc.) a summary note should +be added here. + +The format should look like this: + +* SUMMARY OF NEWSLETTER-WORTHY THING + + Here is where you describe the summary. Summarize the feature, fix, or + change in general language. Remember, RELEASE.txt is for communicating + technical specifics. Text entered here is more like advertising. + + (GitHub #123, #125) + +The GitHub #s could be relevant issues or PRs. They will probably not appear +in the final newsletter, but are so that the person writing the newsletter +has easy access to context if they have questions. + +Every entry in RELEASE.txt does NOT require an entry here. The newsletter is +for communicating major changes that are of interest to anyone. Minor bugfixes, +memory leak fixes, etc. do not require entries. diff --git a/release_docs/README.md b/release_docs/README.md new file mode 100644 index 00000000000..1532f1a25bf --- /dev/null +++ b/release_docs/README.md @@ -0,0 +1,102 @@ +# The `release_docs` directory + +## Intro + +This directory contains instructions for building and using the library as +well as the HDF5 history files. + +## HISTORY files + +The `HISTORY` files contain the history of this branch of HDF5. They fall into +three categories. + +### HISTORY-\[VERSION 1\]-\[VERSION 2\].txt + +These files are created when we release a new major version and include all +the changes that were made to the `develop` branch while creating a major release. + +### HISTORY-\[VERSION\].txt + +This file contains the changes that were made to a maintenance branch since +it split off from `develop`. It will also be found in the `develop` branch +when experimental releases have been created. + +### RELEASE.txt + +This is the changelog for the current version of the library. + +For a MAJOR release (or in `develop`) this files lists all the changes since the +last major version. For a MINOR release (or in a maintenance branch), this file +lists all the changes since the last release in the maintenance branch. + +Examples: + +* The file for HDF5 1.14.0 includes all the changes since HDF5 1.12.0 +* The file for HDF5 1.10.9 includes all the changes since HDF5 1.10.8 +* The file in `develop` includes all the changes since the last major release +* The file in `hdf5_1_14` includes all the changes since the last minor HDF5 1.14 release + +Note that we make no effort to bring maintenance branch `HISTORY` files back to +develop. If you want to compare, say, 1.10.4 with 1.12.3, you'd have to get +the history files from those releases and compare them by hand. + +## Creating new releases + +### MAJOR release + +* If there were experimental releases, merge the experimental `HISTORY` file + and the current `RELEASE.txt` by category to create a separate, unified + file that ignores the experimental releases. Don't check this in yet or + clobber any existing `HISTORY`/`RELEASE` files, but put it someplace handy for + use in later steps. + +* Create the new maintenance branch + +In develop: +* Create the new `HISTORY-\[VERSION 1\]-\[VERSION 2\].txt` file + * If there is an experimental `HISTORY` file, add `RELEASE.txt` to the beginning of it and use that + * Otherwise, start with `RELEASE.txt` + * Add the introduction boilerplate like in the other `HISTORY` files (TOC, etc.) +* Delete any experimental `HISTORY` file +* Clear out `RELEASE.txt` + +Note that we're KEEPING any experimental release history information in the +`HISTORY-\[VERSION 1\]-\[VERSION 2\].txt` file, so do NOT use the merged file in +the above steps! + +In the new maintenance branch: +* Create the new `HISTORY-\[VERSION\].txt` file + * If there is an experimental `HISTORY` file use the combined file you created earlier + * Otherwise, start with `RELEASE.txt` + * Add the introduction boilerplate like in the other `HISTORY` files (TOC, etc.) +* Delete any experimental `HISTORY` file +* Clear out `RELEASE.txt` + +* Create the new release branch + +In the new release branch: +* If there were experimental releases, use the combined file you created earlier as `RELEASE.txt` +* Otherwise the `RELEASE.txt` will be used as-is + +### MINOR release + +* Create the release branch + +In the maintenance branch: +* Add the contents of `RELEASE.txt` to the beginnnig of `HISTORY-\[VERSION\].txt` +* Clear out `RELEASE.txt` + +### EXPERIMENTAL release + +* Add the contents of `RELEASE.txt` to the beginnnig of `HISTORY-\[VERSION\].txt` +* Clear out `RELEASE.txt` + +## INSTALL files + +These files include instructions for building and installing HDF5 on various +platforms. + +## USING files + +These files document how to build HDF5 applications with an installed HDF5 +library. From e0c21e2f8feb0fe5b95bd0c38524ded125ecb17d Mon Sep 17 00:00:00 2001 From: jhendersonHDF Date: Wed, 29 Mar 2023 09:36:34 -0500 Subject: [PATCH 033/108] Fix a memory corruption issue in H5S__point_project_simple (#2626) (#2630) --- release_docs/RELEASE.txt | 14 ++++++++++++++ src/H5Spoint.c | 2 +- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 05160fa52c0..40cf0b26683 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -205,6 +205,20 @@ Bug Fixes since HDF5-1.12.1 release =================================== Library ------- + - Fixed a memory corruption issue that can occur when reading + from a dataset using a hyperslab selection in the file + dataspace and a point selection in the memory dataspace + + When reading from a dataset using a hyperslab selection in + the dataset's file dataspace and a point selection in the + dataset's memory dataspace where the file dataspace's "rank" + is greater than the memory dataspace's "rank", memory corruption + could occur due to an incorrect number of selection points + being copied when projecting the point selection onto the + hyperslab selection's dataspace. + + (JTH - 2023/03/23) + - Seg fault on file close h5debug fails at file close with core dump on a file that has an diff --git a/src/H5Spoint.c b/src/H5Spoint.c index 3e85affe2c8..22feaf83b40 100644 --- a/src/H5Spoint.c +++ b/src/H5Spoint.c @@ -2316,7 +2316,7 @@ H5S__point_project_simple(const H5S_t *base_space, H5S_t *new_space, hsize_t *of /* Copy over the point's coordinates */ HDmemset(new_node->pnt, 0, sizeof(hsize_t) * rank_diff); H5MM_memcpy(&new_node->pnt[rank_diff], base_node->pnt, - (new_space->extent.rank * sizeof(hsize_t))); + (base_space->extent.rank * sizeof(hsize_t))); /* Keep the order the same when copying */ if (NULL == prev_node) From f8993b81deea0537344474606d71234526d1c215 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Wed, 29 Mar 2023 09:37:20 -0500 Subject: [PATCH 034/108] Revert the removal of HDF5GroupInfo class and deprecate. (#2637) --- java/src/Makefile.am | 1 + java/src/hdf/hdf5lib/CMakeLists.txt | 1 + java/src/hdf/hdf5lib/HDF5GroupInfo.java | 188 ++++++++++++++++++++++++ release_docs/RELEASE.txt | 8 + 4 files changed, 198 insertions(+) create mode 100644 java/src/hdf/hdf5lib/HDF5GroupInfo.java diff --git a/java/src/Makefile.am b/java/src/Makefile.am index 8d9182db5a5..c9b0d7c71f3 100644 --- a/java/src/Makefile.am +++ b/java/src/Makefile.am @@ -108,6 +108,7 @@ hdf5_java_JAVA = \ ${pkgpath}/structs/H5AC_cache_config_t.java \ ${pkgpath}/H5.java \ ${pkgpath}/HDF5Constants.java \ + ${pkgpath}/HDF5GroupInfo.java \ ${pkgpath}/HDFArray.java \ ${pkgpath}/HDFNativeData.java diff --git a/java/src/hdf/hdf5lib/CMakeLists.txt b/java/src/hdf/hdf5lib/CMakeLists.txt index 16745fa22c5..1afc8b0e533 100644 --- a/java/src/hdf/hdf5lib/CMakeLists.txt +++ b/java/src/hdf/hdf5lib/CMakeLists.txt @@ -101,6 +101,7 @@ set (HDF5_JAVADOC_HDF_HDF5_STRUCTS_SOURCES set (HDF5_JAVA_HDF_HDF5_SOURCES HDFArray.java HDF5Constants.java + HDF5GroupInfo.java HDFNativeData.java H5.java ) diff --git a/java/src/hdf/hdf5lib/HDF5GroupInfo.java b/java/src/hdf/hdf5lib/HDF5GroupInfo.java new file mode 100644 index 00000000000..50c7db0e1a8 --- /dev/null +++ b/java/src/hdf/hdf5lib/HDF5GroupInfo.java @@ -0,0 +1,188 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the COPYING file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +package hdf.hdf5lib; + +/** + *

    + * This class is a container for the information reported about an HDF5 Object + * from the H5Gget_obj_info() method. + *

    + * The fileno and objno fields contain four values which uniquely identify an + * object among those HDF5 files which are open: if all four values are the same + * between two objects, then the two objects are the same (provided both files + * are still open). The nlink field is the number of hard links to the object or + * zero when information is being returned about a symbolic link (symbolic links + * do not have hard links but all other objects always have at least one). The + * type field contains the type of the object, one of H5G_GROUP, H5G_DATASET, or + * H5G_LINK. The mtime field contains the modification time. If information is + * being returned about a symbolic link then linklen will be the length of the + * link value (the name of the pointed-to object with the null terminator); + * otherwise linklen will be zero. Other fields may be added to this structure + * in the future. + * + * @deprecated Not for public use. It is not used by the library. + * This class assumes that an object can contain four values which uniquely identify an + * object among those HDF5 files which are open. This is no longer valid in future + * HDF5 releases. + */ + +@Deprecated +public class HDF5GroupInfo { + long[] fileno; + long[] objno; + int nlink; + int type; + long mtime; + int linklen; + + /** + * Container for the information reported about an HDF5 Object + * from the H5Gget_obj_info() method + */ + public HDF5GroupInfo() + { + fileno = new long[2]; + objno = new long[2]; + nlink = -1; + type = -1; + mtime = 0; + linklen = 0; + } + + /** + * Sets the HDF5 group information. Used by the JHI5. + * + * @param fn + * File id number + * @param on + * Object id number + * @param nl + * Number of links + * @param t + * Type of the object + * @param mt + * Modification time + * @param len + * Length of link + **/ + public void setGroupInfo(long[] fn, long[] on, int nl, int t, long mt, int len) + { + fileno = fn; + objno = on; + nlink = nl; + type = t; + mtime = mt; + linklen = len; + } + + /** Resets all the group information to defaults. */ + public void reset() + { + fileno[0] = 0; + fileno[1] = 0; + objno[0] = 0; + objno[1] = 0; + nlink = -1; + type = -1; + mtime = 0; + linklen = 0; + } + + /** + * fileno accessors + * @return the file number if successful + */ + public long[] getFileno() { return fileno; } + + /** + * accessors + * @return the object number if successful + */ + public long[] getObjno() { return objno; } + + /** + * accessors + * @return type of group if successful + */ + public int getType() { return type; } + + /** + * accessors + * @return the number of links in the group if successful + */ + public int getNlink() { return nlink; } + + /** + * accessors + * @return the modified time value if successful + */ + public long getMtime() { return mtime; } + + /** + * accessors + * @return a length of link name if successful + */ + public int getLinklen() { return linklen; } + + /** + * The fileno and objno fields contain four values which uniquely identify + * an object among those HDF5 files. + */ + @Override + public boolean equals(Object obj) + { + if (!(obj instanceof HDF5GroupInfo)) { + return false; + } + + HDF5GroupInfo target = (HDF5GroupInfo)obj; + if ((fileno[0] == target.fileno[0]) && (fileno[1] == target.fileno[1]) && + (objno[0] == target.objno[0]) && (objno[1] == target.objno[1])) { + return true; + } + else { + return false; + } + } + + /** + * Returns the object id. + * + * @return the object id + */ + public long getOID() { return objno[0]; } + + /** + * /** Converts this object to a String representation. + * + * @return a string representation of this object + */ + @Override + public String toString() + { + String fileStr = "fileno=null"; + String objStr = "objno=null"; + + if (fileno != null) { + fileStr = "fileno[0]=" + fileno[0] + ",fileno[1]=" + fileno[1]; + } + + if (objno != null) { + objStr = "objno[0]=" + objno[0] + ",objno[1]=" + objno[1]; + } + + return getClass().getName() + "[" + fileStr + "," + objStr + ",type=" + type + ",nlink=" + nlink + + ",mtime=" + mtime + ",linklen=" + linklen + "]"; + } +} diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 40cf0b26683..23cdc36385c 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -121,6 +121,14 @@ New Features Java Library: ------------- + - HDF5GroupInfo class has been deprecated. + + This class assumes that an object can contain four values which uniquely identify an + object among those HDF5 files which are open. This is no longer valid in future + HDF5 releases. + + (ADB - 2023/03/27) + - Added version of H5Rget_name to return the name as a Java string. Other functions that get_name process the get_size then get the name From ab1af79798985b57401596677f7db8eb186f55a1 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Wed, 29 Mar 2023 09:37:52 -0500 Subject: [PATCH 035/108] Update cross compile checks and files #2497 (#2565) --- config/cmake/ConfigureChecks.cmake | 179 +++++++++++++++-------------- config/toolchain/aarch64.cmake | 1 + release_docs/INSTALL_CMake.txt | 81 +++++++++++++ 3 files changed, 174 insertions(+), 87 deletions(-) diff --git a/config/cmake/ConfigureChecks.cmake b/config/cmake/ConfigureChecks.cmake index 472f144d4e0..27eb93f8ff7 100644 --- a/config/cmake/ConfigureChecks.cmake +++ b/config/cmake/ConfigureChecks.cmake @@ -210,83 +210,85 @@ if (HDF5_ENABLE_MIRROR_VFD) endif() #----------------------------------------------------------------------------- -# Check if C has __float128 extension +# Check if C has __float128 extension (used for Fortran only) #----------------------------------------------------------------------------- -HDF_CHECK_TYPE_SIZE(__float128 _SIZEOF___FLOAT128) -if (${_SIZEOF___FLOAT128}) - set (${HDF_PREFIX}_HAVE_FLOAT128 1) - set (${HDF_PREFIX}_SIZEOF___FLOAT128 ${_SIZEOF___FLOAT128}) -else () - set (${HDF_PREFIX}_HAVE_FLOAT128 0) - set (${HDF_PREFIX}_SIZEOF___FLOAT128 0) -endif () +if (HDF5_BUILD_FORTRAN) + HDF_CHECK_TYPE_SIZE(__float128 _SIZEOF___FLOAT128) + if (${_SIZEOF___FLOAT128}) + set (${HDF_PREFIX}_HAVE_FLOAT128 1) + set (${HDF_PREFIX}_SIZEOF___FLOAT128 ${_SIZEOF___FLOAT128}) + else () + set (${HDF_PREFIX}_HAVE_FLOAT128 0) + set (${HDF_PREFIX}_SIZEOF___FLOAT128 0) + endif () -HDF_CHECK_TYPE_SIZE(_Quad _SIZEOF__QUAD) -if (NOT ${_SIZEOF__QUAD}) - set (${HDF_PREFIX}_SIZEOF__QUAD 0) -else () - set (${HDF_PREFIX}_SIZEOF__QUAD ${_SIZEOF__QUAD}) -endif () + HDF_CHECK_TYPE_SIZE(_Quad _SIZEOF__QUAD) + if (NOT ${_SIZEOF__QUAD}) + set (${HDF_PREFIX}_SIZEOF__QUAD 0) + else () + set (${HDF_PREFIX}_SIZEOF__QUAD ${_SIZEOF__QUAD}) + endif () -#----------------------------------------------------------------------------- -# The provided CMake C macros don't provide a general compile/run function -# so this one is used. -#----------------------------------------------------------------------------- -set (RUN_OUTPUT_PATH_DEFAULT ${CMAKE_BINARY_DIR}) -macro (C_RUN FUNCTION_NAME SOURCE_CODE RETURN_VAR RETURN_OUTPUT_VAR) - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE "Detecting C ${FUNCTION_NAME}") - endif () - file (WRITE - ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/testCCompiler1.c - ${SOURCE_CODE} - ) - TRY_RUN (RUN_RESULT_VAR COMPILE_RESULT_VAR - ${CMAKE_BINARY_DIR} - ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/testCCompiler1.c - COMPILE_DEFINITIONS "-D_SIZEOF___FLOAT128=${H5_SIZEOF___FLOAT128};-D_HAVE_QUADMATH_H=${H5_HAVE_QUADMATH_H}" - COMPILE_OUTPUT_VARIABLE COMPILEOUT - RUN_OUTPUT_VARIABLE OUTPUT_VAR - ) + if (NOT CMAKE_CROSSCOMPILING) + #----------------------------------------------------------------------------- + # The provided CMake C macros don't provide a general compile/run function + # so this one is used. + #----------------------------------------------------------------------------- + set (RUN_OUTPUT_PATH_DEFAULT ${CMAKE_BINARY_DIR}) + macro (C_RUN FUNCTION_NAME SOURCE_CODE RETURN_VAR RETURN_OUTPUT_VAR) + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "Detecting C ${FUNCTION_NAME}") + endif () + file (WRITE + ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/testCCompiler1.c + ${SOURCE_CODE} + ) + TRY_RUN (RUN_RESULT_VAR COMPILE_RESULT_VAR + ${CMAKE_BINARY_DIR} + ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/testCCompiler1.c + COMPILE_DEFINITIONS "-D_SIZEOF___FLOAT128=${H5_SIZEOF___FLOAT128};-D_HAVE_QUADMATH_H=${H5_HAVE_QUADMATH_H}" + COMPILE_OUTPUT_VARIABLE COMPILEOUT + RUN_OUTPUT_VARIABLE OUTPUT_VAR + ) - set (${RETURN_OUTPUT_VAR} ${OUTPUT_VAR}) + set (${RETURN_OUTPUT_VAR} ${OUTPUT_VAR}) - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE "* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * ") - message (VERBOSE "Test COMPILE_RESULT_VAR ${COMPILE_RESULT_VAR} ") - message (VERBOSE "Test COMPILE_OUTPUT ${COMPILEOUT} ") - message (VERBOSE "* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * ") - message (VERBOSE "Test RUN_RESULT_VAR ${RUN_RESULT_VAR} ") - message (VERBOSE "* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * ") - endif () + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * ") + message (VERBOSE "Test COMPILE_RESULT_VAR ${COMPILE_RESULT_VAR} ") + message (VERBOSE "Test COMPILE_OUTPUT ${COMPILEOUT} ") + message (VERBOSE "* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * ") + message (VERBOSE "Test RUN_RESULT_VAR ${RUN_RESULT_VAR} ") + message (VERBOSE "* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * ") + endif () - if (COMPILE_RESULT_VAR) - if (RUN_RESULT_VAR EQUAL "0") - set (${RETURN_VAR} 1 CACHE INTERNAL "Have C function ${FUNCTION_NAME}") - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE "Testing C ${FUNCTION_NAME} - OK") + if (COMPILE_RESULT_VAR) + if (RUN_RESULT_VAR EQUAL "0") + set (${RETURN_VAR} 1 CACHE INTERNAL "Have C function ${FUNCTION_NAME}") + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "Testing C ${FUNCTION_NAME} - OK") + endif () + file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeOutput.log + "Determining if the C ${FUNCTION_NAME} exists passed with the following output:\n" + "${OUTPUT_VAR}\n\n" + ) + else () + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "Testing C ${FUNCTION_NAME} - Fail") + endif () + set (${RETURN_VAR} 0 CACHE INTERNAL "Have C function ${FUNCTION_NAME}") + file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log + "Determining if the C ${FUNCTION_NAME} exists failed with the following output:\n" + "${OUTPUT_VAR}\n\n") endif () - file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeOutput.log - "Determining if the C ${FUNCTION_NAME} exists passed with the following output:\n" - "${OUTPUT_VAR}\n\n" - ) else () - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE "Testing C ${FUNCTION_NAME} - Fail") - endif () - set (${RETURN_VAR} 0 CACHE INTERNAL "Have C function ${FUNCTION_NAME}") - file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log - "Determining if the C ${FUNCTION_NAME} exists failed with the following output:\n" - "${OUTPUT_VAR}\n\n") - endif () - else () message (FATAL_ERROR "Compilation of C ${FUNCTION_NAME} - Failed") - endif () -endmacro () + endif () + endmacro () -set (PROG_SRC - " + set (PROG_SRC + " #include \n\ #include \n\ #define CHECK_FLOAT128 _SIZEOF___FLOAT128\n\ @@ -307,31 +309,34 @@ set (PROG_SRC #else\n\ #define C_LDBL_DIG LDBL_DIG\n\ #endif\n\nint main() {\nprintf(\"\\%d\\\;\\%d\\\;\", C_LDBL_DIG, C_FLT128_DIG)\\\;\n\nreturn 0\\\;\n}\n - " -) + " + ) -C_RUN ("maximum decimal precision for C" ${PROG_SRC} PROG_RES PROG_OUTPUT4) -message (STATUS "Testing maximum decimal precision for C - ${PROG_OUTPUT4}") + C_RUN ("maximum decimal precision for C" ${PROG_SRC} PROG_RES PROG_OUTPUT4) + message (STATUS "Testing maximum decimal precision for C - ${PROG_OUTPUT4}") -# dnl The output from the above program will be: -# dnl -- long double decimal precision -- __float128 decimal precision + # dnl The output from the above program will be: + # dnl -- long double decimal precision -- __float128 decimal precision -list (GET PROG_OUTPUT4 0 H5_LDBL_DIG) -list (GET PROG_OUTPUT4 1 H5_FLT128_DIG) + list (GET PROG_OUTPUT4 0 H5_LDBL_DIG) + list (GET PROG_OUTPUT4 1 H5_FLT128_DIG) + endif () -if (${HDF_PREFIX}_SIZEOF___FLOAT128 EQUAL "0" OR FLT128_DIG EQUAL "0") - set (${HDF_PREFIX}_HAVE_FLOAT128 0) - set (${HDF_PREFIX}_SIZEOF___FLOAT128 0) - set (_PAC_C_MAX_REAL_PRECISION ${H5_LDBL_DIG}) -else () - set (_PAC_C_MAX_REAL_PRECISION ${H5_FLT128_DIG}) -endif () -if (NOT ${_PAC_C_MAX_REAL_PRECISION}) - set (${HDF_PREFIX}_PAC_C_MAX_REAL_PRECISION 0) -else () - set (${HDF_PREFIX}_PAC_C_MAX_REAL_PRECISION ${_PAC_C_MAX_REAL_PRECISION}) -endif () -message (STATUS "maximum decimal precision for C var - ${${HDF_PREFIX}_PAC_C_MAX_REAL_PRECISION}") + if (${HDF_PREFIX}_SIZEOF___FLOAT128 EQUAL "0" OR FLT128_DIG EQUAL "0") + set (${HDF_PREFIX}_HAVE_FLOAT128 0) + set (${HDF_PREFIX}_SIZEOF___FLOAT128 0) + set (_PAC_C_MAX_REAL_PRECISION ${H5_LDBL_DIG}) + else () + set (_PAC_C_MAX_REAL_PRECISION ${H5_FLT128_DIG}) + endif () + if (NOT ${_PAC_C_MAX_REAL_PRECISION}) + set (${HDF_PREFIX}_PAC_C_MAX_REAL_PRECISION 0) + else () + set (${HDF_PREFIX}_PAC_C_MAX_REAL_PRECISION ${_PAC_C_MAX_REAL_PRECISION}) + endif () + message (STATUS "maximum decimal precision for C var - ${${HDF_PREFIX}_PAC_C_MAX_REAL_PRECISION}") + +endif() #----------------------------------------------------------------------------- # Macro to determine the various conversion capabilities diff --git a/config/toolchain/aarch64.cmake b/config/toolchain/aarch64.cmake index 03f4e5e9f58..69968336f78 100644 --- a/config/toolchain/aarch64.cmake +++ b/config/toolchain/aarch64.cmake @@ -11,6 +11,7 @@ set (CMAKE_FIND_ROOT_PATH /usr/${TOOLCHAIN_PREFIX}) set (CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) set (CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) set (CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) +set (CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY) set (CMAKE_CROSSCOMPILING_EMULATOR qemu-aarch64) include_directories(/usr/${TOOLCHAIN_PREFIX}/include) diff --git a/release_docs/INSTALL_CMake.txt b/release_docs/INSTALL_CMake.txt index 9113af7213d..66bd732d28e 100644 --- a/release_docs/INSTALL_CMake.txt +++ b/release_docs/INSTALL_CMake.txt @@ -13,6 +13,7 @@ Section V: Options for building HDF5 Libraries with CMake command line Section VI: CMake option defaults for HDF5 Section VII: User Defined Options for HDF5 Libraries with CMake Section VIII: User Defined Compile Flags for HDF5 Libraries with CMake +Section IX: Considerations for cross-compiling ************************************************************************ @@ -939,6 +940,86 @@ The HDF5_ENABLE_COVERAGE option will add "-g -O0 -fprofile-arcs -ftest-coverage" to CMAKE_C_FLAGS. +======================================================================== +IX: Considerations for cross-compiling +======================================================================== + +Cross-compiling has several consequences for CMake: + CMake cannot automatically detect the target platform. + CMake cannot find libraries and headers in the default system directories. + Executables built during cross compiling cannot be executed. + +Cross-compiling support means that CMake separates information about the +build platform and target platform and gives the user mechanisms to solve +cross-compiling issues without additional requirements such as running +virtual machines, etc. + +CMake uses a toolchain of utilities to compile, link libraries and create +archives, and other tasks to drive the build. The toolchain utilities +available are determined by the languages enabled. + +CMake stores info about the current toolchain in the following variables: + CMAKE_C_COMPILER, + CMAKE_CXX_COMPILER. +They contain paths to the C and C++ compilers respectively. This is usually +enough on desktop platforms. In the case of embedded systems, a custom +linker and assembler setting may be needed. In more complex projects +you may need to additionally specify binaries to other parts of the toolchain +(size, ranlib, objcopy…). All these tools should be set in the corresponding +variables: + CMAKE_AR, + CMAKE_ASM_COMPILER, + CMAKE_LINKER, + CMAKE_OBJCOPY, + CMAKE_RANLIB + +As for the host and target operating systems, CMake stores their names in the +following variables: + CMAKE_HOST_SYSTEM_NAME – name of the platform, on which CMake is running (host platform). + On major operating systems this is set to the Linux, Windows or + Darwin (MacOS) value. + CMAKE_SYSTEM_NAME – name of the platform, for which we are building (target platform). + By default, this value is the same as CMAKE_HOST_SYSTEM_NAME, which + means that we are building for the local platform (no cross-compilation). + +Put the toolchain variables into a separate file (e.g. .cmake) +and set CMAKE_TOOLCHAIN_FILE variable to the path of that file. +If cmake is invoked with the command line parameter: + --toolchain path/to/file +or + -DCMAKE_TOOLCHAIN_FILE=path/to/file +the file will be loaded early to set values for the compilers. The +CMAKE_CROSSCOMPILING variable is set to true when CMake is cross-compiling. + +Structure of the toolchain file +------------------------------- +In fact, the toolchain file doesn’t have any structure. You can put anything you +want there. But the best practice is to define at least these settings: +path to the toolchain binaries (C compiler, C++ compiler, linker, etc.) +name of the target platform (and optionally target processor architecture) +required compilation and linking flags on that particular platform +toolchain sysroot settings + +It is recommended that you set the CMAKE_FIND_ROOT_PATH variable to a path where +you have an exact copy of the root filesystem you have on your target device (with +libraries and binaries pre-compiled for the target processor). + +References: + https://cmake.org/cmake/help/latest/manual/cmake-toolchains.7.html + https://gitlab.com/embeddedlinux/libs/platform + https://discourse.cmake.org/t/cross-compile-for-aarch64-on-ubuntu/2161/10 + https://stackoverflow.com/questions/54539682/how-to-set-up-cmake-to-cross-compile-with-clang-for-arm-embedded-on-windows?rq=1 + https://developer.android.com/ndk/guides/cmake + +Predefine H5Tinit.c file +------------------------------- +The one file that needs to be pre-generated is the H5Tinit.c file. The variables +indicated in the error log (see above) are the variables that need to match the target system. + +The HDF5 CMake variables; + HDF5_USE_PREGEN: set this to true + HDF5_USE_PREGEN_DIR: set this path to the preset H5Tinit.c file + ======================================================================== For further assistance, send email to help@hdfgroup.org ======================================================================== From abdc160a97c78b01580308fe43204a202d3a6951 Mon Sep 17 00:00:00 2001 From: Larry Knox Date: Wed, 29 Mar 2023 13:15:11 -0500 Subject: [PATCH 036/108] Minor cherry-pick merges to 1.12 (#2581) --- .github/CODEOWNERS | 2 +- .github/workflows/clang-format-check.yml | 2 +- .github/workflows/clang-format-fix.yml | 15 ++++++++++++--- doxygen/examples/tables/propertyLists.dox | 6 +----- release_docs/RELEASE.txt | 13 +++++++++++++ src/H5Oattr.c | 7 +++---- src/H5Ppublic.h | 11 +++++++---- testpar/testpar.h | 7 +++++-- tools/test/h5dump/CMakeTests.cmake | 5 +++++ tools/test/h5dump/testh5dump.sh.in | 5 +++++ tools/testfiles/tCVE-2021-37501_attr_decode.h5 | Bin 0 -> 48544 bytes 11 files changed, 53 insertions(+), 20 deletions(-) create mode 100644 tools/testfiles/tCVE-2021-37501_attr_decode.h5 diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 8d736843989..506c668b94d 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -2,7 +2,7 @@ # Each line is a file pattern followed by one or more owners. # These owners will be the default owners for everything in the repo. -* @lrknox @derobins @byrnHDF @fortnern @jhendersonHDF @vchoi-hdfgroup @bmribler @raylu-hdf @mattjala @brtnfld +* @lrknox @derobins @byrnHDF @fortnern @jhendersonHDF @qkoziol @vchoi-hdfgroup @bmribler @glennsong09 @mattjala @brtnfld # Order is important. The last matching pattern has the most precedence. # So if a pull request only touches javascript files, only these owners diff --git a/.github/workflows/clang-format-check.yml b/.github/workflows/clang-format-check.yml index e8251f2ce7a..70809a1156a 100644 --- a/.github/workflows/clang-format-check.yml +++ b/.github/workflows/clang-format-check.yml @@ -8,7 +8,7 @@ jobs: if: "!contains(github.event.head_commit.message, 'skip-ci')" steps: - uses: actions/checkout@v3 - - name: Run clang-format style check for C and Java programs. + - name: Run clang-format style check for C and Java code uses: DoozyX/clang-format-lint-action@v0.13 with: source: '.' diff --git a/.github/workflows/clang-format-fix.yml b/.github/workflows/clang-format-fix.yml index c1110cf2b98..feaa3d0014e 100644 --- a/.github/workflows/clang-format-fix.yml +++ b/.github/workflows/clang-format-fix.yml @@ -1,15 +1,24 @@ -name: clang-format Check +# NOTE: This action requires write permissions to be set in your GitHub +# repo/fork for it to be able to commit changes. +# +# This is currently enabled via: +# +# settings > Actions > General > Workflow permissions +# +# which you will need to set to "Read and write permissions" +# +name: clang-format Commit Changes on: workflow_dispatch: push: jobs: formatting-check: - name: Formatting Check + name: Commit Format Changes runs-on: ubuntu-latest if: "!contains(github.event.head_commit.message, 'skip-ci')" steps: - uses: actions/checkout@v3 - - name: Run clang-format style check for C and Java programs. + - name: Fix C and Java formatting issues detected by clang-format uses: DoozyX/clang-format-lint-action@v0.13 with: source: '.' diff --git a/doxygen/examples/tables/propertyLists.dox b/doxygen/examples/tables/propertyLists.dox index 375fd509702..e77c2fd4f6e 100644 --- a/doxygen/examples/tables/propertyLists.dox +++ b/doxygen/examples/tables/propertyLists.dox @@ -372,10 +372,6 @@ regarding the driver. or retrieves information regarding driver. -#H5Pset_fapl_onion/#H5Pget_fapl_onion -Modifies/queries the file driver properties of the onion driver. - - #H5Pset_fapl_sec2 Sets driver for unbuffered permanent files or retrieves information regarding driver. @@ -952,4 +948,4 @@ encoding for object names. //! [acpl_table] * */ - \ No newline at end of file + diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 23cdc36385c..97f137d5266 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -226,6 +226,19 @@ Bug Fixes since HDF5-1.12.1 release hyperslab selection's dataspace. (JTH - 2023/03/23) + + - Fix CVE-2021-37501 / GHSA-rfgw-5vq3-wrjf + + Check for overflow when calculating on-disk attribute data size. + + A bogus hdf5 file may contain dataspace messages with sizes + which lead to the on-disk data sizes to exceed what is addressable. + When calculating the size, make sure, the multiplication does not + overflow. + The test case was crafted in a way that the overflow caused the + size to be 0. + + (EFE - 2023/02/11 GH-2458) - Seg fault on file close diff --git a/src/H5Oattr.c b/src/H5Oattr.c index ac643eafac0..cb06f25a725 100644 --- a/src/H5Oattr.c +++ b/src/H5Oattr.c @@ -221,10 +221,6 @@ H5O__attr_decode(H5F_t *f, H5O_t *open_oh, unsigned H5_ATTR_UNUSED mesg_flags, u else p += attr->shared->ds_size; - /* Get the datatype's size */ - if (0 == (dt_size = H5T_get_size(attr->shared->dt))) - HGOTO_ERROR(H5E_ATTR, H5E_CANTGET, NULL, "unable to get datatype size") - /* Get the datatype & dataspace sizes */ if (0 == (dt_size = H5T_get_size(attr->shared->dt))) HGOTO_ERROR(H5E_ATTR, H5E_CANTGET, NULL, "unable to get datatype size") @@ -234,6 +230,9 @@ H5O__attr_decode(H5F_t *f, H5O_t *open_oh, unsigned H5_ATTR_UNUSED mesg_flags, u /* Compute the size of the data */ H5_CHECKED_ASSIGN(attr->shared->data_size, size_t, ds_size * (hsize_t)dt_size, hsize_t); + /* Check if multiplication has overflown */ + if ((attr->shared->data_size / dt_size) != ds_size) + HGOTO_ERROR(H5E_RESOURCE, H5E_OVERFLOW, NULL, "data size exceeds addressable range") /* Go get the data */ if (attr->shared->data_size) { diff --git a/src/H5Ppublic.h b/src/H5Ppublic.h index 04b8f0eb423..33a1df9c540 100644 --- a/src/H5Ppublic.h +++ b/src/H5Ppublic.h @@ -2321,10 +2321,13 @@ H5_DLL herr_t H5Pset_deflate(hid_t plist_id, unsigned level); * (#H5Z_FILTER_DEFLATE) and the Fletcher32 error detection filter * (#H5Z_FILTER_FLETCHER32). * - * The array \p c_values contains \p cd_nelmts integers which are - * auxiliary data for the filter. The integer values will be - * stored in the dataset object header as part of the filter - * information. + * The array \p cd_values contains \p cd_nelmts unsigned integers + * which are auxiliary data for the filter. The values are typically + * used as parameters to control the filter. In a filter's + * \p set_local method (called from \p H5Dcreate), the values are + * interpreted and possibly modified before they are used to control + * the filter. These, possibly modified values, are then stored in + * the dataset object header as auxiliary data for the filter. * * The \p flags argument is a bit vector with the following * fields specifying certain general properties of the filter: diff --git a/testpar/testpar.h b/testpar/testpar.h index 6c380a989c4..58bcab469e9 100644 --- a/testpar/testpar.h +++ b/testpar/testpar.h @@ -30,8 +30,11 @@ * mesg is not an empty string. */ #define MESG(mesg) \ - if (VERBOSE_MED && *mesg != '\0') \ - HDprintf("%s\n", mesg) + do { \ + if (VERBOSE_MED && *mesg != '\0') { \ + HDprintf("%s\n", mesg); \ + } \ + } while (0) /* * VRFY: Verify if the condition val is true. diff --git a/tools/test/h5dump/CMakeTests.cmake b/tools/test/h5dump/CMakeTests.cmake index 2505e847bc6..be1a414fce7 100644 --- a/tools/test/h5dump/CMakeTests.cmake +++ b/tools/test/h5dump/CMakeTests.cmake @@ -339,6 +339,7 @@ ${HDF5_TOOLS_DIR}/testfiles/tCVE_2018_11206_fill_old.h5 ${HDF5_TOOLS_DIR}/testfiles/tCVE_2018_11206_fill_new.h5 ${HDF5_TOOLS_DIR}/testfiles/zerodim.h5 + ${HDF5_TOOLS_DIR}/testfiles/tCVE-2021-37501_attr_decode.h5 #STD_REF_OBJ files ${HDF5_TOOLS_DIR}/testfiles/trefer_attr.h5 ${HDF5_TOOLS_DIR}/testfiles/trefer_compat.h5 @@ -1187,6 +1188,10 @@ ADD_H5_TEST (tCVE_2018_11206_fill_old 1 tCVE_2018_11206_fill_old.h5) ADD_H5_TEST (tCVE_2018_11206_fill_new 1 tCVE_2018_11206_fill_new.h5) + # test to verify fix for CVE-2021-37501: multiplication overflow in H5O__attr_decode() + # https://github.com/ST4RF4LL/Something_Found/blob/main/HDF5_v1.13.0_h5dump_heap_overflow.assets/poc + ADD_H5_TEST (tCVE-2021-37501_attr_decode 1 tCVE-2021-37501_attr_decode.h5) + ############################################################################## ### P L U G I N T E S T S ############################################################################## diff --git a/tools/test/h5dump/testh5dump.sh.in b/tools/test/h5dump/testh5dump.sh.in index 5d7ff8828a1..ae5cc564ec7 100644 --- a/tools/test/h5dump/testh5dump.sh.in +++ b/tools/test/h5dump/testh5dump.sh.in @@ -183,6 +183,7 @@ $SRC_H5DUMP_TESTFILES/tvms.h5 $SRC_H5DUMP_TESTFILES/err_attr_dspace.h5 $SRC_H5DUMP_TESTFILES/tCVE_2018_11206_fill_old.h5 $SRC_H5DUMP_TESTFILES/tCVE_2018_11206_fill_new.h5 +$SRC_H5DUMP_TESTFILES/tCVE-2021-37501_attr_decode.h5 " LIST_OTHER_TEST_FILES=" @@ -1485,6 +1486,10 @@ TOOLTEST err_attr_dspace.ddl err_attr_dspace.h5 TOOLTEST_FAIL tCVE_2018_11206_fill_old.h5 TOOLTEST_FAIL tCVE_2018_11206_fill_new.h5 +# test to verify fix for CVE-2021-37501: multiplication overflow in H5O__attr_decode() +# https://github.com/ST4RF4LL/Something_Found/blob/main/HDF5_v1.13.0_h5dump_heap_overflow.assets/poc +TOOLTEST_FAIL tCVE-2021-37501_attr_decode.h5 + # Clean up temporary files/directories CLEAN_TESTFILES_AND_TESTDIR diff --git a/tools/testfiles/tCVE-2021-37501_attr_decode.h5 b/tools/testfiles/tCVE-2021-37501_attr_decode.h5 new file mode 100644 index 0000000000000000000000000000000000000000..331b05b59362a661b81364da3d7357312ad1e57c GIT binary patch literal 48544 zcmeFZ30#g{w=mwMNrRFmMIwjkR(7IpC9Bqu$U3BOE#tqJo+G zznW*Wrgh@CchUbnx>e4d1V_5)gg~ilzWzF|3^G&)+7p?GiTW&Jn&3olKK~Yz3mACf_VQ%T(ib~rLY!v4{sm$`M%yBYrGb2Smoos*wbx|r}xUAJ(3f#?&`0!h>D=c zix#>qcVD?!L_^TMpA!G`@6SE$OpA#8R16EcI{mXuPQg!YyKU@CW=kNVav!ps{w|BkLsW(J^>y5u<;qMXw2ZVZKRu?yf@+Z%hmww!KstXu)*`0e zMNEHmeJ6!_q_Y+Q^535SnEY64b-s9{>wQsy)c+j2@cS(6ce{2cK@Xiy`1j&Q1HU=&n*+Z&@S6j_Iq;hUzd7)m1HU=&n*+Z&@S6j_Iq-i42c}JN7$YjM zXb9~u?Ohvup&g*6%Rc*YMrU`0){L4i`;3q-O`{WE(Yq7sbOI6mKiMXK7U>&xBJ8^C z62@J0*26jz!upnWAIkNhK{I-PXFb~|&w|NpCS;8#1R(4N=aB`-og{MFMS zwE6$EHvV&u&|dhfe9DO|?eY`I3#9*V?VLaTBfSJo3hkOgyPpvLxu3%XPKK54D=dxw zQn<)-p|7P-yahG&_+Jh{=zr{TZU}(gA9SJ;of0XuGYX+lPP@eR-|Uco4=*kl+s}8D zbo%}=UJ(%;5jANMwQXI!=_VNcPrR6z*dJB@m+2qj^<#Bz=c|6~LH$W?)zPzXUpkdj*l5L$T~p!?dq;6W zI(^8=X_;8}?KytaUB&hDL*};=$bPn*>w6=F%NcwejKT_Vu1hAj!t^ebuQ%niTGR08 zz$skXoMt*cCFP)w{y?m>OfSz8ndM~*sr9BYc6U=1(Wma?I>*))!{x9%?rO!>3bVEt-5+HId%uW zw0_7L1n=giceHSQy{2%Aw>7QwyhqU((BtnLo|t>^6QTwUg&?IV)sAX_P&PjXXrv#*ctX-*{@{aD+TMQ$l37ab#C@4y=5h zf>)QHr~V6)EVmjD1f3ZT(-3utoq}~PWX@XO`%yQ-EV#K zpZ-bypRC|leWTMg3iYn>9ee7tq>&sj`w`duhdUu~hWqIPw=oo(RtT?`yuQODcEQYq#R#4sd z6g<#07T8Kd@yqENJYA*9de^=sdKZ>4=ZKAv+;buKEYK0=alNp=*m=B>myMfGyo900 zpP*EeGK^3u0EunMcp`ofoVxi5tcJdz_lm!R4*v*qoEJ;ApNnuMpWJEMr4KOS$5Zk& zScS#!Q6#(O^kQ4(Ge~o64b5`u%XMt9MWz03tUl`o@6G!r`Z;?tds1M;X52z7J5@~z zZ1*5$igTv!1KGD3$;EfbbzKE0cwGeh zjJvaOp}CN>IFpu3y(CBZwQ%Qp19-U%VSYCHsO@Hl*Q}~>O>P#Ztu$wmKN$DCuL>7o z){3e#mDzxzZ2qXL{bBFHK=9f90CkTqVB^m}r9TGSu%Z6DQOfT<x z`_1=oFS!mkOngDidfdjFZAw_vp}=}82BDbFV|W!fmfdnKg!r-us*~7*E#yBW)>U%EL4a(u~t3A z)BPv1jgj*3L1!xSIEOgPWf>=<+k5G6?*J=-+g%CtSa|lq$x~V7Q)C4QLe3E1S~%A z2`f}g*zD&!L4NUcc8V8@sgh>J{g!9KePhCvkADo63UBfC?d_bo%~5(&{VLuc0^Dd> z4K7}W!JzUU>`DIzu*k**B@f*sIhXmwLgWlIl_sFX#-6a)@Bu%uuNfSje2VfMfL$3P z!$K!5;0{H$6X)$QppkJ1%YyXjRU1qGa+k;OTxJYe_M!l`)Ya4IixS*Yul02M!)aXq zALSS^azA(MY7yiHzegi45i}X`7U%h%z+FYN=&VW!Y#eCE7PQZT1BO1}JZTDq5Ap%= z8P8!|qY^hNpTgHUyUFAY^^my8ftyxT35ygSfmgC2Hn_In;I~sCWn(fHcC&-T;W4mC zL>(swOOOi#RN1UI(=p%IiG0C>67)Q7#=VhQ&GCo^YggL=*|!&!G17HVmJ< zAHNwdWFPMK;ygacaD7s*fYh$uY|%n3?vT+*tO%>XtWrBrxV4vd-=xKL>{rBRnj@U4%r z=bJse?K&#>tm89oNM_)ZKLXB)j%1l(VHo##1dHaYa&Ff`!04qjbd0xQ)rA&Z_rzp) zD7zgG`pa?-^&)J&)d>i$KMcFiUPp@&irnx`0i4Mm8??*l!%?!*)0?dA5NvQTkyzwmnt6Jo_ zYG!~dFO1<%&NL^(52#{eV@&S zBL9%S>lX#PCQjs5|FGwzE5q4)CI`GDrrgmnAmiANI5-a4A=JvA75H%PDt zVhqMSplxb>QJOy~*2#u{-cYbP-G)rA;XR}XuQ z$1&@hzC43V$WM}TC-sJ2@NHiLmW)|~j~<$EN-@=Vr*$u0R0r1dNYP4m`8r$|x{)=$ zD1@AWL0DuR3Cim^QWv7e`EHWurf#x_F@?eWlO2+r{@lyd<=j=8UgeCpckhM?a%-W( zA`H)+kmSfTIc`y}-i(cU2NQ}WaboSm&{p9q`qj>4ZvKvN=d?DlY>C7=m0+^u#~}zT z$;bAr4EnOU0G^46a9>l+xPv!V0Kf5vhXhH?QNj_L0mp!Ev+IRDWw95|pCmt3XE^^mIqeUAa0ef&r= z_JlNN9NV8|$GW2FZHHphSF+r7xhCF}y~ZqWz7vG(>CJ`B&}DO-?n34CI~D`?rt{6! z;>k?uYjl-L6tPyU<(<;5g`3j)Oe&K{+xmo}?a(6F4 zo4)a_zXX6{Z+$Mp+*r_`AuMEvGS|l|3bx;RZqaST8o0NA81r^@1kGpffm`woueaRd zt@Sm6r&kX_^hz0S!eUX*Xn;KHo-!LYT3Ai$k-KR88A2bLu+=R~S!}?3)DlaDJ9$xfe!wAYxS-FarOSiFoK_UQ9uH?XJVLPS z1(iFGlE~MROxoR@hAmpoJ)Ha$&n&o)J4`=exzlGzcz>6OT=VCg3_fFpvmET7-j6e- za@?DX*ICeegyIm3QaVgLa_uYDKoBMUK<`eilaDPlTfj zAHWMsL*TvAV3Vpn@x~5&)=}7>D$P|O>%+6ihfQHHW9(JfZ8(`FwmgK#{wCa`gzdys z)_~2u+78!zH{sm?CC*5+8T6f}v03eRaL}q&qGfK63yx}{g>(##v+U1q`l@ql`@ZHM ztCfQW+g_acn~iWcU0OT1+YWA`#sj49f?c2gSxJ zw0k)PDiaLYj<8IunY@ie49Ktc-Pw*!&*{94 zZ}F@&!#xjkU^SV|o_kHD!5PCj?=#nMNTdYmrZpQLzBU7kQMGvaP#bAkSkf*rpn1K)bIg^vKz6o1k z8unkOkLF_c$c?RTsH&leE&-v?bHf~PyA?pXY1hyy^%Pd^x&=>fOGgQ*Y%sKH;YGS3 z?6Mln4SJS`(?aec(do~f32mc0)9r|3Q4$+cXo#O25LWCBc>p_gw|kms+T>^Cc-xKFKS2W{)}bugHvJ zk}Q9>HLG8r&T?J6vB6i|YPWA4F@N@zj?vO%CpH}<;U#r&)gM5$?L17YGQyd@itOST z73^pA1!o>r!S1WHn9RKmAe%7{-#i-)_vCugGmXh~%re0Eyh~7EJ`xumP{-q+hH__8 zm$DJ56MimxiClJV3sE$;{1?;l8ykNkpS{c>SR zr77L@+aU}MTo`nr1)riNIO$eAzE$G96i)JL%B zze6$Y?tOTWGl*|^;}CR6-=@mzx3gF4lVM`yDf%j?5PInE=3E-L!_)kFa_^-jSo>Xp zGdgbYFgq8%hwGrt>B+d)FBY!TZcO`HCQYIOD-IP z#k`SR)9b?^uKFGdhLsbg@3&|kR}NcNuVCkQNZ?^n$~z*m%uJ>u8hJ@8QTd28sJd0) z`>FAGU0#CdXZ2!Q4;8tV;cEo@t$x@y9q{1~S2#c99G=R4L&}t9!M4SRS*zg-SaSav z*_N)x4u=+jLdIgyiK67$_j5E}z7Ync?}y^02=$qMuk_S{8Zs=Sp8>!Rn(1Y`qJggyqe|=|^O_ zljhd&QL-6Q#AUfSaSk1#f5Q zUjpwc)}miS47PpLWHM3G+?vE&`0V=`qF{Qm*l=|sH23*HD~gY!zj6gGX*TB4CX^P7 zQw1=1HI_LkOyU|mB-o(I*)V;01TiTHfcv*YvHI=+&a1f!S~tvNs<%RUM(x!QZD+xh zwTE(w9eTX8(~MZTRwGi|8BkA4f;peJ;@iO8fFCue;xkZf zxmF3 z&x#;=f&@2AJ_9c%cEc(yY1-c=1DX$2!!6k#M0)KuSY*|V)*+=3c>OYz&A7`OSnXB( zvB()#uPDKBjYjN_Twk0V^pu_oZbp%7*LkDIgoBPp7`YUggw@+mK$Y@Hl-$$8H(OLs z!+ip9+S6KaG5LzENsD;(UXC#4vp39FQ|4ZdGs4#zav(RfzlxdVON0xw%gyqNu#v5fuRRMbnPX` zsfxzT6%Q%l@##N`QxN(=gz%sDcNBKTDH#97RQR|4j{n&>g}>*IweE5_Z|~~;KZ|c!h@5!-9*SS+6 zJN~QXSST02o(ugyD;IzNKFx;Nog)|OH6b5J?K-~T)DMrwk#gz>k+bD~1{*M9zc#L-_qd&B-5bM(I(Z>0an2t_-OZ;DLn zYEXE8-LLt7eto@aZYL`nT>s3{zg=H1{WJQ%J^lCMjqGN1HodeKmP|UWRtfA7Q8eyW@>?y5_A=kNrB2h5h&q|H^@18BAFEA@#hQZOh=%MN%SUTPZme=gVt!B303{V>-~O)%b*jzCNa&)CM7K?hOHGJF?M-KB z*wfSKqL2h%JVof!{0T6p=sHc2xd-Q(T!Ho6#(TayvDj>RCPuXOqLO!Av0XVCBg)gz z)>+leL%$r$_3L2JvwGq_s0JVL!eP|iCaRM33JXMJ(a^{mh*uR@sf3G}10Q?tg_xIf;Ky1AOm(tCnOV6!(*^o4J!>+AUk^dCQ#Y~h_+=W< z;Y92%OEBNWWPDN!yiYQ((M4?|9MbHd`6*jTR@povzq$i`c0`l2Xa!ky!@#fk8Q8Y_ zL&>v8q<6JH7L3`1y)5!EF~|<5cqriw_w%l`-OB0Dt2XQ`omx74%Dof=+R7 z@bEo_^+~c=6SNz2^3%v2FL`vT9D`9A_bvBJ@4Na8*EHgSqCMf08JsAe047DZ=C#K|a_cr&__+kF8c*<--;)EoglW*^>;W;?<8XRN3Es<8hUh{q+;AWm3=^|a zL0ShtXhq}PF@ku8q=iIbP$}%wiRST{0v9}18r@7L!Z^+Q__f&=l6Q(QH*F`7pRylk zWY$8*os;Oqdk$N-Uxh6Oil8uQBXyBF3Ulg|a9vMTv@!Yys@Ah1Z*?ud^-?_P?iPvX zJsn_nV1?jZ^&lAPH5FRJ??Kd)yYOJkal(y~PO z;y%GiA2m36N1IJuIfjKF83SUuEoA1rb}~NlBx*^o0uoX{3b(K27o2?y_AXm7D0?0= z=pn-v_qu`(IWsvIsr%q_c``1ad>&(a*x^>6EqLR=cl_FdkWuvbHxqf1=!vujg{jPpm4k%Ne;BZgHMj)9*NPM#zYzB z5+cUkk4h!u7mKm?ONwaR(xE){Ys2sWw+|feDWj^RJ>`8i!F0(YqBO#es?4&(sp6q% zH)00Nw5%asr#C=?pB`0xQAkT>eIsS2Vq{*$R&EXdk7m{Yn@i-+Q6Xn(_F)LGvOXS`_QO=9OT{=xuEQ=foA z{o63)at_oEv_dlt4aS!#B8EEMSytV2Zj-|?oO_f{dxX`%Ch^6%FgP4!&TYeqB~~md z$N@GF8i5CQ>2o=IL)j_Th|Zt7v(#gadQ&Fz`I_I=lRHhYJ%+s{H_yXbIkM8hCf(li71 z3?Bm_*FK=cv!QUdUmq~4dB&e3y#S7H$bi$AZDHcu{g^1RkZAPWjwwU0py#bnn67f3 zPNNOvcJEwX=%^zkY=RR!_;eVj=p+!Ohk^LOJ&Pz!x&$ANhEo4}Cwy3WmHsH&3?o}U z@#oDsPx;ZOFs7=SCTK`7^>j_}pOwhl`)LK7RZ}1hA4OpL**U!2n>=(EaRCG2DzyaYESSpyi z`p1Ld6M928CduiH&&IH{TI#Pf3hxG;rjG>oKWv(I77M$}b3>YLU_jDSypVSoRr7~I z)vH*(?Uh~BW7|A>b%qIxzP%XteVxzMqG`m5=q4jYW~e+_%B=5K> zCG(29p8@-nO5*Y|noRc}j;0sWQSnVCX0h-1J)k#tx94uCGn2$o-o|kIUJ%R{#Dg@c zRMU*=cHT+W!9$;s8>?p*HoEQLoyDzaIA#*Gu9oB-vVH{%51Bx# z({ZY>!2=z4Y0$jd{n)r78>iUu;7a;ldi&&H+ygrxZq*s;nHWoZZE?fC5+VZM^J;SJ zUJy2j-sO#POu**r$tZHp5FeDeL(`WJa4b=oZm-#a%Q^;FUQ{S2J{n~>$YTi%i<4lb zCN1QZiazsH(k{B`zm0_NItkaU?qS;WHjuB9r^j^;kk3t7ML6kDiDl9|5iV%dJQO!70=2?adU1m}z6%ILULOtiNV&Tp?(!oBnN`EK z2UAJ6uV+wAZyMgHk0!G20etY;1Lm`O;e+N}&>MJ|?wOR!7th^93jNe@l-pG3P+h@S zaU6+<3KLLsuL+kOAH+XgaFj$Fhu{}k4r1M9@PLU06;q!LSs|6M8pGYvrC8<9bgGe~cZKy*-t2X>-I2$&~y+resfL>94 zMEiIQcScjBNm=R>Az7{Q)C2 zQt|1h1t9k^9@e)O&^0$sQ=56R+@N|nRvzGqg|8&><3L52o;r@5?%Tv4SlJDN=Fh|q z>1DXdrwO8zZedY?CRz_Yjm3-Zkd4w7G;c^bJ(O|+w71;BFYYPcXF*!FH>Qu-j|UUV$k@?G!QdqagU8k5hOeSyz`DwjWa8Q#0J6RzhP;=G;{@cCn1_QrNQq)17@x$9Y&@3IHKaq-YQ zwVk%rlt9pW6^64I#D_{UW3kQHw0;Kk+^)SEyi^%U()Uk~&xo?X5AAcoFyvM_b znkCRvl*6Ns>L6m^L!OO!Tb6&4uyw$B!?hTfkC$wRjJ18y|-X zch=zJp&_{H#ZJ(3w}HuyJK#;}EtKyY4%5jD@N)^oXM@+#BTwSN;Pgi_rY!<5`Bjs` zj2STIA;Q3kP4wu;@nEkq1Pue4kvLUIvpm|;k@1fBnlzcvhB_uz_<_*Vi*x^+0kA6ut8Vw+= zERLQye-PWwYH&p%a`-joJF0HWCxv#wu<3dNzi6)>_PMzh%LOq=HF3J|JXi-N6^P>H z`!1Msb3N?Hl4HXT*5C_;4M4q*p=EJ4_9_{JKc*O4j%q)NuWwol`jAvCqBjtn#bse= zT^4nGu#F#;VhuJ%-Vl`02woH9iF5M}RFob>+*fr#6WYQ1CEdAWA`fV^eo-+taqMj% za2xH#;hK>j8xbMl|bnINaa&fP|P&<2_z+4bF-UC25Oec-x;o;gyf8Kwm+upoiiRn41*@ ziI1nl`W8=kSRg_}3zK*?IVU0I$rEI1*>p-z6}I2zESi1RzMo4kLDBq8}Gq(ix@X5XgIUQhbSp`h4hJEA$7*nU;X{&^?Szev;VeJffsRl6fgSv8e7Q!Cv^{UemM?wqY>!0v zYII4)@fIB`1wP&`b$w@b&G7i8hx|FMB)xyJtH}IP2h# zi_gJeUIx#={2-iFd`w$nv#@u-Vis7ki0obGPvpLovpcP`aLJ%2r0&*0{1EjVYh8Vc zpT3JA(Q_4HprbWLng_#+oMc#gs}eKcJjIC)Z=f-66O^@fgDGq%7O!i;CT$T;)X)Ti zul#^}kp{SGO9Q@(ybZ+?A0XRbjWzT!#Vb**7@Tv1T#nH}mDC52v{4GXKYvXM4UVFb z+HN}W;~{7>9|sc^L--Y2jzF%2HET_@VVj=XL%8F8tclx)?`Fzz8PV;?hMYj@J_=0y z-6=tw?kHT-><^~9w4hO81YO*51tkW@kWq?S478`fi&lLsI`|n^U*ceOr4~J47*A-4 zJ~zC-DzAC@F7S60oIS`)#>?{e>AXZ!+_X&x{hVg8Z{`C@n71WM4jRj?ESKdrEKFl- zGIY4`VNTRfq6$QJK1bcw1k6xTPOe)PC~2(&YI!P6dtz+|@zH_e5x z5-I^o>rR0FY;$t|hB#L_#sxkXt-}4j`s_im8to3vR{_}yG@w($4 zc4y>p*qHMK<0h0sK+!#zr5{3lM|uiuE25mZ@)I&NAsu#@-6uhOc_!`klpnV)8{+G4 zBX87tzCp}%?6vGNtvlMt&)aYiw8y-_=3Wx4M=ik_y+-5LY9lzXe>F@{?1>)AmAso5 zeQ}^{Hg%o4A2-D&fzpvON(|GAbK^(T0V(p_%qTwa)y{yZMjMPfKAiOPI!=sVsS3_$ z^kY2Ta%yK9iTz@ocx7oR=&o~;ZA+{MFa60ZGGi?}`oa%*FSc-YpN!e@>`6={We09) zc7i1N51=vb9?GBkPCrch1}mP#qvqf|+*09;Uo3uL#2ZWKr9p7p$eDuJy*hf=Z5xJr zWb<qGB7)54)cpF!_(?_v2|7v@;}tkRa;g{p9&5meBfTxZaD2!2K7OE@KnZH+_tv|SdZXMWZ9M#{VMPeol z3VDiGW_rVkgz+d_f19qgW_Wksa>$K2hA%YYp?h8cTYpW1+ji{=)Qw1mgx7+&^0)vC ziTx%dRihuQww;8F65Bv;<}&zETaEM6^5~6;+2FKEl1nRAfc+zO^QUdIgp+m^yvkB3 z`rwW-JT9{YU4u8&OAr_D5cC#K8W_Xsjx?AjI-B=aDwODMO$WmOBb={SLM+3q_&bi~ z!=-DY+))>IA~oeLB-d1s+0jdI&8vA9dWOr0ugDoZw@eHAm4}i;^Kz)!t|k~fDui|O z*a_a@<5~9dDeP2kFXkMwg(H>UIPKo+aKkNuEw$w?Ry7@hey0rS!EYY0VsbY9;ra>Q zmD!;cdf}_?WneTb3|j0OVCA@X)LeHT=zLlY-Cv~9hUdLWT2TT@EqM$sAC8gK*`fI0 z_%+%v`U86GjfMLzBhc^B9P(x4Fm9Ta67Pog4rm@w3^VF37Y$bJ%lh;=3E@Zju(sM6 zWcZMqL_}MQ)zo(5KE`IjqI-Msb?J3j-yjO({2M__bvH^z3f6yOTBIg;4P+)h8S5w#ahEeSYvA7Y>B?_U0feIOE!{c?>um8x8}_Q}D%&mw0m4 zU`$vSi2FB8qlWA4;mXHVkZU`NBxzE5qSsq&l`5ca>K>#I9DuF&+UUDwfmCmP0bXcz zC#w^;@NO#}g;9dJt9+q@s{5YjU6}n5v@;D4)egd!^UE1SqzZ* z`n;fgfoyEGp}`Bo@qvXMoU2U0^`~;_oJEfKbu6D5#}2T3eJcX}225ezMm@M5>FHoL zVJtUoZy;XUp~J+BD+Ky!9$7Q|BtE^d0o%UZ!@F}-U{!4)Uq$mP^2%F@(zg}hU-g5Q zZ;uAY*A4XT$QsN%6vcl!*b#b|=`G{X_UsXu_i`)m+#qo_In{`6-d;<~Z>)p|QZjV5f(Dqm>jGTShK%ny znCDS|-__*toBs@)_C*3$c?$01tXKf;OquoK{K*=Xd%W&ub!5hs)3E;X5Dc#7L7y*b zxM{ID==$}81KW)XuU9;zA>ZQYl*dwJ-@=!elU$7cS_N37Je>4WdP*t`O{hz394J=l zFqO&4;TrU}k}URX;s+^(RS`!*b4x)V2iw4hZceIemZ z7Ur6Jpqx%r@mSa0v~RZ~)PAP{3r)zx=OU+R%%umQXA;c6zu$oDim4^H7S&^b$8{Pk zu-o{be@W}_Ohbp-{ zPwftRO_S+Z-8aPkb{oDtmcp~Do`hL*YAhezxs4VU;yf7%WtLfHgl5rUSgw1JhSW1u zyO4@4lC~sP@f?0$#OF;EY?E#6H3ethv~j{758j0pr!Zii87vNR=EV%03AqbC(C3ARW-fkPvt zV8xFw=wls?{;$^K8Zif$IAo8R{Pe~2z|=NK8=He^Cmq1`(Q8x;8DU5w_hSc0& z06Vkn@$K00V8!D&XV+}V*!Gw_%f7(N9cTixD|*8k>nw09ECj<czFBx2U=WlB`r5MW13O~8Jl{Ow=+QlmK|;4WxrR# zy(#h-sV$HJ>pn0(e<+useTazLq`AEQK>YF)k+_D-O;ERhP%8R1z&%OzvV=L=Exjni6=Sd62L9 zu^#U#*}?MN$yD6)AxWMfj+MztpdBBESbi1{)aTJbJOl1n){z9aQaV7Y8tf_( zp`mFF9FfU~_vtUOmqIOFfAb`L7kU^htTV8EM>_PLcMpRCwt<+~L68se$Fg(1saZd1 z?!c+@G&??%>fXKwA0n#pqf9y0^7i7YC09VTbRkZ=)QU6a*wT?N_OJueBe>dEm7ugL z9gjp-5)ZfWZ2sWI?1KON+LlPht@I;#2Nsjs842j8 zQB3QL5E{;%N34o~T7jLkHewepp5_d8R;yuXavzkPgtS1p4@=hl4n28+5c(~Grq~rw zrPq|+aY@0#k@M*u=SBFaA%}cg>5P{1hOtLx!5DXaD{D{}<aK*QC zv}og1>@B7M#&?vc$^du%p6~}ScHJP*(B4Y}J9x0-%neM}4I*0h1?Y7n5+WxRf|Ouy zJ*(duXwcF?vA2{Yy_4l8zHFo|74vCa!bLE!^~2stZ73&ikEZXIldRM-G>}x|Ru7hB zKHs*$gR%}fa@1~Sl$eQcro}O_frmNYkHPG?$SyMbt0+lz7o0H{*tK?rzruu#2O-lz z0WHcL!Fyi}-};gQy|E(|lhsauSf)8#^<4qeHwKZ7p(FTO`N5cM=#PhEZi0tk|6=3U zhNJUu5xxEzT+L-!99>gN=h`L1vL!#zYIOm5c)l;U*G8QUw`#-$wX>i+B^+(?8sW3F z4n)3^WT)%wvERFStXFTs^|37=>g@;N+lge<+kFr`hZmBB?_=PsmM%n0PsbIDuk#90 zqi}kqJ}wiJ!0jgrU{psvh?>RXpcjkqyTL_jY!Co%Ri&U??mohq+`@_353t~K2YD0v z+H!^4F1YSA29?z+Fv9Qx1ip}E+qS<0gOA(c#g{j9)t&vkmsz5mhQ}P@_0}I<6bU}5 zzC%g|>>##d1Nh5ony?}-9tO;C#M8U;>8iAqsD9ZWjdzH$;MzLSk1c`*#ZzEBsT`uK z%1HXtJy=$F1nl0a(dqN9VcioMHYRQ(n5p_ukI_*ucg+bHbS|BGI>plwL*zN5smH*q zh2veU(qJm73)!pHy;$10L8R=C0b4Wko_R)K0Fc&&c=>t>JQ8<;xQS1h6gvVV3~M9jCDhfmjjp|vOk9)N=mCjTIMHIsGFz3{bj`=$TbGBUW*xwi`4Po& zI%{!Sc>wJ;pb^Tnbg}2zb`-B^=l6NA5(;mxXr%x)=2~20>k;A1rOzO`KwkFjA`!lsl?X zsvlRJp&E!S1Fys72^Zm1gCCx5K0zSHi(KOCz!EYWn_Pq)l?*A^z>7So&XwD2WQ z?QJn`N%X~yy`oU9cR4IzJ5c{oA;?)iAR6Bn0(U46m*?eTfn+lse6yead2<-9tB($dVqO*^9k+Ay@z_#9iH=r~Qzolmje4^^?$>dw8E%5VS zZHk4_uGY|AcN`9nibS=ssVF;r5NE%Q5VzQBa9h|Pdnvu62M#Nch{PD;l<3c^KG6s3 z3@l*6#3RM)7tX@l(NgR{fC^VL`zzKey@Ge?Eo9TxKunJx19M$RFw>QiFjdxr7Ef#^ zJ7t&hAMc8V+hHo~aKDXk!uk;`>m$l!Ko+D&6yw12uSs5W0+E+`Lqm68qut&a;AEvx z)Cu&*BQc(6IxG)HE>pwHZvw!3^9%ydTfj#43^-T~U_Fb(S)%GI;%Jsb&J9&$6`v&p z-_)5*Lvk!zjM;ccFB?VUEV(TU?!$10X_zN#kAo-j@od})P%)9m#>DnH+VYvaXj= zDyJ3aS??pUeXgU{Cm9Ut`;zB%{RB+ybC}#+AHj?J5<(ux6;p?7efpaJ7*vys_^@G# zU_FvdkMqSrRsJzn?)VB@1#40l*9dZZP63e+tihete$a;f`XJ??jZYh=;IYN-VDMoz zte5aWhlZXw>GEmPTcZq$WFpDU6>sUjZzU*xYd4vhdKB7u{`@0fPl8onf2fY14c>*j z`P#ED;M&~XsOuAlL49&DS|%J^4;MhdtUNH@QVxl0^stB8BIa9Zf-KpVwIAM8tl!p1 zD`x0$`C&Qe5mAn9QqQ5EyEsdakE6TyzC=Z>^%$bZW8$J!c74Ddx|>!S5`&ZJ&!yS|O;JkW8K#ouoUBAd(C!x5E%1@IP9=;p6rW3sirOxY0-q zMm9LIwf5q?B}F>SB1DgsZd2tnmQUgm^A_Q8Jwuu%HisUI?#_%oPNQ>#Cpl@p3?myo ziY>35r56rNL5r$=FvD3L8zQ6eO;t3%xJi#{XI&+YwX-4jxINnIzoT3O$t{_$)qNyx%$4M|{a z9zu?-x`$eVJ8I&imf-7WN%Ta$BBzpE2{%>E*>%C4KHo+?1vME0xerFtnY|o=2<$YE zXLry>?R{iIM-1flpNJn8$YAIOZ;*eu3|;S!#FX0O7Pnk?BB z6;xESCk_t-=$`+tz4r==Dtfyq@nhzLlM)cZMr0TdJzGbSVmA|fV0QT)!-`~ELxrl!8Csj2zqVy^n4FV3#+KE2Og z&u^`DE(k@IoWg&*O|e^pkmWU{Ah|^!AD3@Hjf(qxMMqccH#?7hgUUGjvnW2fm%#aE zA7bAD1?nO228nJmzNniHek+5)W1c_Ij2LRIz6RfRB-5IWo&4c*n?bbg0n9M+qSCL< z!^L8Kh@2wEcd|_<)l%Py-c~D+-tJBk$q+A7JP)S#J|IVPV`%(GdGgP=lccm<;APyO zP~GBhLi4|L)1IqQRW?i@6spgL0v`>cZ(k07bDp3;@ozZyhmf;n+%(@P#2~-zro8j)4+2fLpGZ_(lZC1LxG1A6lr}S zTX#H$MfZ~66i!BA^yU1vFUWA90kd-A2mYSpFV0V%$_7}Ok^aNWajTgPT{HO^ zVGnhn?WvEbAvhZfuP?%B$zQ;Br3v^xG$1!EH^6|74Aa27je?il*@4dwuqi@_L@s#; zQYW{-%ij}N!!arNC-{rj_9}zylg)4~Lj>g{O<8tA6#a5qkl9iD1ns0%n7Cs-^m0rx zD)udA*6l8UAM3-RvgH%~lYNOk6|^E6&mN<$$ubPx(?UMps)K)%CzG3*4)8`R4!5aF z!no7oZ0EWzPKUZf(pS6y+3am(woN=<(9%GMt6Yu@mk$*YGKW1Iwu72TD}mTOGjidC z3>%Dbct80YJ>ne#)kD+qeMLR)vc%aEIpo_`fc$CnP~Sf4mOY53_1s=5dS^Fcxn7J zyn9|B0?9YrEm?vi94XUy-vw7B*1_5N85lL!4b7&V$GhvNFh>e@lWo#-aPRCmD3QEM zJ_N6$e`=HP&>aq)(5@h&8W}b7o7~$NUeWuG9`giSf7!WmN zf}ERhw|G2`s_C)Hz{_x$X;sS z#m$~qNU*_1$+Yp|WY(%a47Sl%kg~6t@20Vy&8{5678!Yl`K!ph`>D@&zIYyew<@yv z({iboRW1KX*gR5yAQW4B1tEG)IT-!C1l$%6{Qo|B@7UStCtevSeDl4v3mLE3w_ z!-}9GVyW)JS2>pi^Cf&p@&Ykja{U-}Fc)Do6)MrD=Pj1S{^HIhZ=m+62UZ2N!S!jU zk$0^NKFE(}?=OjlC$CEIQSnXuvAf#jLD4O0QWF8Ud7Y%faxnzO$}v)Rbg{~67dC!U zBI9x@`GMphR9IG*B4d2WA zAc!nY1?NZE7;&Z)Tz`F{U0)5MNzRt8@6N&{j;ZjocNM+0@Gg!V)xdgA8|XOYiFt*; zcqe!f)OJHS%6MNVPtH}+NNXjWdT9dlJfQ$5q@STRtO?FE`GEGHCD?I)#27#GN)qXo z1skUbnf@(K!K{ECxC`FH9a4zu2a(^Dj}miULD;gh{22cQq;QcV z-kk2m&UF{SweC^$Z=NDEuX!pvZg7Y^2#$xznSp4S;9A{Rx0a{vI~&&jnT3Wo6Yx-8 z5#7?ILDO?QVBV!1+~;>0WuNHds`gW`{;52i8%xL7E!h;>2YHKjc;V^g383IILhq^8 zV|u74Q`Y*$boO~4lI3H{Mkl9G+sJru-uDq(tp*_IZ$1QAXkq(F0e<9;0+8AN3%!@$ zAm&fnP`5Z4N@tb8mTYd`l#vR#Lt!}kTL!$gnuZ>e){-JV3aHnjmYmp{jE)r}yi>4ewMy@HO`M(Wml8Ip@lXqcBj zfBv+5=-yS2gG_yw*h@StFJ`c_$NbT*_~L zlkg5y4iLcHYf{*B&XSF;+-{oC!$ZHo`Bc$q1+zVE5Wn55aOO&M1H;kt+Uslym=f^e9Q3~Hfb2rCQUz&WnzqL3xL z12YRpYLsNnd?|PcMn{_AFu#*`B4i!AWT_cWh#5rF`WSS%SxiE1he1}X0Q=%i08M+- zM^euHg>gZ5$u(trvZ_u1>PD_$&2=9<{W>2R>oh2(e<0Z-4nnL>X`J-l=D2k}&k;IXDkp4NB7{p(|{KC7Yi4tZYU7=4b zour|$dlMdeZ~+YbpTODWugH?BGpKm3iX?<20V7^P!+Oootg#l3CF{e&T_zdwj~i;-tbdOJb% z%6=TQDtywL!DpQz!75xihr#;RZ`yl55sEGfgMiOjR5ee5S&^K^b^8-pzh4v$mj5A+;_IP*QxwkO&ijvF zMS}47K=9!5RQqdgk^~F_`4Cy$7p0C=bRn{t1b+ z^iefGptGJVf7Ofm5#{*vdlQ3uD6#;6tVX>ime}vi)S)PY-J8 zq)mWbl4r0%KZ(|=7_lxE*!sNXXK&%Zn!Rx2uRXczAPTR(%wV)P+kr~+ zELKj)0?#z+v1W7o=)x73cunIX$nf_J=+k0O89if30$==((U7~QLBnX*f!@3 z5l_F1#cK|eJRJ#;TgUBth1MVxevdpJcL9tFcH-dsP1|nn~VK_c0scD zKQf^ImoD-&z{*NXe5@mf+HG5biMT_or+bkHI@ciZqc)7Hmt#Rk4aBZ|j~(msd8@7( zW6+``I(c3Kt+{=V?@!M1UtF<;6`=^3Kjy=cu2_7?7sG~iO03#xmZWW;41GfHP^ zfz9qym}mJ09@kFCYEOGmD^}t)CK;iB_Xvnd)Y8?_c67VAG3YkiV*87XYX5D>zi@OW zuIkr8<(NmX#Pk&GIN$+M=I79{W)yq4j91wfSvI6=1s#l?fPV&G!O0a-^#147lxYv7 zQxAWDs{G}2)cPt8+)sg$$@aK*?jfWuoR0TAkL-33WIfkOGg-6$LXG!0R&()EeD26a zX*G*7y2nQ#&h-?1|7j*BP7J}G%0?0;ITf!xoq~2(exOyQH;UNh;Tyk8yjd$+;rp;9 z^v!Yu5-dY~NhdvAMCgKPFL+8`6IdtnFlb$>fOneD5K(VI=ExaUw2m&IS7ocH7lfhZ zF@OV`E#R>4S!`MN-Spwz8XlMkvJ>rWiAT*J+`p%v)au^C>3AJ=#)`;tCrRj(R7Qu- zRb;8+Cp0u}#*i7c;HqVcdTm*F%j+Bz%Zagz?w^H#buGARmpJ^mu?v4aXu-v0`b1Pg z4eQ%YsLT#Q_MMF%t{k|?|I#6cQ~KVL_umX*-rp=t3J3PnBRur672J}QjBOz5YO&BN4_$Q82p4qlY+rz$!T=m`U|(@ttMJi1eso+Xma+bIZS&L%zG@8iB&NL(7fV0 zPA==9&9@pbiRTX8IVBjrSci4sAEN#T8z4351h$^IhR>ScLBt&qs&n`NOs~HMvU7$| z%vhJUZa1W=3BJUWW7!{$>m%7h&G<=n7q#^cCdXVXz(VvPDG-!~dFqknDPMgI6Qv4Ss`itlPb1_;W0d_$L)YPT76%VDFo5Zz)FI zO$(@8(hTg4U5Ra*y~$P27`U6p`2_3jd9$C_;Y`lUAho`gsBjw8&qZ8rphFW`H|$D? zI>J{%@|jDoK_#PNB2F|}drdB(mX_;#EKbM_yXeY`Ihob7J&+r__;&Q>W#c2OYh zeH~6@UWTH1ksQptG>N&RyN3Oo(*b)r{y~6h2>ZGB5^O5hMO%*y-qeA$yjuALZV#0} zv&?D~o?`{}?+j?cKq_YP4Qb4g-?(^o8vL-Bgn_wI%%1nYq%QRid8wfVon__t%vqWB zFZ~XCF6~DNu{zE-mj{;D-{2npI(Ysp2bD4;*^l0xP$n^rS?1me^->F9$aDs3HO62_ zbqH!o8ZdX}VTj3Uj%b&wI@5R`^lcLIw zYnZb~g{^yH08b2~KsZ5+b?L1H7uCI(?Hz&UJEq}QQcG17%V63A9%hJs1TtqDx_K4g zk#EvCeib!cQ2Yiq^_R?bso zG?p}BCA*9~S*ygP*%C5+UJg!L-%L{mHsXitx;XuyJ5dzlG(g4qFt}5Jn+4S2<@?RV zBXkkGd^>_8hQIk$<~v|{tQsyaQDm-cxlNNfFK?{bA2_{DoONxR3xA%<}bjM%`#+bM8TYcGr)=X@KkQ`m%X3#^;4g8nt%$R?P(;oZ4wSkneU=Jt(u z=uxLg1tcAzKJ^tGwmJl>TM~)pQAvnd9tZN?4v?7qfc|`EfO4&}Y?XH4#)YghE#2C&ao;Q9V5mu6Q1A1ajE+;>KhLmr#P{h zJMTlzyK7MRTAa1O14KBZl9s-0rV2HyAz=;+lxK+g(oRqsd4diGdE=YmBh=|*HXeD; zhyK++>3r0Iq3X-v?!Oc1wW%OG9?_{I9~;!Hi00&Y;`_x0le*kciJJ>WcrGU^TJP{m zHyOgOl<)Y_@D>!-pQ2e3ip&9#hgjKF0#0u7px^HVUb_r%Y}H3x!e#w?bl;~}c(E9J zT@}QRy@AcAM~UY09+(z21!wJF0h_O!g6L2O(m&-YNQqXno3ankQGu`Iv+H=6p*IH& z0t&&xA_i`L--xOuhWM%X2vPpN2`{OZo3<)?R;fiI| zlh}uKFX6B)$1-J?@p)?%*y>qk%=Jm zMYs7AHpH=Ct%HCU6aqV^7UJ5v6;L&CKUrselO)Y5gIQNvu`zZV>IlxFVP^vAf+OeY zzJz!vYj?wuq$g;+aywe)8>7+M_ZWHb9ldfj8TZUS$9L}OA)fvO&n9xL#@u3HB)Tx? zyf2PP{h%2|@i_i!2FkYvQKileylW-Z)q*q$g-;|A!`-g1yrLYm4m}|W*H6P<-S_-k z=^gmc7Kq0}1$zHiHkb!JvJpm-nf6HQLTSS|lNsCvSkofa>JOqU*1=^4bd7YW#-upe}OE~a+7 zZ=&;~gSh8yHxyi(Mn7A>gp0wmuwdP58u_FG-WGS_-9p3{-m$!--JT>nyc%p@IKfZ# zLb!8w0~`sS%)~p};`;ctd^(bXMNbr=c4ispiSq|*pH-x2U^?Wkwd1v)yGm0VOR+mr z0!3}I@Op(L3@qwLhYj4^ZFMVp?$Kw+%qngVNhM=;3-OR&C-#WTvWah}(tzW)aYDWb z`lf1QqQ@!tG;SBJsJA9+-%Uw}y*f>kS%87R7vr<^0jyh+4gY@3Kq2lqp!QZX^>qEr z8@6nuU;asRS;05aJ3s~cUliiPf-*dCFc#F>b|SBT5LC!TSn)^&`>$RH^R@I}I#Zy$a2;tZ0tKtVmNhmY?swBM%l0_%MlT`Q zOC#0KrJ+A5iAlcBGymp@Z@%55Q4{wp)VJ3_X)8@4H1Zm#yEO2;qi~ItI4WyI@`VHr z;ig?g*aQb@-hT#Xffh zyf$?)G40w8@O&zI1PxLNTEj7$qRg8cR;2H-4oE-mf*Ggp(YX#AvHW8yjA}lkGTY+u zi_i@`Y3_@!TPBjAl=0A?(idgm=fpc` zT-ifH^lQqvaa7tph~$ihLrJRvuH;xtS#4>i+PDiZY6>%p z@8?y&jE#c`b~k?OUyKXB7^AYC0EuYN;peohp!}fA=ob?S|4!xOu0xzRTkRjLIw``q zMmyt@tnYA;b82oPn(V^9S&VJX16ak=;dHn$va~?~T^kuxc)A$#rsm*p|EcIz_!QOD zyYPOQH#X~aK(%rdbk0zMsN4id5aibWON5jNCgSicA+XemC2MXM(M5^j=xC+|;S-W@ zLAEr;n+mZ{oK)~>2bWi|^9-5&fw1`x8gccPL{^jaAv2 zgJI^?v|@%589gt+9BI0UTKh#<*cOTb|8Ag{jXdnMp2?Vb&1Z|K64N!$0J_`Caqp(* zc>bXn*hSlr2KE9tn@FO#pC}52-olRW$#60EH~Bf?G4Zx@M&83m^dc2zb}vw5qFXBH zU`YW_-9HUpmQG_&ZApQ3@00j97A?f(mqb{}M?$>J72D}zQ3a-{mV$`2q5~4cS8r z&(dqVKjDUb<3KxYAN47}N$xIiM0mJ|NCgUzH=#|`vONwG?blQ#e_;XQ7c z#?2vG*5P!?EBv^GDBL3@NCm52lViT}^h@gy?)e@~wjN`vmnCM>1qG>Cf6NGrz2kB3 z=@8V}JwklX`QY=qIn2IhIrd1?Zt!u9XD_M(31O?;c#CGUPxZUMt@ErPX_~0rSl6~JNA-Cn?;zCAuFOb z?hb_HHqrIfoz!fA56{xI*%e!2Oxwmk=gk=7dZrwz$Uuc8&tuOHR2$7e8k&w@e_aIZ zFGBu;lNi?!4qc}hLOhG;Qa4F#j#gk_dzC})0|sx@X+U(tC%UXa6O?9#qMgr5$eqwg zWP29D>9rThEstUt9}o!rS+eX)L5d4LP6xH`+u)j^BzubHVmQZyzB8XeVeM(Wv9%p% zsHEb!jmJQAEDYj)|G?8L^ck?4&C2?iFWNiW$Lo{ufkDL;=yGN|D3$u7=OY`AVLyl> zmoDS4;g@8pl_BVs74pNE+^3%#qhQtoHMXp5kSvt7BqQb5P?zTk?g4u+ZTo9-DQ+ng zsss_kvUKA2MI5%4?!qUby6E;!2e(X508{Hb*xexx(o%jfN7WN2|Ez{a`{NK16pW{L z+oEnlHnmiZqoogS(0-}!w6FLfWXe{7!ZZn{?8Z1|YnvLIrc_b={nZ$MeZqGdyG0B) z{eDkxZ3yIx-M6wJ59@!nk_YQ1L8M(b zPu?{N+eaPfJnb>EFYN%LrXipCQ%WQsr%|hWk-T|FSCGP4O&sqp!K{=ug}8_6pm#P2 zKe;I~&KC#KyiO0zLgulXKX;Kidd;}q?l9(d6~P~KEmD$kk1wU#jYm>bX~kG79+)S{ zY>)cFt5R*mMgEz*lcWNE9FnE!KQEdp3vYlq9Cy5*jpKAmeRRrSE zFC#xtS1(=k?Tg13-8mpTa0U()rb1Uw0_Tm{3h(};(*cFuSYdSmwKt^lPDr1IjxsCQ z=~so@8b@iH>%A z_~G!2?63?0^$<&JEBk<>PZVI;s$X=W)Mv^|oWX2etpT_7;(3a%qoCbslv=foam>X8 zcK0V;V(P_`y+8Ky&7*tiyYUiC$b=g5F6Aj|-fpEa#tUf0k5sfD)TX-SvDg?;Lz;e^ zr4E+kSc?;Pz;C1#OY28T+WuI&cWev!`O*bu%pV|PPdR_Ra2qV-Sd<>E&E%f)5~%Jx zXS!m`7D)JTk8Iy}7<%`Y;kXPB66R7z)o#7TAeCOwdn1GQ3!}k%Rw?AZEyGoJb;+CT zhj`4U4R-c6fZKIfrgYvll>VlIS<;U%#q1?5_7#F;$!Ijx??+#b5t3eMg9RgxaFvxU zE}lCd`k9-s+(`$tWcavhngEll=mi(ttx!#G7bcyrhsBxPY%k0m-``Z^N$9y$;cQBuZK3L0j9i-4t5hc~;7J201^=mM{@EFSc^1?Nb$1xF& z+3+z~fY}`SmsDiAldrlPu-DrLOb_;xXYK?Tjwuf=*Mlp{-LT0>9VdOT2C3p2j944T zpBo+nS#c|IgJUcdo1H)#(;cv5xjN)N6@w#3r(lNL^NOgA#kivCJQg%Pgq<6U`SXhE z5pucyps(lQw&+~;bd?itcb^cu{GkY>zAU7V%vZBxw;K2h#gouB&=X7-9fgR~jiB%_ z9D~F3aLTqb-1p)HPn#T2o^OY9+gr(Im1_|1RZj1vh%!sB>fxt;agrbRg?{65in>Zv z=)5g&(fbMF%K$O9WLh@Y$@Yk92lD9TBTis&aX%5Wl*W-qJb=E{wE4nhY+UpKbKFCr zOht=%_AVCk^bV7n9n)D2llL^T;5JDuodo-Wc@Va=fvOrufbx#TrY~ix$clUFp#1L- z`W@pkP}s?+)gg!jtB=F2+5N=XCY)n-#xuv<`S__|F z^?$IC2*83hQJ5q35)uvTaZ=`2>XQ@?dyQtm>}8?6{>6zPZ|s9J4S&QF%@Vrg{f62m=I3Rcsuf6a<)37u2n@BIeT9TVN@a37CHQIEFlm*x4 z1fW`b1y_k@;GM!o-q8siylYEy$$2j$%)d7emfZKl)-fBBV(}RNbaPryh7a7PlDN56 z5OiP(x)kg1TPCYOiuX_MJgfr21L91W_at`KtZWdRRYpg4-++xnr(w-IZB(uML}g~I zz_566OmJ|d^21W>!J!h8VDg?MMIi=lSj^Rr6kTQO$t3qef1}a&ObYt@k$Z@@c%z_W_V8<|NT+Fd+w!%zq zxiRjZDMOqVPr{p{Z^)(VQJ7dMfzJx#L9^u*c^ntTD|xb=Iw(Ei=HT`yH_IG{T0Rlw z8}r%n!AG$AO9{S}7NXu3qiD|M_pMseLz?1NkUeij(R>NVFTFE{mYuIDe^L>??O9zd zrGFj6YajAMw<|C;o*ytP)d(!td_ePwY1Cvb*LT$Z41Ub$B8Ll#aAoO6yuR@m?p<~l zm7c0V(lr5ex&0YWPrr>f4n(77$`3eu_XFM6z5sfj&tWGWOXfQ%oWlv4gz7E&3tmtA z&}fnt$y7Ckx;g_Gw`x7_qqifY_O+8AXl4PM_RJ(cKEkZiq( znyRvHX*R6V>4{A9w{gtr8eLp@(uhs}{SEeYII`^*q?kE}PLWO%3pf`y9~_Mw@!+Zk z`cO_BPn?(pYkZ7x=H}T@=ROWk%evFba0}+TumpSH0mT666ryqQ0nQTfW`~4lG0)bR zun&D^(q+e!;r7~KDssu4e|GpdYKgVshR1{C@ZmG0Y{7WeV_PX-TV)1XPthj90>chob>){GR8LSW{4fsVlYEk*s;l(`RKEC_EF6 z4;!$H$M#`=_FQ&WQ9fC3Sm3kY;GM6JRaIQZ`}oiA`1*G$ZU^OoPS zWc6+Ayyt~~?DS#2?Rb`18wqB5e{uHpZY-T`z{qoK;N|u2fM4YUEI02R7G4TfYu|!d zku)#LSsYYeBv2tzi*p{%M$hE~_*%_}9C_GEJxmH9UG)cunQG(V?=s9#wK{u6>HygL zn;9J@z*7Id_9V1Vg2`q5-5Q=-|9mOcZRvwHz^vPzU)Qd3Rm?yH1N6aEtG zU%}M%f*_{REg+JNWJ*9WNEhFrgHK|qpqC)64j9D6?h?4Sbt79oClWMw&Vb~z+3;bi z2^)O02?Pb*`RY$47{?PHuz8{tw1G2Dyz&IrEziX(X3e~%dMW(X{`I^Jyi7dDm%`Tu zWmX-}!R2vr>_XijREeF&C}tGV@vYlgZ*5m*?nejKwD>iIa~VrFNh`G8cE#cnXACd6 zh@z<*@odoqYFPOl{Ww06>aloltc}wZ!pJX=YE0s^tlMiPdCSr|;B?+O^lpsD^@*H! z!163Dxe$Xl_On#>tR(xax)#2FJBtxsHcYxF*RkdijMMrSvzjr>LAfFlL^fDa+;bQt zOJDOkckko>a8AI{$Yv5y{|US9M8l4M>cCD_V-sH+~c%3+IaAsh0-k%Q-ukSLLWPu zfL7H~wr<>8`tRH+92i&zx_4zJdC#-9Yaw}i6xfXwy&!W^f>#<` zfvI`N>BiFi6r57wTeNx&l``Ib`#NzIb@ySHe5R827W)1PTXy6F-&+H)mK{# zCM8jDkA%bb7gwO`dInE?yAh;Ha$L;6l_=uqO2w^{(Z^bn#5AuWc8gz9{sS}igx?tu zEfU3A4?Bp6)?qjOx`Vss=|Z}|OYrW@p)>bALi`hglUB6gh6Wj^zbVF^S}TY1=E&j` z-(I}_ZIs3}6cYiHvphq!BbYtZ03Wuukk#rOlM*e9KU|mbhc+i+@rDjEIM0GHvdQOs z!t=m8EE}b#x#O#^9B(}ogY#=UAxe5L7GLeFTIlzP?|bw6^}>&nEm zKl@U*S5}_T#dR+?nQ(5?tZ_BNt|fvmVzzkQO@jdAKk45*1WgM^}0L#1d^8xE3A+hc<;`zVb`{jq*VlK3NHqhTmfh zch6axAHYdsp4I0kRbpnI8YtV=!qVIXT%sBebCteg;g)t>Z<2$H4(x%I%R{lAyDx1L z`S7{4jLUnifFtt;(fi96cyD2k{-ihPN+{+)6fz#n2KNX^~@0xm-+{9iRNBrdD09m3>&^P)cni_PGj;#?Onsbp} z8gc<&s|nN~P#ylfZpHI`m*ByO2>1?LVTnQr8dX<=x`z;Yq$yL^Iv*@a?E~+QNW3Kd z2%?%ii0`d!=-YV~&K^+2=)Hbq-=3$YM^t^#b-Es2GbzDo6I9{K4|`g-vkFblU#Ig& z{r;zYZ2#$v_|NPA{l19a|L$Y^uYQXEq^kd)`y%}Q^Va{8*Z-?NHg){3<(&NQRsGL; zoc{m*i2o&D-~VSW`hVC-=Xv_y1Op)BgZD CR{P-q literal 0 HcmV?d00001 From da9310d08fc3ee9dd8e24f900d0947c51f8a2b00 Mon Sep 17 00:00:00 2001 From: Mark Kittisopikul Date: Wed, 29 Mar 2023 14:15:58 -0400 Subject: [PATCH 037/108] Port h5dchunk_iter_hsizet_size #2074 to hdf5_1_12 branch (#2329) --- src/H5D.c | 26 -------------------------- src/H5Dchunk.c | 4 ++-- src/H5Dpublic.h | 2 +- test/chunk_info.c | 18 +++++++++--------- 4 files changed, 12 insertions(+), 38 deletions(-) diff --git a/src/H5D.c b/src/H5D.c index cafa91c5879..c9c10543bd9 100644 --- a/src/H5D.c +++ b/src/H5D.c @@ -1137,32 +1137,6 @@ H5Dget_chunk_info_by_coord(hid_t dset_id, const hsize_t *offset, unsigned *filte * H5D_chunk_iter_op_t cb IN: User callback function, called for every chunk. * void *op_data IN/OUT: Optional user data passed on to user callback. * - * Callback information: - * H5D_chunk_iter_op_t is defined as: - * - * typedef int (*H5D_chunk_iter_op_t)( - * const hsize_t *offset, - * uint32_t filter_mask, - * haddr_t addr, - * uint32_t size, - * void *op_data); - * - * H5D_chunk_iter_op_t parameters: - * hsize_t *offset; IN/OUT: Array of starting logical coordinates of chunk. - * uint32_t filter_mask; IN: Filter mask of chunk. - * haddr_t addr; IN: Offset in file of chunk data. - * uint32_t nbytes; IN: Size in number of bytes of chunk data in file. - * void *op_data; IN/OUT: Pointer to any user-defined data - * associated with the operation. - * - * The return values from an operator are: - * Zero (H5_ITER_CONT) causes the iterator to continue, returning zero when all - * elements have been processed. - * Positive (H5_ITER_STOP) causes the iterator to immediately return that positive - * value, indicating short-circuit success. - * Negative (H5_ITER_ERROR) causes the iterator to immediately return that value, - * indicating failure. - * * Return: Non-negative on success, negative on failure * * Programmer: Gaute Hope diff --git a/src/H5Dchunk.c b/src/H5Dchunk.c index 216ac44ca80..a0f83714627 100644 --- a/src/H5Dchunk.c +++ b/src/H5Dchunk.c @@ -7687,8 +7687,8 @@ H5D__chunk_iter_cb(const H5D_chunk_rec_t *chunk_rec, void *udata) FUNC_ENTER_PACKAGE_NOERR /* Check for callback failure and pass along return value */ - if ((ret_value = (data->op)(offset, chunk_rec->filter_mask, chunk_rec->chunk_addr, chunk_rec->nbytes, - data->op_data)) < 0) + if ((ret_value = (data->op)(offset, (unsigned)chunk_rec->filter_mask, chunk_rec->chunk_addr, + (hsize_t)chunk_rec->nbytes, data->op_data)) < 0) HERROR(H5E_DATASET, H5E_CANTNEXT, "iteration operator failed"); FUNC_LEAVE_NOAPI(ret_value) diff --git a/src/H5Dpublic.h b/src/H5Dpublic.h index e0533b3388e..22e856db914 100644 --- a/src/H5Dpublic.h +++ b/src/H5Dpublic.h @@ -238,7 +238,7 @@ typedef herr_t (*H5D_gather_func_t)(const void *dst_buf, size_t dst_buf_bytes_us * \li A negative (#H5_ITER_ERROR) causes the iterator to immediately * return that value, indicating failure. */ -typedef int (*H5D_chunk_iter_op_t)(const hsize_t *offset, uint32_t filter_mask, haddr_t addr, uint32_t size, +typedef int (*H5D_chunk_iter_op_t)(const hsize_t *offset, unsigned filter_mask, haddr_t addr, hsize_t size, void *op_data); //! diff --git a/test/chunk_info.c b/test/chunk_info.c index 68afc1e660f..25b17e57189 100644 --- a/test/chunk_info.c +++ b/test/chunk_info.c @@ -1507,9 +1507,9 @@ test_chunk_info_version2_btrees(const char *filename, hid_t fapl) typedef struct chunk_iter_info_t { hsize_t offset[2]; - uint32_t filter_mask; + unsigned filter_mask; haddr_t addr; - uint32_t nbytes; + hsize_t size; } chunk_iter_info_t; typedef struct chunk_iter_udata_t { @@ -1518,7 +1518,7 @@ typedef struct chunk_iter_udata_t { } chunk_iter_udata_t; static int -iter_cb(const hsize_t *offset, uint32_t filter_mask, haddr_t addr, uint32_t nbytes, void *op_data) +iter_cb(const hsize_t *offset, unsigned filter_mask, haddr_t addr, hsize_t size, void *op_data) { chunk_iter_udata_t *cidata = (chunk_iter_udata_t *)op_data; int idx = cidata->last_index + 1; @@ -1527,7 +1527,7 @@ iter_cb(const hsize_t *offset, uint32_t filter_mask, haddr_t addr, uint32_t nbyt cidata->chunk_info[idx].offset[1] = offset[1]; cidata->chunk_info[idx].filter_mask = filter_mask; cidata->chunk_info[idx].addr = addr; - cidata->chunk_info[idx].nbytes = nbytes; + cidata->chunk_info[idx].size = size; cidata->last_index++; @@ -1535,8 +1535,8 @@ iter_cb(const hsize_t *offset, uint32_t filter_mask, haddr_t addr, uint32_t nbyt } static int -iter_cb_stop(const hsize_t H5_ATTR_UNUSED *offset, uint32_t H5_ATTR_UNUSED filter_mask, - haddr_t H5_ATTR_UNUSED addr, uint32_t H5_ATTR_UNUSED nbytes, void *op_data) +iter_cb_stop(const hsize_t H5_ATTR_UNUSED *offset, unsigned H5_ATTR_UNUSED filter_mask, + haddr_t H5_ATTR_UNUSED addr, hsize_t H5_ATTR_UNUSED size, void *op_data) { chunk_iter_info_t **chunk_info = (chunk_iter_info_t **)op_data; *chunk_info += 1; @@ -1544,8 +1544,8 @@ iter_cb_stop(const hsize_t H5_ATTR_UNUSED *offset, uint32_t H5_ATTR_UNUSED filte } static int -iter_cb_fail(const hsize_t H5_ATTR_UNUSED *offset, uint32_t H5_ATTR_UNUSED filter_mask, - haddr_t H5_ATTR_UNUSED addr, uint32_t H5_ATTR_UNUSED nbytes, void *op_data) +iter_cb_fail(const hsize_t H5_ATTR_UNUSED *offset, unsigned H5_ATTR_UNUSED filter_mask, + haddr_t H5_ATTR_UNUSED addr, hsize_t H5_ATTR_UNUSED size, void *op_data) { chunk_iter_info_t **chunk_info = (chunk_iter_info_t **)op_data; *chunk_info += 1; @@ -1717,7 +1717,7 @@ test_basic_query(hid_t fapl) FAIL_PUTS_ERROR("offset[1] mismatch"); if (chunk_infos[0].filter_mask != 0) FAIL_PUTS_ERROR("filter mask mismatch"); - if (chunk_infos[0].nbytes != 96) + if (chunk_infos[0].size != 96) FAIL_PUTS_ERROR("size mismatch"); if (chunk_infos[1].offset[0] != CHUNK_NX) From 70fa97e271ffdfa21b0a57206928c0f9bac89c2b Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Wed, 29 Mar 2023 22:35:56 -0500 Subject: [PATCH 038/108] Fix CMake for version less then 3.18 (#2651) --- bin/batch/ctest.qsub.in.cmake | 14 +++++----- bin/batch/ctestP.lsf.in.cmake | 3 ++- bin/batch/ctestP.sl.in.cmake | 7 ++--- bin/batch/ctestS.lsf.in.cmake | 5 ++-- bin/batch/ctestS.sl.in.cmake | 8 +++--- bin/batch/ctest_parallel.cmake.in | 12 +++++++++ bin/batch/ctest_serial.cmake.in | 12 +++++++++ bin/batch/knl_ctestP.sl.in.cmake | 8 +++--- bin/batch/knl_ctestS.sl.in.cmake | 9 +++---- bin/batch/ray_ctestP.lsf.in.cmake | 4 ++- bin/batch/ray_ctestS.lsf.in.cmake | 5 ++-- config/cmake/UseJavaSymlinks.cmake | 5 +--- config/cmake/scripts/CTestScript.cmake | 37 ++++++++++++++++---------- config/cmake/scripts/HDF5config.cmake | 26 +++++++++++++----- config/cmake/scripts/HDF5options.cmake | 2 +- release_docs/INSTALL_CMake.txt | 8 +++--- release_docs/USING_CMake_Examples.txt | 3 ++- release_docs/USING_HDF5_CMake.txt | 6 ++++- 18 files changed, 112 insertions(+), 62 deletions(-) create mode 100644 bin/batch/ctest_parallel.cmake.in create mode 100644 bin/batch/ctest_serial.cmake.in diff --git a/bin/batch/ctest.qsub.in.cmake b/bin/batch/ctest.qsub.in.cmake index 702fca7f011..da3ae2f01a4 100755 --- a/bin/batch/ctest.qsub.in.cmake +++ b/bin/batch/ctest.qsub.in.cmake @@ -11,11 +11,13 @@ CTEST_CMD=`which ctest` cd @HDF5_BINARY_DIR@ if [[ $SUMMARY_FILE == *"ctestS"* ]]; then - CMD="${CTEST_CMD} . -E MPI_TEST_ -C Release -j 32 -T test" + CMD="${CTEST_CMD} -S ctest_serial.cmake" + qsub -t 60 -n 1 -q debug-flat-quad -A ${ACCOUNT_ID} ${CMD} >& ${SUMMARY_FILE} + echo "Done running ctest serial command." + touch ctestS.done else - CMD="${CTEST_CMD} . -R MPI_TEST_ ${SKIP_TESTS} -C Release -T test" + CMD="${CTEST_CMD} -S ctest_parallel.cmake" + qsub -t 60 -n 1 -q debug-flat-quad -A ${ACCOUNT_ID} ${CMD} >& ${SUMMARY_FILE} + echo "Done running ctest parallel command." + touch ctestP.done fi - -qsub -t 60 -n 1 -q debug-flat-quad -A ${ACCOUNT_ID} ${CMD} >& ${SUMMARY_FILE} - -echo "Done running ctest parallel command." diff --git a/bin/batch/ctestP.lsf.in.cmake b/bin/batch/ctestP.lsf.in.cmake index 3fdd5ce99da..8b89c9ca803 100644 --- a/bin/batch/ctestP.lsf.in.cmake +++ b/bin/batch/ctestP.lsf.in.cmake @@ -14,6 +14,7 @@ cd @HDF5_BINARY_DIR@ echo "Run parallel test command. Test output will be in build/ctestP.out" -ctest . -R MPI_TEST_ -C Release -T test >& ctestP.out +ctest -S ctest_parallel.cmake >& ctestP.out echo "Done running ctest parallel command." +touch ctestP.done diff --git a/bin/batch/ctestP.sl.in.cmake b/bin/batch/ctestP.sl.in.cmake index 1069fa90281..599fee82031 100644 --- a/bin/batch/ctestP.sl.in.cmake +++ b/bin/batch/ctestP.sl.in.cmake @@ -8,7 +8,8 @@ #SBATCH --job-name=h5_ctestP cd @HDF5_BINARY_DIR@ -ctest . -R MPI_TEST_ -C Release -T test >& ctestP.out - -echo "Done running ctestP.sl" +echo "Run parallel test command. Test output will be in build/ctestP.out" +ctest -S ctest_parallel.cmake >& ctestP.out +echo "Done running ctest parallel command." +touch ctestP.done diff --git a/bin/batch/ctestS.lsf.in.cmake b/bin/batch/ctestS.lsf.in.cmake index a01d39b00a3..73b17c0dc16 100644 --- a/bin/batch/ctestS.lsf.in.cmake +++ b/bin/batch/ctestS.lsf.in.cmake @@ -11,8 +11,7 @@ cd @HDF5_BINARY_DIR@ echo "Run command. Test output will be in build/ctestS.out" -ctest . -E MPI_TEST_ -C Release -j 32 -T test >& ctestS.out +ctest -S ctest_serial.cmake >& ctestS.out -##$CMD >& ctestS.out echo "Done running command." - +touch ctestS.done diff --git a/bin/batch/ctestS.sl.in.cmake b/bin/batch/ctestS.sl.in.cmake index 4f96bb940f9..bf0d6ae7c9b 100644 --- a/bin/batch/ctestS.sl.in.cmake +++ b/bin/batch/ctestS.sl.in.cmake @@ -8,8 +8,8 @@ #SBATCH --job-name=h5_ctestS cd @HDF5_BINARY_DIR@ -CMD="ctest . -E MPI_TEST_ -C Release -j 32 -T test" +echo "Run command. Test output will be in build/ctestS.out" +ctest -S ctest_serial.cmake >& ctestS.out -echo "Run $CMD. Test output will be in build/ctestS.out" -$CMD >& ctestS.out -echo "Done running $CMD" +echo "Done running command." +touch ctestS.done diff --git a/bin/batch/ctest_parallel.cmake.in b/bin/batch/ctest_parallel.cmake.in new file mode 100644 index 00000000000..f35a772459e --- /dev/null +++ b/bin/batch/ctest_parallel.cmake.in @@ -0,0 +1,12 @@ +if(NOT "$ENV{CI_SITE_NAME}" STREQUAL "") + set(CTEST_SITE "$ENV{CI_SITE_NAME}") +endif() +if(NOT "$ENV{CI_BUILD_NAME}" STREQUAL "") + set(CTEST_BUILD_NAME "$ENV{CI_BUILD_NAME}") +endif() + +ctest_start ("$ENV{CI_MODEL}" "@HDF5_SOURCE_DIR@" "@HDF5_BINARY_DIR@" APPEND) +ctest_test (BUILD "@HDF5_BINARY_DIR@" APPEND INCLUDE MPI_TEST_ RETURN_VALUE res) +if (${res} LESS 0 OR ${res} GREATER 0) + file (APPEND ${CTEST_SCRIPT_DIRECTORY}/FailedCTest.txt "Failed Tests: ${res}\n") + endif () diff --git a/bin/batch/ctest_serial.cmake.in b/bin/batch/ctest_serial.cmake.in new file mode 100644 index 00000000000..e54421baed6 --- /dev/null +++ b/bin/batch/ctest_serial.cmake.in @@ -0,0 +1,12 @@ +if(NOT "$ENV{CI_SITE_NAME}" STREQUAL "") + set(CTEST_SITE "$ENV{CI_SITE_NAME}") +endif() +if(NOT "$ENV{CI_BUILD_NAME}" STREQUAL "") + set(CTEST_BUILD_NAME "$ENV{CI_BUILD_NAME}") +endif() + +ctest_start ("$ENV{CI_MODEL}" "@HDF5_SOURCE_DIR@" "@HDF5_BINARY_DIR@" APPEND) +ctest_test (BUILD "@HDF5_BINARY_DIR@" APPEND EXCLUDE MPI_TEST_ PARALLEL_LEVEL 32 RETURN_VALUE res) +if (${res} LESS 0 OR ${res} GREATER 0) + file (APPEND ${CTEST_SCRIPT_DIRECTORY}/FailedCTest.txt "Failed Tests: ${res}\n") + endif () diff --git a/bin/batch/knl_ctestP.sl.in.cmake b/bin/batch/knl_ctestP.sl.in.cmake index f985fbb1180..97bd2ad31aa 100644 --- a/bin/batch/knl_ctestP.sl.in.cmake +++ b/bin/batch/knl_ctestP.sl.in.cmake @@ -9,8 +9,8 @@ #SBATCH --job-name=h5_ctestP cd @HDF5_BINARY_DIR@ -#run parallel tests except t_cache_image test -ctest . -R MPI_TEST_ -C Release -T test >& ctestP.out - -echo "Done running $CMD" +echo "Run parallel test command. Test output will be in build/ctestP.out" +ctest -S ctest_parallel.cmake >& ctestP.out +echo "Done running ctest parallel command." +touch ctestP.done diff --git a/bin/batch/knl_ctestS.sl.in.cmake b/bin/batch/knl_ctestS.sl.in.cmake index af6353bae39..87c4a486eea 100644 --- a/bin/batch/knl_ctestS.sl.in.cmake +++ b/bin/batch/knl_ctestS.sl.in.cmake @@ -9,9 +9,8 @@ #SBATCH --job-name=h5_ctestS cd @HDF5_BINARY_DIR@ -CMD="ctest . -E MPI_TEST_ -C Release -j 32 -T test" - -echo "Run $CMD. Test output will be in build/ctestS.out" -$CMD >& ctestS.out -echo "Done running $CMD" +echo "Run command. Test output will be in build/ctestS.out" +ctest -S ctest_serial.cmake >& ctestS.out +echo "Done running command." +touch ctestS.done diff --git a/bin/batch/ray_ctestP.lsf.in.cmake b/bin/batch/ray_ctestP.lsf.in.cmake index 7067a65811e..51425b50c9a 100644 --- a/bin/batch/ray_ctestP.lsf.in.cmake +++ b/bin/batch/ray_ctestP.lsf.in.cmake @@ -15,6 +15,8 @@ cd @HDF5_BINARY_DIR@ echo "Run parallel test command. Test output will be in build/ctestP.out" -ctest . -R 'MPI_TEST_' -C Release -T test >& ctestP.out +ctest -S ctest_parallel.cmake >& ctestP.out echo "Done running ctest parallel command." +touch ctestP.done +~ diff --git a/bin/batch/ray_ctestS.lsf.in.cmake b/bin/batch/ray_ctestS.lsf.in.cmake index da2043824e2..7f585b41209 100644 --- a/bin/batch/ray_ctestS.lsf.in.cmake +++ b/bin/batch/ray_ctestS.lsf.in.cmake @@ -11,8 +11,7 @@ cd @HDF5_BINARY_DIR@ echo "Run command. Test output will be in build/ctestS.out" -ctest . -E 'MPI_TEST_' -C Release -j 32 -T test >& ctestS.out +ctest -S ctest_serial.cmake >& ctestS.out -##$CMD >& ctestS.out echo "Done running command." - +touch ctestS.done diff --git a/config/cmake/UseJavaSymlinks.cmake b/config/cmake/UseJavaSymlinks.cmake index cb2e2820952..183a7448855 100644 --- a/config/cmake/UseJavaSymlinks.cmake +++ b/config/cmake/UseJavaSymlinks.cmake @@ -4,10 +4,7 @@ if (UNIX AND _JAVA_TARGET_OUTPUT_LINK) if (_JAVA_TARGET_OUTPUT_NAME) - find_program(LN_EXECUTABLE - NAMES - ln - ) + find_program(LN_EXECUTABLE NAMES ln) execute_process( COMMAND ${LN_EXECUTABLE} -sf "${_JAVA_TARGET_OUTPUT_NAME}" "${_JAVA_TARGET_OUTPUT_LINK}" diff --git a/config/cmake/scripts/CTestScript.cmake b/config/cmake/scripts/CTestScript.cmake index fa149c3fa92..fb9dd57d0fd 100644 --- a/config/cmake/scripts/CTestScript.cmake +++ b/config/cmake/scripts/CTestScript.cmake @@ -261,6 +261,10 @@ if (NOT DEFINED MODEL) set (MODEL "Experimental") endif () +set (ENV{CI_SITE_NAME} ${CTEST_SITE}) +set (ENV{CI_BUILD_NAME} ${CTEST_BUILD_NAME}) +set (ENV{CI_MODEL} ${MODEL}) + #----------------------------------------------------------------------------- ## NORMAL process ## -- LOCAL_UPDATE updates the source folder from svn @@ -269,7 +273,11 @@ endif () ## -- LOCAL_MEMCHECK_TEST executes the Valgrind testing ## -- LOCAL_COVERAGE_TEST executes code coverage process ## -------------------------- - ctest_start (${MODEL} GROUP ${MODEL}) + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.16.0") + ctest_start (${MODEL} GROUP ${MODEL}) + else () + ctest_start (${MODEL} TRACK ${MODEL}) + endif () if (LOCAL_UPDATE) ctest_update (SOURCE "${CTEST_SOURCE_DIRECTORY}") endif () @@ -315,19 +323,17 @@ endif () execute_process (COMMAND ${LOCAL_BATCH_SCRIPT_COMMAND} ${LOCAL_BATCH_SCRIPT_ARGS} ${CTEST_BINARY_DIRECTORY}/${LOCAL_BATCH_SCRIPT_NAME}) endif() endif () - message(STATUS "Check for existence of ${CTEST_BINARY_DIRECTORY}/Testing/${TAG_CONTENTS}/Test.xml") - execute_process(COMMAND ls ${CTEST_BINARY_DIRECTORY}/Testing/${TAG_CONTENTS}/Test.xml RESULT_VARIABLE result OUTPUT_QUIET ERROR_QUIET) + message(STATUS "Check for existence of ${CTEST_BINARY_DIRECTORY}/ctestS.done") + execute_process(COMMAND ls ${CTEST_BINARY_DIRECTORY}/ctestS.done RESULT_VARIABLE result OUTPUT_QUIET ERROR_QUIET) while(result) ctest_sleep(60) - execute_process(COMMAND ls ${CTEST_BINARY_DIRECTORY}/Testing/${TAG_CONTENTS}/Test.xml RESULT_VARIABLE result OUTPUT_QUIET ERROR_QUIET) + execute_process(COMMAND ls ${CTEST_BINARY_DIRECTORY}/ctestS.done RESULT_VARIABLE result OUTPUT_QUIET ERROR_QUIET) endwhile(result) + message(STATUS "Serial tests completed.") + if (LOCAL_SUBMIT) + ctest_submit (PARTS Test) + endif () if (LOCAL_BATCH_SCRIPT_PARALLEL_NAME) - if (LOCAL_SUBMIT) - ctest_submit (PARTS Test) - endif () - message(STATUS "Found ${CTEST_BINARY_DIRECTORY}/Testing/${TAG_CONTENTS}/Test.xml for serial tests. Renaming to SerialTest.xml") - file (RENAME ${CTEST_BINARY_DIRECTORY}/Testing/${TAG_CONTENTS}/Test.xml ${CTEST_BINARY_DIRECTORY}/Testing/${TAG_CONTENTS}/SerialTest.xml) - file (RENAME ${CTEST_BINARY_DIRECTORY}/Testing/Temporary/LastTest_${TAG_CONTENTS}.log ${CTEST_BINARY_DIRECTORY}/Testing/Temporary/LastTest_${TAG_CONTENTS}_Serial.log) unset(result CACHE) if (LOCAL_BATCH_SCRIPT_COMMAND STREQUAL "raybsub") execute_process (COMMAND ${CTEST_BINARY_DIRECTORY}/${LOCAL_BATCH_SCRIPT_COMMAND} ${LOCAL_BATCH_SCRIPT_ARGS} ${CTEST_BINARY_DIRECTORY}/${LOCAL_BATCH_SCRIPT_PARALLEL_NAME}) @@ -338,13 +344,13 @@ endif () execute_process (COMMAND ${LOCAL_BATCH_SCRIPT_COMMAND} ${LOCAL_BATCH_SCRIPT_ARGS} ${CTEST_BINARY_DIRECTORY}/${LOCAL_BATCH_SCRIPT_PARALLEL_NAME}) endif () endif () - message(STATUS "Check for existence of ${CTEST_BINARY_DIRECTORY}/Testing/${TAG_CONTENTS}/Test.xml") - execute_process(COMMAND ls ${CTEST_BINARY_DIRECTORY}/Testing/${TAG_CONTENTS}/Test.xml RESULT_VARIABLE result OUTPUT_QUIET ERROR_QUIET) + message(STATUS "Check for existence of ${CTEST_BINARY_DIRECTORY}/ctestP.done") + execute_process(COMMAND ls ${CTEST_BINARY_DIRECTORY}/ctestP.done RESULT_VARIABLE result OUTPUT_QUIET ERROR_QUIET) while(result) ctest_sleep(60) - execute_process(COMMAND ls ${CTEST_BINARY_DIRECTORY}/Testing/${TAG_CONTENTS}/Test.xml RESULT_VARIABLE result OUTPUT_QUIET ERROR_QUIET) + execute_process(COMMAND ls ${CTEST_BINARY_DIRECTORY}/ctestP.done RESULT_VARIABLE result OUTPUT_QUIET ERROR_QUIET) endwhile(result) - message(STATUS "Found ${CTEST_BINARY_DIRECTORY}/Testing/${TAG_CONTENTS}/Test.xml for parallel tests.") + message(STATUS "parallel tests completed.") endif() endif () if (LOCAL_SUBMIT) @@ -365,6 +371,9 @@ endif () ctest_submit (PARTS Coverage) endif () endif () + if (LOCAL_SUBMIT) + ctest_submit (PARTS Done) + endif () endif () if (NOT LOCAL_MEMCHECK_TEST AND NOT LOCAL_NO_PACKAGE AND NOT LOCAL_SKIP_BUILD) diff --git a/config/cmake/scripts/HDF5config.cmake b/config/cmake/scripts/HDF5config.cmake index daa902edc51..2c9155a040b 100644 --- a/config/cmake/scripts/HDF5config.cmake +++ b/config/cmake/scripts/HDF5config.cmake @@ -11,7 +11,7 @@ # ############################################################################################# ### ${CTEST_SCRIPT_ARG} is of the form OPTION=VALUE ### -### BUILD_GENERATOR required [Unix, VS2019, VS201964, VS2017, VS201764, VS2015, VS201564] ### +### BUILD_GENERATOR required [Unix, VS2022, VS2019, VS201964, VS2017, VS201764, VS2015, VS201564] ### ### ctest -S HDF5config.cmake,BUILD_GENERATOR=VS201764 -C Release -VV -O hdf5.log ### ############################################################################################# @@ -23,14 +23,14 @@ cmake_minimum_required (VERSION 3.12) # BUILD_GENERATOR - The cmake build generator: # MinGW * MinGW Makefiles # Unix * Unix Makefiles +# VS2022 * Visual Studio 17 2022 +# VS202264 * Visual Studio 17 2022 # VS2019 * Visual Studio 16 2019 # VS201964 * Visual Studio 16 2019 # VS2017 * Visual Studio 15 2017 # VS201764 * Visual Studio 15 2017 Win64 # VS2015 * Visual Studio 14 2015 # VS201564 * Visual Studio 14 2015 Win64 -# VS2013 * Visual Studio 12 2013 -# VS201364 * Visual Studio 12 2013 Win64 # # INSTALLDIR - root folder where hdf5 is installed # CTEST_CONFIGURATION_TYPE - Release, Debug, etc @@ -68,7 +68,7 @@ endif () # build generator must be defined if (NOT DEFINED BUILD_GENERATOR) - message (FATAL_ERROR "BUILD_GENERATOR must be defined - Unix, VS2019, VS201964, VS2017, VS201764, VS2015, VS201564") + message (FATAL_ERROR "BUILD_GENERATOR must be defined - Unix, VS2022, VS2019, VS201964, VS2017, VS201764, VS2015, VS201564") endif () ################################################################### @@ -105,12 +105,24 @@ endif () ######### Following describes compiler ############ if (NOT DEFINED HPC) if (NOT DEFINED BUILD_GENERATOR) - message (FATAL_ERROR "BUILD_GENERATOR must be defined - Unix, VS2019, VS201964, VS2017, VS201764, VS2015, VS201564") + message (FATAL_ERROR "BUILD_GENERATOR must be defined - Unix, VS2022, VS2019, VS201964, VS2017, VS201764, VS2015, VS201564") endif () if (WIN32 AND NOT MINGW) set (SITE_OS_NAME "Windows") set (SITE_OS_VERSION "WIN10") - if (BUILD_GENERATOR STREQUAL "VS201964") + if (BUILD_GENERATOR STREQUAL "VS202264") + set (CTEST_CMAKE_GENERATOR "Visual Studio 17 2022") + set (CMAKE_GENERATOR_ARCHITECTURE "x64") + set (SITE_OS_BITS "64") + set (SITE_COMPILER_NAME "vs2022") + set (SITE_COMPILER_VERSION "17") + elseif (BUILD_GENERATOR STREQUAL "VS2022") + set (CTEST_CMAKE_GENERATOR "Visual Studio 17 2022") + set (CMAKE_GENERATOR_ARCHITECTURE "Win32") + set (SITE_OS_BITS "32") + set (SITE_COMPILER_NAME "vs2022") + set (SITE_COMPILER_VERSION "17") + elseif (BUILD_GENERATOR STREQUAL "VS201964") set (CTEST_CMAKE_GENERATOR "Visual Studio 16 2019") set (CMAKE_GENERATOR_ARCHITECTURE "x64") set (SITE_OS_BITS "64") @@ -163,7 +175,7 @@ if (NOT DEFINED HPC) set (SITE_COMPILER_NAME "vs2012") set (SITE_COMPILER_VERSION "11") else () - message (FATAL_ERROR "Invalid BUILD_GENERATOR must be - Unix, VS2019, VS201964, VS2017, VS201764, VS2015, VS201564") + message (FATAL_ERROR "Invalid BUILD_GENERATOR must be - Unix, VS2022, VS2019, VS201964, VS2017, VS201764, VS2015, VS201564") endif () ## Set the following to unique id your computer ## set (CTEST_SITE "WIN7${BUILD_GENERATOR}.XXXX") diff --git a/config/cmake/scripts/HDF5options.cmake b/config/cmake/scripts/HDF5options.cmake index e14dfba94a1..92bfd37ecbe 100644 --- a/config/cmake/scripts/HDF5options.cmake +++ b/config/cmake/scripts/HDF5options.cmake @@ -74,7 +74,7 @@ set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ALLOW_EXTERNAL_SUPPORT:STRIN #set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_ENCODING:BOOL=OFF") #### package examples #### -#set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_PACK_EXAMPLES:BOOL=ON -DHDF5_EXAMPLES_COMPRESSED:STRING=HDF5Examples-2.0.2-Source.tar.gz -DHDF5_EXAMPLES_COMPRESSED_DIR:PATH=${CTEST_SCRIPT_DIRECTORY}") +#set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_PACK_EXAMPLES:BOOL=ON -DHDF5_EXAMPLES_COMPRESSED:STRING=HDF5Examples-2.0.3-Source.tar.gz -DHDF5_EXAMPLES_COMPRESSED_DIR:PATH=${CTEST_SCRIPT_DIRECTORY}") ############################################################################################# ### enable parallel builds diff --git a/release_docs/INSTALL_CMake.txt b/release_docs/INSTALL_CMake.txt index 66bd732d28e..db2d37557ba 100644 --- a/release_docs/INSTALL_CMake.txt +++ b/release_docs/INSTALL_CMake.txt @@ -97,6 +97,10 @@ To build HDF5 with the SZIP and ZLIB external libraries you will need to: 5. From the "myhdfstuff" directory execute the CTest Script with the following options: + On 32-bit Windows with Visual Studio 2022, execute: + ctest -S HDF5config.cmake,BUILD_GENERATOR=VS2022 -C Release -VV -O hdf5.log + On 64-bit Windows with Visual Studio 2022, execute: + ctest -S HDF5config.cmake,BUILD_GENERATOR=VS202264 -C Release -VV -O hdf5.log On 32-bit Windows with Visual Studio 2019, execute: ctest -S HDF5config.cmake,BUILD_GENERATOR=VS2019 -C Release -VV -O hdf5.log On 64-bit Windows with Visual Studio 2019, execute: @@ -105,10 +109,6 @@ To build HDF5 with the SZIP and ZLIB external libraries you will need to: ctest -S HDF5config.cmake,BUILD_GENERATOR=VS2017 -C Release -VV -O hdf5.log On 64-bit Windows with Visual Studio 2017, execute: ctest -S HDF5config.cmake,BUILD_GENERATOR=VS201764 -C Release -VV -O hdf5.log - On 32-bit Windows with Visual Studio 2015, execute: - ctest -S HDF5config.cmake,BUILD_GENERATOR=VS2015 -C Release -VV -O hdf5.log - On 64-bit Windows with Visual Studio 2015, execute: - ctest -S HDF5config.cmake,BUILD_GENERATOR=VS201564 -C Release -VV -O hdf5.log On Linux and Mac, execute: ctest -S HDF5config.cmake,BUILD_GENERATOR=Unix -C Release -VV -O hdf5.log The supplied build scripts are versions of the above. diff --git a/release_docs/USING_CMake_Examples.txt b/release_docs/USING_CMake_Examples.txt index bed27fbaf8c..35e1036c455 100644 --- a/release_docs/USING_CMake_Examples.txt +++ b/release_docs/USING_CMake_Examples.txt @@ -22,7 +22,8 @@ I. Preconditions 1. We suggest you obtain the latest CMake for your platform from the Kitware web site. The HDF5 1.12.x product requires a minimum CMake version - of 3.12. If you are using VS2019, the minimum version is 3.15. + of 3.12. If you are using VS2019, the minimum version is 3.15. For VS2022, + the minimum version is 3.21. 2. You have installed the HDF5 library built with CMake, by executing the HDF Install Utility (the *.msi file in the binary package for diff --git a/release_docs/USING_HDF5_CMake.txt b/release_docs/USING_HDF5_CMake.txt index 9bfc2ca4208..33031032bc6 100644 --- a/release_docs/USING_HDF5_CMake.txt +++ b/release_docs/USING_HDF5_CMake.txt @@ -39,7 +39,8 @@ I. Preconditions 1. We suggest you obtain the latest CMake for your platform from the Kitware web site. The HDF5 1.12.x product requires a minimum CMake version - of 3.12. If you are using VS2019, the minimum version is 3.15. + of 3.12. If you are using VS2019, the minimum version is 3.15. For VS2022, + the minimum version is 3.21. 2. You have installed the HDF5 library built with CMake, by executing the HDF Install Utility (the *.msi file in the binary package for @@ -126,6 +127,9 @@ These steps are described in more detail below. * Visual Studio 16 2019 * ... in addition VS2019 will need to set the "-A" option, * ... [Win32, x64, ARM, ARM64] + * Visual Studio 17 2022 + * ... in addition VS2022 will need to set the "-A" option, + * ... [Win32, x64, ARM, ARM64] is: * BUILD_TESTING:BOOL=ON From 2aa55e242c5dfe925834e79a508d331cd7eb0793 Mon Sep 17 00:00:00 2001 From: Dana Robinson <43805+derobins@users.noreply.github.com> Date: Thu, 13 Apr 2023 09:05:07 -0700 Subject: [PATCH 039/108] * Convert asserts to error handling in efl decode (#2709) The function that decodes external data files object header messages would call assert() when parsing malformed files, causing applications to crash when linked against the debug library. This change converts these assert() calls to HDF5 error checks, so the messages are sanity checked in both release and debug mode and debug mode no longer crashes applications. Also cleaned up some error handling usage and debug checks. * Free memory on H5O efl decode errors * Add buffer size checks to efl msg decode * Add parentheses to math expressions Fixes GitHub #2605 --- src/H5Oefl.c | 109 ++++++++++++++++++++++++++++----------------------- 1 file changed, 61 insertions(+), 48 deletions(-) diff --git a/src/H5Oefl.c b/src/H5Oefl.c index 073822e4c8c..90ef7cf7f5a 100644 --- a/src/H5Oefl.c +++ b/src/H5Oefl.c @@ -67,108 +67,121 @@ const H5O_msg_class_t H5O_MSG_EFL[1] = {{ * Purpose: Decode an external file list message and return a pointer to * the message (and some other data). * - * Return: Success: Ptr to a new message struct. + * We allow zero dimension size starting from the 1.8.7 release. + * The dataset size of external storage can be zero. * + * Return: Success: Pointer to a new message struct * Failure: NULL - * - * Programmer: Robb Matzke - * Tuesday, November 25, 1997 - * - * Modification: - * Raymond Lu - * 11 April 2011 - * We allow zero dimension size starting from the 1.8.7 release. - * The dataset size of external storage can be zero. *------------------------------------------------------------------------- */ static void * H5O__efl_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNUSED mesg_flags, - unsigned H5_ATTR_UNUSED *ioflags, size_t H5_ATTR_UNUSED p_size, const uint8_t *p) + unsigned H5_ATTR_UNUSED *ioflags, size_t p_size, const uint8_t *p) { - H5O_efl_t *mesg = NULL; - int version; - const char *s = NULL; - H5HL_t *heap; - size_t u; /* Local index variable */ - void *ret_value = NULL; /* Return value */ + H5O_efl_t *mesg = NULL; + int version; + const uint8_t *p_end = p + p_size - 1; /* pointer to last byte in p */ + const char *s = NULL; + H5HL_t *heap = NULL; + void *ret_value = NULL; /* Return value */ FUNC_ENTER_STATIC /* Check args */ HDassert(f); HDassert(p); + HDassert(p_size > 0); if (NULL == (mesg = (H5O_efl_t *)H5MM_calloc(sizeof(H5O_efl_t)))) - HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, NULL, "memory allocation failed") + HGOTO_ERROR(H5E_OHDR, H5E_NOSPACE, NULL, "memory allocation failed") - /* Version */ + /* Version (1 byte) */ + if ((p + 1 - 1) > p_end) + HGOTO_ERROR(H5E_OHDR, H5E_NOSPACE, NULL, "ran off end of input buffer while decoding") version = *p++; if (version != H5O_EFL_VERSION) HGOTO_ERROR(H5E_OHDR, H5E_CANTLOAD, NULL, "bad version number for external file list message") - /* Reserved */ + /* Reserved (3 bytes) */ + if ((p + 3 - 1) > p_end) + HGOTO_ERROR(H5E_OHDR, H5E_NOSPACE, NULL, "ran off end of input buffer while decoding") p += 3; - /* Number of slots */ + /* Number of slots (2x 2 bytes) */ + if ((p + 4 - 1) > p_end) + HGOTO_ERROR(H5E_OHDR, H5E_NOSPACE, NULL, "ran off end of input buffer while decoding") UINT16DECODE(p, mesg->nalloc); - HDassert(mesg->nalloc > 0); + if (mesg->nalloc <= 0) + HGOTO_ERROR(H5E_OHDR, H5E_CANTLOAD, NULL, "bad number of allocated slots when parsing efl msg") UINT16DECODE(p, mesg->nused); - HDassert(mesg->nused <= mesg->nalloc); + if (mesg->nused > mesg->nalloc) + HGOTO_ERROR(H5E_OHDR, H5E_CANTLOAD, NULL, "bad number of in-use slots when parsing efl msg") /* Heap address */ + if ((p + H5F_SIZEOF_ADDR(f) - 1) > p_end) + HGOTO_ERROR(H5E_OHDR, H5E_NOSPACE, NULL, "ran off end of input buffer while decoding") H5F_addr_decode(f, &p, &(mesg->heap_addr)); + if (H5F_addr_defined(mesg->heap_addr) == FALSE) + HGOTO_ERROR(H5E_OHDR, H5E_CANTLOAD, NULL, "bad local heap address when parsing efl msg") -#ifndef NDEBUG - HDassert(H5F_addr_defined(mesg->heap_addr)); + /* Decode the file list */ + mesg->slot = (H5O_efl_entry_t *)H5MM_calloc(mesg->nalloc * sizeof(H5O_efl_entry_t)); + if (NULL == mesg->slot) + HGOTO_ERROR(H5E_OHDR, H5E_NOSPACE, NULL, "memory allocation failed") if (NULL == (heap = H5HL_protect(f, mesg->heap_addr, H5AC__READ_ONLY_FLAG))) - HGOTO_ERROR(H5E_SYM, H5E_NOTFOUND, NULL, "unable to read protect link value") + HGOTO_ERROR(H5E_OHDR, H5E_CANTPROTECT, NULL, "unable to protect local heap") +#ifdef H5O_DEBUG + /* Verify that the name at offset 0 in the local heap is the empty string */ s = (const char *)H5HL_offset_into(heap, 0); - - HDassert(s && !*s); - - if (H5HL_unprotect(heap) < 0) - HGOTO_ERROR(H5E_SYM, H5E_NOTFOUND, NULL, "unable to read unprotect link value") - heap = NULL; + if (s == NULL) + HGOTO_ERROR(H5E_OHDR, H5E_CANTGET, NULL, "could not obtain pointer into local heap") + if (*s != '\0') + HGOTO_ERROR(H5E_OHDR, H5E_CANTGET, NULL, "entry at offset 0 in local heap not an empty string") #endif - /* Decode the file list */ - mesg->slot = (H5O_efl_entry_t *)H5MM_calloc(mesg->nalloc * sizeof(H5O_efl_entry_t)); - if (NULL == mesg->slot) - HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, NULL, "memory allocation failed") - - if (NULL == (heap = H5HL_protect(f, mesg->heap_addr, H5AC__READ_ONLY_FLAG))) - HGOTO_ERROR(H5E_SYM, H5E_NOTFOUND, NULL, "unable to read protect link value") - for (u = 0; u < mesg->nused; u++) { + for (size_t u = 0; u < mesg->nused; u++) { /* Name */ + if ((p + H5F_SIZEOF_SIZE(f) - 1) > p_end) + HGOTO_ERROR(H5E_OHDR, H5E_NOSPACE, NULL, "ran off end of input buffer while decoding") H5F_DECODE_LENGTH(f, p, mesg->slot[u].name_offset); if ((s = (const char *)H5HL_offset_into(heap, mesg->slot[u].name_offset)) == NULL) - HGOTO_ERROR(H5E_SYM, H5E_CANTGET, NULL, "unable to get external file name") - if (*s == (char)'\0') - HGOTO_ERROR(H5E_SYM, H5E_CANTGET, NULL, "invalid external file name") + HGOTO_ERROR(H5E_OHDR, H5E_CANTGET, NULL, "unable to get external file name") + if (*s == '\0') + HGOTO_ERROR(H5E_OHDR, H5E_CANTGET, NULL, "invalid external file name") mesg->slot[u].name = H5MM_xstrdup(s); - HDassert(mesg->slot[u].name); + if (mesg->slot[u].name == NULL) + HGOTO_ERROR(H5E_OHDR, H5E_NOSPACE, NULL, "string duplication failed") /* File offset */ + if ((p + H5F_SIZEOF_SIZE(f) - 1) > p_end) + HGOTO_ERROR(H5E_OHDR, H5E_NOSPACE, NULL, "ran off end of input buffer while decoding") H5F_DECODE_LENGTH(f, p, mesg->slot[u].offset); /* Size */ + if ((p + H5F_SIZEOF_SIZE(f) - 1) > p_end) + HGOTO_ERROR(H5E_OHDR, H5E_NOSPACE, NULL, "ran off end of input buffer while decoding") H5F_DECODE_LENGTH(f, p, mesg->slot[u].size); - } /* end for */ + } if (H5HL_unprotect(heap) < 0) - HGOTO_ERROR(H5E_SYM, H5E_NOTFOUND, NULL, "unable to read unprotect link value") - heap = NULL; + HGOTO_ERROR(H5E_OHDR, H5E_CANTUNPROTECT, NULL, "unable to unprotect local heap") /* Set return value */ ret_value = mesg; done: if (ret_value == NULL) - if (mesg != NULL) + if (mesg != NULL) { + if (mesg->slot != NULL) { + for (size_t u = 0; u < mesg->nused; u++) + H5MM_xfree(mesg->slot[u].name); + H5MM_xfree(mesg->slot); + } H5MM_xfree(mesg); + } FUNC_LEAVE_NOAPI(ret_value) } /* end H5O__efl_decode() */ From b8529623eb0eb4fb67f12ae44ecedd5ad447c4b6 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Thu, 13 Apr 2023 11:11:15 -0500 Subject: [PATCH 040/108] Change name of test file to avoid conflict (#2705) --- java/test/TestH5OcopyOld.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/java/test/TestH5OcopyOld.java b/java/test/TestH5OcopyOld.java index 6353df554f9..0fa57e650f2 100644 --- a/java/test/TestH5OcopyOld.java +++ b/java/test/TestH5OcopyOld.java @@ -249,7 +249,7 @@ public void testH5OcopyRefsDatasettodiffFile() try { // create new file - H5fid2 = H5.H5Fcreate("copy.h5", HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + H5fid2 = H5.H5Fcreate("copy_old.h5", HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); assertTrue("testH5OcopyRefsDatasettodiffFile.H5Fcreate: ", H5fid2 >= 0); H5.H5Fflush(H5fid2, HDF5Constants.H5F_SCOPE_LOCAL); @@ -288,7 +288,7 @@ public void testH5OcopyRefsDatasettodiffFile() catch (Exception ex) { } } - _deleteFile("copy.h5"); + _deleteFile("copy_old.h5"); } @Test From 0249cead15fdee5fb3984a2bd93beb6ca52b445e Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Thu, 13 Apr 2023 11:11:53 -0500 Subject: [PATCH 041/108] Fix #2598 sanitize leak (#2676) --- release_docs/RELEASE.txt | 7 +++++++ tools/lib/h5trav.c | 8 ++++---- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 97f137d5266..62c7b8b44ce 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -359,6 +359,13 @@ Bug Fixes since HDF5-1.12.1 release Tools ----- + - In the tools traverse function - an error in either visit call + will bypass the cleanup of the local data variables. + + Replaced the H5TOOLS_GOTO_ERROR with just H5TOOLS_ERROR. + + (ADB - 2023/04/06 GH-2598) + - Fix h5repack to only print output when verbose option is selected When timing option was added to h5repack, the check for verbose was diff --git a/tools/lib/h5trav.c b/tools/lib/h5trav.c index 3c2353f979c..a26dac34cad 100644 --- a/tools/lib/h5trav.c +++ b/tools/lib/h5trav.c @@ -149,9 +149,9 @@ trav_token_visited(hid_t loc_id, trav_addr_t *visited, H5O_token_t *token) size_t u; /* Local index variable */ int token_cmp; - /* Look for address */ + /* Look for path associated with token */ for (u = 0; u < visited->nused; u++) { - /* Check for address already in array */ + /* Check for token already in array */ if (H5Otoken_cmp(loc_id, &visited->objs[u].token, token, &token_cmp) < 0) return NULL; if (!token_cmp) @@ -286,13 +286,13 @@ traverse(hid_t file_id, const char *grp_name, hbool_t visit_start, hbool_t recur /* Visit all links in group, recursively */ if (H5Lvisit_by_name2(file_id, grp_name, trav_index_by, trav_index_order, traverse_cb, &udata, H5P_DEFAULT) < 0) - H5TOOLS_GOTO_ERROR((-1), "H5Lvisit_by_name failed"); + H5TOOLS_ERROR((-1), "H5Lvisit_by_name failed"); } /* end if */ else { /* Iterate over links in group */ if (H5Literate_by_name2(file_id, grp_name, trav_index_by, trav_index_order, NULL, traverse_cb, &udata, H5P_DEFAULT) < 0) - H5TOOLS_GOTO_ERROR((-1), "H5Literate_by_name failed"); + H5TOOLS_ERROR((-1), "H5Literate_by_name failed"); } /* end else */ /* Free visited addresses table */ From 1d4c9ac7957bf29bb2c3641d196755e640f2bdd4 Mon Sep 17 00:00:00 2001 From: jhendersonHDF Date: Thu, 13 Apr 2023 11:13:26 -0500 Subject: [PATCH 042/108] Avoid suppressing error output for non-tentative file opens (#2632) (#2669) * Avoid suppressing error output for non-tentative file opens * Update comment about tentative file opens --- src/H5Fint.c | 50 +++++++++++++++++++++++++++++++++++++++++++------- 1 file changed, 43 insertions(+), 7 deletions(-) diff --git a/src/H5Fint.c b/src/H5Fint.c index ba54f7d765d..372537678d7 100644 --- a/src/H5Fint.c +++ b/src/H5Fint.c @@ -1830,16 +1830,52 @@ H5F_open(const char *name, unsigned flags, hid_t fcpl_id, hid_t fapl_id) else tent_flags = flags; - if (NULL == (lf = H5FD_open(name, tent_flags, fapl_id, HADDR_UNDEF))) { - if (tent_flags == flags) - HGOTO_ERROR(H5E_FILE, H5E_CANTOPENFILE, NULL, "unable to open file: name = '%s', tent_flags = %x", - name, tent_flags) - H5E_clear_stack(NULL); - tent_flags = flags; + /* + * When performing a tentative open of a file where we have stripped away + * flags such as H5F_ACC_CREAT from the specified file access flags, the + * H5E_BEGIN/END_TRY macros are used to suppress error output since there + * is an expectation that the tentative open might fail. Even though we + * explicitly clear the error stack after such a failure, the underlying + * file driver might maintain its own error stack and choose whether to + * display errors based on whether the library has disabled error reporting. + * Since we wish to suppress that error output as well for the case of + * tentative file opens, surrounding the file open call with the + * H5E_BEGIN/END_TRY macros is an explicit instruction to the file driver + * not to display errors. If the tentative file open call fails, another + * attempt at opening the file will be made without error output being + * suppressed. + * + * However, if stripping away the H5F_ACC_CREAT flag and others left us + * with the same file access flags as before, then we will skip this + * tentative file open and only make a single attempt at opening the file. + * In this case, we don't want to suppress error output since the underlying + * file driver might provide more details on why the file open failed. + */ + if (tent_flags != flags) { + /* Make tentative attempt to open file */ + H5E_BEGIN_TRY + { + lf = H5FD_open(name, tent_flags, fapl_id, HADDR_UNDEF); + } + H5E_END_TRY; + } + + /* + * If a tentative attempt to open the file wasn't necessary, attempt + * to open the file now. Otherwise, if the tentative open failed, clear + * the error stack and reset the file access flags, then make another + * attempt at opening the file. + */ + if ((tent_flags == flags) || (lf == NULL)) { + if (tent_flags != flags) { + H5E_clear_stack(NULL); + tent_flags = flags; + } + if (NULL == (lf = H5FD_open(name, tent_flags, fapl_id, HADDR_UNDEF))) HGOTO_ERROR(H5E_FILE, H5E_CANTOPENFILE, NULL, "unable to open file: name = '%s', tent_flags = %x", name, tent_flags) - } /* end if */ + } /* Is the file already open? */ if ((shared = H5F__sfile_search(lf)) != NULL) { From bc4d78e474221a21f966edd148090b9c270577ae Mon Sep 17 00:00:00 2001 From: jhendersonHDF Date: Thu, 13 Apr 2023 11:16:55 -0500 Subject: [PATCH 043/108] CMake - Match Autotools behavior for library instrumentation (#2648) (#2673) Enable library instrumentation by default for parallel debug builds --- CMakeLists.txt | 8 +++++++- release_docs/RELEASE.txt | 10 ++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 888070c3d2f..1e657fa008e 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -655,9 +655,15 @@ option (HDF5_BUILD_GENERATORS "Build Test Generators" OFF) #----------------------------------------------------------------------------- option (HDF5_ENABLE_TRACE "Enable API tracing capability" OFF) mark_as_advanced (HDF5_ENABLE_TRACE) -if (${HDF_CFG_NAME} MATCHES "Debug") +if (${HDF_CFG_NAME} MATCHES "Debug" OR ${HDF_CFG_NAME} MATCHES "Developer") # Enable instrumenting of the library's internal operations option (HDF5_ENABLE_INSTRUMENT "Instrument The library" OFF) + + # Instrumenting is enabled by default for parallel debug builds + if (HDF5_ENABLE_PARALLEL) + set (HDF5_ENABLE_INSTRUMENT ON CACHE BOOL "Instrument The library" FORCE) + endif () + if (HDF5_ENABLE_INSTRUMENT) set (H5_HAVE_INSTRUMENTED_LIBRARY 1) endif () diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 62c7b8b44ce..7b90b8ac1db 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -47,6 +47,16 @@ New Features Configuration: ------------- + - Enabled instrumentation of the library by default in CMake for parallel + debug builds + + HDF5 can be configured to instrument portions of the parallel library to + aid in debugging. Autotools builds of HDF5 turn this capability on by + default for parallel debug builds and off by default for other build types. + CMake has been updated to match this behavior. + + (JTH - 2023/03/29) + - Added new option to build libaec and zlib inline with CMake. Using the CMake FetchContent module, the external filters can populate From fcb247afd39499983c5d8e4a32c1bafe8a2e2af5 Mon Sep 17 00:00:00 2001 From: Dana Robinson <43805+derobins@users.noreply.github.com> Date: Fri, 14 Apr 2023 12:31:09 -0700 Subject: [PATCH 044/108] Add Elena to CODEOWNERS for Fortran --- .github/CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 506c668b94d..714ae8aba5d 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -7,5 +7,5 @@ # Order is important. The last matching pattern has the most precedence. # So if a pull request only touches javascript files, only these owners # will be requested to review. -/fortran/ @brtnfld @derobins +/fortran/ @brtnfld @derobins @epourmal /java/ @jhendersonHDF @byrnHDF @derobins From 2b528cc651ee609bb4bc9a0c04615d7dfc47ce65 Mon Sep 17 00:00:00 2001 From: vchoi-hdfgroup <55293060+vchoi-hdfgroup@users.noreply.github.com> Date: Fri, 14 Apr 2023 15:41:41 -0500 Subject: [PATCH 045/108] Fix for github issue #2599: (#2665) (#2735) As indicated in the description, memory leak is detected when running "./h5dump pov". The problem is: when calling H5O__add_cont_msg() from H5O__chunk_deserialize(), memory is allocated for cont_msg_info->msgs. Eventually, when the library tries to load the continuation message via H5AC_protect() in H5O_protect(), error is encountered due to illegal info in the continuation message. Due to the error, H5O_protect() exits but the memory allocated for cont_msg_info->msgs is not freed. When we figure out how to handle fuzzed files that we didn't generate, a test needs to be added to run h5dump with the provided "pov" file. --- release_docs/RELEASE.txt | 14 ++++++++++++++ src/H5Oint.c | 7 ++++++- 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 7b90b8ac1db..c938ce00ce4 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -223,6 +223,20 @@ Bug Fixes since HDF5-1.12.1 release =================================== Library ------- + + - Memory leak + + Memory leak was detected when running h5dump with "pov". The memory was allocated + via H5FL__malloc() in hdf5/src/H5FL.c + + The fuzzed file "pov" was an HDF5 file containing an illegal continuation message. + When deserializing the object header chunks for the file, memory is allocated for the + array of continuation messages (cont_msg_info->msgs) in continuation message info struct. + As error is encountered in loading the illegal message, the memory allocated for + cont_msg_info->msgs needs to be freed. + + (VC - 2023/04/11 GH-2599) + - Fixed a memory corruption issue that can occur when reading from a dataset using a hyperslab selection in the file dataspace and a point selection in the memory dataspace diff --git a/src/H5Oint.c b/src/H5Oint.c index 48f25c9cb64..16a50280b7e 100644 --- a/src/H5Oint.c +++ b/src/H5Oint.c @@ -1169,9 +1169,14 @@ H5O_protect(const H5O_loc_t *loc, unsigned prot_flags, hbool_t pin_all_chunks) ret_value = oh; done: - if (ret_value == NULL && oh) + if (ret_value == NULL && oh) { + /* Release any continuation messages built up */ + if (cont_msg_info.msgs) + cont_msg_info.msgs = (H5O_cont_t *)H5FL_SEQ_FREE(H5O_cont_t, cont_msg_info.msgs); + if (H5O_unprotect(loc, oh, H5AC__NO_FLAGS_SET) < 0) HDONE_ERROR(H5E_OHDR, H5E_CANTUNPROTECT, NULL, "unable to release object header") + } FUNC_LEAVE_NOAPI_TAG(ret_value) } /* end H5O_protect() */ From ff7e778c1d6787d0f492b635f32220378604998e Mon Sep 17 00:00:00 2001 From: Dana Robinson <43805+derobins@users.noreply.github.com> Date: Fri, 14 Apr 2023 15:13:50 -0700 Subject: [PATCH 046/108] Remove 1.12 release schedule (#2744) There is no need to maintain the release schedule in each branch, so it's being removed from all branches aside from develop. Also removes the badges that don't pertain to 1.12 --- README.md | 30 -------------------------- doc/img/release-schedule.plantuml | 34 ------------------------------ doc/img/release-schedule.png | Bin 13977 -> 0 bytes 3 files changed, 64 deletions(-) delete mode 100644 doc/img/release-schedule.plantuml delete mode 100755 doc/img/release-schedule.png diff --git a/README.md b/README.md index eedc4cfa030..99e0fa52c2d 100644 --- a/README.md +++ b/README.md @@ -2,11 +2,7 @@ HDF5 version 1.12.3-1 currently under development ![HDF5 Logo](doxygen/img/HDF5.png) -[![develop build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/main.yml?branch=develop&label=develop)](https://github.com/HDFGroup/hdf5/actions?query=branch%3Adevelop) -[![1.14 build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/main.yml?branch=hdf5_1_14&label=1.14)](https://github.com/HDFGroup/hdf5/actions?query=branch%3Ahdf5_1_14) [![1.12 build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/main.yml?branch=hdf5_1_12&label=1.12)](https://github.com/HDFGroup/hdf5/actions?query=branch%3Ahdf5_1_12) -[![1.10 build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/main.yml?branch=hdf5_1_10&label=1.10)](https://github.com/HDFGroup/hdf5/actions?query=branch%3Ahdf5_1_10) -[![1.8 build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/main.yml?branch=hdf5_1_8&label=1.8)](https://github.com/HDFGroup/hdf5/actions?query=branch%3Ahdf5_1_8) [![BSD](https://img.shields.io/badge/License-BSD-blue.svg)](https://github.com/HDFGroup/hdf5/blob/develop/COPYING) *Please refer to the release_docs/INSTALL file for installation instructions.* @@ -75,32 +71,6 @@ use and configuration https://forum.hdfgroup.org/t/quickstart-guide-welcome-to-the-new-hdf-forum -RELEASE SCHEDULE ----------------- - -![HDF5 release schedule](doc/img/release-schedule.png) - -HDF5 does not release on a regular schedule. Instead, releases are driven by -new features and bug fixes, though we try to have at least one release of each -maintenance branch per year. Future HDF5 releases indicated on this schedule -are tentative. - -**NOTE**: HDF5 1.12 is being retired early due to its incomplete and incompatible VOL -layer. - -| Release | New Features | -| ------- | ------------ | -| 1.8.23 | last HDF5 1.8 release | -| 1.10.10 | CVE fixes, performance improvements, H5Dchunk\_iter() | -| 1.12.3 | CVE fixes, performance improvements, H5Dchunk\_iter(), last HDF5 1.12 release | -| 1.14.1 | selection I/O with datatype conversion | -| 2.0.0 | TBD | -| TBD | VFD SWMR | - -This list of feature release versions is also tentative, and the specific release -in which a feature is introduced may change. - - SNAPSHOTS, PREVIOUS RELEASES AND SOURCE CODE -------------------------------------------- Periodically development code snapshots are provided at the following URL: diff --git a/doc/img/release-schedule.plantuml b/doc/img/release-schedule.plantuml deleted file mode 100644 index c724dc98802..00000000000 --- a/doc/img/release-schedule.plantuml +++ /dev/null @@ -1,34 +0,0 @@ -The release timeline was generated on PlantUML (https://plantuml.com) - -The current script: - -@startgantt - -title HDF5 Release Schedule - -projectscale monthly -Project starts 2022-01-01 - -[1.8] starts 2022-01-01 and lasts 57 weeks -[1.8.23] happens 2023-01-31 -[1.8] is colored in #F76969 - -[1.10] starts 2022-01-01 and lasts 104 weeks -[1.10.9] happens 2022-05-31 -[1.10.10] happens 2023-02-28 -[1.10.10] displays on same row as [1.10.9] -[1.10] is colored in #F6DD60 - -[1.12] starts 2022-01-01 and lasts 65 weeks -[1.12.2] happens 2022-04-30 -[1.12.3] happens 2023-03-31 -[1.12.3] displays on same row as [1.12.2] -[1.12] is colored in #88CCEE - -[1.14] starts at 2023-01-01 and lasts 52 weeks -[1.14.0] happens at 2022-12-31 -[1.14.1] happens at 2023-04-30 -[1.14.1] displays on same row as [1.14.0] -[1.14] is colored in #B187CF - -@endgantt diff --git a/doc/img/release-schedule.png b/doc/img/release-schedule.png deleted file mode 100755 index b96f741a02953245d03f6420552621181d8aa608..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 13977 zcmeHuWn5L=*6*T}5D+OrNrKC$*xebR*p#^N_UGi(jA+WymRAu z&U>D7-!J#x-}~kM4jeU1!ssCW!a?8K24-Psg{w6tK+x3QV=%?}4HM>yS2M}t_~iYVHRH&Z#w$PF{XE9I`<7kl*1QWceG99{ z-YL_$Hwvq4Oqh_vW#ktTQL#chwKq3Jzkgh!O=}z8p&*@N%@P?g&w4A=U{;2*@TD`D zWh==S#Ti}g+>)!W>pnJ@pbT? zH&vMu^^%)ed|CJAX%l;iLgJpT2iB~WuAa(hQu4yxBw-|PC=AJ8E)KsE*xV@^+{vu- z!P&ZT5%l2w_)w-$R!^gsLs{omw^f4=jrz-nf+_$UT&KbLB{b zNfz~Mrx&xLu!w*>qM*gta`PatewQX->?Q9kO7+KIldhOXdpUBP*|ojBAIBhPOBY+!D9S6n?c=58IL7@b&#Hac$h z3Qp-JcR>_E(ARg8+t+XMCxs~xfd#$r=0}7p z^a+BYB3@r3!O;<~Uy&E_5U<{79W;nn2rCjy9()l|MO?o!{dWW8^HZ=o&w5mCR_EuJ zHe{fa$vCa%m*zka_wEMQ4M>%5eOSr0+AcX?x5?AO!eXT_b*E+}?fZAp$2zyz7Ur}o z&0=?x9!FqcVVRGYEy;0DJFE`eho#`85mNP^3=IvDup4Q)uH{w`M@2;%#z=TD7b*mmj852tB;V)q^F)<@0ud38bUhX|u%aKdwl16b&xIL(9Eh6$3 zaTi+4Z^;@uuv~umo47cU#{31mI=JMeJv}`ixUOF1DWt34lzQscD@EgVbv8R}^z7NYyNb%nPdrW?E>%Ck z_Lr-sohF0u=_~BXB|PDs^i5~eJNf0F7pDaEVgWei#{Ddh9_jD&rSk2=;aS<)Vq#+W zckY}XtnJUZ?fed99IUq0f7x?5rpxNeu*N;>PQxX1b-s^BBcXV^>kct7x6PdJ_DVzb zLO9=c{eE9U!rh?^T%q$%2}||fXjnu8KBi0W8*CVyq;IyvhIz1{4F9+M$Qe$NY2p5? zuxG_DD}(ud=MR(`ANO3VA4^nI;MCuK6@HH1FTPm)_W24xsex&L@h#|^M@4Q|Kq<<0SOY+Wp>0!hmu`@9B4--hK+POcsG zYbj`u;y%b2MZVqr{_}E?k!AUPsGL8GUcg!KU_fjFUEsNp8*SQ!WCVqroZO*{7qz5I zuMHVH%8gqL4+ys)&HkT54Ci~fXDw3qu=wF#mn&ZR9@|qqGxaNJ zZnt!?0z*S{jx4LD+idf{>Da$0p@`;6Er%d83XuF}H$5*CNBgH68@1f_d%0`uf5Vq1 zker?;ada+RlyHpy5^rH-X7*Sw$aC5MkZh&LDpVugo*;f#gr?WtBfO`#cj?>KWYtcl ziHOX_W_f@3J??O=!}ZbZ%b5nZvryBU(kUQGKjh}Z>U9%s^AuoEqRh#6P4xRyb%P( zb6?+5>}0#{%rvz5?RW$p&w8cbjERi}x7lu|^om*Lji9xaD7|N0A2ngzm-2gh$FtoR zws@1$T{y{qGFvj@Nb9zJ9|+mNK-Ructl8Pt_V)b;UNuC-#8t-q6x-u9_VnCO{nV?U zvU)dZWjDWC_lqcB`7Y0CI`nRWMj|xhsOV#(*}u=43(mmESm1WN4W_Z&`}60|2*J}1 z7N5Dy019diJ-w{o#t~^=moQigF=O9yZ*tAbk5|@96qg0RA|oTi$@z>73~Yoi;qplv zY|*jL8e9*DhKG|m%_R!`R^ZRfo=Hj$n+*>QnebW~85y01GJZoc{b*WxvbH1r{=w(5 z^Xzegm9lOYcH`rp=)}#oNV0KEzrN*YG3q~ihH4FO`R03b|CzE8Q*6(hT)BXMS!`ZX zE_lbn1yw*!GYELkF>U8WulurLR8)*?dM$yVvPKbP+_aRJ4{sK_%5Ed!Fo|qGt0eGp zRRRUjez^zK1%E$3rHWGRI{9?Li<7;Db*+FXU5`Vxk2($Co12rlt>xD(yJ8smTCD9W-FK@K8>O&!mO+SdVb;vU#87 zHtc%k1^(V&;ReTn?Lu4ClwJ4L<<3=-bFfpUaeo>|h&pxS-4ZQOJg`CWK%89vZztUU z(Eu*+>oe9H1NVYn_5lRtV=IP3&|7E`Z-5=FAB%SF;_@<=$p95I^Ol0w>2Qtx%DdxK z=b27$2z7R@4P-Xg!}iY;m*$X#z3os{Q;H<%x(0WDHM2II!$Fa5YiSvnB7CGCs0pz} zN(w6&7%YOED%hK{?}ybtPZvzpPWL=Db#Xaq3BoV9m@xksW&X_vP0*O98oHf9hC~>! zu2f_9Y)e7#cgvh)Mms(!X%`LBZBV5I_E-8>+B)CU*OXOQj7Ph*g_4LRhY+)VI0BJU zKu$@itFFGx-~A2E8(Koof2tbM6-%>ct0XOrQTL-*vsnN!ov$}@b91!>C%#yA3m_7_ z7aX;I6X3Ws+5zF5V*Gmn8C?7{Lp|bQTz0N(yxkQzBx#BB7r^}~EvBlY9(8Alhg5@e zCxuJb>%!42ozFSEVW=*ix0{j}VvzibROzakOJ-!!UBbYl_E22q4T zG5&!0#BzdTkc5cn!MYL*#xs^WTBO0(4_`z63NX)+Ns**l7vAUB-;nKc^YZqOZ6S23 z2ybRpR*5ju*p!s>qe<)Jvsaa7OEtTuUrt6&%lifG)0Q@oy+wGCUv3sk=62)RpP$&K z!G8r%b_pt0fpgV0oQ!7(JRyifaz3X}w;Q+=Jz;~{l5WL9SR};6&dWU{E1Ebg5GsSD zaBHTkYAM;Y*VV<;Q`>pe?rg`6AGWyAEf5-FI|kCy(qqX|Qc|zSj!|(;`;%V~YMT48 z?wFR$FXa~)Rpr&>vo=D0lY3zqQoo$sn=c5g+PPWa1l!_7)v4dNPW;apr^2mK#WAZE z?rrDgzdfO-vcFrpjH%Ra&Vz?Pw|xT4DW`F=my?@X-oqpGIZw%Y*GIG&8zGHinQW(;p% zAmO%3S=Iewq3L@Ao5F4L@$d_;L_v?*7$xoV&-(2Qm!OE{c=C01cTWO1Q&&7q&gogX z6BCnFD_a!`Ztz!PSV88r`4XS2;wsO~d$=Lt; z?gzlgPO)bBHh^m&;=5?-2?z;;yx{U7NupY^>_$Cv6t&;iY}B@Qkc#$tx#2yD?DX;} z)pkoVrqBHpRnSvDGF&OCsLJwCgDg{NaAxUJqEmGKwi_$%#y~dk;q^mT-px<4Z|!Im z7c{JSx}4)ApS^kW#;^1q^8|tN;!<~fa>|NrU*8+k=L>CNDk>_t6#N|*_z#{3MnqKI zI=si3U3(&Q^}9b?BCK{P?p2QG&Av1N^HraFwI>peJ26S3b zDdAvRSs%8Z#JFH*vvm3wyEf2Q3Wmy{x;OXrvBEugtG?kQ;OifMC zt!D>~ICZ4_9o1CR;BdH(+ZN|WBxZ)({aqO9xkz%>`clWPMClBlMsGe#ML38i} zn`Gm~(PWL1q9U8!Vh4}?awrM=^2W!eXKTqaESj)=F zP9ijtl~e*_6&b7)S;9XGlnlWhi=U!#3NA$YXeU^R0kSEq_=yIh!ZC+aAZ*|6~ z@?oGGGu;)#60`Oj$n>=(iSzLX61-IFoUbW*Ocw6VG$oDaNaO49mjlwVJzaONtM&I+ z6x`hqCBN`Hlg}z)D~Fe!uZ&PJ)2z!Q2RNhMKS;0OR!2j$#0NPIGvYbb9*c*JX z6I09Bma&UfKTo{*4G+pd!ie8q%Y-LfZ>kjrF| zZqZOEa@Fa{yru2l#)EZyqQ$QtFY3mdqjgX^1u}A{@hp4-d@Fq#ddGo!LR4>tyluW?fQls|sczPDMRfcK{dRQl?SLME1`QUF`&)UT((x@|exlh-^AvVs&qp%4Zy* z3p&x;%aRmejeAjh5^7Im%aB6c|1)*aF!dxe-) z=e(0XJwY#}okvhCH-N}`H9Wdj^5FylvOP=Pt5+$`d=-p6QI9kb!HL(N&ZkkFs1y<5 z`Z$}cV5RkrEOEMRpO0Nj1{ntYB3|;~hM2WRg=|h!j;u_I3%GK`SJsS$j&7+Vie^fu z(H)j4jn;JdDyL;vqZ+zutbM*WapBqX0EFQC_L`_9;Bq$5R+|emoIee!`2&JIIH%m14=F4cw!L zXZ+p2AHWv-Qy*Jf>3~j`cilf zlFCenKNP6sMK>lt)^8Iyo??If)qA?mIa%-gj(Usc7mLKzJikHLGyqteje3`Lb*LP9 zJkMQ}V-ALd$Es>o^!5Opq>&8I`BpC-lF2@nr$8AjY?n+uAmajp*Y#tTobtB{zLAW0 z=51p#obYtMRuqOqK=JOkfjXO@@Mvvy(yDH2ubnc_ft# znHmnx1F4!@JM=v#w#BlFsp)VHW1NGY9DkJ-)hy+M3sp z0HD_DVD|CWWP7T^EnV0Wp_ObtIPae@Z!RZWdjgK_r_o@MBXG(&fHp#4gj_>LGE=6L z$w#~tXv;vbj>i_WuNRaX(*M*zgy)H`UCSb02hBFU2T);NjcbT(Dy!nff0sGw(TgjJ z{c7xa7KJ9La$?WOJICIr;65|RAju#(DvI55{Bw!w0?=OC z==|k)|E@RjS;5r=>feotJoo1)TZ&Kx7tY?v4_BBDS2VeGCa9<`m=fmK&3O@}hZv6# z5b6O$^qyYf!|@&0;oNB2x&~D`KcO*UwfVE4Nd%<(nYVit3=h%{E?m$tc-s#q7mRZf zx`UtY%+ZC{rBtbivL|4*!lpHe4W73;we{b@BzOkfs5~3ivaeeR&^e_5@Fxv}@--k8 z2a{h!LC`@6pF;D=HBB3_?CZ`R_ByAzb;EZQuRKl!f+FAY`+R%t*jH?L8-fg|B3`q- zNM$!C0X&ij z$m4(J{E#U^=JKRn-q!X}X=BoaeObA+$(a1V58{Xa;MqHWyR&_z?C9*Qk;iOzZxMAE zG5bOgmC75K1N_G;eQIjK`fXk_AwZ(6?8`k=b_nivxkRZ z9-{KfcieQzpR#d2_#kFs5&!O;C5=RW?#;hlhz@@8`T5^0ZwJlYTk0!4*Gsot|3{;~ z<|AOot~F_4?ab=x7u!I*CPz>cUJ*}EPgT{C># zOhYm+03)M-!X6wrBn<&#>EnaEPXGyfBlHAxyLViA6_^FZ#i7sofrwiP&j*?%6Vv9` zwGh;}-}0vF`y2QE+iEtZ=+K=t~ODNE*Kulo^+WFhDSp@*oJA42W&Q zY&51cH~pRw5D}Rz;RWTzF~5p-$vO&QZF~xpURVp7)7C_6Obo3f&<+l|9(|1AMS|X5 z(=?MoGa8Aa$jwE(&)`rq>P@ON8O#Fj0&pqgrit$gd7kG;zi%-Dq8L6tKA*#?4M0}I z43Kcf^<&5fhlEHEIBfn@QkNh$8Oe`2sIzc_=hxTs18Jh9Y9@;-s4$8~5>S?&WNubI zzE{Dpqs?)!TDbSPdZsWSbQ^>%AR-GkK~xl3nn;rbR*478-E)GkHeFp<1DUC+>GE_V zCWfHsikOJ`WlUlsZ1d;i@i%vYpep#e4 z#p|S2{O-6-n50Ye2z4Bi_(riVFL9RKueP@dPPN#?-OFrI)N zTacxtju-3KCCnaMAA0GAs?mSHzR(w~jHfdjUpKluIJAkw* zd~0_uhGqR3xC0pclQ0)OtmUXzvbhp2wIOiWd(sg&AjT&FkisLBI!AfMysv+asaDQ^ z>eL;kguP+!c6_rr`p&&Vl|O$2Risdtg4677n7-g^L9Lu>0*FS#WZb%&kd}{C>AQFDew4i8HXTCcCWS;$7L{ziPXz9|@T*om2lGVs9$nN~HiC@>K~eZ0ym8v~{mK z$2V-U)$}l-j4WXawEISuEhdD@nwsOMha1k@Qw1dxPBWQe0SIF$uMLbZ=yqgLz5#@+ zgM0=ChKwWNMvVblm)HQIl3ZUKJzcW6=~zAZ@G)=}l&B(nu+{E8e)*?x6mikO`cwIc zNk}wRRiABz-Qzx59b6d8^tT0}9~g*kwKHB$&&M~piAI&6Sc#XdCgpGUNLqDOLLV{O zoDf(daa-HR7&L`_%AH04=t*HppAsfxs%%+qwaTwzISIJ3zh;dV2GPYDM`1 z&f8j@Si%Fs+<2zhplWp;C&UAaH>kZZ6V|2*p8Km8Vv@@AhZ|$@SYL=;KWcy0^W$jD zQmX@w%hZp!*RH19U<)K9+8T~$lB1)kVMyqtKrd|`= z&VNiZQ2oAXWmG1CCPo16E*2&xnc!F5Iw#B7Mvo;O$OpYdbjXZGtR|NBq3!kBU(W$w z`u?~njTmFuokC~e{LHrtQNkAS%0r&I9xz;70nG*AXs^9iVmekx#6Kr;vmz?9F19CM zXXRdR{s;l&eFgT9S@b@SNp^8@F;MDNe_aF$a&wRM$&x*_q2lA?V`jDl z>wP{))zu%x_lR(7}nSs>Y_=Xsf)8Vo8nN~3iy}5D__9#H#lkRC8n~Dnh_a3^k;4b zMz9#?Gn5jR{v_>P-*o`I{yC&_DtvXlGa|>K!)5j2V%-Jtv}&!zx%Z$smeB_hrHN0t z3ofiF+?_x2-bKt@jpNVEbto!VM{;f5%fUhLpf|c!hdtVL+snA0F138Co_)+?Y0qUo z`qg_;YV*Xnf|<$d>Rj<>5fA7KK=$asqlRyMRs9|QRLz<;PDAiD7d&(7e>m+$^x%=)iBCG@r=A4=*<)yCW>Ze zW_j|dAmb|Cj;)4oQR+a#U%yW<@bHW;h!;gxaDE%#(88Dh3&rn$!u}YxMTxrY{UvVB6VbwijPW!7DfgFYv1y-K3RR8)0T{kOtc&8+$I`? zivH5FB_FfXrWnINd#vUG(?76`8xpz`m`eEkyxx#7un6DFrpAN%*xuTj=*82TnL-ji z1CDZqBo68A2JdoWphPP&0+E~wMynBw^-Nq`x~Y3v5&xFtM{T)PKrI1Z1tl1VoR{_t zx*j+N)E*#AbvyRS0;>d+7GsJ{#lBl^I{+z^m6vaM-uyX^1)-C{_xJB{Tan$lLrsho zLC?W)kQ=WP-8MOj+SUga5s0FE9;XMO;&~&*AqgLMwG8NDt)Mh$AFslcXWUvlwYHAy+{wD zMazaI{7WAsC^g`4{`x!I8O0UeMS-dEq5B!+g#kS=2{>t-kLlIW|2LLX_*WY?W-nJ(13;6*A zGVz4v83Gi26)jN*nWe7Z=7)|z-?j48d!~;%a`v@kk!8r5E`Q2C#|FegQbLF;UUb5 zPl`pO_QOAT(aA^a4BGrh)ZO!%4^e{> z6c$8L5pbSq#B7Plfp*6un@DGL>*e*>=a2vEIr>B;;X3c__1)ha_3A~>XXA^BistWq z_od7)2bJu(xmF+o`n~mgS@MuJ!u@(^U)%dW04t&rDHqfhi|74p+XHgxo?Mc4B2R*A zT(5i8FrD^h{r78fNf-Lh#~=hHZZzSqYt1S_Is8P*{$DfsmlXv&@V9myVPQorEhgu; zq^Rwb5IbTECg_EPgjiUtfJI)0GJ^i}5|TokBW5J7T)h@($YbjrLrUEd(LUdf9#)pxs&m!PjAE`h_~=- zfz%Cn6c9K@1m(!a7J0S#d1d4F5PR!-u=zw z8r=U^peGcx4QYUT+fTsM|^wnc9Cd3kFGK zPS3-*YdnX)OeNvEkNlC7|b0XX6Y;Q!}-RlkWXY)x$ zTMm3{*EPELSNwvwNnB{X!NI`*n@rX^KC)I{@H2R! zA-=@O%DPiIrc;hn00ALCUGghs1!JKOknk7R8|+6z*XJMEUSyL=|Er^zL6-PAL|1!> zp-B`TxOg{6@J`9q`HGO~dlqE7C{NHAk;vy5)=y7_!KzWss zhAVjDZRIFq3(AMb!Js5r7?-`hJuGjeRUwW4H#mA@d@9&1XFQIl9U2)2fuM;Houhx% z=S^%#hgqu{G_Yx_aBe%f5_uIX~q>&Jo}H!_aB$< zKQ7<@?=D}OdpsTZCx{b_E~z$Kkcr9u)f}Mf`Fjbt#i2n#nKkFuJje*hyc!f5I#uV)%9-Z2 z%@Yw1ypi$^aAIQP;-){frLNQkolbVlFFt);`FsCfe(%-L9BYIyLTHGQM5aQ_Y+3h2 zr3YZw#jYW6q;G@&dbnlDqEnwRaTAv!$I0E@T~jl0|L?=Jw2Ol?c=Khs5%w{J5XC7$ ztB>99+gAospqJ0|{Z&EfaGpZBGr+rj<>$6Ypr4dXu}G4zdvGXMw&n_Sh(=?L*n$p7 zTrSWyA8R<6B|e2uK_Q=fe*7M2$?ZC8)3T{c5m-QL#0?iuOO9ej9ley#^K;v(atE!GppQAC5a{9{MYauo;yuie|QmoxGrWc>QwLnMI zxd$19+(GjiYcvG#EjoI7zz-*vFF|K9jKw{PS+J7GD|Wytt{e-xv4n2&xric`MY0Eo zK(H?GuL2Bb!qbR_=doLqN*DBK3%lnGJf&?Ceit@wi3p0V&zDRFT3TdR_Z0&Ehubbk zg?{3!KLtIwxJ^olkFPLRE6vB8)`r0I&~tJ+g8l~}q6r#z;+P9VcR}YS^;r1B_-r5& zf$pN;!H-G Date: Fri, 14 Apr 2023 19:23:52 -0500 Subject: [PATCH 047/108] H5O__pline_decode() Make more resilient to out-of-bounds read (#2210) (#2733) Malformed hdf5 files may have trunkated content which does not match the expected size. When this function attempts to decode these it may read past the end of the allocated space leading to heap overflows as bounds checking is incomplete. Make sure each element is within bounds before reading. This fixes CVE-2019-8396 / HDFFV-10712 / github bug #2209. --- release_docs/RELEASE.txt | 10 ++++++++++ src/H5Opline.c | 17 +++++++++++++++-- src/H5private.h | 3 +++ 3 files changed, 28 insertions(+), 2 deletions(-) diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index c938ce00ce4..61a2d407c82 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -223,6 +223,16 @@ Bug Fixes since HDF5-1.12.1 release =================================== Library ------- + - Fix for CVE-2019-8396 + + Malformed HDF5 files may have truncated content which does not match + the expected size. When H5O__pline_decode() attempts to decode these it + may read past the end of the allocated space leading to heap overflows + as bounds checking is incomplete. + + The fix ensures each element is within bounds before reading. + + (2023/04/13 - HDFFV-10712, CVE-2019-8396, GitHub #2209) - Memory leak diff --git a/src/H5Opline.c b/src/H5Opline.c index 1ef18ee2272..2babcc0a804 100644 --- a/src/H5Opline.c +++ b/src/H5Opline.c @@ -109,6 +109,7 @@ H5FL_DEFINE(H5O_pline_t); * *------------------------------------------------------------------------- */ + static void * H5O__pline_decode(H5F_t H5_ATTR_UNUSED *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNUSED mesg_flags, unsigned H5_ATTR_UNUSED *ioflags, size_t p_size, const uint8_t *p) @@ -130,6 +131,9 @@ H5O__pline_decode(H5F_t H5_ATTR_UNUSED *f, H5O_t H5_ATTR_UNUSED *open_oh, unsign HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, NULL, "memory allocation failed") /* Version */ + if (p + 4 - 1 > p_end) /* 4 byte is minimum for all versions */ + HGOTO_ERROR(H5E_OHDR, H5E_NOSPACE, NULL, "ran off the end of the buffer: current p = %p, p_end = %p", + p + 4, p_end) pline->version = *p++; if (pline->version < H5O_PLINE_VERSION_1 || pline->version > H5O_PLINE_VERSION_LATEST) HGOTO_ERROR(H5E_PLINE, H5E_CANTLOAD, NULL, "bad version number for filter pipeline message") @@ -158,6 +162,9 @@ H5O__pline_decode(H5F_t H5_ATTR_UNUSED *f, H5O_t H5_ATTR_UNUSED *open_oh, unsign /* Decode filters */ for (i = 0, filter = &pline->filter[0]; i < pline->nused; i++, filter++) { /* Filter ID */ + if (p + 6 - 1 > p_end) /* 6 bytes minimum */ + HGOTO_ERROR(H5E_OHDR, H5E_NOSPACE, NULL, + "ran off the end of the buffer: current p = %p, p_end = %p", p + 6, p_end) UINT16DECODE(p, filter->id); /* Length of filter name */ @@ -167,6 +174,9 @@ H5O__pline_decode(H5F_t H5_ATTR_UNUSED *f, H5O_t H5_ATTR_UNUSED *open_oh, unsign UINT16DECODE(p, name_length); if (pline->version == H5O_PLINE_VERSION_1 && name_length % 8) HGOTO_ERROR(H5E_PLINE, H5E_CANTLOAD, NULL, "filter name length is not a multiple of eight") + if (p + 4 - 1 > p_end) /* with name_length 4 bytes to go */ + HGOTO_ERROR(H5E_OHDR, H5E_NOSPACE, NULL, + "ran off the end of the buffer: current p = %p, p_end = %p", p + 4, p_end) } /* end if */ /* Filter flags */ @@ -178,9 +188,12 @@ H5O__pline_decode(H5F_t H5_ATTR_UNUSED *f, H5O_t H5_ATTR_UNUSED *open_oh, unsign /* Filter name, if there is one */ if (name_length) { size_t actual_name_length; /* Actual length of name */ - + size_t len = (size_t)(p_end - p + 1); /* Determine actual name length (without padding, but with null terminator) */ - actual_name_length = HDstrlen((const char *)p) + 1; + actual_name_length = HDstrnlen((const char *)p, len); + if (actual_name_length == len) + HGOTO_ERROR(H5E_OHDR, H5E_NOSPACE, NULL, "filter name not null terminated") + actual_name_length += 1; /* include \0 byte */ HDassert(actual_name_length <= name_length); /* Allocate space for the filter name, or use the internal buffer */ diff --git a/src/H5private.h b/src/H5private.h index 3929ac308b6..a82796e006c 100644 --- a/src/H5private.h +++ b/src/H5private.h @@ -1469,6 +1469,9 @@ H5_DLL H5_ATTR_CONST int Nflock(int fd, int operation); #ifndef HDstrlen #define HDstrlen(S) strlen(S) #endif +#ifndef HDstrnlen +#define HDstrnlen(S, L) strnlen(S, L) +#endif #ifndef HDstrncat #define HDstrncat(X, Y, Z) strncat(X, Y, Z) #endif From 895ebf705ea5b830685424cbfe0ebef7cfd90d28 Mon Sep 17 00:00:00 2001 From: jhendersonHDF Date: Fri, 14 Apr 2023 20:30:21 -0500 Subject: [PATCH 048/108] Fix a heap buffer overflow during H5D__compact_readvv (GitHub #2606) (#2664) (#2726) --- release_docs/RELEASE.txt | 19 +++++++++++++++++++ src/H5Dint.c | 27 +++++++++++++++++++++++++++ 2 files changed, 46 insertions(+) diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 61a2d407c82..acc54112c06 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -223,6 +223,25 @@ Bug Fixes since HDF5-1.12.1 release =================================== Library ------- + - Fixed a heap buffer overflow that occurs when reading from + a dataset with a compact layout within a malformed HDF5 file + + During opening of a dataset that has a compact layout, the + library allocates a buffer that stores the dataset's raw data. + The dataset's object header that gets written to the file + contains information about how large of a buffer the library + should allocate. If this object header is malformed such that + it causes the library to allocate a buffer that is too small + to hold the dataset's raw data, future I/O to the dataset can + result in heap buffer overflows. To fix this issue, an extra + check is now performed for compact datasets to ensure that + the size of the allocated buffer matches the expected size + of the dataset's raw data (as calculated from the dataset's + dataspace and datatype information). If the two sizes do not + match, opening of the dataset will fail. + + (JTH - 2023/04/04, GH-2606) + - Fix for CVE-2019-8396 Malformed HDF5 files may have truncated content which does not match diff --git a/src/H5Dint.c b/src/H5Dint.c index 24985f38ad9..95be82731e6 100644 --- a/src/H5Dint.c +++ b/src/H5Dint.c @@ -1764,6 +1764,33 @@ H5D__open_oid(H5D_t *dataset, hid_t dapl_id) /* Indicate that the layout information was initialized */ layout_init = TRUE; + /* + * Now that we've read the dataset's datatype, dataspace and + * layout information, perform a quick check for compact datasets + * to ensure that the size of the internal buffer that was + * allocated for the dataset's raw data matches the size of + * the data. A corrupted file can cause a mismatch between the + * two, which might result in buffer overflows during future + * I/O to the dataset. + */ + if (H5D_COMPACT == dataset->shared->layout.type) { + hssize_t dset_nelemts = 0; + size_t dset_type_size = H5T_GET_SIZE(dataset->shared->type); + size_t dset_data_size = 0; + + HDassert(H5D_COMPACT == dataset->shared->layout.storage.type); + + if ((dset_nelemts = H5S_GET_EXTENT_NPOINTS(dataset->shared->space)) < 0) + HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get number of elements in dataset's dataspace") + + dset_data_size = (size_t)dset_nelemts * dset_type_size; + + if (dataset->shared->layout.storage.u.compact.size != dset_data_size) + HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, + "bad value from dataset header - size of compact dataset's data buffer doesn't match " + "size of dataset data"); + } + /* Set up flush append property */ if (H5D__append_flush_setup(dataset, dapl_id)) HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to set up flush append property") From be02375f5926300e84f0c661b74cfdc7f97c5f26 Mon Sep 17 00:00:00 2001 From: jhendersonHDF Date: Sat, 15 Apr 2023 00:12:52 -0500 Subject: [PATCH 049/108] Add buffer overrun checks to H5O__layout_decode and H5O__sdspace_decode (#2679) (#2729) --- release_docs/RELEASE.txt | 8 ++ src/H5Olayout.c | 256 +++++++++++++++++++++++++++++++++------ src/H5Osdspace.c | 62 +++++++--- src/H5private.h | 5 + 4 files changed, 278 insertions(+), 53 deletions(-) diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index acc54112c06..d547aa6c657 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -223,6 +223,14 @@ Bug Fixes since HDF5-1.12.1 release =================================== Library ------- + - Fixed potential buffer overrun issues in some object header decode routines + + Several checks were added to H5O__layout_decode and H5O__sdspace_decode to + ensure that memory buffers don't get overrun when decoding buffers read from + a (possibly corrupted) HDF5 file. + + (JTH - 2023/04/05) + - Fixed a heap buffer overflow that occurs when reading from a dataset with a compact layout within a malformed HDF5 file diff --git a/src/H5Olayout.c b/src/H5Olayout.c index 2d0250783e5..ed69fb1a559 100644 --- a/src/H5Olayout.c +++ b/src/H5Olayout.c @@ -91,11 +91,11 @@ static void * H5O__layout_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNUSED mesg_flags, unsigned H5_ATTR_UNUSED *ioflags, size_t p_size, const uint8_t *p) { + const uint8_t *p_end = p + p_size - 1; /* End of the p buffer */ H5O_layout_t *mesg = NULL; uint8_t *heap_block = NULL; unsigned u; - const uint8_t *p_end = p + p_size - 1; /* End of the p buffer */ - void *ret_value = NULL; /* Return value */ + void *ret_value = NULL; /* Return value */ FUNC_ENTER_STATIC @@ -105,39 +105,55 @@ H5O__layout_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNU /* decode */ if (NULL == (mesg = H5FL_CALLOC(H5O_layout_t))) - HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, NULL, "memory allocation failed") + HGOTO_ERROR(H5E_RESOURCE, H5E_CANTALLOC, NULL, "memory allocation failed") mesg->storage.type = H5D_LAYOUT_ERROR; + if (H5_IS_BUFFER_OVERFLOW(p, 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") mesg->version = *p++; + if (mesg->version < H5O_LAYOUT_VERSION_1 || mesg->version > H5O_LAYOUT_VERSION_4) - HGOTO_ERROR(H5E_OHDR, H5E_CANTLOAD, NULL, "bad version number for layout message") + HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, NULL, "bad version number for layout message") if (mesg->version < H5O_LAYOUT_VERSION_3) { unsigned ndims; /* Num dimensions in chunk */ /* Dimensionality */ + if (H5_IS_BUFFER_OVERFLOW(p, 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") ndims = *p++; + if (!ndims || ndims > H5O_LAYOUT_NDIMS) - HGOTO_ERROR(H5E_OHDR, H5E_CANTLOAD, NULL, "dimensionality is out of range") + HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, NULL, "dimensionality is out of range") /* Layout class */ + if (H5_IS_BUFFER_OVERFLOW(p, 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") mesg->type = (H5D_layout_t)*p++; - HDassert(H5D_CONTIGUOUS == mesg->type || H5D_CHUNKED == mesg->type || H5D_COMPACT == mesg->type); + + if (H5D_CONTIGUOUS != mesg->type && H5D_CHUNKED != mesg->type && H5D_COMPACT != mesg->type) + HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, NULL, "bad layout type for layout message") /* Set the storage type */ mesg->storage.type = mesg->type; /* Reserved bytes */ + if (H5_IS_BUFFER_OVERFLOW(p, 5, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") p += 5; /* Address */ if (mesg->type == H5D_CONTIGUOUS) { + if (H5_IS_BUFFER_OVERFLOW(p, H5F_SIZEOF_ADDR(f), p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") H5F_addr_decode(f, &p, &(mesg->storage.u.contig.addr)); /* Set the layout operations */ mesg->ops = H5D_LOPS_CONTIG; } /* end if */ else if (mesg->type == H5D_CHUNKED) { + if (H5_IS_BUFFER_OVERFLOW(p, H5F_SIZEOF_ADDR(f), p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") H5F_addr_decode(f, &p, &(mesg->storage.u.chunk.idx_addr)); /* Set the layout operations */ @@ -164,27 +180,46 @@ H5O__layout_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNU * size in the dataset code, where we've got the dataspace * information available also. - QAK 5/26/04 */ - p += ndims * 4; /* Skip over dimension sizes (32-bit quantities) */ - } /* end if */ + if (H5_IS_BUFFER_OVERFLOW(p, (ndims * sizeof(uint32_t)), p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") + p += ndims * sizeof(uint32_t); /* Skip over dimension sizes */ + } /* end if */ else { + if (ndims < 2) + HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, NULL, "bad dimensions for chunked storage") mesg->u.chunk.ndims = ndims; - for (u = 0; u < ndims; u++) + + if (H5_IS_BUFFER_OVERFLOW(p, (ndims * sizeof(uint32_t)), p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") + + for (u = 0; u < ndims; u++) { UINT32DECODE(p, mesg->u.chunk.dim[u]); + /* Just in case that something goes very wrong, such as file corruption. */ + if (mesg->u.chunk.dim[u] == 0) + HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, NULL, + "bad chunk dimension value when parsing layout message - chunk dimension " + "must be positive: mesg->u.chunk.dim[%u] = %u", + u, mesg->u.chunk.dim[u]) + } + /* Compute chunk size */ for (u = 1, mesg->u.chunk.size = mesg->u.chunk.dim[0]; u < ndims; u++) mesg->u.chunk.size *= mesg->u.chunk.dim[u]; } /* end if */ if (mesg->type == H5D_COMPACT) { + if (H5_IS_BUFFER_OVERFLOW(p, sizeof(uint32_t), p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") UINT32DECODE(p, mesg->storage.u.compact.size); + if (mesg->storage.u.compact.size > 0) { /* Ensure that size doesn't exceed buffer size, due to possible data corruption */ - if (p + mesg->storage.u.compact.size - 1 > p_end) - HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "storage size exceeds buffer size") + if (H5_IS_BUFFER_OVERFLOW(p, mesg->storage.u.compact.size, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") if (NULL == (mesg->storage.u.compact.buf = H5MM_malloc(mesg->storage.u.compact.size))) - HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, NULL, + HGOTO_ERROR(H5E_RESOURCE, H5E_CANTALLOC, NULL, "memory allocation failed for compact data buffer") H5MM_memcpy(mesg->storage.u.compact.buf, p, mesg->storage.u.compact.size); p += mesg->storage.u.compact.size; @@ -193,18 +228,23 @@ H5O__layout_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNU } /* end if */ else { /* Layout & storage class */ + if (H5_IS_BUFFER_OVERFLOW(p, 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") mesg->type = mesg->storage.type = (H5D_layout_t)*p++; /* Interpret the rest of the message according to the layout class */ switch (mesg->type) { case H5D_COMPACT: /* Compact data size */ + if (H5_IS_BUFFER_OVERFLOW(p, sizeof(uint16_t), p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") UINT16DECODE(p, mesg->storage.u.compact.size); if (mesg->storage.u.compact.size > 0) { /* Ensure that size doesn't exceed buffer size, due to possible data corruption */ - if (p + mesg->storage.u.compact.size - 1 > p_end) - HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "storage size exceeds buffer size") + if (H5_IS_BUFFER_OVERFLOW(p, mesg->storage.u.compact.size, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding") /* Allocate space for compact data */ if (NULL == (mesg->storage.u.compact.buf = H5MM_malloc(mesg->storage.u.compact.size))) @@ -222,9 +262,13 @@ H5O__layout_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNU case H5D_CONTIGUOUS: /* Contiguous storage address */ + if (H5_IS_BUFFER_OVERFLOW(p, H5F_SIZEOF_ADDR(f), p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") H5F_addr_decode(f, &p, &(mesg->storage.u.contig.addr)); /* Contiguous storage size */ + if (H5_IS_BUFFER_OVERFLOW(p, H5F_SIZEOF_SIZE(f), p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") H5F_DECODE_LENGTH(f, p, mesg->storage.u.contig.size); /* Set the layout operations */ @@ -237,22 +281,36 @@ H5O__layout_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNU mesg->u.chunk.flags = (uint8_t)0; /* Dimensionality */ + if (H5_IS_BUFFER_OVERFLOW(p, 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding") mesg->u.chunk.ndims = *p++; + if (mesg->u.chunk.ndims > H5O_LAYOUT_NDIMS) - HGOTO_ERROR(H5E_OHDR, H5E_CANTLOAD, NULL, "dimensionality is too large") + HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, NULL, "dimensionality is too large") + if (mesg->u.chunk.ndims < 2) + HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, NULL, "bad dimensions for chunked storage") /* B-tree address */ + if (H5_IS_BUFFER_OVERFLOW(p, H5F_SIZEOF_ADDR(f), p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding") H5F_addr_decode(f, &p, &(mesg->storage.u.chunk.idx_addr)); + if (H5_IS_BUFFER_OVERFLOW(p, (mesg->u.chunk.ndims * sizeof(uint32_t)), p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding") + /* Chunk dimensions */ for (u = 0; u < mesg->u.chunk.ndims; u++) { UINT32DECODE(p, mesg->u.chunk.dim[u]); /* Just in case that something goes very wrong, such as file corruption. */ if (mesg->u.chunk.dim[u] == 0) - HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, NULL, - "chunk dimension must be positive: mesg->u.chunk.dim[%u] = %u", u, - mesg->u.chunk.dim[u]) + HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, NULL, + "bad chunk dimension value when parsing layout message - chunk " + "dimension must be positive: mesg->u.chunk.dim[%u] = %u", + u, mesg->u.chunk.dim[u]) } /* end for */ /* Compute chunk size */ @@ -266,6 +324,9 @@ H5O__layout_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNU } /* end if */ else { /* Get the chunked layout flags */ + if (H5_IS_BUFFER_OVERFLOW(p, 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding") mesg->u.chunk.flags = *p++; /* Check for valid flags */ @@ -276,25 +337,50 @@ H5O__layout_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNU HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, NULL, "bad flag value for message") /* Dimensionality */ + if (H5_IS_BUFFER_OVERFLOW(p, 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding") mesg->u.chunk.ndims = *p++; + if (mesg->u.chunk.ndims > H5O_LAYOUT_NDIMS) HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, NULL, "dimensionality is too large") /* Encoded # of bytes for each chunk dimension */ + if (H5_IS_BUFFER_OVERFLOW(p, 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding") mesg->u.chunk.enc_bytes_per_dim = *p++; + if (mesg->u.chunk.enc_bytes_per_dim == 0 || mesg->u.chunk.enc_bytes_per_dim > 8) HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, NULL, "encoded chunk dimension size is too large") + if (H5_IS_BUFFER_OVERFLOW(p, (mesg->u.chunk.ndims * mesg->u.chunk.enc_bytes_per_dim), + p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding") + /* Chunk dimensions */ - for (u = 0; u < mesg->u.chunk.ndims; u++) + for (u = 0; u < mesg->u.chunk.ndims; u++) { UINT64DECODE_VAR(p, mesg->u.chunk.dim[u], mesg->u.chunk.enc_bytes_per_dim); + /* Just in case that something goes very wrong, such as file corruption. */ + if (mesg->u.chunk.dim[u] == 0) + HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, NULL, + "bad chunk dimension value when parsing layout message - chunk " + "dimension must be positive: mesg->u.chunk.dim[%u] = %u", + u, mesg->u.chunk.dim[u]) + } + /* Compute chunk size */ for (u = 1, mesg->u.chunk.size = mesg->u.chunk.dim[0]; u < mesg->u.chunk.ndims; u++) mesg->u.chunk.size *= mesg->u.chunk.dim[u]; /* Chunk index type */ + if (H5_IS_BUFFER_OVERFLOW(p, 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding") mesg->u.chunk.idx_type = (H5D_chunk_index_t)*p++; + if (mesg->u.chunk.idx_type >= H5D_CHUNK_IDX_NTYPES) HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, NULL, "unknown chunk index type") mesg->storage.u.chunk.idx_type = mesg->u.chunk.idx_type; @@ -311,6 +397,9 @@ H5O__layout_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNU case H5D_CHUNK_IDX_SINGLE: /* Single Chunk Index */ if (mesg->u.chunk.flags & H5O_LAYOUT_CHUNK_SINGLE_INDEX_WITH_FILTER) { + if (H5_IS_BUFFER_OVERFLOW(p, H5F_SIZEOF_SIZE(f) + sizeof(uint32_t), p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding") H5F_DECODE_LENGTH(f, p, mesg->storage.u.chunk.u.single.nbytes); UINT32DECODE(p, mesg->storage.u.chunk.u.single.filter_mask); } /* end if */ @@ -321,9 +410,13 @@ H5O__layout_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNU case H5D_CHUNK_IDX_FARRAY: /* Fixed array creation parameters */ + if (H5_IS_BUFFER_OVERFLOW(p, 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding") mesg->u.chunk.u.farray.cparam.max_dblk_page_nelmts_bits = *p++; + if (0 == mesg->u.chunk.u.farray.cparam.max_dblk_page_nelmts_bits) - HGOTO_ERROR(H5E_OHDR, H5E_CANTLOAD, NULL, + HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, NULL, "invalid fixed array creation parameter") /* Set the chunk operations */ @@ -332,25 +425,49 @@ H5O__layout_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNU case H5D_CHUNK_IDX_EARRAY: /* Extensible array creation parameters */ + if (H5_IS_BUFFER_OVERFLOW(p, 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding") mesg->u.chunk.u.earray.cparam.max_nelmts_bits = *p++; + if (0 == mesg->u.chunk.u.earray.cparam.max_nelmts_bits) - HGOTO_ERROR(H5E_OHDR, H5E_CANTLOAD, NULL, + HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, NULL, "invalid extensible array creation parameter") + + if (H5_IS_BUFFER_OVERFLOW(p, 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding") mesg->u.chunk.u.earray.cparam.idx_blk_elmts = *p++; + if (0 == mesg->u.chunk.u.earray.cparam.idx_blk_elmts) - HGOTO_ERROR(H5E_OHDR, H5E_CANTLOAD, NULL, + HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, NULL, "invalid extensible array creation parameter") + + if (H5_IS_BUFFER_OVERFLOW(p, 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding") mesg->u.chunk.u.earray.cparam.sup_blk_min_data_ptrs = *p++; + if (0 == mesg->u.chunk.u.earray.cparam.sup_blk_min_data_ptrs) - HGOTO_ERROR(H5E_OHDR, H5E_CANTLOAD, NULL, + HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, NULL, "invalid extensible array creation parameter") + + if (H5_IS_BUFFER_OVERFLOW(p, 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding") mesg->u.chunk.u.earray.cparam.data_blk_min_elmts = *p++; + if (0 == mesg->u.chunk.u.earray.cparam.data_blk_min_elmts) - HGOTO_ERROR(H5E_OHDR, H5E_CANTLOAD, NULL, + HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, NULL, "invalid extensible array creation parameter") + + if (H5_IS_BUFFER_OVERFLOW(p, 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding") mesg->u.chunk.u.earray.cparam.max_dblk_page_nelmts_bits = *p++; + if (0 == mesg->u.chunk.u.earray.cparam.max_dblk_page_nelmts_bits) - HGOTO_ERROR(H5E_OHDR, H5E_CANTLOAD, NULL, + HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, NULL, "invalid extensible array creation parameter") /* Set the chunk operations */ @@ -358,10 +475,35 @@ H5O__layout_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNU break; case H5D_CHUNK_IDX_BT2: /* v2 B-tree index */ + if (H5_IS_BUFFER_OVERFLOW(p, sizeof(uint32_t), p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding") UINT32DECODE(p, mesg->u.chunk.u.btree2.cparam.node_size); + + if (H5_IS_BUFFER_OVERFLOW(p, 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding") mesg->u.chunk.u.btree2.cparam.split_percent = *p++; + + if (mesg->u.chunk.u.btree2.cparam.split_percent == 0 || + mesg->u.chunk.u.btree2.cparam.split_percent > 100) + HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, NULL, + "bad value for v2 B-tree split percent value - must be > 0 and " + "<= 100: split percent = %" PRIu8, + mesg->u.chunk.u.btree2.cparam.split_percent) + + if (H5_IS_BUFFER_OVERFLOW(p, 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding") mesg->u.chunk.u.btree2.cparam.merge_percent = *p++; + if (mesg->u.chunk.u.btree2.cparam.merge_percent == 0 || + mesg->u.chunk.u.btree2.cparam.merge_percent > 100) + HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, NULL, + "bad value for v2 B-tree merge percent value - must be > 0 and " + "<= 100: merge percent = %" PRIu8, + mesg->u.chunk.u.btree2.cparam.merge_percent) + /* Set the chunk operations */ mesg->storage.u.chunk.ops = H5D_COPS_BT2; break; @@ -372,6 +514,9 @@ H5O__layout_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNU } /* end switch */ /* Chunk index address */ + if (H5_IS_BUFFER_OVERFLOW(p, H5F_SIZEOF_ADDR(f), p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding") H5F_addr_decode(f, &p, &(mesg->storage.u.chunk.idx_addr)); } /* end else */ @@ -385,7 +530,13 @@ H5O__layout_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNU HGOTO_ERROR(H5E_OHDR, H5E_VERSION, NULL, "invalid layout version with virtual layout") /* Heap information */ + if (H5_IS_BUFFER_OVERFLOW(p, H5F_SIZEOF_ADDR(f), p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") H5F_addr_decode(f, &p, &(mesg->storage.u.virt.serial_list_hobjid.addr)); + /* NOTE: virtual mapping global heap entry address could be undefined */ + + if (H5_IS_BUFFER_OVERFLOW(p, sizeof(uint32_t), p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") UINT32DECODE(p, mesg->storage.u.virt.serial_list_hobjid.idx); /* Initialize other fields */ @@ -401,54 +552,88 @@ H5O__layout_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNU /* Decode heap block if it exists */ if (mesg->storage.u.virt.serial_list_hobjid.addr != HADDR_UNDEF) { const uint8_t *heap_block_p; + const uint8_t *heap_block_p_end; uint8_t heap_vers; size_t block_size = 0; size_t tmp_size; hsize_t tmp_hsize; uint32_t stored_chksum; uint32_t computed_chksum; - size_t i; /* Read heap */ if (NULL == (heap_block = (uint8_t *)H5HG_read( f, &(mesg->storage.u.virt.serial_list_hobjid), NULL, &block_size))) HGOTO_ERROR(H5E_OHDR, H5E_READERROR, NULL, "Unable to read global heap block") - heap_block_p = (const uint8_t *)heap_block; + heap_block_p = (const uint8_t *)heap_block; + heap_block_p_end = heap_block_p + block_size - 1; /* Decode the version number of the heap block encoding */ + if (H5_IS_BUFFER_OVERFLOW(heap_block_p, 1, heap_block_p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding") heap_vers = (uint8_t)*heap_block_p++; + if ((uint8_t)H5O_LAYOUT_VDS_GH_ENC_VERS != heap_vers) HGOTO_ERROR(H5E_OHDR, H5E_VERSION, NULL, "bad version # of encoded VDS heap information, expected %u, got %u", (unsigned)H5O_LAYOUT_VDS_GH_ENC_VERS, (unsigned)heap_vers) /* Number of entries */ + if (H5_IS_BUFFER_OVERFLOW(heap_block_p, H5F_SIZEOF_SIZE(f), heap_block_p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding") H5F_DECODE_LENGTH(f, heap_block_p, tmp_hsize) /* Allocate entry list */ if (NULL == (mesg->storage.u.virt.list = (H5O_storage_virtual_ent_t *)H5MM_calloc( (size_t)tmp_hsize * sizeof(H5O_storage_virtual_ent_t)))) - HGOTO_ERROR(H5E_OHDR, H5E_RESOURCE, NULL, "unable to allocate heap block") + HGOTO_ERROR(H5E_OHDR, H5E_CANTALLOC, NULL, "unable to allocate heap block") mesg->storage.u.virt.list_nalloc = (size_t)tmp_hsize; mesg->storage.u.virt.list_nused = (size_t)tmp_hsize; /* Decode each entry */ - for (i = 0; i < mesg->storage.u.virt.list_nused; i++) { + for (size_t i = 0; i < mesg->storage.u.virt.list_nused; i++) { + ptrdiff_t avail_buffer_space; + + avail_buffer_space = heap_block_p_end - heap_block_p + 1; + if (avail_buffer_space <= 0) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding") + /* Source file name */ - tmp_size = HDstrlen((const char *)heap_block_p) + 1; + tmp_size = HDstrnlen((const char *)heap_block_p, (size_t)avail_buffer_space); + if (tmp_size == (size_t)avail_buffer_space) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding - unterminated source " + "file name string") + else + tmp_size += 1; /* Add space for NUL terminator */ + if (NULL == (mesg->storage.u.virt.list[i].source_file_name = (char *)H5MM_malloc(tmp_size))) - HGOTO_ERROR(H5E_OHDR, H5E_RESOURCE, NULL, + HGOTO_ERROR(H5E_OHDR, H5E_CANTALLOC, NULL, "unable to allocate memory for source file name") H5MM_memcpy(mesg->storage.u.virt.list[i].source_file_name, heap_block_p, tmp_size); heap_block_p += tmp_size; + avail_buffer_space = heap_block_p_end - heap_block_p + 1; + if (avail_buffer_space <= 0) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding") + /* Source dataset name */ - tmp_size = HDstrlen((const char *)heap_block_p) + 1; + tmp_size = HDstrnlen((const char *)heap_block_p, (size_t)avail_buffer_space); + if (tmp_size == (size_t)avail_buffer_space) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding - unterminated source " + "dataset name string") + else + tmp_size += 1; /* Add space for NUL terminator */ + if (NULL == (mesg->storage.u.virt.list[i].source_dset_name = (char *)H5MM_malloc(tmp_size))) - HGOTO_ERROR(H5E_OHDR, H5E_RESOURCE, NULL, + HGOTO_ERROR(H5E_OHDR, H5E_CANTALLOC, NULL, "unable to allocate memory for source dataset name") H5MM_memcpy(mesg->storage.u.virt.list[i].source_dset_name, heap_block_p, tmp_size); heap_block_p += tmp_size; @@ -531,6 +716,9 @@ H5O__layout_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNU } /* end for */ /* Read stored checksum */ + if (H5_IS_BUFFER_OVERFLOW(heap_block_p, sizeof(uint32_t), heap_block_p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, + "ran off end of input buffer while decoding") UINT32DECODE(heap_block_p, stored_chksum) /* Compute checksum */ @@ -554,7 +742,7 @@ H5O__layout_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNU case H5D_LAYOUT_ERROR: case H5D_NLAYOUTS: default: - HGOTO_ERROR(H5E_OHDR, H5E_CANTLOAD, NULL, "Invalid layout class") + HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, NULL, "Invalid layout class") } /* end switch */ } /* end else */ diff --git a/src/H5Osdspace.c b/src/H5Osdspace.c index 0819480578d..79e4e586733 100644 --- a/src/H5Osdspace.c +++ b/src/H5Osdspace.c @@ -107,11 +107,11 @@ static void * H5O__sdspace_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNUSED mesg_flags, unsigned H5_ATTR_UNUSED *ioflags, size_t p_size, const uint8_t *p) { - H5S_extent_t *sdim = NULL; /* New extent dimensionality structure */ + const uint8_t *p_end = p + p_size - 1; /* End of the p buffer */ + H5S_extent_t *sdim = NULL; /* New extent dimensionality structure */ unsigned flags, version; - unsigned i; /* Local counting variable */ - const uint8_t *p_end = p + p_size - 1; /* End of the p buffer */ - void *ret_value = NULL; /* Return value */ + unsigned i; + void *ret_value = NULL; /* Return value */ FUNC_ENTER_STATIC @@ -121,25 +121,37 @@ H5O__sdspace_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UN /* decode */ if (NULL == (sdim = H5FL_CALLOC(H5S_extent_t))) - HGOTO_ERROR(H5E_DATASPACE, H5E_NOSPACE, NULL, "dataspace structure allocation failed") + HGOTO_ERROR(H5E_DATASPACE, H5E_CANTALLOC, NULL, "dataspace structure allocation failed") + sdim->type = H5S_NO_CLASS; /* Check version */ + if (H5_IS_BUFFER_OVERFLOW(p, 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") version = *p++; + if (version < H5O_SDSPACE_VERSION_1 || version > H5O_SDSPACE_VERSION_2) - HGOTO_ERROR(H5E_OHDR, H5E_CANTINIT, NULL, "wrong version number in dataspace message") + HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, NULL, "wrong version number in dataspace message") sdim->version = version; /* Get rank */ + if (H5_IS_BUFFER_OVERFLOW(p, 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") sdim->rank = *p++; + if (sdim->rank > H5S_MAX_RANK) - HGOTO_ERROR(H5E_OHDR, H5E_CANTINIT, NULL, "simple dataspace dimensionality is too large") + HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, NULL, "simple dataspace dimensionality is too large") /* Get dataspace flags for later */ + if (H5_IS_BUFFER_OVERFLOW(p, 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") flags = *p++; /* Get or determine the type of the extent */ if (version >= H5O_SDSPACE_VERSION_2) { + if (H5_IS_BUFFER_OVERFLOW(p, 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") sdim->type = (H5S_class_t)*p++; + if (sdim->type != H5S_SIMPLE && sdim->rank > 0) HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, NULL, "invalid rank for scalar or NULL dataspace") } /* end if */ @@ -151,36 +163,48 @@ H5O__sdspace_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UN sdim->type = H5S_SCALAR; /* Increment past reserved byte */ + if (H5_IS_BUFFER_OVERFLOW(p, 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") p++; } /* end else */ HDassert(sdim->type != H5S_NULL || sdim->version >= H5O_SDSPACE_VERSION_2); /* Only Version 1 has these reserved bytes */ - if (version == H5O_SDSPACE_VERSION_1) + if (version == H5O_SDSPACE_VERSION_1) { + if (H5_IS_BUFFER_OVERFLOW(p, 4, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") p += 4; /*reserved*/ + } /* Decode dimension sizes */ if (sdim->rank > 0) { - /* Ensure that rank doesn't cause reading passed buffer's end, - due to possible data corruption */ uint8_t sizeof_size = H5F_SIZEOF_SIZE(f); - if (p + (sizeof_size * sdim->rank - 1) > p_end) { - HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "rank might cause reading passed buffer's end") - } + + /* + * Ensure that decoding doesn't cause reading past buffer's end, + * due to possible data corruption - check that we have space to + * decode a "sdim->rank" number of hsize_t values + */ + if (H5_IS_BUFFER_OVERFLOW(p, (sizeof_size * sdim->rank), p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") if (NULL == (sdim->size = (hsize_t *)H5FL_ARR_MALLOC(hsize_t, (size_t)sdim->rank))) - HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, NULL, "memory allocation failed") + HGOTO_ERROR(H5E_RESOURCE, H5E_CANTALLOC, NULL, "memory allocation failed") for (i = 0; i < sdim->rank; i++) H5F_DECODE_LENGTH(f, p, sdim->size[i]); if (flags & H5S_VALID_MAX) { if (NULL == (sdim->max = (hsize_t *)H5FL_ARR_MALLOC(hsize_t, (size_t)sdim->rank))) - HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, NULL, "memory allocation failed") - - /* Ensure that rank doesn't cause reading passed buffer's end */ - if (p + (sizeof_size * sdim->rank - 1) > p_end) - HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "rank might cause reading passed buffer's end") + HGOTO_ERROR(H5E_RESOURCE, H5E_CANTALLOC, NULL, "memory allocation failed") + + /* + * Ensure that decoding doesn't cause reading past buffer's end, + * due to possible data corruption - check that we have space to + * decode a "sdim->rank" number of hsize_t values + */ + if (H5_IS_BUFFER_OVERFLOW(p, (sizeof_size * sdim->rank), p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") for (i = 0; i < sdim->rank; i++) H5F_DECODE_LENGTH(f, p, sdim->max[i]); diff --git a/src/H5private.h b/src/H5private.h index a82796e006c..7d15312cb7a 100644 --- a/src/H5private.h +++ b/src/H5private.h @@ -398,6 +398,11 @@ /* Raise an integer to a power of 2 */ #define H5_EXP2(n) (1 << (n)) +/* Check if a read of size bytes starting at ptr would overflow past + * the last valid byte, pointed to by buffer_end. + */ +#define H5_IS_BUFFER_OVERFLOW(ptr, size, buffer_end) (((ptr) + (size)-1) > (buffer_end)) + /* * HDF Boolean type. */ From 5fcffd1eb197d18bba6618cca357313f4113b149 Mon Sep 17 00:00:00 2001 From: Dana Robinson <43805+derobins@users.noreply.github.com> Date: Sun, 16 Apr 2023 12:29:20 -0700 Subject: [PATCH 050/108] Remove author and date from RELEASE.txt entries (#2752) --- release_docs/RELEASE.txt | 58 +++++++++++----------------------------- 1 file changed, 15 insertions(+), 43 deletions(-) diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index d547aa6c657..53df50c271e 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -55,8 +55,6 @@ New Features default for parallel debug builds and off by default for other build types. CMake has been updated to match this behavior. - (JTH - 2023/03/29) - - Added new option to build libaec and zlib inline with CMake. Using the CMake FetchContent module, the external filters can populate @@ -80,8 +78,6 @@ New Features See the CMakeFilters.cmake and config/cmake/cacheinit.cmake files for usage. - (ADB - 2023/02/21) - - Add new CMake configuration variable HDF5_USE_GNU_DIRS HDF5_USE_GNU_DIRS (default OFF) selects the use of GNU Coding Standard install @@ -90,8 +86,6 @@ New Features sets various PATH variables for use during the build, test and install processes. By default, the historical settings for these variables will be used. - (ADB - 2022/10/21, GH-2175, GH-1716) - - Correct the usage of CMAKE_Fortran_MODULE_DIRECTORY and where to install Fortran mod files. @@ -106,8 +100,6 @@ New Features CMake will use "mod" folder by default unless overridden by the CMake variable; HDF5_INSTALL_MODULE_DIR. - (ADB - 2022/07/21) - Library: -------- @@ -137,15 +129,11 @@ New Features object among those HDF5 files which are open. This is no longer valid in future HDF5 releases. - (ADB - 2023/03/27) - - Added version of H5Rget_name to return the name as a Java string. Other functions that get_name process the get_size then get the name within the JNI implementation. Now H5Rget_name has a H5Rget_name_string. - (ADB - 2022/07/12) - - Added reference support to H5A and H5D read write vlen JNI functions. Added the implementation to handle VL references as an Array of Lists @@ -155,7 +143,7 @@ New Features structures. The wrappers use the specified datatype arguments for the List type translation, it is expected that the Java type is correct. - (ADB - 2022/07/11, HDFFV-11318) + Fixes Jira issue HDFFV-11318 - H5A and H5D read write vlen JNI functions were incorrect. @@ -169,7 +157,7 @@ New Features structures. The wrappers use the specified datatype arguments for the List type translation, it is expected that the Java type is correct. - (ADB - 2022/07/07, HDFFV-11310) + Fixes Jira issue HDFFV-11310 - H5A and H5D read write JNI functions had flawed vlen datatype check. @@ -177,8 +165,6 @@ New Features to a single check and variable. The variable can then be used to call the H5Treclaim function. Adjusted existing test and added new test. - (ADB - 2022/06/22) - Tools: ------ @@ -187,8 +173,6 @@ New Features Ported 1.10 tools display function to provide ability to inspect and display 1.10 reference data. - (ADB - 2022/06/22) - High-Level APIs: ---------------- @@ -211,8 +195,6 @@ New Features The resulting documentation files will be in the share/html subdirectory of the HDF5 install directory. - (ADB - 2022/08/09) - Support for new platforms, languages and compilers ================================================== @@ -229,8 +211,6 @@ Bug Fixes since HDF5-1.12.1 release ensure that memory buffers don't get overrun when decoding buffers read from a (possibly corrupted) HDF5 file. - (JTH - 2023/04/05) - - Fixed a heap buffer overflow that occurs when reading from a dataset with a compact layout within a malformed HDF5 file @@ -248,7 +228,7 @@ Bug Fixes since HDF5-1.12.1 release dataspace and datatype information). If the two sizes do not match, opening of the dataset will fail. - (JTH - 2023/04/04, GH-2606) + Fixes GitHub issue #2606 - Fix for CVE-2019-8396 @@ -259,7 +239,7 @@ Bug Fixes since HDF5-1.12.1 release The fix ensures each element is within bounds before reading. - (2023/04/13 - HDFFV-10712, CVE-2019-8396, GitHub #2209) + Fixes Jira issue HDFFV-10712, CVE-2019-8396, GitHub issue #2209 - Memory leak @@ -272,7 +252,7 @@ Bug Fixes since HDF5-1.12.1 release As error is encountered in loading the illegal message, the memory allocated for cont_msg_info->msgs needs to be freed. - (VC - 2023/04/11 GH-2599) + Fixes GitHub issue #2599 - Fixed a memory corruption issue that can occur when reading from a dataset using a hyperslab selection in the file @@ -286,8 +266,6 @@ Bug Fixes since HDF5-1.12.1 release being copied when projecting the point selection onto the hyperslab selection's dataspace. - (JTH - 2023/03/23) - - Fix CVE-2021-37501 / GHSA-rfgw-5vq3-wrjf Check for overflow when calculating on-disk attribute data size. @@ -299,7 +277,7 @@ Bug Fixes since HDF5-1.12.1 release The test case was crafted in a way that the overflow caused the size to be 0. - (EFE - 2023/02/11 GH-2458) + Fixes GitHub issue #2458 - Seg fault on file close @@ -318,7 +296,7 @@ Bug Fixes since HDF5-1.12.1 release It will be freed later in H5VL__native_file_close() when the library exits and terminates the file package. - (VC - 2022/12/14, HDFFV-11052, CVE-2020-10812) + Fixes Jira issue HDFFV-11052, CVE-2020-10812 - Fixed an issue with variable length attributes @@ -328,15 +306,11 @@ Bug Fixes since HDF5-1.12.1 release closed, attempting to write to the attribute through the first handle would cause an error. - (NAF - 2022/10/24) - - Fixed an issue with hyperslab selections Previously, when combining hyperslab selections, it was possible for the library to produce an incorrect combined selection. - (NAF - 2022/09/25) - - Fixed an issue with attribute type conversion with compound datatypes Previously, when performing type conversion for attribute I/O with a @@ -344,7 +318,7 @@ Bug Fixes since HDF5-1.12.1 release the contents of the destination, potentially causing data to be lost when only writing to a subset of the compound fields. - (NAF - 2022/08/22, GitHub #2016) + Fixes GitHub issue #2016 - Modified H5Fstart_swmr_write() to preserve DAPL properties @@ -354,7 +328,7 @@ Bug Fixes since HDF5-1.12.1 release properties. Modified the library to instead save these properties and use them when reopening the file. - (NAF - 2022/07/18, HDFFV-11308) + Fixes Jira issue HDFFV-11308 - Converted an assertion on (possibly corrupt) file contents to a normal error check @@ -365,7 +339,7 @@ Bug Fixes since HDF5-1.12.1 release in debug builds of HDF5. In production builds, this situation could cause either a library error or a crash, depending on the platform. - (JTH - 2022/07/08, HDFFV-11316, HDFFV-11317) + Fixes Jira issues HDFFV-11316 & HDFFV-11317 - Memory leak @@ -378,7 +352,7 @@ Bug Fixes since HDF5-1.12.1 release is therefore lost. A buffer is now allocated regardless so that the element in fill->buf can later be reclaimed. - (VC - 2022/10/10, HDFFV-10840) + Fixes Jira issue HDFFV-10840 Java Library @@ -399,7 +373,7 @@ Bug Fixes since HDF5-1.12.1 release supported pkconfig files. Still recommend that the CMake config file method be used for building projects with CMake. - (ADB - 2023/02/16 GH-1546,GH-2259) + Fixes GitHub issues #1546 & #2259 - Change the settings of the *pc files to use the correct format @@ -407,7 +381,7 @@ Bug Fixes since HDF5-1.12.1 release settings. Changing the set to use 'lib-name = version' instead 'lib-name-version' fixes the issue - (ADB - 2022/12/06 HDFFV-11355) + Fixes Jira issue HDFFV-11355 - Move MPI libraries link from PRIVATE to PUBLIC @@ -415,8 +389,6 @@ Bug Fixes since HDF5-1.12.1 release an application or library was built with the C library. Also updated the CMake target link command to use the newer style MPI::MPI_C link variable. - (ADB - 2022/20/27) - Tools ----- @@ -425,14 +397,14 @@ Bug Fixes since HDF5-1.12.1 release Replaced the H5TOOLS_GOTO_ERROR with just H5TOOLS_ERROR. - (ADB - 2023/04/06 GH-2598) + Fixes GitHub issue #2598 - Fix h5repack to only print output when verbose option is selected When timing option was added to h5repack, the check for verbose was incorrectly implemented. - (ADB - 2022/12/02, GH #2270) + Fixes GitHub issue #2270 Performance From 1ffe6afce83b18ec608827089a080ef671a27e8f Mon Sep 17 00:00:00 2001 From: jhendersonHDF Date: Mon, 17 Apr 2023 21:09:44 -0500 Subject: [PATCH 051/108] Check for invalid AAPL in H5Aopen (#2712) (#2771) --- src/H5VLnative_attr.c | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/src/H5VLnative_attr.c b/src/H5VLnative_attr.c index c6099da5896..65241b23b43 100644 --- a/src/H5VLnative_attr.c +++ b/src/H5VLnative_attr.c @@ -104,12 +104,13 @@ H5VL__native_attr_create(void *obj, const H5VL_loc_params_t *loc_params, const c *------------------------------------------------------------------------- */ void * -H5VL__native_attr_open(void *obj, const H5VL_loc_params_t *loc_params, const char *attr_name, - hid_t H5_ATTR_UNUSED aapl_id, hid_t H5_ATTR_UNUSED dxpl_id, void H5_ATTR_UNUSED **req) +H5VL__native_attr_open(void *obj, const H5VL_loc_params_t *loc_params, const char *attr_name, hid_t aapl_id, + hid_t H5_ATTR_UNUSED dxpl_id, void H5_ATTR_UNUSED **req) { - H5G_loc_t loc; /* Object location */ - H5A_t *attr = NULL; /* Attribute opened */ - void *ret_value; + H5P_genplist_t *plist; + H5G_loc_t loc; /* Object location */ + H5A_t *attr = NULL; /* Attribute opened */ + void *ret_value; FUNC_ENTER_PACKAGE @@ -117,6 +118,9 @@ H5VL__native_attr_open(void *obj, const H5VL_loc_params_t *loc_params, const cha if (H5G_loc_real(obj, loc_params->obj_type, &loc) < 0) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, NULL, "not a file or file object") + if (NULL == (plist = H5P_object_verify(aapl_id, H5P_ATTRIBUTE_ACCESS))) + HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, NULL, "AAPL is not an attribute access property list") + if (loc_params->type == H5VL_OBJECT_BY_SELF) { /* H5Aopen */ /* Open the attribute */ From 547fb23bb2be18787156f72af637656b2c20232b Mon Sep 17 00:00:00 2001 From: bmribler <39579120+bmribler@users.noreply.github.com> Date: Tue, 18 Apr 2023 14:22:48 -0400 Subject: [PATCH 052/108] Fixed GH-2603, heap-buffer-overflow in H5O__linfo_decode (#2763) Verified with valgrind -v --tool=memcheck --leak-check=full h5dump POV-GH-2603 The several invalid reads shown originally are now gone. --- release_docs/RELEASE.txt | 11 ++++++++++- src/H5Olinfo.c | 30 +++++++++++++++++++++++------- 2 files changed, 33 insertions(+), 8 deletions(-) diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 53df50c271e..7de2a61720a 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -201,10 +201,19 @@ Support for new platforms, languages and compilers - -Bug Fixes since HDF5-1.12.1 release +Bug Fixes since HDF5-1.12.2 release =================================== Library ------- + - Fixed potential heap buffer overflow in decoding of link info message + + Detections of buffer overflow were added for decoding version, index + flags, link creation order value, and the next three addresses. The + checkings will remove the potential invalid read of any of these + values that could be triggered by a malformed file. + + (GH-2603 - 2023/04/16) + - Fixed potential buffer overrun issues in some object header decode routines Several checks were added to H5O__layout_decode and H5O__sdspace_decode to diff --git a/src/H5Olinfo.c b/src/H5Olinfo.c index f7e4b108c09..46987f91bb1 100644 --- a/src/H5Olinfo.c +++ b/src/H5Olinfo.c @@ -105,11 +105,13 @@ H5FL_DEFINE_STATIC(H5O_linfo_t); */ static void * H5O__linfo_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNUSED mesg_flags, - unsigned H5_ATTR_UNUSED *ioflags, size_t H5_ATTR_UNUSED p_size, const uint8_t *p) + unsigned H5_ATTR_UNUSED *ioflags, size_t p_size, const uint8_t *p) { - H5O_linfo_t *linfo = NULL; /* Link info */ - unsigned char index_flags; /* Flags for encoding link index info */ - void *ret_value = NULL; /* Return value */ + const uint8_t *p_end = p + p_size - 1; /* End of the p buffer */ + H5O_linfo_t *linfo = NULL; /* Link info */ + unsigned char index_flags; /* Flags for encoding link index info */ + uint8_t addr_size = H5F_SIZEOF_ADDR(f); /* Temp var */ + void *ret_value = NULL; /* Return value */ FUNC_ENTER_STATIC @@ -117,13 +119,17 @@ H5O__linfo_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNUS HDassert(f); HDassert(p); + /* Check input buffer before decoding version and index flags */ + if (H5_IS_BUFFER_OVERFLOW(p, 2, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") + /* Version of message */ if (*p++ != H5O_LINFO_VERSION) HGOTO_ERROR(H5E_OHDR, H5E_CANTLOAD, NULL, "bad version number for message") /* Allocate space for message */ if (NULL == (linfo = H5FL_MALLOC(H5O_linfo_t))) - HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, NULL, "memory allocation failed") + HGOTO_ERROR(H5E_RESOURCE, H5E_CANTALLOC, NULL, "memory allocation failed") /* Get the index flags for the group */ index_flags = *p++; @@ -136,11 +142,18 @@ H5O__linfo_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNUS linfo->nlinks = HSIZET_MAX; /* Max. link creation order value for the group, if tracked */ - if (linfo->track_corder) + if (linfo->track_corder) { + if (H5_IS_BUFFER_OVERFLOW(p, 8, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") INT64DECODE(p, linfo->max_corder) + } else linfo->max_corder = 0; + /* Check input buffer before decoding the next two addresses */ + if (H5_IS_BUFFER_OVERFLOW(p, addr_size + addr_size, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") + /* Address of fractal heap to store "dense" links */ H5F_addr_decode(f, &p, &(linfo->fheap_addr)); @@ -148,8 +161,11 @@ H5O__linfo_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNUS H5F_addr_decode(f, &p, &(linfo->name_bt2_addr)); /* Address of v2 B-tree to index creation order of links, if there is one */ - if (linfo->index_corder) + if (linfo->index_corder) { + if (H5_IS_BUFFER_OVERFLOW(p, addr_size, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding") H5F_addr_decode(f, &p, &(linfo->corder_bt2_addr)); + } else linfo->corder_bt2_addr = HADDR_UNDEF; From 7d24d61ea26c522f673c3e97ae68aa1f3f530f01 Mon Sep 17 00:00:00 2001 From: Larry Knox Date: Tue, 18 Apr 2023 15:07:32 -0500 Subject: [PATCH 053/108] Add new version of COPYING_LBNL_HDF5. (#2776) --- COPYING_LBNL_HDF5 | 108 +++++++++++++++++++++++++--------------------- 1 file changed, 60 insertions(+), 48 deletions(-) diff --git a/COPYING_LBNL_HDF5 b/COPYING_LBNL_HDF5 index 16fba5d1768..ebc00df1b0c 100644 --- a/COPYING_LBNL_HDF5 +++ b/COPYING_LBNL_HDF5 @@ -1,49 +1,61 @@ -Copyright Notice and License Terms for -HDF5 (Hierarchical Data Format 5) Software Library and Utilities ------------------------------------------------------------------------------ - -HDF5 (Hierarchical Data Format 5) -Copyright (c) 2016, The Regents of the University of California, through -Lawrence Berkeley National Laboratory (subject to receipt of any required -approvals from the U.S. Dept. of Energy). - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, - this list of conditions, and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions, and the following disclaimer in the documentation - and/or materials provided with the distribution. - -3. Neither the name of the University of California, Lawrence Berkeley -National Laboratory, U.S. Dept. of Energy nor the names of its contributors -may be used to endorse or promote products derived from this software without -specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF -THE POSSIBILITY OF SUCH DAMAGE. - -You are under no obligation whatsoever to provide any bug fixes, patches, -or upgrades to the features, functionality or performance of the source -code ("Enhancements") to anyone; however, if you choose to make your -Enhancements available either publicly, or directly to Lawrence Berkeley -National Laboratory, without imposing a separate written license agreement -for such Enhancements, then you hereby grant the following license: -a non-exclusive, royalty-free perpetual license to install, use, modify, -prepare derivative works, incorporate into other computer software, -distribute, and sublicense such enhancements or derivative works thereof, -in binary and source code form. +**************************** +*** Copyright Notice *** +Hierarchical Data Format 5 (HDF5) v1.12.0 Copyright (c) 2020, HDF Group and The +Regents of the University of California, through Lawrence Berkeley National +Laboratory (subject to receipt of any required approvals from the U.S. Dept. of +Energy). All rights reserved. + +If you have questions about your rights to use or distribute this software, +please contact Berkeley Lab's Intellectual Property Office at IPO@lbl.gov. + +NOTICE. This Software was partially developed under funding from the U.S. +Department of Energy and the U.S. Government consequently retains certain +rights. As such, the U.S. Government has been granted for itself and others +acting on its behalf a paid-up, nonexclusive, irrevocable, worldwide license in +the Software to reproduce, distribute copies to the public, prepare derivative +works, and perform publicly and display publicly, and to permit others to do so. + +**************************** +*** License Agreement *** + +Hierarchical Data Format 5 (HDF5) v1.12.0 Copyright (c) 2020, HDF Group and The +Regents of the University of California, through Lawrence Berkeley National +Laboratory (subject to receipt of any required approvals from the U.S. Dept. of +Energy). All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +(1) Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +(2) Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +(3) Neither the name of the HDF Group, University of California, Lawrence +Berkeley National Laboratory, U.S. Dept. of Energy, nor the names of its +contributors may be used to endorse or promote products derived from this +software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +You are under no obligation whatsoever to provide any bug fixes, patches, or +upgrades to the features, functionality or performance of the source code +("Enhancements") to anyone; however, if you choose to make your Enhancements +available either publicly, or directly to Lawrence Berkeley National Laboratory, +without imposing a separate written license agreement for such Enhancements, +then you hereby grant the following license: a non-exclusive, royalty-free +perpetual license to install, use, modify, prepare derivative works, incorporate +into other computer software, distribute, and sublicense such enhancements or +derivative works thereof, in binary and source code form. From 39099bd397ac8d27965b70e7bfc644abc27fd5df Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Wed, 19 Apr 2023 07:44:56 -0500 Subject: [PATCH 054/108] Add no subsets option to h5diff like h5dump (#2760) * reorder argument in alpha order --- release_docs/RELEASE.txt | 7 + tools/lib/h5diff.c | 4 +- tools/lib/h5diff.h | 45 +++--- tools/lib/h5tools_utils.c | 122 ++++++++++++++++ tools/lib/h5tools_utils.h | 3 + tools/src/h5diff/h5diff_common.c | 157 +++------------------ tools/src/h5dump/h5dump.c | 117 +-------------- tools/test/h5diff/testfiles/h5diff_10.txt | 2 + tools/test/h5diff/testfiles/h5diff_600.txt | 2 + tools/test/h5diff/testfiles/h5diff_603.txt | 2 + tools/test/h5diff/testfiles/h5diff_606.txt | 2 + tools/test/h5diff/testfiles/h5diff_612.txt | 2 + tools/test/h5diff/testfiles/h5diff_615.txt | 2 + tools/test/h5diff/testfiles/h5diff_621.txt | 2 + tools/test/h5diff/testfiles/h5diff_622.txt | 2 + tools/test/h5diff/testfiles/h5diff_623.txt | 2 + tools/test/h5diff/testfiles/h5diff_624.txt | 2 + 17 files changed, 205 insertions(+), 270 deletions(-) diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 7de2a61720a..a5eb5851a03 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -401,6 +401,13 @@ Bug Fixes since HDF5-1.12.2 release Tools ----- + - Names of objects with square brackets will have trouble without the + special argument, --no-compact-subset, on the h5dump command line. + + h5diff did not have this option and now it has been added. + + Fixes GitHub issue #2682 + - In the tools traverse function - an error in either visit call will bypass the cleanup of the local data variables. diff --git a/tools/lib/h5diff.c b/tools/lib/h5diff.c index a0cd8db3608..9b49be9cd21 100644 --- a/tools/lib/h5diff.c +++ b/tools/lib/h5diff.c @@ -181,7 +181,7 @@ is_exclude_path(char *path, h5trav_type_t type, diff_opt_t *opts) /* search objects in exclude list */ while (NULL != exclude_path_ptr) { - /* if exclude path is is group, exclude its members as well */ + /* if exclude path is in group, exclude its members as well */ if (exclude_path_ptr->obj_type == H5TRAV_TYPE_GROUP) { ret_cmp = HDstrncmp(exclude_path_ptr->obj_path, path, HDstrlen(exclude_path_ptr->obj_path)); if (ret_cmp == 0) { /* found matching members */ @@ -245,7 +245,7 @@ is_exclude_attr(const char *path, h5trav_type_t type, diff_opt_t *opts) /* search objects in exclude list */ while (NULL != exclude_ptr) { - /* if exclude path is is group, exclude its members as well */ + /* if exclude path is in group, exclude its members as well */ if (exclude_ptr->obj_type == H5TRAV_TYPE_GROUP) { ret_cmp = HDstrncmp(exclude_ptr->obj_path, path, HDstrlen(exclude_ptr->obj_path)); if (ret_cmp == 0) { /* found matching members */ diff --git a/tools/lib/h5diff.h b/tools/lib/h5diff.h index d67d22419f3..0aec94fb33f 100644 --- a/tools/lib/h5diff.h +++ b/tools/lib/h5diff.h @@ -51,28 +51,29 @@ typedef struct { int mode_quiet; /* quiet mode: no output at all */ int mode_report; /* report mode: print the data */ int mode_verbose; /* verbose mode: print the data, list of objcets, warnings */ - int mode_verbose_level; /* control verbose details */ - int mode_list_not_cmp; /* list not comparable messages */ - int print_header; /* print header */ - int print_percentage; /* print percentage */ - int print_dims; /* print dimension index */ - int delta_bool; /* delta, absolute value to compare */ - double delta; /* delta value */ - int use_system_epsilon; /* flag to use system epsilon (1 or 0) */ - int percent_bool; /* relative error to compare*/ - double percent; /* relative error value */ - hbool_t follow_links; /* follow symbolic links */ - int no_dangle_links; /* return error when find dangling link */ - int cmn_objs; /* do we have common objects */ - int not_cmp; /* are the objects comparable */ - int contents; /* equal contents */ - int do_nans; /* consider Nans while diffing floats */ - int exclude_path; /* exclude path to an object */ - int exclude_attr_path; /* exclude path to an object */ - struct exclude_path_list *exclude; /* keep exclude path list */ - struct exclude_path_list *exclude_attr; /* keep exclude attribute list */ - int count_bool; /* count, compare up to count */ - hsize_t count; /* count value */ + int mode_verbose_level; /* control verbose details */ + int mode_list_not_cmp; /* list not comparable messages */ + int print_header; /* print header */ + int print_percentage; /* print percentage */ + int print_dims; /* print dimension index */ + int delta_bool; /* delta, absolute value to compare */ + double delta; /* delta value */ + int use_system_epsilon; /* flag to use system epsilon (1 or 0) */ + int percent_bool; /* relative error to compare*/ + double percent; /* relative error value */ + hbool_t follow_links; /* follow symbolic links */ + int no_dangle_links; /* return error when find dangling link */ + int cmn_objs; /* do we have common objects */ + int not_cmp; /* are the objects comparable */ + int contents; /* equal contents */ + int do_nans; /* consider Nans while diffing floats */ + int disable_compact_subset; /* disable compact form of subset notation */ + int exclude_path; /* exclude path to an object */ + int exclude_attr_path; /* exclude path to an object */ + struct exclude_path_list *exclude; /* keep exclude path list */ + struct exclude_path_list *exclude_attr; /* keep exclude attribute list */ + int count_bool; /* count, compare up to count */ + hsize_t count; /* count value */ diff_err_t err_stat; /* an error occurred (2, error, 1, differences, 0, no error) */ hsize_t nelmts; /* total number of elements */ hsize_t hs_nelmts; /* number of elements to read at a time*/ diff --git a/tools/lib/h5tools_utils.c b/tools/lib/h5tools_utils.c index 1f4345cd29d..17609c7e1f3 100644 --- a/tools/lib/h5tools_utils.c +++ b/tools/lib/h5tools_utils.c @@ -161,6 +161,128 @@ help_ref_msg(FILE *output) HDfprintf(output, "see the <%s> entry in the 'HDF5 Reference Manual'.\n", h5tools_getprogname()); } +/*------------------------------------------------------------------------- + * Function: parse_hsize_list + * + * Purpose: Parse a list of comma or space separated integers and return + * them in a list. The string being passed into this function + * should be at the start of the list you want to parse. You are + * responsible for freeing the array returned from here. + * + * Lists in the so-called "terse" syntax are separated by + * semicolons (;). The lists themselves can be separated by + * either commas (,) or white spaces. + * + * Return: + *------------------------------------------------------------------------- + */ +void +parse_hsize_list(const char *h_list, subset_d *d) +{ + hsize_t *p_list; + const char *ptr; + unsigned int size_count = 0; + unsigned int i = 0; + unsigned int last_digit = 0; + + if (!h_list || !*h_list || *h_list == ';') + return; + + H5TOOLS_START_DEBUG(" - h_list:%s", h_list); + /* count how many integers do we have */ + for (ptr = h_list; ptr && *ptr && *ptr != ';' && *ptr != ']'; ptr++) + if (HDisdigit(*ptr)) { + if (!last_digit) + /* the last read character wasn't a digit */ + size_count++; + + last_digit = 1; + } + else + last_digit = 0; + + if (size_count == 0) { + /* there aren't any integers to read */ + H5TOOLS_ENDDEBUG("No integers to read"); + return; + } + H5TOOLS_DEBUG("Number integers to read=%ld", size_count); + + /* allocate an array for the integers in the list */ + if ((p_list = (hsize_t *)HDcalloc(size_count, sizeof(hsize_t))) == NULL) + H5TOOLS_INFO("Unable to allocate space for subset data"); + + for (ptr = h_list; i < size_count && ptr && *ptr && *ptr != ';' && *ptr != ']'; ptr++) + if (HDisdigit(*ptr)) { + /* we should have an integer now */ + p_list[i++] = (hsize_t)HDstrtoull(ptr, NULL, 0); + + while (HDisdigit(*ptr)) + /* scroll to end of integer */ + ptr++; + } + d->data = p_list; + d->len = size_count; + H5TOOLS_ENDDEBUG(" "); +} + +/*------------------------------------------------------------------------- + * Function: parse_subset_params + * + * Purpose: Parse the so-called "terse" syntax for specifying subsetting parameters. + * + * Return: Success: struct subset_t object + * Failure: NULL + *------------------------------------------------------------------------- + */ +struct subset_t * +parse_subset_params(const char *dset) +{ + struct subset_t *s = NULL; + char *brace; + const char *q_dset; + + H5TOOLS_START_DEBUG(" - dset:%s", dset); + /* if dset name is quoted wait till after second quote to look for subset brackets */ + if (*dset == '"') + q_dset = HDstrchr(dset, '"'); + else + q_dset = dset; + if ((brace = HDstrrchr(q_dset, '[')) != NULL) { + *brace++ = '\0'; + + s = (struct subset_t *)HDcalloc(1, sizeof(struct subset_t)); + parse_hsize_list(brace, &s->start); + + while (*brace && *brace != ';') + brace++; + + if (*brace) + brace++; + + parse_hsize_list(brace, &s->stride); + + while (*brace && *brace != ';') + brace++; + + if (*brace) + brace++; + + parse_hsize_list(brace, &s->count); + + while (*brace && *brace != ';') + brace++; + + if (*brace) + brace++; + + parse_hsize_list(brace, &s->block); + } + H5TOOLS_ENDDEBUG(" "); + + return s; +} + /*------------------------------------------------------------------------- * Function: get_option * diff --git a/tools/lib/h5tools_utils.h b/tools/lib/h5tools_utils.h index b714b7ddd38..bf8ce75b73f 100644 --- a/tools/lib/h5tools_utils.h +++ b/tools/lib/h5tools_utils.h @@ -114,9 +114,12 @@ typedef struct find_objs_t { H5TOOLS_DLLVAR unsigned h5tools_nCols; /*max number of columns for outputting */ /* Definitions of useful routines */ +H5TOOLS_DLL struct subset_t *parse_subset_params(const char *dset); + H5TOOLS_DLL void indentation(unsigned); H5TOOLS_DLL void print_version(const char *progname); H5TOOLS_DLL void parallel_print(const char *format, ...); +H5TOOLS_DLL void parse_hsize_list(const char *h_list, subset_d *d); H5TOOLS_DLL herr_t parse_tuple(const char *start, int sep, char **cpy_out, unsigned *nelems, char ***ptrs_out); H5TOOLS_DLL void error_msg(const char *fmt, ...); diff --git a/tools/src/h5diff/h5diff_common.c b/tools/src/h5diff/h5diff_common.c index 293c55aa2b7..e38c33867bf 100644 --- a/tools/src/h5diff/h5diff_common.c +++ b/tools/src/h5diff/h5diff_common.c @@ -24,23 +24,24 @@ static int check_d_input(const char *); * Command-line options: The user can specify short or long-named * parameters. */ -static const char *s_opts = "hVrv*qn:d:p:NcelxE:A:S*"; -static struct long_options l_opts[] = {{"help", no_arg, 'h'}, - {"version", no_arg, 'V'}, - {"report", no_arg, 'r'}, - {"verbose", optional_arg, 'v'}, - {"quiet", no_arg, 'q'}, - {"count", require_arg, 'n'}, +static const char *s_opts = "cd:ehln:p:qrv*xA:CE:NS*V"; +static struct long_options l_opts[] = {{"compare", no_arg, 'c'}, {"delta", require_arg, 'd'}, - {"relative", require_arg, 'p'}, - {"nan", no_arg, 'N'}, - {"compare", no_arg, 'c'}, {"use-system-epsilon", no_arg, 'e'}, + {"help", no_arg, 'h'}, {"follow-symlinks", no_arg, 'l'}, + {"count", require_arg, 'n'}, + {"relative", require_arg, 'p'}, + {"quiet", no_arg, 'q'}, + {"report", no_arg, 'r'}, + {"verbose", optional_arg, 'v'}, {"no-dangling-links", no_arg, 'x'}, - {"exclude-path", require_arg, 'E'}, {"exclude-attribute", require_arg, 'A'}, + {"no-compact-subset", no_arg, 'C'}, + {"exclude-path", require_arg, 'E'}, + {"nan", no_arg, 'N'}, {"enable-error-stack", optional_arg, 'S'}, + {"version", no_arg, 'V'}, {"vol-value-1", require_arg, '1'}, {"vol-name-1", require_arg, '2'}, {"vol-info-1", require_arg, '3'}, @@ -75,122 +76,6 @@ check_options(diff_opt_t *opts) } } -/*------------------------------------------------------------------------- - * Function: parse_hsize_list - * - * Purpose: Parse a list of comma or space separated integers and return - * them in a list. The string being passed into this function - * should be at the start of the list you want to parse. You are - * responsible for freeing the array returned from here. - * - * Lists in the so-called "terse" syntax are separated by - * semicolons (;). The lists themselves can be separated by - * either commas (,) or white spaces. - * - * Return: - *------------------------------------------------------------------------- - */ -static void -parse_hsize_list(const char *h_list, subset_d *d) -{ - hsize_t *p_list; - const char *ptr; - unsigned int size_count = 0; - unsigned int i = 0; - unsigned int last_digit = 0; - - if (!h_list || !*h_list || *h_list == ';') - return; - - H5TOOLS_START_DEBUG(" - h_list:%s", h_list); - /* count how many integers do we have */ - for (ptr = h_list; ptr && *ptr && *ptr != ';' && *ptr != ']'; ptr++) - if (HDisdigit(*ptr)) { - if (!last_digit) - /* the last read character wasn't a digit */ - size_count++; - - last_digit = 1; - } - else - last_digit = 0; - - if (size_count == 0) { - /* there aren't any integers to read */ - H5TOOLS_ENDDEBUG("No integers to read"); - return; - } - H5TOOLS_DEBUG("Number integers to read=%ld", size_count); - - /* allocate an array for the integers in the list */ - if ((p_list = (hsize_t *)HDcalloc(size_count, sizeof(hsize_t))) == NULL) - H5TOOLS_INFO("Unable to allocate space for subset data"); - - for (ptr = h_list; i < size_count && ptr && *ptr && *ptr != ';' && *ptr != ']'; ptr++) - if (HDisdigit(*ptr)) { - /* we should have an integer now */ - p_list[i++] = (hsize_t)HDstrtoull(ptr, NULL, 0); - - while (HDisdigit(*ptr)) - /* scroll to end of integer */ - ptr++; - } - d->data = p_list; - d->len = size_count; - H5TOOLS_ENDDEBUG(" "); -} - -/*------------------------------------------------------------------------- - * Function: parse_subset_params - * - * Purpose: Parse the so-called "terse" syntax for specifying subsetting parameters. - * - * Return: Success: struct subset_t object - * Failure: NULL - *------------------------------------------------------------------------- - */ -static struct subset_t * -parse_subset_params(const char *dset) -{ - struct subset_t *s = NULL; - char *brace; - - H5TOOLS_START_DEBUG(" - dset:%s", dset); - if ((brace = HDstrrchr(dset, '[')) != NULL) { - *brace++ = '\0'; - - s = (struct subset_t *)HDcalloc(1, sizeof(struct subset_t)); - parse_hsize_list(brace, &s->start); - - while (*brace && *brace != ';') - brace++; - - if (*brace) - brace++; - - parse_hsize_list(brace, &s->stride); - - while (*brace && *brace != ';') - brace++; - - if (*brace) - brace++; - - parse_hsize_list(brace, &s->count); - - while (*brace && *brace != ';') - brace++; - - if (*brace) - brace++; - - parse_hsize_list(brace, &s->block); - } - H5TOOLS_ENDDEBUG(" "); - - return s; -} - /*------------------------------------------------------------------------- * Function: parse_command_line * @@ -327,6 +212,10 @@ parse_command_line(int argc, const char *const *argv, const char **fname1, const } break; + case 'C': + opts->disable_compact_subset = TRUE; + break; + case 'A': opts->exclude_attr_path = 1; @@ -476,13 +365,10 @@ parse_command_line(int argc, const char *const *argv, const char **fname1, const } H5TOOLS_DEBUG("objname2 = %s", *objname2); - /* - * TRILABS_227 is complete except for an issue with printing indices - * the following calls will enable subsetting - */ - opts->sset[0] = parse_subset_params(*objname1); - - opts->sset[1] = parse_subset_params(*objname2); + if (!opts->disable_compact_subset) { + opts->sset[0] = parse_subset_params(*objname1); + opts->sset[1] = parse_subset_params(*objname2); + } H5TOOLS_ENDDEBUG(" "); } @@ -823,6 +709,9 @@ usage(void) * the following will be needed for subsetting */ PRINTVALSTREAM(rawoutstream, " Subsetting options:\n"); + PRINTVALSTREAM(rawoutstream, + " --no-compact-subset Disable compact form of subsetting and allow the use\n"); + PRINTVALSTREAM(rawoutstream, " of \"[\" in dataset names.\n"); PRINTVALSTREAM(rawoutstream, " Subsetting is available by using the fcompact form of subsetting, as follows:\n"); PRINTVALSTREAM(rawoutstream, " obj1 /foo/mydataset[START;STRIDE;COUNT;BLOCK]\n"); diff --git a/tools/src/h5dump/h5dump.c b/tools/src/h5dump/h5dump.c index f5f6fbf98a7..3c0ed1af2c4 100644 --- a/tools/src/h5dump/h5dump.c +++ b/tools/src/h5dump/h5dump.c @@ -538,114 +538,6 @@ set_sort_order(const char *form) return iter_order; } -/*------------------------------------------------------------------------- - * Function: parse_hsize_list - * - * Purpose: Parse a list of comma or space separated integers and return - * them in a list. The string being passed into this function - * should be at the start of the list you want to parse. You are - * responsible for freeing the array returned from here. - * - * Lists in the so-called "terse" syntax are separated by - * semicolons (;). The lists themselves can be separated by - * either commas (,) or white spaces. - * - * Return: - *------------------------------------------------------------------------- - */ -static void -parse_hsize_list(const char *h_list, subset_d *d) -{ - hsize_t *p_list; - const char *ptr; - unsigned int size_count = 0; - unsigned int i = 0; - unsigned int last_digit = 0; - - if (!h_list || !*h_list || *h_list == ';') - return; - - /* count how many integers do we have */ - for (ptr = h_list; ptr && *ptr && *ptr != ';' && *ptr != ']'; ptr++) - if (HDisdigit(*ptr)) { - if (!last_digit) - /* the last read character wasn't a digit */ - size_count++; - - last_digit = 1; - } - else - last_digit = 0; - - if (size_count == 0) - /* there aren't any integers to read */ - return; - - /* allocate an array for the integers in the list */ - p_list = (hsize_t *)HDcalloc(size_count, sizeof(hsize_t)); - - for (ptr = h_list; i < size_count && ptr && *ptr && *ptr != ';' && *ptr != ']'; ptr++) - if (HDisdigit(*ptr)) { - /* we should have an integer now */ - p_list[i++] = (hsize_t)HDstrtoull(ptr, NULL, 0); - - while (HDisdigit(*ptr)) - /* scroll to end of integer */ - ptr++; - } - d->data = p_list; - d->len = size_count; -} - -/*------------------------------------------------------------------------- - * Function: parse_subset_params - * - * Purpose: Parse the so-called "terse" syntax for specifying subsetting parameters. - * - * Return: Success: struct subset_t object - * Failure: NULL - *------------------------------------------------------------------------- - */ -static struct subset_t * -parse_subset_params(const char *dset) -{ - struct subset_t *s = NULL; - char *brace; - - if (!dump_opts.disable_compact_subset && ((brace = HDstrrchr(dset, '[')) != NULL)) { - *brace++ = '\0'; - - s = (struct subset_t *)HDcalloc(1, sizeof(struct subset_t)); - parse_hsize_list(brace, &s->start); - - while (*brace && *brace != ';') - brace++; - - if (*brace) - brace++; - - parse_hsize_list(brace, &s->stride); - - while (*brace && *brace != ';') - brace++; - - if (*brace) - brace++; - - parse_hsize_list(brace, &s->count); - - while (*brace && *brace != ';') - brace++; - - if (*brace) - brace++; - - parse_hsize_list(brace, &s->block); - } - - return s; -} - /*------------------------------------------------------------------------- * Function: parse_mask_list * @@ -931,10 +823,11 @@ parse_command_line(int argc, const char *const *argv) for (i = 0; i < argc; i++) if (!hand[i].func) { - hand[i].func = handle_datasets; - hand[i].obj = HDstrdup(opt_arg); - hand[i].subset_info = parse_subset_params(hand[i].obj); - last_dset = &hand[i]; + hand[i].func = handle_datasets; + hand[i].obj = HDstrdup(opt_arg); + if (!dump_opts.disable_compact_subset) + hand[i].subset_info = parse_subset_params(hand[i].obj); + last_dset = &hand[i]; break; } diff --git a/tools/test/h5diff/testfiles/h5diff_10.txt b/tools/test/h5diff/testfiles/h5diff_10.txt index 26418eb6478..3a238aa3668 100644 --- a/tools/test/h5diff/testfiles/h5diff_10.txt +++ b/tools/test/h5diff/testfiles/h5diff_10.txt @@ -145,6 +145,8 @@ usage: h5diff [OPTIONS] file1 file2 [obj1[ obj2]] symbolic links are compared.). Subsetting options: + --no-compact-subset Disable compact form of subsetting and allow the use + of "[" in dataset names. Subsetting is available by using the fcompact form of subsetting, as follows: obj1 /foo/mydataset[START;STRIDE;COUNT;BLOCK] It is not required to use all parameters, but until the last parameter value used, diff --git a/tools/test/h5diff/testfiles/h5diff_600.txt b/tools/test/h5diff/testfiles/h5diff_600.txt index ae61123c9b6..1d114b342a0 100644 --- a/tools/test/h5diff/testfiles/h5diff_600.txt +++ b/tools/test/h5diff/testfiles/h5diff_600.txt @@ -145,6 +145,8 @@ usage: h5diff [OPTIONS] file1 file2 [obj1[ obj2]] symbolic links are compared.). Subsetting options: + --no-compact-subset Disable compact form of subsetting and allow the use + of "[" in dataset names. Subsetting is available by using the fcompact form of subsetting, as follows: obj1 /foo/mydataset[START;STRIDE;COUNT;BLOCK] It is not required to use all parameters, but until the last parameter value used, diff --git a/tools/test/h5diff/testfiles/h5diff_603.txt b/tools/test/h5diff/testfiles/h5diff_603.txt index 48c80a377e9..81b2d6c31f8 100644 --- a/tools/test/h5diff/testfiles/h5diff_603.txt +++ b/tools/test/h5diff/testfiles/h5diff_603.txt @@ -146,6 +146,8 @@ usage: h5diff [OPTIONS] file1 file2 [obj1[ obj2]] symbolic links are compared.). Subsetting options: + --no-compact-subset Disable compact form of subsetting and allow the use + of "[" in dataset names. Subsetting is available by using the fcompact form of subsetting, as follows: obj1 /foo/mydataset[START;STRIDE;COUNT;BLOCK] It is not required to use all parameters, but until the last parameter value used, diff --git a/tools/test/h5diff/testfiles/h5diff_606.txt b/tools/test/h5diff/testfiles/h5diff_606.txt index 9f37b117fe5..f367a7bfa89 100644 --- a/tools/test/h5diff/testfiles/h5diff_606.txt +++ b/tools/test/h5diff/testfiles/h5diff_606.txt @@ -146,6 +146,8 @@ usage: h5diff [OPTIONS] file1 file2 [obj1[ obj2]] symbolic links are compared.). Subsetting options: + --no-compact-subset Disable compact form of subsetting and allow the use + of "[" in dataset names. Subsetting is available by using the fcompact form of subsetting, as follows: obj1 /foo/mydataset[START;STRIDE;COUNT;BLOCK] It is not required to use all parameters, but until the last parameter value used, diff --git a/tools/test/h5diff/testfiles/h5diff_612.txt b/tools/test/h5diff/testfiles/h5diff_612.txt index e616e51fd49..5e2728705db 100644 --- a/tools/test/h5diff/testfiles/h5diff_612.txt +++ b/tools/test/h5diff/testfiles/h5diff_612.txt @@ -146,6 +146,8 @@ usage: h5diff [OPTIONS] file1 file2 [obj1[ obj2]] symbolic links are compared.). Subsetting options: + --no-compact-subset Disable compact form of subsetting and allow the use + of "[" in dataset names. Subsetting is available by using the fcompact form of subsetting, as follows: obj1 /foo/mydataset[START;STRIDE;COUNT;BLOCK] It is not required to use all parameters, but until the last parameter value used, diff --git a/tools/test/h5diff/testfiles/h5diff_615.txt b/tools/test/h5diff/testfiles/h5diff_615.txt index 39e0458774e..da4e1ee5a15 100644 --- a/tools/test/h5diff/testfiles/h5diff_615.txt +++ b/tools/test/h5diff/testfiles/h5diff_615.txt @@ -146,6 +146,8 @@ usage: h5diff [OPTIONS] file1 file2 [obj1[ obj2]] symbolic links are compared.). Subsetting options: + --no-compact-subset Disable compact form of subsetting and allow the use + of "[" in dataset names. Subsetting is available by using the fcompact form of subsetting, as follows: obj1 /foo/mydataset[START;STRIDE;COUNT;BLOCK] It is not required to use all parameters, but until the last parameter value used, diff --git a/tools/test/h5diff/testfiles/h5diff_621.txt b/tools/test/h5diff/testfiles/h5diff_621.txt index 8bc019e5bbd..4166ce26b26 100644 --- a/tools/test/h5diff/testfiles/h5diff_621.txt +++ b/tools/test/h5diff/testfiles/h5diff_621.txt @@ -146,6 +146,8 @@ usage: h5diff [OPTIONS] file1 file2 [obj1[ obj2]] symbolic links are compared.). Subsetting options: + --no-compact-subset Disable compact form of subsetting and allow the use + of "[" in dataset names. Subsetting is available by using the fcompact form of subsetting, as follows: obj1 /foo/mydataset[START;STRIDE;COUNT;BLOCK] It is not required to use all parameters, but until the last parameter value used, diff --git a/tools/test/h5diff/testfiles/h5diff_622.txt b/tools/test/h5diff/testfiles/h5diff_622.txt index 39fb8dfc419..98e4c284586 100644 --- a/tools/test/h5diff/testfiles/h5diff_622.txt +++ b/tools/test/h5diff/testfiles/h5diff_622.txt @@ -146,6 +146,8 @@ usage: h5diff [OPTIONS] file1 file2 [obj1[ obj2]] symbolic links are compared.). Subsetting options: + --no-compact-subset Disable compact form of subsetting and allow the use + of "[" in dataset names. Subsetting is available by using the fcompact form of subsetting, as follows: obj1 /foo/mydataset[START;STRIDE;COUNT;BLOCK] It is not required to use all parameters, but until the last parameter value used, diff --git a/tools/test/h5diff/testfiles/h5diff_623.txt b/tools/test/h5diff/testfiles/h5diff_623.txt index 8250bf4ca20..c886870288e 100644 --- a/tools/test/h5diff/testfiles/h5diff_623.txt +++ b/tools/test/h5diff/testfiles/h5diff_623.txt @@ -146,6 +146,8 @@ usage: h5diff [OPTIONS] file1 file2 [obj1[ obj2]] symbolic links are compared.). Subsetting options: + --no-compact-subset Disable compact form of subsetting and allow the use + of "[" in dataset names. Subsetting is available by using the fcompact form of subsetting, as follows: obj1 /foo/mydataset[START;STRIDE;COUNT;BLOCK] It is not required to use all parameters, but until the last parameter value used, diff --git a/tools/test/h5diff/testfiles/h5diff_624.txt b/tools/test/h5diff/testfiles/h5diff_624.txt index b362749a296..f0ca01ac991 100644 --- a/tools/test/h5diff/testfiles/h5diff_624.txt +++ b/tools/test/h5diff/testfiles/h5diff_624.txt @@ -146,6 +146,8 @@ usage: h5diff [OPTIONS] file1 file2 [obj1[ obj2]] symbolic links are compared.). Subsetting options: + --no-compact-subset Disable compact form of subsetting and allow the use + of "[" in dataset names. Subsetting is available by using the fcompact form of subsetting, as follows: obj1 /foo/mydataset[START;STRIDE;COUNT;BLOCK] It is not required to use all parameters, but until the last parameter value used, From ba9ebcbb4d677f11051a07b5a4346b1191b78eed Mon Sep 17 00:00:00 2001 From: Dave Allured Date: Thu, 20 Apr 2023 15:52:07 -0600 Subject: [PATCH 055/108] H5Spoint.c: Comment fix (#2782) Comment should be "point", not "hyperslab". --- src/H5Spoint.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/H5Spoint.c b/src/H5Spoint.c index 22feaf83b40..c6fdecd3681 100644 --- a/src/H5Spoint.c +++ b/src/H5Spoint.c @@ -122,7 +122,7 @@ const H5S_select_class_t H5S_sel_point[1] = {{ H5S__point_iter_init, }}; -/* Format version bounds for dataspace hyperslab selection */ +/* Format version bounds for dataspace point selection */ const unsigned H5O_sds_point_ver_bounds[] = { H5S_POINT_VERSION_1, /* H5F_LIBVER_EARLIEST */ H5S_POINT_VERSION_1, /* H5F_LIBVER_V18 */ From 30d3b1e48e55c15217d6a400b3ac8c927d5003d3 Mon Sep 17 00:00:00 2001 From: jhendersonHDF Date: Sun, 30 Apr 2023 13:56:22 -0500 Subject: [PATCH 056/108] Fix v1 object header gap bug in H5Ocopy (#2785) (#2833) --- release_docs/RELEASE.txt | 12 ++++++++ src/H5Ocopy.c | 9 ++++-- test/objcopy.c | 64 ++++++++++++++++++++++++++++++++++++++++ 3 files changed, 83 insertions(+), 2 deletions(-) diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index a5eb5851a03..1e0af89a987 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -205,6 +205,18 @@ Bug Fixes since HDF5-1.12.2 release =================================== Library ------- + - Fixed a bug in H5Ocopy that could generate invalid HDF5 files + + H5Ocopy was missing a check to determine whether the new object's + object header version is greater than version 1. Without this check, + copying of objects with object headers that are smaller than a + certain size would cause H5Ocopy to create an object header for the + new object that has a gap in the header data. According to the + HDF5 File Format Specification, this is not allowed for version + 1 of the object header format. + + Fixes GitHub issue #2653 + - Fixed potential heap buffer overflow in decoding of link info message Detections of buffer overflow were added for decoding version, index diff --git a/src/H5Ocopy.c b/src/H5Ocopy.c index abd1f0b2cf5..90c7ed82ae5 100644 --- a/src/H5Ocopy.c +++ b/src/H5Ocopy.c @@ -664,10 +664,15 @@ H5O__copy_header_real(const H5O_loc_t *oloc_src, H5O_loc_t *oloc_dst /*out*/, H5 HDassert((oh_dst->flags & H5O_HDR_CHUNK0_SIZE) == H5O_HDR_CHUNK0_1); /* Determine whether to create gap or NULL message */ - if (delta < H5O_SIZEOF_MSGHDR_OH(oh_dst)) + if ((oh_dst->version > H5O_VERSION_1) && (delta < H5O_SIZEOF_MSGHDR_OH(oh_dst))) dst_oh_gap = delta; - else + else { + /* NULL message must be at least size of message header */ + if (delta < H5O_SIZEOF_MSGHDR_OH(oh_dst)) + delta = H5O_SIZEOF_MSGHDR_OH(oh_dst); + dst_oh_null = delta; + } /* Increase destination object header size */ dst_oh_size += delta; diff --git a/test/objcopy.c b/test/objcopy.c index 03e5b68a1aa..5a5b8f9bf8f 100644 --- a/test/objcopy.c +++ b/test/objcopy.c @@ -16391,6 +16391,68 @@ test_copy_iterate_cb(hid_t loc_id, const char *name, const H5L_info2_t H5_ATTR_U return (H5_ITER_ERROR); } /* end test_copy_iterate_cb */ +/* + * Test for a bug with copying of v1 object headers where the + * new object header would end up with a gap in the header data, + * which v1 object header shouldn't have. + */ +static int +test_copy_cdt_v1_header_bug(hid_t fcpl_src, hid_t src_fapl) +{ + hid_t file_id = H5I_INVALID_HID; + hid_t type_id = H5I_INVALID_HID; + hid_t ocpypl_id = H5I_INVALID_HID; + char src_filename[NAME_BUF_SIZE]; + + TESTING("H5Ocopy(): bug with copying v1 object headers"); + + /* Initialize the filenames */ + h5_fixname(FILENAME[0], src_fapl, src_filename, sizeof src_filename); + + if ((file_id = H5Fcreate(src_filename, H5F_ACC_TRUNC, fcpl_src, src_fapl)) < 0) + TEST_ERROR; + + if ((type_id = H5Tcreate(H5T_STRING, 385)) < 0) + TEST_ERROR; + if (H5Tset_strpad(type_id, H5T_STR_NULLPAD) < 0) + TEST_ERROR; + if (H5Tset_cset(type_id, H5T_CSET_ASCII) < 0) + TEST_ERROR; + + if (H5Tcommit2(file_id, "committed_str_type", type_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT) < 0) + TEST_ERROR; + + if ((ocpypl_id = H5Pcreate(H5P_OBJECT_COPY)) < 0) + TEST_ERROR; + if (H5Pset_copy_object(ocpypl_id, H5O_COPY_WITHOUT_ATTR_FLAG) < 0) + TEST_ERROR; + + if (H5Ocopy(file_id, "committed_str_type", file_id, "committed_str_type2", ocpypl_id, H5P_DEFAULT) < 0) + TEST_ERROR; + + if (H5Tclose(type_id) < 0) + TEST_ERROR; + if (H5Pclose(ocpypl_id) < 0) + TEST_ERROR; + if (H5Fclose(file_id) < 0) + TEST_ERROR; + + PASSED(); + + return 0; + +error: + H5E_BEGIN_TRY + { + H5Tclose(type_id); + H5Pclose(ocpypl_id); + H5Fclose(file_id); + } + H5E_END_TRY; + + return 1; +} + static int test_copy_iterate(hid_t fcpl_src, hid_t fcpl_dst, hid_t src_fapl, hid_t dst_fapl) { @@ -17555,6 +17617,8 @@ main(void) nerrors += test_copy_null_ref(fcpl_src, fcpl_dst, src_fapl, dst_fapl); nerrors += test_copy_null_ref_open(fcpl_src, fcpl_dst, src_fapl, dst_fapl); + nerrors += test_copy_cdt_v1_header_bug(fcpl_src, src_fapl); + nerrors += test_copy_iterate(fcpl_src, fcpl_dst, src_fapl, dst_fapl); } /* end if */ From e7f9e24d0a9c61bbd7c685054ef83bc4f832e938 Mon Sep 17 00:00:00 2001 From: glennsong09 <43005495+glennsong09@users.noreply.github.com> Date: Sun, 30 Apr 2023 15:09:41 -0500 Subject: [PATCH 057/108] Clean up memory allocated when reading messages in H5Dlayout on error (#2811) --- release_docs/RELEASE.txt | 10 ++++++++++ src/H5Dlayout.c | 21 ++++++++++++++++----- 2 files changed, 26 insertions(+), 5 deletions(-) diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 1e0af89a987..5856d817cbd 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -123,6 +123,16 @@ New Features Java Library: ------------- + - Fixed memory leaks that could occur when reading a dataset from a + malformed file + + When attempting to read layout, pline, and efl information for a + dataset, memory leaks could occur if attempting to read pline/efl + information threw an error, which is due to the memory that was + allocated for pline and efl not being properly cleaned up on error. + + Fixes Github issue #2602 + - HDF5GroupInfo class has been deprecated. This class assumes that an object can contain four values which uniquely identify an diff --git a/src/H5Dlayout.c b/src/H5Dlayout.c index 53943b467e5..019ead1e7cd 100644 --- a/src/H5Dlayout.c +++ b/src/H5Dlayout.c @@ -591,7 +591,9 @@ herr_t H5D__layout_oh_read(H5D_t *dataset, hid_t dapl_id, H5P_genplist_t *plist) { htri_t msg_exists; /* Whether a particular type of message exists */ + hbool_t pline_copied = FALSE; /* Flag to indicate that dcpl_cache.pline's message was copied */ hbool_t layout_copied = FALSE; /* Flag to indicate that layout message was copied */ + hbool_t efl_copied = FALSE; /* Flag to indicate that the EFL message was copied */ herr_t ret_value = SUCCEED; /* Return value */ FUNC_ENTER_PACKAGE @@ -607,6 +609,7 @@ H5D__layout_oh_read(H5D_t *dataset, hid_t dapl_id, H5P_genplist_t *plist) /* Retrieve the I/O pipeline message */ if (NULL == H5O_msg_read(&(dataset->oloc), H5O_PLINE_ID, &dataset->shared->dcpl_cache.pline)) HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't retrieve message") + pline_copied = TRUE; /* Set the I/O pipeline info in the property list */ if (H5P_set(plist, H5O_CRT_PIPELINE_NAME, &dataset->shared->dcpl_cache.pline) < 0) @@ -627,9 +630,10 @@ H5D__layout_oh_read(H5D_t *dataset, hid_t dapl_id, H5P_genplist_t *plist) if ((msg_exists = H5O_msg_exists(&(dataset->oloc), H5O_EFL_ID)) < 0) HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't check if message exists") if (msg_exists) { - /* Retrieve the EFL message */ + /* Retrieve the EFL message */ if (NULL == H5O_msg_read(&(dataset->oloc), H5O_EFL_ID, &dataset->shared->dcpl_cache.efl)) HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't retrieve message") + efl_copied = TRUE; /* Set the EFL info in the property list */ if (H5P_set(plist, H5D_CRT_EXT_FILE_LIST_NAME, &dataset->shared->dcpl_cache.efl) < 0) @@ -661,10 +665,17 @@ H5D__layout_oh_read(H5D_t *dataset, hid_t dapl_id, H5P_genplist_t *plist) HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, "unable to set chunk sizes") done: - if (ret_value < 0 && layout_copied) - if (H5O_msg_reset(H5O_LAYOUT_ID, &dataset->shared->layout) < 0) - HDONE_ERROR(H5E_DATASET, H5E_CANTRESET, FAIL, "unable to reset layout info") - + if (ret_value < 0) { + if (pline_copied) + if (H5O_msg_reset(H5O_PLINE_ID, &dataset->shared->dcpl_cache.pline) < 0) + HDONE_ERROR(H5E_DATASET, H5E_CANTRESET, FAIL, "unable to reset pipeline info") + if (layout_copied) + if (H5O_msg_reset(H5O_LAYOUT_ID, &dataset->shared->layout) < 0) + HDONE_ERROR(H5E_DATASET, H5E_CANTRESET, FAIL, "unable to reset layout info") + if (efl_copied) + if (H5O_msg_reset(H5O_EFL_ID, &dataset->shared->dcpl_cache.efl) < 0) + HDONE_ERROR(H5E_DATASET, H5E_CANTRESET, FAIL, "unable to reset efl message") + } FUNC_LEAVE_NOAPI(ret_value) } /* end H5D__layout_oh_read() */ From a92b768a939a0f3afed2fe66d14848b28abf3fd7 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Sun, 30 Apr 2023 15:11:30 -0500 Subject: [PATCH 058/108] Add support for CMakePresets and fix example download (#2819) --- .gitignore | 1 + CMakeInstallation.cmake | 55 ++- CMakePresets.json | 248 ++++++++++++ config/cmake-presets/hidden-presets.json | 491 +++++++++++++++++++++++ release_docs/INSTALL_CMake.txt | 101 ++++- release_docs/RELEASE.txt | 9 + 6 files changed, 893 insertions(+), 12 deletions(-) create mode 100644 CMakePresets.json create mode 100644 config/cmake-presets/hidden-presets.json diff --git a/.gitignore b/.gitignore index 3caf16a1c67..cbaccb29d32 100644 --- a/.gitignore +++ b/.gitignore @@ -41,3 +41,4 @@ src/H5overflow.h src/H5version.h /.classpath +/CMakeUserPresets.json diff --git a/CMakeInstallation.cmake b/CMakeInstallation.cmake index 018a36462d1..54034a7373a 100644 --- a/CMakeInstallation.cmake +++ b/CMakeInstallation.cmake @@ -146,17 +146,56 @@ if (HDF5_PACK_EXAMPLES) DESTINATION ${HDF5_INSTALL_DATA_DIR} COMPONENT hdfdocuments ) - if (EXISTS "${HDF5_EXAMPLES_COMPRESSED_DIR}/${HDF5_EXAMPLES_COMPRESSED}") + + option (EXAMPLES_USE_RELEASE_NAME "Use the released examples artifact name" OFF) + option (EXAMPLES_DOWNLOAD "Download to use released examples files" OFF) + if (EXAMPLES_DOWNLOAD) + if (NOT EXAMPLES_USE_LOCALCONTENT) + set (EXAMPLES_URL ${EXAMPLES_TGZ_ORIGPATH}/${EXAMPLES_TGZ_ORIGNAME}) + else () + set (EXAMPLES_URL ${TGZPATH}/${EXAMPLES_TGZ_ORIGNAME}) + endif () + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "Examples file is ${EXAMPLES_URL}") + endif () + file (DOWNLOAD ${EXAMPLES_URL} ${HDF5_BINARY_DIR}/${HDF5_EXAMPLES_COMPRESSED}) + if (EXISTS "${HDF5_BINARY_DIR}/${HDF5_EXAMPLES_COMPRESSED}") + execute_process( + COMMAND ${CMAKE_COMMAND} -E tar xzf ${HDF5_EXAMPLES_COMPRESSED} + WORKING_DIRECTORY ${HDF5_BINARY_DIR} + COMMAND_ECHO STDOUT + ) + endif () + set (EXAMPLES_USE_RELEASE_NAME ON CACHE BOOL "" FORCE) + else () + if (EXISTS "${HDF5_EXAMPLES_COMPRESSED_DIR}/${HDF5_EXAMPLES_COMPRESSED}") + execute_process( + COMMAND ${CMAKE_COMMAND} -E tar xzf ${HDF5_EXAMPLES_COMPRESSED_DIR}/${HDF5_EXAMPLES_COMPRESSED} + WORKING_DIRECTORY ${HDF5_BINARY_DIR} + COMMAND_ECHO STDOUT + ) + endif () + endif () + if (EXAMPLES_USE_RELEASE_NAME) + get_filename_component (EX_LAST_EXT ${HDF5_EXAMPLES_COMPRESSED} LAST_EXT) + if (${EX_LAST_EXT} STREQUAL ".zip") + get_filename_component (EX_DIR_NAME ${HDF5_EXAMPLES_COMPRESSED} NAME_WLE) + else () + get_filename_component (EX_DIR_NAME ${HDF5_EXAMPLES_COMPRESSED} NAME_WLE) + get_filename_component (EX_DIR_NAME ${EX_DIR_NAME} NAME_WLE) + endif () execute_process( - COMMAND ${CMAKE_COMMAND} -E tar xzf ${HDF5_EXAMPLES_COMPRESSED_DIR}/${HDF5_EXAMPLES_COMPRESSED} - ) - install ( - DIRECTORY ${HDF5_BINARY_DIR}/HDF5Examples - DESTINATION ${HDF5_INSTALL_DATA_DIR} - USE_SOURCE_PERMISSIONS - COMPONENT hdfdocuments + COMMAND ${CMAKE_COMMAND} -E rename ${EX_DIR_NAME} HDF5Examples + WORKING_DIRECTORY ${HDF5_BINARY_DIR} + COMMAND_ECHO STDOUT ) endif () + install ( + DIRECTORY ${HDF5_BINARY_DIR}/HDF5Examples + DESTINATION ${HDF5_INSTALL_DATA_DIR} + USE_SOURCE_PERMISSIONS + COMPONENT hdfdocuments + ) install ( FILES ${HDF5_SOURCE_DIR}/release_docs/USING_CMake_Examples.txt diff --git a/CMakePresets.json b/CMakePresets.json new file mode 100644 index 00000000000..d806cdaa1da --- /dev/null +++ b/CMakePresets.json @@ -0,0 +1,248 @@ +{ + "version": 6, + "include": [ + "config/cmake-presets/hidden-presets.json" + ], + "configurePresets": [ + { + "name": "ci-base-tgz", + "hidden": true, + "inherits": "ci-base", + "cacheVariables": { + "HDF5_ALLOW_EXTERNAL_SUPPORT": "NO", + "TGZPATH": {"type": "STRING", "value": "${sourceParentDir}/temp"} + } + }, + { + "name": "ci-StdCompression", + "hidden": true, + "inherits": "ci-base-tgz", + "cacheVariables": { + "HDF5_ENABLE_Z_LIB_SUPPORT": "ON", + "HDF5_ENABLE_SZIP_SUPPORT": "ON", + "HDF5_ENABLE_SZIP_ENCODING": "ON", + "BUILD_ZLIB_WITH_FETCHCONTENT": "ON", + "ZLIB_PACKAGE_NAME": {"type": "STRING", "value": "zlib"}, + "ZLIB_TGZ_ORIGPATH": {"type": "STRING", "value": "https://github.com/madler/zlib/releases/download/v1.2.13"}, + "ZLIB_TGZ_ORIGNAME": {"type": "STRING", "value": "zlib-1.2.13.tar.gz"}, + "ZLIB_USE_LOCALCONTENT": "OFF", + "BUILD_SZIP_WITH_FETCHCONTENT": "ON", + "LIBAEC_PACKAGE_NAME": {"type": "STRING", "value": "libaec"}, + "LIBAEC_TGZ_ORIGPATH": {"type": "STRING", "value": "https://github.com/MathisRosenhauer/libaec/releases/download/v1.0.6"}, + "LIBAEC_TGZ_ORIGNAME": {"type": "STRING", "value": "libaec-1.0.6.tar.gz"}, + "LIBAEC_USE_LOCALCONTENT": "OFF" + } + }, + { + "name": "ci-base-plugins", + "hidden": true, + "inherits": "ci-base-tgz", + "cacheVariables": { + "PLUGIN_TGZ_NAME": {"type": "STRING", "value": "hdf5_plugins-1.14.0.tar.gz"}, + "PLUGIN_PACKAGE_NAME": {"type": "STRING", "value": "pl"}, + "BSHUF_TGZ_NAME": {"type": "STRING", "value": "bitshuffle.tar.gz"}, + "BSHUF_PACKAGE_NAME": {"type": "STRING", "value": "bshuf"}, + "BLOSC_TGZ_NAME": {"type": "STRING", "value": "c-blosc.tar.gz"}, + "BLOSC_PACKAGE_NAME": {"type": "STRING", "value": "blosc"}, + "BLOSC_ZLIB_TGZ_NAME": {"type": "STRING", "value": "ZLib.tar.gz"}, + "BLOSC_ZLIB_PACKAGE_NAME": {"type": "STRING", "value": "zlib"}, + "BZ2_TGZ_NAME": {"type": "STRING", "value": "BZ2.tar.gz"}, + "BZ2_PACKAGE_NAME": {"type": "STRING", "value": "bz2"}, + "FPZIP_TGZ_NAME": {"type": "STRING", "value": "fpzip.tar.gz"}, + "FPZIP_PACKAGE_NAME": {"type": "STRING", "value": "fpzip"}, + "JPEG_TGZ_NAME": {"type": "STRING", "value": "JPEG.tar.gz"}, + "JPEG_PACKAGE_NAME": {"type": "STRING", "value": "jpeg"}, + "BUILD_LZ4_LIBRARY_SOURCE": "ON", + "LZ4_TGZ_NAME": {"type": "STRING", "value": "lz4.tar.gz"}, + "LZ4_PACKAGE_NAME": {"type": "STRING", "value": "lz4"}, + "LZF_TGZ_NAME": {"type": "STRING", "value": "lzf.tar.gz"}, + "LZF_PACKAGE_NAME": {"type": "STRING", "value": "lzf"}, + "SZ_TGZ_NAME": {"type": "STRING", "value": "szf.tar.gz"}, + "SZ_PACKAGE_NAME": {"type": "STRING", "value": "SZ"}, + "ZFP_TGZ_NAME": {"type": "STRING", "value": "zfp.tar.gz"}, + "ZFP_PACKAGE_NAME": {"type": "STRING", "value": "zfp"}, + "ZSTD_TGZ_NAME": {"type": "STRING", "value": "zstd.tar.gz"}, + "ZSTD_PACKAGE_NAME": {"type": "STRING", "value": "zstd"} + } + }, + { + "name": "ci-StdPlugins", + "hidden": true, + "inherits": ["ci-base-plugins", "ci-base-tgz"], + "cacheVariables": { + "HDF5_ENABLE_PLUGIN_SUPPORT": "ON", + "PLUGIN_TGZ_ORIGPATH": {"type": "STRING", "value": "https://github.com/HDFGroup/hdf5_plugins/archive/refs/tags"}, + "PLUGIN_TGZ_ORIGNAME": {"type": "STRING", "value": "hdf5_plugins-1.14.0.tar.gz"} + } + }, + { + "name": "ci-StdExamples", + "hidden": true, + "inherits": "ci-base", + "cacheVariables": { + "HDF5_PACK_EXAMPLES": "ON", + "HDF5_EXAMPLES_COMPRESSED": {"type": "STRING", "value": "hdf5-examples-2.0.3.tar.gz"}, + "HDF5_EXAMPLES_COMPRESSED_DIR": {"type": "STRING", "value": "${sourceParentDir}/temp"}, + "EXAMPLES_TGZ_ORIGPATH": {"type": "STRING", "value": "https://github.com/HDFGroup/hdf5-examples/archive/refs/tags/"}, + "EXAMPLES_TGZ_ORIGNAME": {"type": "STRING", "value": "2.0.3.tar.gz"}, + "EXAMPLES_DOWNLOAD": "ON" + } + }, + { + "name": "ci-StdShar", + "hidden": true, + "inherits": "ci-StdCompression", + "cacheVariables": { + "HDF_PACKAGE_NAMESPACE": {"type": "STRING", "value": "hdf5::"}, + "HDF5_INSTALL_MOD_FORTRAN": "NO", + "HDF5_BUILD_GENERATORS": "ON", + "HDF5_ENABLE_ALL_WARNINGS": "ON", + "HDF5_MINGW_STATIC_GCC_LIBS": "ON", + "HDF_TEST_EXPRESS": "2" + } + }, + { + "name": "ci-StdShar-MSVC", + "description": "MSVC Standard Config for x64 (Release)", + "inherits": [ + "ci-x64-Release-MSVC", + "ci-CPP", + "ci-Java", + "ci-StdShar", + "ci-StdExamples" + ] + }, + { + "name": "ci-StdShar-MSVC-Fortran", + "description": "MSVC Standard Config for x64 (Release)", + "inherits": [ + "ci-x64-Release-MSVC", + "ci-CPP", + "ci-Fortran", + "ci-Java", + "ci-StdShar", + "ci-StdExamples" + ] + }, + { + "name": "ci-StdShar-Clang", + "description": "Clang Standard Config for x64 (Release)", + "inherits": [ + "ci-x64-Release-Clang", + "ci-CPP", + "ci-Fortran", + "ci-Java", + "ci-StdShar", + "ci-StdExamples" + ] + }, + { + "name": "ci-StdShar-GNUC", + "description": "GNUC Standard Config for x64 (Release)", + "inherits": [ + "ci-x64-Release-GNUC", + "ci-CPP", + "ci-Fortran", + "ci-Java", + "ci-StdShar", + "ci-StdExamples" + ] + } + ], + "buildPresets": [ + { + "name": "ci-StdShar-MSVC", + "description": "MSVC Standard Build for x64 (Release)", + "configurePreset": "ci-StdShar-MSVC", + "inherits": [ + "ci-x64-Release-MSVC" + ] + }, + { + "name": "ci-StdShar-Clang", + "description": "Clang Standard Build for x64 (Release)", + "configurePreset": "ci-StdShar-Clang", + "inherits": [ + "ci-x64-Release-Clang" + ] + }, + { + "name": "ci-StdShar-GNUC", + "description": "GNUC Standard Build for x64 (Release)", + "configurePreset": "ci-StdShar-GNUC", + "verbose": false, + "inherits": [ + "ci-x64-Release-GNUC" + ] + } + ], + "testPresets": [ + { + "name": "ci-StdShar-MSVC", + "configurePreset": "ci-StdShar-MSVC", + "inherits": [ + "ci-x64-Release-MSVC" + ] + }, + { + "name": "ci-StdShar-Clang", + "configurePreset": "ci-StdShar-Clang", + "inherits": [ + "ci-x64-Release-Clang" + ] + }, + { + "name": "ci-StdShar-GNUC", + "configurePreset": "ci-StdShar-GNUC", + "inherits": [ + "ci-x64-Release-GNUC" + ] + } + ], + "packagePresets": [ + { + "name": "ci-StdShar-MSVC", + "configurePreset": "ci-StdShar-MSVC", + "inherits": "ci-x64-Release-MSVC" + }, + { + "name": "ci-StdShar-Clang", + "configurePreset": "ci-StdShar-Clang", + "inherits": "ci-x64-Release-Clang" + }, + { + "name": "ci-StdShar-GNUC", + "configurePreset": "ci-StdShar-GNUC", + "inherits": "ci-x64-Release-GNUC" + } + ], + "workflowPresets": [ + { + "name": "ci-StdShar-MSVC", + "steps": [ + {"type": "configure", "name": "ci-StdShar-MSVC"}, + {"type": "build", "name": "ci-StdShar-MSVC"}, + {"type": "test", "name": "ci-StdShar-MSVC"}, + {"type": "package", "name": "ci-StdShar-MSVC"} + ] + }, + { + "name": "ci-StdShar-Clang", + "steps": [ + {"type": "configure", "name": "ci-StdShar-Clang"}, + {"type": "build", "name": "ci-StdShar-Clang"}, + {"type": "test", "name": "ci-StdShar-Clang"}, + {"type": "package", "name": "ci-StdShar-Clang"} + ] + }, + { + "name": "ci-StdShar-GNUC", + "steps": [ + {"type": "configure", "name": "ci-StdShar-GNUC"}, + {"type": "build", "name": "ci-StdShar-GNUC"}, + {"type": "test", "name": "ci-StdShar-GNUC"}, + {"type": "package", "name": "ci-StdShar-GNUC"} + ] + } + ] +} \ No newline at end of file diff --git a/config/cmake-presets/hidden-presets.json b/config/cmake-presets/hidden-presets.json new file mode 100644 index 00000000000..c616e7d1f4b --- /dev/null +++ b/config/cmake-presets/hidden-presets.json @@ -0,0 +1,491 @@ +{ + "version": 6, + "configurePresets": [ + { + "name": "ci-base", + "displayName": "Basic Config", + "description": "Basic build using Ninja generator", + "generator": "Ninja", + "hidden": true, + "binaryDir": "${sourceParentDir}/build/${presetName}", + "installDir": "${sourceParentDir}/install/${presetName}" + }, + { + "name": "ci-x64", + "architecture": { + "value": "x64", + "strategy": "external" + }, + "hidden": true + }, + { + "name": "ci-x86", + "architecture": { + "value": "x86", + "strategy": "external" + }, + "hidden": true + }, + { + "name": "ci-Debug", + "cacheVariables": { + "CMAKE_BUILD_TYPE": "Debug" + }, + "hidden": true + }, + { + "name": "ci-Release", + "cacheVariables": { + "CMAKE_BUILD_TYPE": "RelWithDebInfo", + "HDF5_BUILD_DOC": "ON" + }, + "hidden": true + }, + { + "name": "ci-MSVC", + "hidden": true, + "cacheVariables": { + "CMAKE_C_COMPILER": "cl", + "CMAKE_CXX_COMPILER": "cl" + }, + "toolset": { + "value": "host=x64", + "strategy": "external" + }, + "condition": { + "type": "equals", + "lhs": "${hostSystemName}", + "rhs": "Windows" + } + }, + { + "name": "ci-Clang", + "hidden": true, + "cacheVariables": { + "CMAKE_TOOLCHAIN_FILE": "config/toolchain/clang.cmake" + }, + "toolset": { + "value": "host=x64", + "strategy": "external" + } + }, + { + "name": "ci-GNUC", + "hidden": true, + "cacheVariables": { + "CMAKE_TOOLCHAIN_FILE": "config/toolchain/gcc.cmake" + }, + "condition": { + "type": "equals", + "lhs": "${hostSystemName}", + "rhs": "Linux" + }, + "toolset": { + "value": "host=x64", + "strategy": "external" + } + }, + { + "name": "ci-Intel", + "hidden": true, + "cacheVariables": { + "CMAKE_TOOLCHAIN_FILE": "config/toolchain/intel.cmake" + }, + "toolset": { + "value": "host=x64", + "strategy": "external" + } + }, + { + "name": "ci-Fortran-Clang", + "hidden": true, + "cacheVariables": { + "CMAKE_Fortran_COMPILER": "gfortran" + }, + "condition": { + "type": "matches", + "string": "${presetName}", + "regex": ".*-Clang" + } + }, + { + "name": "ci-Fortran", + "hidden": true, + "inherits": "ci-Fortran-Clang", + "cacheVariables": { + "HDF5_BUILD_FORTRAN": "ON" + }, + "toolset": { + "value": "host=x64", + "strategy": "external" + } + }, + { + "name": "ci-CPP", + "hidden": true, + "cacheVariables": { + "HDF5_BUILD_CPP_LIB": "ON" + } + }, + { + "name": "ci-Java", + "hidden": true, + "cacheVariables": { + "HDF5_BUILD_JAVA": "ON" + }, + "toolset": { + "value": "host=x64", + "strategy": "external" + } + }, + { + "name": "ci-x64-Debug-MSVC", + "description": "MSVC for x64 (Debug)", + "hidden": true, + "inherits": [ + "ci-base", + "ci-x64", + "ci-Debug", + "ci-MSVC" + ] + }, + { + "name": "ci-x64-Release-MSVC", + "description": "MSVC for x64 (Release)", + "hidden": true, + "inherits": [ + "ci-base", + "ci-x64", + "ci-Release", + "ci-MSVC" + ] + }, + { + "name": "ci-x64-Debug-Clang", + "description": "Clang/LLVM for x64 (Debug)", + "hidden": true, + "inherits": [ + "ci-base", + "ci-x64", + "ci-Debug", + "ci-Clang" + ] + }, + { + "name": "ci-x64-Release-Clang", + "description": "Clang/LLVM for x64 (Release)", + "hidden": true, + "inherits": [ + "ci-base", + "ci-x64", + "ci-Release", + "ci-Clang" + ] + }, + { + "name": "ci-x64-Debug-GNUC", + "description": "GNUC for x64 (Debug)", + "hidden": true, + "inherits": [ + "ci-base", + "ci-x64", + "ci-Debug", + "ci-GNUC" + ] + }, + { + "name": "ci-x64-Release-GNUC", + "description": "GNUC for x64 (Release)", + "hidden": true, + "inherits": [ + "ci-base", + "ci-x64", + "ci-Release", + "ci-GNUC" + ] + }, + { + "name": "ci-x64-Debug-MSVC-asan", + "description": "x64-Debug-MSVC with /fsanitize=address", + "hidden": true, + "inherits": "ci-x64-Debug-MSVC", + "cacheVariables": { + "USE_SANITIZER": "Address", + "HDF5_ENABLE_SANITIZERS": "ON" + } + }, + { + "name": "ci-x64-Debug-GNUC-asan", + "hidden": true, + "inherits": "ci-x64-Debug-GNUC", + "cacheVariables": { + "USE_SANITIZER": "Address", + "HDF5_ENABLE_SANITIZERS": "ON" + } + }, + { + "name": "ci-x64-Debug-GNUC-tsan", + "hidden": true, + "inherits": "ci-x64-Debug-GNUC", + "cacheVariables": { + "USE_SANITIZER": "Thread", + "HDF5_ENABLE_SANITIZERS": "ON" + } + }, + { + "name": "ci-x64-Debug-GNUC-lsan", + "hidden": true, + "inherits": "ci-x64-Debug-GNUC", + "cacheVariables": { + "USE_SANITIZER": "Leak", + "HDF5_ENABLE_SANITIZERS": "ON" + } + }, + { + "name": "ci-x64-Debug-GNUC-ubsan", + "hidden": true, + "inherits": "ci-x64-Debug-GNUC", + "cacheVariables": { + "USE_SANITIZER": "Undefined", + "HDF5_ENABLE_SANITIZERS": "ON" + } + } + ], + "buildPresets": [ + { + "name": "ci-base", + "configurePreset": "ci-base", + "hidden": true, + "verbose": true, + "jobs": 8 + }, + { + "name": "ci-x64-Debug-MSVC", + "configurePreset": "ci-x64-Debug-MSVC", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-x64-Release-MSVC", + "configurePreset": "ci-x64-Release-MSVC", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-x64-Debug-Clang", + "configurePreset": "ci-x64-Debug-Clang", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-x64-Release-Clang", + "configurePreset": "ci-x64-Release-Clang", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-x64-Debug-GNUC", + "configurePreset": "ci-x64-Debug-GNUC", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-x64-Release-GNUC", + "configurePreset": "ci-x64-Release-GNUC", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-x64-Debug-MSVC-asan", + "configurePreset": "ci-x64-Debug-MSVC-asan", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-x64-Debug-GNUC-asan", + "configurePreset": "ci-x64-Debug-GNUC-asan", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-x64-Debug-GNUC-tsan", + "configurePreset": "ci-x64-Debug-GNUC-tsan", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-x64-Debug-GNUC-lsan", + "configurePreset": "ci-x64-Debug-GNUC-lsan", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-x64-Debug-GNUC-ubsan", + "configurePreset": "ci-x64-Debug-GNUC-ubsan", + "hidden": true, + "inherits": [ + "ci-base" + ] + } + ], + "testPresets": [ + { + "name": "ci-base", + "configurePreset": "ci-base", + "output": { + "outputOnFailure": false, + "shortProgress": true, + "verbosity": "verbose" + }, + "hidden": true, + "execution": { + "noTestsAction": "error", + "timeout": 180, + "jobs": 8 + } + }, + { + "name": "ci-x64-Debug-MSVC", + "configurePreset": "ci-x64-Debug-MSVC", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-x64-Release-MSVC", + "configurePreset": "ci-x64-Release-MSVC", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-x64-Debug-Clang", + "configurePreset": "ci-x64-Debug-Clang", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-x64-Release-Clang", + "configurePreset": "ci-x64-Release-Clang", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-x64-Debug-GNUC", + "configurePreset": "ci-x64-Debug-GNUC", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-x64-Release-GNUC", + "configurePreset": "ci-x64-Release-GNUC", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-x64-Debug-MSVC-asan", + "configurePreset": "ci-x64-Debug-MSVC-asan", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-x64-Debug-GNUC-asan", + "configurePreset": "ci-x64-Debug-GNUC-asan", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-x64-Debug-GNUC-tsan", + "configurePreset": "ci-x64-Debug-GNUC-tsan", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-x64-Debug-GNUC-lsan", + "configurePreset": "ci-x64-Debug-GNUC-lsan", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-x64-Debug-GNUC-ubsan", + "configurePreset": "ci-x64-Debug-GNUC-ubsan", + "inherits": [ + "ci-base" + ] + } + ], + "packagePresets": [ + { + "name": "ci-base", + "hidden": true, + "output": { + "verbose": true + } + }, + { + "name": "ci-x64-Release-MSVC", + "configurePreset": "ci-x64-Release-MSVC", + "hidden": true, + "inherits": "ci-base", + "generators": [ + "ZIP" + ] + }, + { + "name": "ci-x64-Release-Clang", + "configurePreset": "ci-x64-Release-Clang", + "hidden": true, + "inherits": "ci-base", + "generators": [ + "TGZ" + ] + }, + { + "name": "ci-x64-Release-GNUC", + "configurePreset": "ci-x64-Release-GNUC", + "hidden": true, + "inherits": "ci-base", + "generators": [ + "TGZ" + ] + } + ] +} \ No newline at end of file diff --git a/release_docs/INSTALL_CMake.txt b/release_docs/INSTALL_CMake.txt index db2d37557ba..361e3e6d415 100644 --- a/release_docs/INSTALL_CMake.txt +++ b/release_docs/INSTALL_CMake.txt @@ -14,6 +14,7 @@ Section VI: CMake option defaults for HDF5 Section VII: User Defined Options for HDF5 Libraries with CMake Section VIII: User Defined Compile Flags for HDF5 Libraries with CMake Section IX: Considerations for cross-compiling +Section X: Using CMakePresets.json for compiling ************************************************************************ @@ -210,10 +211,10 @@ Notes: This short set of instructions is written for users who want to 5. Configure the C library, tools and tests with one of the following commands: On Windows 32 bit - cmake -G "Visual Studio 12 2013" -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_SHARED_LIBS:BOOL=OFF -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ..\hdf5-1.12."X" + cmake -G "Visual Studio 16 2019" -A Win32 -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_SHARED_LIBS:BOOL=OFF -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ..\hdf5-1.12."X" On Windows 64 bit - cmake -G "Visual Studio 12 2013 Win64" -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_SHARED_LIBS:BOOL=OFF -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ..\hdf5-1.12."X" + cmake -G "Visual Studio 16 2019 Win64" -A x64 -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_SHARED_LIBS:BOOL=OFF -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ..\hdf5-1.12."X" On Linux and Mac cmake -G "Unix Makefiles" -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_SHARED_LIBS:BOOL=OFF -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ../hdf5-1.12."X" @@ -616,7 +617,6 @@ These five steps are described in detail below. set (ZFP_TGZ_NAME "zfp.tar.gz" CACHE STRING "Use ZFP from compressed file" FORCE) set (ZFP_PACKAGE_NAME "zfp" CACHE STRING "Name of ZFP package" FORCE) - 2. Configure the cache settings 2.1 Visual CMake users, click the Configure button. If this is the first time you are @@ -639,7 +639,7 @@ These five steps are described in detail below. 2.2 Preferred command line example on Windows in c:\MyHDFstuff\hdf5\build directory: - cmake -C ../config/cmake/cacheinit.cmake -G "Visual Studio 12 2013" \ + cmake -C ../config/cmake/cacheinit.cmake -G "Visual Studio 16 2019" "-Ax64"\ -DHDF5_ENABLE_SZIP_SUPPORT:BOOL=OFF -DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=OFF \ -DCMAKE_BUILD_TYPE:STRING=Release .. @@ -1020,6 +1020,99 @@ The HDF5 CMake variables; HDF5_USE_PREGEN: set this to true HDF5_USE_PREGEN_DIR: set this path to the preset H5Tinit.c file + +======================================================================== +X: Using CMakePresets.json for compiling +======================================================================== + +One problem that CMake users often face is sharing settings with other people for common +ways to configure a project. This may be done to support CI builds, or for users who +frequently use the same build. CMake supports two main files, CMakePresets.json and CMakeUserPresets.json, +that allow users to specify common configure options and share them with others. CMake also supports +files included with the include field. + +CMakePresets.json and CMakeUserPresets.json live in the project's root directory. They +both have exactly the same format, and both are optional (though at least one must be +present if --preset is specified). CMakePresets.json is meant to specify project-wide build +details, while CMakeUserPresets.json is meant for developers to specify their own local build details. + +See CMake documentation for details: https://cmake.org/cmake/help/latest/manual/cmake-presets.7.html + +HDF-provided CMakePresets.json +------------------------------- +The CMakePresets.json provided by HDF requires CMake version 3.25, which supports package +and workflow presets, and ninja build system. The top-level configuration group is intended to be +a standard set of options to produce a package of shared and staic libraries and tools. Other configurations +used for inheriting settings are in the included json file in "config/cmake-presets/hidden-presets.json". + +Available configurations presets can be displayed by executing: + cmake -S --list-presets + +Using individual command presets (where is GNUC or MSVC or Clang): + change directory to the hdf5 source folder + cmake --presets=ci-StdShar- + cmake --build --presets=ci-StdShar- + ctest --presets=ci-StdShar- + cpack --presets=ci-StdShar- + + +Using the workflow preset to configure, build, test and package the standard configuration is: + change directory to the hdf5 source folder + execute "cmake --workflow --presets=ci-StdShar- --fresh" + where is GNUC or MSVC or Clang + +Creating your own configurations +-------------------------------- +The quickest way is to copy CMakePresets.json to CMakeUserPresets.json and +edit CMakeUserPresets.json configuration names from ci-* to my-*. Change the +"configurePresets" section "inherits" field only for those that you have alternate +options. Then change the "configurePreset" field entries in the "buildPresets", +"testPresets", "packagePresets" sections to match your my-StdShar-. +And finally the names settings in the "workflowPresets" steps will also need the ci-* to my-* change. + +For instance, to change the support files to use a local directory, edit CMakeUserPresets.json: +...... + { + "name": "my-base-tgz", + "hidden": true, + "inherits": "ci-base", + "cacheVariables": { + "HDF5_ALLOW_EXTERNAL_SUPPORT": {"type": "STRING", "value": "TGZ"}, + "TGZPATH": {"type": "STRING", "value": "${sourceParentDir}/temp"} + } + }, + { + "name": "my-StdCompression", + "hidden": true, + "inherits": "my-base-tgz", + "cacheVariables": { +...... + { + "name": "my-StdShar", + "hidden": true, + "inherits": "my-StdCompression", + "cacheVariables": { +...... + { + "name": "my-StdShar-GNUC", + "description": "GNUC Standard Config for x64 (Release)", + "inherits": [ + "ci-x64-Release-GNUC", + "ci-CPP", + "ci-Fortran", + "ci-Java", + "my-StdShar", + "my-StdExamples" + ] + } +...... + + +Then you can change or add options for your specific case. + + + + ======================================================================== For further assistance, send email to help@hdfgroup.org ======================================================================== diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 5856d817cbd..1c14ab790af 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -47,6 +47,15 @@ New Features Configuration: ------------- + - Added support for CMake presets file. + + CMake supports two main files, CMakePresets.json and CMakeUserPresets.json, + that allow users to specify common configure options and share them with others. + HDF added a CMakePresets.json file of a typical configuration and support + file, config/cmake-presets/hidden-presets.json. + Also added a section to INSTALL_CMake.txt with very basic explanation of the + process to use CMakePresets. + - Enabled instrumentation of the library by default in CMake for parallel debug builds From 922ff5b8c705c650b8dc01fa1296b1e98cb4d564 Mon Sep 17 00:00:00 2001 From: jhendersonHDF Date: Sun, 30 Apr 2023 15:13:31 -0500 Subject: [PATCH 059/108] Add bug note to H5Dget_space_status documentation (#2788) (#2836) --- src/H5Dpublic.h | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/src/H5Dpublic.h b/src/H5Dpublic.h index 22e856db914..13a6d3400af 100644 --- a/src/H5Dpublic.h +++ b/src/H5Dpublic.h @@ -423,6 +423,20 @@ H5_DLL hid_t H5Dget_space(hid_t dset_id); * \details H5Dget_space_status() determines whether space has been allocated * for the dataset \p dset_id. * + * \note \Bold{BUG:} Prior to the HDF5 1.14.0, 1.12.2 and 1.10.9 releases, + * H5Dget_space_status() may return incorrect space allocation status + * values for datasets with filters applied to them. + * H5Dget_space_status() calculated the space allocation status by + * comparing the sum of the sizes of all the allocated chunks in the + * dataset against the total data size of the dataset, as calculated by + * the number of elements in the dataset's dataspace multiplied by the + * dataset's datatype size. If the dataset had any compression filters + * applied to it and the dataset chunks were successfully compressed, + * the sum of the sizes of the allocated dataset chunks would generally + * always be less than the total data size of the dataset, and + * H5Dget_space_status() wouldn't ever return + * `H5D_SPACE_STATUS_ALLOCATED`. + * * \since 1.6.0 * */ From 6bd9f780620bc221b9a9521eca736e1a762844d0 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Sun, 30 Apr 2023 15:14:54 -0500 Subject: [PATCH 060/108] Correct compression install files (#2847) --- config/cmake/LIBAEC/CMakeLists.txt | 12 ++++++++---- config/cmake/ZLIB/CMakeLists.txt | 18 ++++++++++++++---- 2 files changed, 22 insertions(+), 8 deletions(-) diff --git a/config/cmake/LIBAEC/CMakeLists.txt b/config/cmake/LIBAEC/CMakeLists.txt index 212c9bfe886..fb650ec04b5 100644 --- a/config/cmake/LIBAEC/CMakeLists.txt +++ b/config/cmake/LIBAEC/CMakeLists.txt @@ -369,6 +369,10 @@ if (WIN32) find_program (WIX_EXECUTABLE candle PATHS "${CPACK_WIX_ROOT}/bin") endif () +configure_file (${LIBAEC_SOURCE_DIR}/LICENSE.txt ${LIBAEC_BINARY_DIR}/LIBAEC_LICENSE.txt @ONLY) +configure_file (${LIBAEC_SOURCE_DIR}/README.SZIP ${LIBAEC_BINARY_DIR}/LIBAEC_README.SZIP @ONLY) +configure_file (${LIBAEC_SOURCE_DIR}/README.md ${LIBAEC_BINARY_DIR}/LIBAEC_README.md @ONLY) + #----------------------------------------------------------------------------- # Set the cpack variables #----------------------------------------------------------------------------- @@ -383,9 +387,9 @@ if (NOT LIBAEC_EXTERNALLY_CONFIGURED) set (CPACK_PACKAGE_VERSION_MAJOR "${LIBAEC_PACKAGE_VERSION_MAJOR}") set (CPACK_PACKAGE_VERSION_MINOR "${LIBAEC_PACKAGE_VERSION_MINOR}") set (CPACK_PACKAGE_VERSION_PATCH "") - set (CPACK_RESOURCE_FILE_LICENSE "${CMAKE_CURRENT_SOURCE_DIR}/LICENSE.txt") - set (CPACK_PACKAGE_DESCRIPTION_FILE "${CMAKE_CURRENT_SOURCE_DIR}/README.SZIP") - set (CPACK_RESOURCE_FILE_README "${CMAKE_CURRENT_SOURCE_DIR}/README.md") + set (CPACK_RESOURCE_FILE_LICENSE "${LIBAEC_BINARY_DIR}/LIBAEC_LICENSE.txt") + set (CPACK_PACKAGE_DESCRIPTION_FILE "${LIBAEC_BINARY_DIR}/LIBAEC_README.SZIP") + set (CPACK_RESOURCE_FILE_README "${LIBAEC_BINARY_DIR}/LIBAEC_README.md") set (CPACK_PACKAGE_RELOCATABLE TRUE) set (CPACK_PACKAGE_DESCRIPTION_SUMMARY "libaec - Adaptive Entropy Coding library by Deutsches Klimarechenzentrum GmbH") set (CPACK_PACKAGE_INSTALL_DIRECTORY "${CPACK_PACKAGE_VENDOR}/${CPACK_PACKAGE_NAME}/${CPACK_PACKAGE_VERSION}") @@ -419,7 +423,7 @@ if (NOT LIBAEC_EXTERNALLY_CONFIGURED) endif () #WiX variables set (CPACK_WIX_UNINSTALL "1") - set (CPACK_RESOURCE_FILE_LICENSE "${CMAKE_CURRENT_SOURCE_DIR}/LICENSE.txt") + set (CPACK_RESOURCE_FILE_LICENSE "${LIBAEC_BINARY_DIR}/LIBAEC_LICENSE.txt") elseif (APPLE) list (APPEND CPACK_GENERATOR "STGZ") list (APPEND CPACK_GENERATOR "DragNDrop") diff --git a/config/cmake/ZLIB/CMakeLists.txt b/config/cmake/ZLIB/CMakeLists.txt index c74ecea9dd9..5e42fb2576c 100644 --- a/config/cmake/ZLIB/CMakeLists.txt +++ b/config/cmake/ZLIB/CMakeLists.txt @@ -422,6 +422,16 @@ if (WIN32) find_program (WIX_EXECUTABLE candle PATHS "${CPACK_WIX_ROOT}/bin") endif () +#----------------------------------------------------------------------------- +# Configure the LICENSE.txt file for the windows binary package +#----------------------------------------------------------------------------- +if (WIN32) + configure_file (${ZLIB_SOURCE_DIR}/LICENSE ${ZLIB_BINARY_DIR}/ZLIB_LICENSE.txt @ONLY) +else () + configure_file (${ZLIB_SOURCE_DIR}/LICENSE ${ZLIB_BINARY_DIR}/ZLIB_LICENSE @ONLY) +endif () +configure_file (${ZLIB_SOURCE_DIR}/README ${ZLIB_BINARY_DIR}/ZLIB_README @ONLY) + #----------------------------------------------------------------------------- # Set the cpack variables #----------------------------------------------------------------------------- @@ -436,9 +446,9 @@ if (NOT ZLIB_EXTERNALLY_CONFIGURED) set (CPACK_PACKAGE_VERSION_MAJOR "${ZLIB_PACKAGE_VERSION_MAJOR}") set (CPACK_PACKAGE_VERSION_MINOR "${ZLIB_PACKAGE_VERSION_MINOR}") set (CPACK_PACKAGE_VERSION_PATCH "") - set (CPACK_RESOURCE_FILE_LICENSE "${CMAKE_CURRENT_SOURCE_DIR}/README") - set (CPACK_PACKAGE_DESCRIPTION_FILE "${CMAKE_CURRENT_SOURCE_DIR}/README") - set (CPACK_RESOURCE_FILE_README "${CMAKE_CURRENT_SOURCE_DIR}/README") + set (CPACK_RESOURCE_FILE_LICENSE "${ZLIB_BINARY_DIR}/ZLIB_LICENSE") + set (CPACK_PACKAGE_DESCRIPTION_FILE "${ZLIB_BINARY_DIR}/ZLIB_README") + set (CPACK_RESOURCE_FILE_README "${ZLIB_BINARY_DIR}/ZLIB_README") set (CPACK_PACKAGE_RELOCATABLE TRUE) set (CPACK_PACKAGE_DESCRIPTION_SUMMARY "zlib Installation") set (CPACK_PACKAGE_INSTALL_DIRECTORY "${CPACK_PACKAGE_VENDOR}/${CPACK_PACKAGE_NAME}/${CPACK_PACKAGE_VERSION}") @@ -472,7 +482,7 @@ if (NOT ZLIB_EXTERNALLY_CONFIGURED) endif () #WiX variables set (CPACK_WIX_UNINSTALL "1") - set (CPACK_RESOURCE_FILE_LICENSE "${JPEG_BINARY_DIR}/README") + set (CPACK_RESOURCE_FILE_LICENSE "${ZLIB_BINARY_DIR}/ZLIB_LICENSE.txt") elseif (APPLE) list (APPEND CPACK_GENERATOR "STGZ") list (APPEND CPACK_GENERATOR "DragNDrop") From 94a3d86fb867182f90e60f7955c3653b3ac53b8e Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Tue, 2 May 2023 14:54:17 -0500 Subject: [PATCH 061/108] Correct usage of CMAKE_BUILD_TYPE (#2868) --- config/cmake/libhdf5.settings.cmake.in | 2 +- config/cmake_ext_mod/HDFLibMacros.cmake | 8 +++--- config/cmake_ext_mod/HDFMacros.cmake | 33 +++++++++++-------------- 3 files changed, 20 insertions(+), 23 deletions(-) diff --git a/config/cmake/libhdf5.settings.cmake.in b/config/cmake/libhdf5.settings.cmake.in index d80b0f8c5a9..0e2c0134562 100644 --- a/config/cmake/libhdf5.settings.cmake.in +++ b/config/cmake/libhdf5.settings.cmake.in @@ -13,7 +13,7 @@ General Information: Compiling Options: ------------------ - Build Mode: @CMAKE_BUILD_TYPE@ + Build Mode: @HDF_CFG_NAME@ Debugging Symbols: @HDF5_ENABLE_SYMBOLS@ Asserts: @HDF5_ENABLE_ASSERTS@ Profiling: @HDF5_ENABLE_PROFILING@ diff --git a/config/cmake_ext_mod/HDFLibMacros.cmake b/config/cmake_ext_mod/HDFLibMacros.cmake index d2f2660d0a2..6d77ab54e4f 100644 --- a/config/cmake_ext_mod/HDFLibMacros.cmake +++ b/config/cmake_ext_mod/HDFLibMacros.cmake @@ -89,7 +89,7 @@ macro (EXTERNAL_SZIP_LIBRARY compress_type encoding) -DBUILD_SHARED_LIBS:BOOL=OFF -DSZIP_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT} -DSZIP_EXTERNALLY_CONFIGURED:BOOL=OFF - -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE} + -DCMAKE_BUILD_TYPE:STRING=${HDF_CFG_NAME} -DCMAKE_DEBUG_POSTFIX:STRING=${CMAKE_DEBUG_POSTFIX} -DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX} -DCMAKE_RUNTIME_OUTPUT_DIRECTORY:PATH=${CMAKE_RUNTIME_OUTPUT_DIRECTORY} @@ -111,7 +111,7 @@ macro (EXTERNAL_SZIP_LIBRARY compress_type encoding) -DBUILD_SHARED_LIBS:BOOL=OFF -DSZIP_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT} -DSZIP_EXTERNALLY_CONFIGURED:BOOL=OFF - -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE} + -DCMAKE_BUILD_TYPE:STRING=${HDF_CFG_NAME} -DCMAKE_DEBUG_POSTFIX:STRING=${CMAKE_DEBUG_POSTFIX} -DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX} -DCMAKE_RUNTIME_OUTPUT_DIRECTORY:PATH=${CMAKE_RUNTIME_OUTPUT_DIRECTORY} @@ -184,7 +184,7 @@ macro (EXTERNAL_ZLIB_LIBRARY compress_type) -DBUILD_SHARED_LIBS:BOOL=OFF -DZLIB_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT} -DZLIB_EXTERNALLY_CONFIGURED:BOOL=OFF - -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE} + -DCMAKE_BUILD_TYPE:STRING=${HDF_CFG_NAME} -DCMAKE_DEBUG_POSTFIX:STRING=${CMAKE_DEBUG_POSTFIX} -DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX} -DCMAKE_RUNTIME_OUTPUT_DIRECTORY:PATH=${CMAKE_RUNTIME_OUTPUT_DIRECTORY} @@ -205,7 +205,7 @@ macro (EXTERNAL_ZLIB_LIBRARY compress_type) -DBUILD_SHARED_LIBS:BOOL=OFF -DZLIB_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT} -DZLIB_EXTERNALLY_CONFIGURED:BOOL=OFF - -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE} + -DCMAKE_BUILD_TYPE:STRING=${HDF_CFG_NAME} -DCMAKE_DEBUG_POSTFIX:STRING=${CMAKE_DEBUG_POSTFIX} -DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX} -DCMAKE_RUNTIME_OUTPUT_DIRECTORY:PATH=${CMAKE_RUNTIME_OUTPUT_DIRECTORY} diff --git a/config/cmake_ext_mod/HDFMacros.cmake b/config/cmake_ext_mod/HDFMacros.cmake index 7483f3cad8b..b369ab1acb1 100644 --- a/config/cmake_ext_mod/HDFMacros.cmake +++ b/config/cmake_ext_mod/HDFMacros.cmake @@ -14,28 +14,23 @@ macro (SET_HDF_BUILD_TYPE) get_property (_isMultiConfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG) if (_isMultiConfig) - set (HDF_CFG_NAME ${CMAKE_BUILD_TYPE}) - set (HDF_BUILD_TYPE ${CMAKE_CFG_INTDIR}) + # HDF_CFG_BUILD_TYPE is used in the Fortran install commands for the build location of the .mod files set (HDF_CFG_BUILD_TYPE \${CMAKE_INSTALL_CONFIG_NAME}) + if (CMAKE_BUILD_TYPE) + # set the default to the specified command line define + set (HDF_CFG_NAME ${CMAKE_BUILD_TYPE}) + else () + # set the default to the MultiConfig variable + set (HDF_CFG_NAME ${CMAKE_CFG_INTDIR}) + endif () else () set (HDF_CFG_BUILD_TYPE ".") if (CMAKE_BUILD_TYPE) set (HDF_CFG_NAME ${CMAKE_BUILD_TYPE}) - set (HDF_BUILD_TYPE ${CMAKE_BUILD_TYPE}) else () set (HDF_CFG_NAME "Release") - set (HDF_BUILD_TYPE "Release") endif () endif () - if (NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE "Setting build type to 'RelWithDebInfo' as none was specified.") - endif() - set (CMAKE_BUILD_TYPE RelWithDebInfo CACHE STRING "Choose the type of build." FORCE) - # Set the possible values of build type for cmake-gui - set_property (CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "Debug" "Release" - "MinSizeRel" "RelWithDebInfo") - endif () endmacro () #------------------------------------------------------------------------------- @@ -75,9 +70,11 @@ endmacro () #------------------------------------------------------------------------------- macro (INSTALL_TARGET_PDB libtarget targetdestination targetcomponent) + option (DISABLE_PDB_FILES "Do not install PDB files" OFF) + mark_as_advanced (DISABLE_PDB_FILES) if (WIN32 AND MSVC AND NOT DISABLE_PDB_FILES) get_target_property (target_type ${libtarget} TYPE) - if (${libtype} MATCHES "SHARED") + if (${target_type} MATCHES "SHARED") set (targetfilename $) else () get_property (target_name TARGET ${libtarget} PROPERTY $,OUTPUT_NAME_DEBUG,OUTPUT_NAME_RELWITHDEBINFO>) @@ -173,8 +170,8 @@ macro (HDF_IMPORT_SET_LIB_OPTIONS libtarget libname libtype libversion) ) else () set_target_properties (${libtarget} PROPERTIES - IMPORTED_IMPLIB "${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/${HDF_BUILD_TYPE}/${CMAKE_IMPORT_LIBRARY_PREFIX}${IMPORT_LIB_NAME}${CMAKE_IMPORT_LIBRARY_SUFFIX}" - IMPORTED_LOCATION "${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/${HDF_BUILD_TYPE}/${CMAKE_IMPORT_LIBRARY_PREFIX}${IMPORT_LIB_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX}" + IMPORTED_IMPLIB "${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/${HDF_CFG_NAME}/${CMAKE_IMPORT_LIBRARY_PREFIX}${IMPORT_LIB_NAME}${CMAKE_IMPORT_LIBRARY_SUFFIX}" + IMPORTED_LOCATION "${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/${HDF_CFG_NAME}/${CMAKE_IMPORT_LIBRARY_PREFIX}${IMPORT_LIB_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX}" ) endif () else () @@ -199,7 +196,7 @@ macro (HDF_IMPORT_SET_LIB_OPTIONS libtarget libname libtype libversion) else () if (WIN32 AND NOT MINGW) set_target_properties (${libtarget} PROPERTIES - IMPORTED_LOCATION "${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/${HDF_BUILD_TYPE}/${IMPORT_LIB_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX}" + IMPORTED_LOCATION "${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/${HDF_CFG_NAME}/${IMPORT_LIB_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX}" IMPORTED_LINK_INTERFACE_LANGUAGES "C" ) else () @@ -453,7 +450,7 @@ macro (HDF_DIR_PATHS package_prefix) ) get_property(_isMultiConfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG) if(_isMultiConfig) - set (CMAKE_TEST_OUTPUT_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${CMAKE_BUILD_TYPE}) + set (CMAKE_TEST_OUTPUT_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${HDF_CFG_NAME}) set (CMAKE_PDB_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/bin CACHE PATH "Single Directory for all pdb files." ) From b9cc2ebac85b5026a2306f98e852ecdd58437894 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Wed, 3 May 2023 10:31:44 -0500 Subject: [PATCH 062/108] Add configure_file commands for batch (#2882) --- src/CMakeLists.txt | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 0088ae23704..75505a67d66 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -987,6 +987,14 @@ if (BUILD_SHARED_LIBS) endif () if (LOCAL_BATCH_TEST) + configure_file ( + ${HDF5_SOURCE_DIR}/bin/batch/ctest_serial.cmake.in + ${HDF5_BINARY_DIR}/ctest_serial.cmake ESCAPE_QUOTES @ONLY + ) + configure_file ( + ${HDF5_SOURCE_DIR}/bin/batch/ctest_parallel.cmake.in + ${HDF5_BINARY_DIR}/ctest_parallel.cmake ESCAPE_QUOTES @ONLY + ) if (LOCAL_BATCH_SCRIPT_COMMAND STREQUAL "raybsub") configure_file ( ${HDF5_SOURCE_DIR}/bin/batch/${LOCAL_BATCH_SCRIPT_COMMAND} From f45caff637232482a0fe74ed7628bd248a41596a Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Thu, 11 May 2023 19:06:32 -0500 Subject: [PATCH 063/108] Fix CMake generator expression syntax (#2940) --- CMakePresets.json | 7 ++++++- c++/src/CMakeLists.txt | 4 ++-- c++/test/CMakeLists.txt | 2 +- config/cmake_ext_mod/HDFMacros.cmake | 6 +++--- fortran/examples/CMakeLists.txt | 4 ++-- fortran/src/CMakeLists.txt | 4 ++-- fortran/test/CMakeLists.txt | 24 ++++++++++++------------ release_docs/RELEASE.txt | 15 ++++++++++++++- src/CMakeLists.txt | 19 ++++++++++--------- test/CMakeLists.txt | 6 +++--- testpar/CMakeLists.txt | 4 ++-- 11 files changed, 57 insertions(+), 38 deletions(-) diff --git a/CMakePresets.json b/CMakePresets.json index d806cdaa1da..66f31a4745d 100644 --- a/CMakePresets.json +++ b/CMakePresets.json @@ -182,7 +182,12 @@ "configurePreset": "ci-StdShar-MSVC", "inherits": [ "ci-x64-Release-MSVC" - ] + ], + "filter": { + "exclude": { + "name": "H5DUMP-tfloatsattrs" + } + } }, { "name": "ci-StdShar-Clang", diff --git a/c++/src/CMakeLists.txt b/c++/src/CMakeLists.txt index 656f4bf48fb..6bf0e8d4ca1 100644 --- a/c++/src/CMakeLists.txt +++ b/c++/src/CMakeLists.txt @@ -85,7 +85,7 @@ if (BUILD_STATIC_LIBS) ) target_compile_options(${HDF5_CPP_LIB_TARGET} PRIVATE "${HDF5_CMAKE_CXX_FLAGS}") target_compile_definitions(${HDF5_CPP_LIB_TARGET} - PRIVATE $<$:MPICH_SKIP_MPICXX;MPICH_IGNORE_CXX_SEEK># Parallel/MPI, prevent spurious cpp/cxx warnings + PRIVATE "$<$:MPICH_SKIP_MPICXX;MPICH_IGNORE_CXX_SEEK>"# Parallel/MPI, prevent spurious cpp/cxx warnings ) TARGET_C_PROPERTIES (${HDF5_CPP_LIB_TARGET} STATIC) target_link_libraries (${HDF5_CPP_LIB_TARGET} PUBLIC ${HDF5_LIB_TARGET}) @@ -104,7 +104,7 @@ if (BUILD_SHARED_LIBS) target_compile_options(${HDF5_CPP_LIBSH_TARGET} PRIVATE "${HDF5_CMAKE_CXX_FLAGS}") target_compile_definitions(${HDF5_CPP_LIBSH_TARGET} PUBLIC "H5_BUILT_AS_DYNAMIC_LIB" - PRIVATE $<$:MPICH_SKIP_MPICXX;MPICH_IGNORE_CXX_SEEK># Parallel/MPI, prevent spurious cpp/cxx warnings + PRIVATE "$<$:MPICH_SKIP_MPICXX;MPICH_IGNORE_CXX_SEEK>"# Parallel/MPI, prevent spurious cpp/cxx warnings ) TARGET_C_PROPERTIES (${HDF5_CPP_LIBSH_TARGET} SHARED) target_link_libraries (${HDF5_CPP_LIBSH_TARGET} diff --git a/c++/test/CMakeLists.txt b/c++/test/CMakeLists.txt index 1255e39d3db..331cada33d4 100644 --- a/c++/test/CMakeLists.txt +++ b/c++/test/CMakeLists.txt @@ -41,7 +41,7 @@ add_executable (cpp_testhdf5 ${CPP_TEST_SOURCES} ${HDF5_CPP_TEST_SOURCE_DIR}/h5c target_include_directories (cpp_testhdf5 PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};${HDF5_TEST_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(cpp_testhdf5 PRIVATE "${HDF5_CMAKE_CXX_FLAGS}") target_compile_definitions(cpp_testhdf5 - PRIVATE $<$:MPICH_SKIP_MPICXX;MPICH_IGNORE_CXX_SEEK># Parallel/MPI, prevent spurious cpp/cxx warnings + PRIVATE "$<$:MPICH_SKIP_MPICXX;MPICH_IGNORE_CXX_SEEK>"# Parallel/MPI, prevent spurious cpp/cxx warnings ) if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (cpp_testhdf5 STATIC) diff --git a/config/cmake_ext_mod/HDFMacros.cmake b/config/cmake_ext_mod/HDFMacros.cmake index b369ab1acb1..ef910080eb9 100644 --- a/config/cmake_ext_mod/HDFMacros.cmake +++ b/config/cmake_ext_mod/HDFMacros.cmake @@ -77,7 +77,7 @@ macro (INSTALL_TARGET_PDB libtarget targetdestination targetcomponent) if (${target_type} MATCHES "SHARED") set (targetfilename $) else () - get_property (target_name TARGET ${libtarget} PROPERTY $,OUTPUT_NAME_DEBUG,OUTPUT_NAME_RELWITHDEBINFO>) + get_property (target_name TARGET ${libtarget} PROPERTY "$,OUTPUT_NAME_DEBUG,OUTPUT_NAME_RELWITHDEBINFO>") set (targetfilename $/${target_name}.pdb) endif () install ( @@ -211,8 +211,8 @@ endmacro () #------------------------------------------------------------------------------- macro (TARGET_C_PROPERTIES wintarget libtype) target_compile_options(${wintarget} PRIVATE - $<$:${WIN_COMPILE_FLAGS}> - $<$:${WIN_COMPILE_FLAGS}> + "$<$:${WIN_COMPILE_FLAGS}>" + "$<$:${WIN_COMPILE_FLAGS}>" ) if(MSVC) set_property(TARGET ${wintarget} APPEND PROPERTY LINK_FLAGS "${WIN_LINK_FLAGS}") diff --git a/fortran/examples/CMakeLists.txt b/fortran/examples/CMakeLists.txt index 14ed5fbcea3..793df8d7447 100644 --- a/fortran/examples/CMakeLists.txt +++ b/fortran/examples/CMakeLists.txt @@ -137,7 +137,7 @@ if (H5_HAVE_PARALLEL AND MPI_Fortran_FOUND) PRIVATE ${HDF5_F90_LIB_TARGET} ${HDF5_LIB_TARGET} - $<$:${MPI_Fortran_LIBRARIES}> + "$<$:${MPI_Fortran_LIBRARIES}>" ) set_target_properties (f90_ex_ph5example PROPERTIES LINKER_LANGUAGE Fortran @@ -158,7 +158,7 @@ if (H5_HAVE_PARALLEL AND MPI_Fortran_FOUND) PRIVATE ${HDF5_F90_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} - $<$:${MPI_Fortran_LIBRARIES}> + "$<$:${MPI_Fortran_LIBRARIES}>" ) set_target_properties (f90_ex_ph5example PROPERTIES LINKER_LANGUAGE Fortran diff --git a/fortran/src/CMakeLists.txt b/fortran/src/CMakeLists.txt index 00d7ca5c8db..c253c2c9b23 100644 --- a/fortran/src/CMakeLists.txt +++ b/fortran/src/CMakeLists.txt @@ -332,7 +332,7 @@ if (BUILD_STATIC_LIBS) PUBLIC ${HDF5_F90_C_LIB_TARGET} PRIVATE ${LINK_Fortran_LIBS} - $<$:${MPI_Fortran_LIBRARIES}> + "$<$:${MPI_Fortran_LIBRARIES}>" ) # set_property(TARGET ${HDF5_F90_LIB_TARGET} APPEND PROPERTY LINK_FLAGS $<$:"-SUBSYSTEM:CONSOLE">) # set_property(TARGET ${HDF5_F90_LIB_TARGET} APPEND PROPERTY LINK_FLAGS $<$:${WIN_LINK_FLAGS}>) @@ -365,7 +365,7 @@ if (BUILD_SHARED_LIBS) ) target_link_libraries (${HDF5_F90_LIBSH_TARGET} PUBLIC ${HDF5_F90_C_LIBSH_TARGET} - PRIVATE ${LINK_Fortran_LIBS} $<$:${MPI_Fortran_LIBRARIES}> + PRIVATE ${LINK_Fortran_LIBS} "$<$:${MPI_Fortran_LIBRARIES}>" ) # set_property(TARGET ${HDF5_F90_LIBSH_TARGET} APPEND PROPERTY LINK_FLAGS $<$:"-SUBSYSTEM:CONSOLE">) # set_property(TARGET ${HDF5_F90_LIBSH_TARGET} APPEND PROPERTY LINK_FLAGS $<$:${WIN_LINK_FLAGS}>) diff --git a/fortran/test/CMakeLists.txt b/fortran/test/CMakeLists.txt index 1e879e73d8c..2d29497ef26 100644 --- a/fortran/test/CMakeLists.txt +++ b/fortran/test/CMakeLists.txt @@ -224,7 +224,7 @@ if(MSVC) endif() if (NOT BUILD_SHARED_LIBS) target_include_directories (testhdf5_fortran PRIVATE "${CMAKE_Fortran_MODULE_DIRECTORY}/static;${HDF5_F90_BINARY_DIR};${HDF5_F90_BINARY_DIR}/static") - target_link_libraries (testhdf5_fortran PRIVATE ${HDF5_F90_TEST_LIB_TARGET} ${HDF5_F90_LIB_TARGET} ${HDF5_LIB_TARGET} $<$:ws2_32.lib>) + target_link_libraries (testhdf5_fortran PRIVATE ${HDF5_F90_TEST_LIB_TARGET} ${HDF5_F90_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:ws2_32.lib>") set_target_properties (testhdf5_fortran PROPERTIES LINKER_LANGUAGE Fortran FOLDER test/fortran @@ -233,7 +233,7 @@ if (NOT BUILD_SHARED_LIBS) add_dependencies (testhdf5_fortran ${HDF5_F90_TEST_LIB_TARGET}) else () target_include_directories (testhdf5_fortran PRIVATE "${CMAKE_Fortran_MODULE_DIRECTORY}/shared;${HDF5_F90_BINARY_DIR};${HDF5_F90_BINARY_DIR}/shared") - target_link_libraries (testhdf5_fortran PRIVATE ${HDF5_F90_TEST_LIBSH_TARGET} ${HDF5_F90_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} $<$:ws2_32.lib>) + target_link_libraries (testhdf5_fortran PRIVATE ${HDF5_F90_TEST_LIBSH_TARGET} ${HDF5_F90_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$:ws2_32.lib>") set_target_properties (testhdf5_fortran PROPERTIES LINKER_LANGUAGE Fortran FOLDER test/fortran @@ -263,7 +263,7 @@ if(MSVC) endif() if (NOT BUILD_SHARED_LIBS) target_include_directories (testhdf5_fortran_1_8 PRIVATE "${CMAKE_Fortran_MODULE_DIRECTORY}/static;${HDF5_F90_BINARY_DIR};${HDF5_F90_BINARY_DIR}/static") - target_link_libraries (testhdf5_fortran_1_8 PRIVATE ${HDF5_F90_TEST_LIB_TARGET} ${HDF5_F90_LIB_TARGET} ${HDF5_LIB_TARGET} $<$:ws2_32.lib>) + target_link_libraries (testhdf5_fortran_1_8 PRIVATE ${HDF5_F90_TEST_LIB_TARGET} ${HDF5_F90_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:ws2_32.lib>") set_target_properties (testhdf5_fortran_1_8 PROPERTIES LINKER_LANGUAGE Fortran FOLDER test/fortran @@ -272,7 +272,7 @@ if (NOT BUILD_SHARED_LIBS) add_dependencies (testhdf5_fortran_1_8 ${HDF5_F90_TEST_LIB_TARGET}) else () target_include_directories (testhdf5_fortran_1_8 PRIVATE "${CMAKE_Fortran_MODULE_DIRECTORY}/shared;${HDF5_F90_BINARY_DIR};${HDF5_F90_BINARY_DIR}/shared") - target_link_libraries (testhdf5_fortran_1_8 PRIVATE ${HDF5_F90_TEST_LIBSH_TARGET} ${HDF5_F90_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} $<$:ws2_32.lib>) + target_link_libraries (testhdf5_fortran_1_8 PRIVATE ${HDF5_F90_TEST_LIBSH_TARGET} ${HDF5_F90_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$:ws2_32.lib>") set_target_properties (testhdf5_fortran_1_8 PROPERTIES LINKER_LANGUAGE Fortran FOLDER test/fortran @@ -304,7 +304,7 @@ if(MSVC) endif() if (NOT BUILD_SHARED_LIBS) target_include_directories (fortranlib_test_F03 PRIVATE "${CMAKE_Fortran_MODULE_DIRECTORY}/static;${HDF5_F90_BINARY_DIR};${HDF5_F90_BINARY_DIR}/static") - target_link_libraries (fortranlib_test_F03 PRIVATE ${HDF5_F90_TEST_LIB_TARGET} ${HDF5_F90_LIB_TARGET} ${HDF5_LIB_TARGET} $<$:ws2_32.lib>) + target_link_libraries (fortranlib_test_F03 PRIVATE ${HDF5_F90_TEST_LIB_TARGET} ${HDF5_F90_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:ws2_32.lib>") set_target_properties (fortranlib_test_F03 PROPERTIES LINKER_LANGUAGE Fortran FOLDER test/fortran @@ -313,7 +313,7 @@ if (NOT BUILD_SHARED_LIBS) add_dependencies (fortranlib_test_F03 ${HDF5_F90_TEST_LIB_TARGET}) else () target_include_directories (fortranlib_test_F03 PRIVATE "${CMAKE_Fortran_MODULE_DIRECTORY}/shared;${HDF5_F90_BINARY_DIR};${HDF5_F90_BINARY_DIR}/shared") - target_link_libraries (fortranlib_test_F03 PRIVATE ${HDF5_F90_TEST_LIBSH_TARGET} ${HDF5_F90_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} $<$:ws2_32.lib>) + target_link_libraries (fortranlib_test_F03 PRIVATE ${HDF5_F90_TEST_LIBSH_TARGET} ${HDF5_F90_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$:ws2_32.lib>") set_target_properties (fortranlib_test_F03 PROPERTIES LINKER_LANGUAGE Fortran FOLDER test/fortran @@ -336,7 +336,7 @@ if(MSVC) endif() if (NOT BUILD_SHARED_LIBS) target_include_directories (fflush1 PRIVATE ${CMAKE_Fortran_MODULE_DIRECTORY}/static) - target_link_libraries (fflush1 PRIVATE ${HDF5_F90_LIB_TARGET} ${HDF5_F90_TEST_LIB_TARGET} ${HDF5_LIB_TARGET} $<$:ws2_32.lib>) + target_link_libraries (fflush1 PRIVATE ${HDF5_F90_LIB_TARGET} ${HDF5_F90_TEST_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:ws2_32.lib>") set_target_properties (fflush1 PROPERTIES LINKER_LANGUAGE Fortran FOLDER test/fortran @@ -345,7 +345,7 @@ if (NOT BUILD_SHARED_LIBS) add_dependencies (fflush1 ${HDF5_F90_TEST_LIB_TARGET}) else () target_include_directories (fflush1 PRIVATE ${CMAKE_Fortran_MODULE_DIRECTORY}/shared) - target_link_libraries (fflush1 PRIVATE ${HDF5_F90_LIBSH_TARGET} ${HDF5_F90_TEST_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} $<$:ws2_32.lib>) + target_link_libraries (fflush1 PRIVATE ${HDF5_F90_LIBSH_TARGET} ${HDF5_F90_TEST_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$:ws2_32.lib>") set_target_properties (fflush1 PROPERTIES LINKER_LANGUAGE Fortran FOLDER test/fortran @@ -368,7 +368,7 @@ if(MSVC) endif() if (NOT BUILD_SHARED_LIBS) target_include_directories (fflush2 PRIVATE ${CMAKE_Fortran_MODULE_DIRECTORY}/static) - target_link_libraries (fflush2 PRIVATE ${HDF5_F90_TEST_LIB_TARGET} ${HDF5_F90_LIB_TARGET} ${HDF5_LIB_TARGET} $<$:ws2_32.lib>) + target_link_libraries (fflush2 PRIVATE ${HDF5_F90_TEST_LIB_TARGET} ${HDF5_F90_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:ws2_32.lib>") set_target_properties (fflush2 PROPERTIES LINKER_LANGUAGE Fortran FOLDER test/fortran @@ -377,7 +377,7 @@ if (NOT BUILD_SHARED_LIBS) add_dependencies (fflush2 ${HDF5_F90_TEST_LIB_TARGET}) else () target_include_directories (fflush2 PRIVATE ${CMAKE_Fortran_MODULE_DIRECTORY}/shared) - target_link_libraries (fflush2 PRIVATE ${HDF5_F90_TEST_LIBSH_TARGET} ${HDF5_F90_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} $<$:ws2_32.lib>) + target_link_libraries (fflush2 PRIVATE ${HDF5_F90_TEST_LIBSH_TARGET} ${HDF5_F90_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$:ws2_32.lib>") set_target_properties (fflush2 PROPERTIES LINKER_LANGUAGE Fortran FOLDER test/fortran @@ -400,7 +400,7 @@ if(MSVC) endif() if (NOT BUILD_SHARED_LIBS) target_include_directories (vol_connector PRIVATE ${CMAKE_Fortran_MODULE_DIRECTORY}/static) - target_link_libraries (vol_connector PRIVATE ${HDF5_F90_LIB_TARGET} ${HDF5_F90_TEST_LIB_TARGET} ${HDF5_LIB_TARGET} $<$:ws2_32.lib>) + target_link_libraries (vol_connector PRIVATE ${HDF5_F90_LIB_TARGET} ${HDF5_F90_TEST_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:ws2_32.lib>") set_target_properties (vol_connector PROPERTIES LINKER_LANGUAGE Fortran FOLDER test/fortran @@ -409,7 +409,7 @@ if (NOT BUILD_SHARED_LIBS) add_dependencies (vol_connector ${HDF5_F90_TEST_LIB_TARGET}) else () target_include_directories (vol_connector PRIVATE ${CMAKE_Fortran_MODULE_DIRECTORY}/shared) - target_link_libraries (vol_connector PRIVATE ${HDF5_F90_LIBSH_TARGET} ${HDF5_F90_TEST_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} $<$:ws2_32.lib>) + target_link_libraries (vol_connector PRIVATE ${HDF5_F90_LIBSH_TARGET} ${HDF5_F90_TEST_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$:ws2_32.lib>") set_target_properties (vol_connector PROPERTIES LINKER_LANGUAGE Fortran FOLDER test/fortran diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 1c14ab790af..7f44441442c 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -402,6 +402,19 @@ Bug Fixes since HDF5-1.12.2 release Configuration ------------- + - Fixed syntax of generator expressions used by CMake + + Adding quotes around the generator expression should allow CMake to + correctly parse the expression. Generator expressions are typically + parsed after command arguments. If a generator expression contains + spaces, new lines, semicolons or other characters that may be + interpreted as command argument separators, the whole expression + should be surrounded by quotes when passed to a command. Failure to + do so may result in the expression being split and it may no longer + be recognized as a generator expression. + + Fixes GitHub issue #2906 + - Correct the CMake generated pkg-config file The pkg-config file generated by CMake had the order and placement of the @@ -413,7 +426,7 @@ Bug Fixes since HDF5-1.12.2 release supported pkconfig files. Still recommend that the CMake config file method be used for building projects with CMake. - Fixes GitHub issues #1546 & #2259 + Fixes GitHub issues #1546 and #2259 - Change the settings of the *pc files to use the correct format diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 75505a67d66..2f30e97d708 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -1182,17 +1182,17 @@ if (BUILD_STATIC_LIBS) ${HDF_EXTRA_C_FLAGS} ${HDF_EXTRA_FLAGS} PRIVATE - $<$:H5_DEBUG_API> # Enable tracing of the API - $<$:H5Z_DEBUG;H5T_DEBUG;H5S_DEBUG;H5O_DEBUG;H5I_DEBUG;H5HL_DEBUG;H5F_DEBUG;H5D_DEBUG;H5B2_DEBUG;H5AC_DEBUG> + "$<$:H5_DEBUG_API>" # Enable tracing of the API + "$<$:H5Z_DEBUG;H5T_DEBUG;H5S_DEBUG;H5O_DEBUG;H5I_DEBUG;H5HL_DEBUG;H5F_DEBUG;H5D_DEBUG;H5B2_DEBUG;H5AC_DEBUG>" ) TARGET_C_PROPERTIES (${HDF5_LIB_TARGET} STATIC) target_link_libraries (${HDF5_LIB_TARGET} PRIVATE ${LINK_LIBS} ${LINK_COMP_LIBS} - PUBLIC $<$>:${CMAKE_DL_LIBS}> "$<$:MPI::MPI_C>" + PUBLIC "$<$>:${CMAKE_DL_LIBS}>" "$<$:MPI::MPI_C>" ) if (NOT WIN32) target_link_libraries (${HDF5_LIB_TARGET} - PRIVATE $<$:Threads::Threads> + PRIVATE "$<$:Threads::Threads>" ) endif () set_global_variable (HDF5_LIBRARIES_TO_EXPORT ${HDF5_LIB_TARGET}) @@ -1223,14 +1223,15 @@ if (BUILD_SHARED_LIBS) ${HDF_EXTRA_C_FLAGS} ${HDF_EXTRA_FLAGS} PRIVATE - $<$:H5_HAVE_THREADSAFE> - $<$:H5_DEBUG_API> # Enable tracing of the API - $<$:H5Z_DEBUG;H5T_DEBUG;H5S_DEBUG;H5O_DEBUG;H5I_DEBUG;H5HL_DEBUG;H5F_DEBUG;H5D_DEBUG;H5B2_DEBUG;H5AC_DEBUG> + "$<$:H5_HAVE_THREADSAFE>" + "$<$:H5_DEBUG_API>" # Enable tracing of the API + "$<$:H5Z_DEBUG;H5T_DEBUG;H5S_DEBUG;H5O_DEBUG;H5I_DEBUG;H5HL_DEBUG;H5F_DEBUG;H5D_DEBUG;H5B2_DEBUG;H5AC_DEBUG>" ) TARGET_C_PROPERTIES (${HDF5_LIBSH_TARGET} SHARED) target_link_libraries (${HDF5_LIBSH_TARGET} - PRIVATE ${LINK_LIBS} ${LINK_COMP_LIBS} $<$:Threads::Threads> - PUBLIC $<$>:${CMAKE_DL_LIBS}> "$<$:MPI::MPI_C>" + PRIVATE ${LINK_LIBS} ${LINK_COMP_LIBS} + "$<$:Threads::Threads>" + PUBLIC "$<$>:${CMAKE_DL_LIBS}>" "$<$:MPI::MPI_C>" ) set_global_variable (HDF5_LIBRARIES_TO_EXPORT "${HDF5_LIBRARIES_TO_EXPORT};${HDF5_LIBSH_TARGET}") H5_SET_LIB_OPTIONS (${HDF5_LIBSH_TARGET} ${HDF5_LIB_NAME} SHARED "LIB") diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt index 5c0dc9a6f68..3102f9a72d1 100644 --- a/test/CMakeLists.txt +++ b/test/CMakeLists.txt @@ -40,7 +40,7 @@ if (BUILD_STATIC_LIBS) TARGET_C_PROPERTIES (${HDF5_TEST_LIB_TARGET} STATIC) target_link_libraries (${HDF5_TEST_LIB_TARGET} PUBLIC ${LINK_LIBS} ${HDF5_LIB_TARGET} - PRIVATE $<$,$>:ws2_32.lib> + PRIVATE "$<$,$>:ws2_32.lib>" ) if (MINGW) target_link_libraries (${HDF5_TEST_LIB_TARGET} PRIVATE "wsock32.lib") @@ -473,7 +473,7 @@ if (NOT BUILD_SHARED_LIBS) target_link_libraries (ttsafe PRIVATE ${HDF5_TEST_LIB_TARGET}) if (NOT WIN32) target_link_libraries (ttsafe - PRIVATE $<$:Threads::Threads> + PRIVATE "$<$:Threads::Threads>" ) endif () else () @@ -499,7 +499,7 @@ if (NOT BUILD_SHARED_LIBS) target_link_libraries (thread_id PRIVATE ${HDF5_TEST_LIB_TARGET}) if (NOT WIN32) target_link_libraries (thread_id - PRIVATE $<$:Threads::Threads> + PRIVATE "$<$:Threads::Threads>" ) endif () else () diff --git a/testpar/CMakeLists.txt b/testpar/CMakeLists.txt index 4d9e65bd49e..eb5b8cd5852 100644 --- a/testpar/CMakeLists.txt +++ b/testpar/CMakeLists.txt @@ -56,13 +56,13 @@ macro (ADD_H5P_EXE file) TARGET_C_PROPERTIES (${file} STATIC) target_link_libraries (${file} PRIVATE ${HDF5_TEST_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:MPI::MPI_C>" - $<$,$>:ws2_32.lib> + "$<$,$>:ws2_32.lib>" ) else () TARGET_C_PROPERTIES (${file} SHARED) target_link_libraries (${file} PRIVATE ${HDF5_TEST_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$:MPI::MPI_C>" - $<$,$>:ws2_32.lib> + "$<$,$>:ws2_32.lib>" ) endif () set_target_properties (${file} PROPERTIES FOLDER test/par) From d5143567c8f08f6f6de80ffcffa3ede98634ca78 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Thu, 11 May 2023 19:07:02 -0500 Subject: [PATCH 064/108] Update action uses version (#2939) --- .github/workflows/clang-format-fix.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/clang-format-fix.yml b/.github/workflows/clang-format-fix.yml index feaa3d0014e..00d23529cbd 100644 --- a/.github/workflows/clang-format-fix.yml +++ b/.github/workflows/clang-format-fix.yml @@ -27,7 +27,7 @@ jobs: inplace: True style: file exclude: './config ./hl/src/H5LTanalyze.c ./hl/src/H5LTparse.c ./hl/src/H5LTparse.h ./src/H5Epubgen.h ./src/H5Einit.h ./src/H5Eterm.h ./src/H5Edefin.h ./src/H5version.h ./src/H5overflow.h' - - uses: EndBug/add-and-commit@v7 + - uses: EndBug/add-and-commit@v9 with: author_name: github-actions author_email: 41898282+github-actions[bot]@users.noreply.github.com From 8dcc7dc17ff4e797edc5570a6eaa1d8da691b97e Mon Sep 17 00:00:00 2001 From: mattjala <124107509+mattjala@users.noreply.github.com> Date: Thu, 18 May 2023 11:13:34 -0500 Subject: [PATCH 065/108] Prevent buffer overrun in H5S_select_deserialize (#2956) --- src/H5Odtype.c | 503 +++++++++++++++++++++++++++++++++-------------- src/H5Olayout.c | 20 +- src/H5Rint.c | 19 +- src/H5S.c | 5 +- src/H5Sall.c | 18 +- src/H5Shyper.c | 80 ++++++-- src/H5Snone.c | 17 +- src/H5Spkg.h | 3 +- src/H5Spoint.c | 76 +++++-- src/H5Sprivate.h | 4 +- src/H5Sselect.c | 19 +- src/H5private.h | 9 + 12 files changed, 560 insertions(+), 213 deletions(-) diff --git a/src/H5Odtype.c b/src/H5Odtype.c index c13f80d61ec..b6aaa7a9c6c 100644 --- a/src/H5Odtype.c +++ b/src/H5Odtype.c @@ -108,35 +108,46 @@ const H5O_msg_class_t H5O_MSG_DTYPE[1] = {{ }}; /*------------------------------------------------------------------------- - * Function: H5O__dtype_decode_helper + * Function: H5O__dtype_decode_helper * - * Purpose: Decodes a datatype + * Purpose: Decodes a datatype * - * Return: TRUE if we can upgrade the parent type's version even + * Return: TRUE if we can upgrade the parent type's version even * with strict format checks * FALSE if we cannot - * Negative on failure - * - * Programmer: Robb Matzke - * Monday, December 8, 1997 - * + * NEGATIVE on failure *------------------------------------------------------------------------- */ static htri_t -H5O__dtype_decode_helper(unsigned *ioflags /*in,out*/, const uint8_t **pp, H5T_t *dt) +H5O__dtype_decode_helper(unsigned *ioflags /*in,out*/, const uint8_t **pp, H5T_t *dt, hbool_t skip, + const uint8_t *p_end) { - unsigned flags, version; - unsigned i; - size_t z; - htri_t ret_value = FALSE; /* Return value */ + unsigned flags; + unsigned version; + htri_t ret_value = FALSE; - FUNC_ENTER_STATIC + FUNC_ENTER_PACKAGE - /* check args */ HDassert(pp && *pp); HDassert(dt && dt->shared); + /* XXX NOTE! + * + * H5Tencode() does not take a buffer size, so normal bounds checking in + * that case is impossible. + * + * Instead of using our normal H5_IS_BUFFER_OVERFLOW macro, use + * H5_IS_KNOWN_BUFFER_OVERFLOW, which will skip the check when the + * we're decoding a buffer from H5Tconvert(). + * + * Even if this is fixed at some point in the future, as long as we + * support the old, size-less API call, we will need to use the modified + * macros. + */ + /* Version, class & flags */ + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, 4, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, "ran off end of input buffer while decoding"); UINT32DECODE(*pp, flags); version = (flags >> 4) & 0x0f; if (version < H5O_DTYPE_VERSION_1 || version > H5O_DTYPE_VERSION_LATEST) @@ -146,8 +157,14 @@ H5O__dtype_decode_helper(unsigned *ioflags /*in,out*/, const uint8_t **pp, H5T_t flags >>= 8; /* Size */ + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, 4, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, "ran off end of input buffer while decoding"); UINT32DECODE(*pp, dt->shared->size); + /* Check for invalid datatype size */ + if (dt->shared->size == 0) + HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, FAIL, "invalid datatype size") + switch (dt->shared->type) { case H5T_INTEGER: /* @@ -157,6 +174,8 @@ H5O__dtype_decode_helper(unsigned *ioflags /*in,out*/, const uint8_t **pp, H5T_t dt->shared->u.atomic.lsb_pad = (flags & 0x2) ? H5T_PAD_ONE : H5T_PAD_ZERO; dt->shared->u.atomic.msb_pad = (flags & 0x4) ? H5T_PAD_ONE : H5T_PAD_ZERO; dt->shared->u.atomic.u.i.sign = (flags & 0x8) ? H5T_SGN_2 : H5T_SGN_NONE; + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, 2 + 2, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, "ran off end of input buffer while decoding"); UINT16DECODE(*pp, dt->shared->u.atomic.offset); UINT16DECODE(*pp, dt->shared->u.atomic.prec); break; @@ -174,7 +193,7 @@ H5O__dtype_decode_helper(unsigned *ioflags /*in,out*/, const uint8_t **pp, H5T_t /* VAX order if both 1st and 6th bits are turned on*/ if (flags & 0x40) dt->shared->u.atomic.order = H5T_ORDER_VAX; - } /* end if */ + } dt->shared->u.atomic.lsb_pad = (flags & 0x2) ? H5T_PAD_ONE : H5T_PAD_ZERO; dt->shared->u.atomic.msb_pad = (flags & 0x4) ? H5T_PAD_ONE : H5T_PAD_ZERO; dt->shared->u.atomic.u.f.pad = (flags & 0x8) ? H5T_PAD_ONE : H5T_PAD_ZERO; @@ -193,21 +212,40 @@ H5O__dtype_decode_helper(unsigned *ioflags /*in,out*/, const uint8_t **pp, H5T_t default: HGOTO_ERROR(H5E_DATATYPE, H5E_UNSUPPORTED, FAIL, "unknown floating-point normalization") - } /* end switch */ + } dt->shared->u.atomic.u.f.sign = (flags >> 8) & 0xff; + + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, 2 + 2, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, "ran off end of input buffer while decoding"); UINT16DECODE(*pp, dt->shared->u.atomic.offset); UINT16DECODE(*pp, dt->shared->u.atomic.prec); + + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, 1 + 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, "ran off end of input buffer while decoding"); dt->shared->u.atomic.u.f.epos = *(*pp)++; dt->shared->u.atomic.u.f.esize = *(*pp)++; - HDassert(dt->shared->u.atomic.u.f.esize > 0); + if (dt->shared->u.atomic.u.f.esize == 0) + HGOTO_ERROR(H5E_DATATYPE, H5E_BADVALUE, FAIL, "exponent size can't be zero") + + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, 1 + 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, "ran off end of input buffer while decoding"); dt->shared->u.atomic.u.f.mpos = *(*pp)++; dt->shared->u.atomic.u.f.msize = *(*pp)++; - HDassert(dt->shared->u.atomic.u.f.msize > 0); + if (dt->shared->u.atomic.u.f.msize == 0) + HGOTO_ERROR(H5E_DATATYPE, H5E_BADVALUE, FAIL, "mantissa size can't be zero") + + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, 4, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, "ran off end of input buffer while decoding"); UINT32DECODE(*pp, dt->shared->u.atomic.u.f.ebias); break; - case H5T_TIME: /* Time datatypes */ + case H5T_TIME: + /* + * Time datatypes... + */ dt->shared->u.atomic.order = (flags & 0x1) ? H5T_ORDER_BE : H5T_ORDER_LE; + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, 2, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, "ran off end of input buffer while decoding"); UINT16DECODE(*pp, dt->shared->u.atomic.prec); break; @@ -232,29 +270,42 @@ H5O__dtype_decode_helper(unsigned *ioflags /*in,out*/, const uint8_t **pp, H5T_t dt->shared->u.atomic.order = (flags & 0x1) ? H5T_ORDER_BE : H5T_ORDER_LE; dt->shared->u.atomic.lsb_pad = (flags & 0x2) ? H5T_PAD_ONE : H5T_PAD_ZERO; dt->shared->u.atomic.msb_pad = (flags & 0x4) ? H5T_PAD_ONE : H5T_PAD_ZERO; + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, 2 + 2, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, "ran off end of input buffer while decoding"); UINT16DECODE(*pp, dt->shared->u.atomic.offset); UINT16DECODE(*pp, dt->shared->u.atomic.prec); break; - case H5T_OPAQUE: + case H5T_OPAQUE: { + size_t z; + /* * Opaque types... */ + + /* The opaque tag flag field must be aligned */ z = flags & (H5T_OPAQUE_TAG_MAX - 1); - HDassert(0 == (z & 0x7)); /*must be aligned*/ + if (0 != (z & 0x7)) + HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, FAIL, "opaque flag field must be aligned") + if (NULL == (dt->shared->u.opaque.tag = (char *)H5MM_malloc(z + 1))) HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed") + + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, z, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, "ran off end of input buffer while decoding"); H5MM_memcpy(dt->shared->u.opaque.tag, *pp, z); dt->shared->u.opaque.tag[z] = '\0'; + *pp += z; break; + } case H5T_COMPOUND: { + unsigned nmembs; /* Number of compound members */ unsigned offset_nbytes; /* Size needed to encode member offsets */ size_t max_memb_pos = 0; /* Maximum member covered, so far */ unsigned max_version = 0; /* Maximum member version */ unsigned upgrade_to = 0; /* Version number we can "soft" upgrade to */ - unsigned j; /* Compute the # of bytes required to store a member offset */ offset_nbytes = H5VM_limit_enc_size((uint64_t)dt->shared->size); @@ -262,76 +313,146 @@ H5O__dtype_decode_helper(unsigned *ioflags /*in,out*/, const uint8_t **pp, H5T_t /* * Compound datatypes... */ - dt->shared->u.compnd.nmembs = flags & 0xffff; - if (dt->shared->u.compnd.nmembs == 0) - HGOTO_ERROR(H5E_DATATYPE, H5E_BADVALUE, FAIL, "invalid number of members: %u", - dt->shared->u.compnd.nmembs) - dt->shared->u.compnd.nalloc = dt->shared->u.compnd.nmembs; - dt->shared->u.compnd.memb = - (H5T_cmemb_t *)H5MM_calloc(dt->shared->u.compnd.nalloc * sizeof(H5T_cmemb_t)); - dt->shared->u.compnd.memb_size = 0; - if (NULL == dt->shared->u.compnd.memb) + nmembs = flags & 0xffff; + if (nmembs == 0) + HGOTO_ERROR(H5E_DATATYPE, H5E_BADVALUE, FAIL, "invalid number of members: %u", nmembs) + if (NULL == + (dt->shared->u.compnd.memb = (H5T_cmemb_t *)H5MM_calloc(nmembs * sizeof(H5T_cmemb_t)))) HGOTO_ERROR(H5E_DATATYPE, H5E_CANTALLOC, FAIL, "memory allocation failed") - for (i = 0; i < dt->shared->u.compnd.nmembs; i++) { - unsigned ndims = 0; /* Number of dimensions of the array field */ - htri_t can_upgrade; /* Whether we can upgrade this type's version */ - hsize_t dim[H5O_LAYOUT_NDIMS]; /* Dimensions of the array */ - H5T_t *array_dt; /* Temporary pointer to the array datatype */ - H5T_t *temp_type; /* Temporary pointer to the field's datatype */ + dt->shared->u.compnd.nalloc = nmembs; + + if (dt->shared->u.compnd.memb_size != 0) + HGOTO_ERROR(H5E_DATATYPE, H5E_BADVALUE, FAIL, "member size not initialized to zero") + + for (dt->shared->u.compnd.nmembs = 0; dt->shared->u.compnd.nmembs < nmembs; + dt->shared->u.compnd.nmembs++) { + + size_t actual_name_length = 0; /* Actual length of name */ + unsigned ndims = 0; /* Number of dimensions of the array field */ + htri_t can_upgrade; /* Whether we can upgrade this type's version */ + hsize_t dim[H5O_LAYOUT_NDIMS]; /* Dimensions of the array */ + H5T_t *array_dt; /* Temporary pointer to the array datatype */ + H5T_t *temp_type; /* Temporary pointer to the field's datatype */ + + /* Get the length of the field name */ + if (!skip) { + /* There is a realistic buffer end, so check bounds */ + + size_t max = (size_t)(p_end - *pp + 1); /* Max possible name length */ + + actual_name_length = HDstrnlen((const char *)*pp, max); + if (actual_name_length == max) + HGOTO_ERROR(H5E_OHDR, H5E_NOSPACE, FAIL, "field name not null terminated") + } + else { + /* The buffer end can't be determined when it's an unbounded buffer + * passed via H5Tdecode(), so don't bounds check and hope for + * the best. + */ + actual_name_length = HDstrlen((const char *)*pp); + } + + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, actual_name_length, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, "ran off end of input buffer while decoding"); /* Decode the field name */ - dt->shared->u.compnd.memb[i].name = H5MM_xstrdup((const char *)*pp); + if (NULL == (dt->shared->u.compnd.memb[dt->shared->u.compnd.nmembs].name = + H5MM_xstrdup((const char *)*pp))) + HGOTO_ERROR(H5E_RESOURCE, H5E_CANTCOPY, FAIL, + "can't duplicate compound member name string") /* Version 3 of the datatype message eliminated the padding to multiple of 8 bytes */ - if (version >= H5O_DTYPE_VERSION_3) + if (version >= H5O_DTYPE_VERSION_3) { /* Advance past name, including null terminator */ - *pp += HDstrlen((const char *)*pp) + 1; - else + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, actual_name_length + 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, + "ran off end of input buffer while decoding"); + *pp += actual_name_length + 1; + } + else { /* Advance multiple of 8 w/ null terminator */ - *pp += ((HDstrlen((const char *)*pp) + 8) / 8) * 8; + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, ((actual_name_length + 8) / 8) * 8, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, + "ran off end of input buffer while decoding"); + *pp += ((actual_name_length + 8) / 8) * 8; + } /* Decode the field offset */ /* (starting with version 3 of the datatype message, use the minimum # of bytes required) */ - if (version >= H5O_DTYPE_VERSION_3) - UINT32DECODE_VAR(*pp, dt->shared->u.compnd.memb[i].offset, offset_nbytes) - else - UINT32DECODE(*pp, dt->shared->u.compnd.memb[i].offset) + if (version >= H5O_DTYPE_VERSION_3) { + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, offset_nbytes, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, + "ran off end of input buffer while decoding"); + UINT32DECODE_VAR(*pp, dt->shared->u.compnd.memb[dt->shared->u.compnd.nmembs].offset, + offset_nbytes) + } + else { + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, 4, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, + "ran off end of input buffer while decoding"); + UINT32DECODE(*pp, dt->shared->u.compnd.memb[dt->shared->u.compnd.nmembs].offset) + } /* Older versions of the library allowed a field to have * intrinsic 'arrayness'. Newer versions of the library * use the separate array datatypes. */ if (version == H5O_DTYPE_VERSION_1) { /* Decode the number of dimensions */ + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, + "ran off end of input buffer while decoding"); ndims = *(*pp)++; /* Check that ndims is valid */ - if (ndims > 4) + if (ndims > 4) { + dt->shared->u.compnd.memb[dt->shared->u.compnd.nmembs].name = + H5MM_xfree(dt->shared->u.compnd.memb[dt->shared->u.compnd.nmembs].name); HGOTO_ERROR(H5E_DATATYPE, H5E_BADTYPE, FAIL, "invalid number of dimensions for array") + } - *pp += 3; /*reserved bytes */ + /* Skip reserved bytes */ + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, 3, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, + "ran off end of input buffer while decoding"); + *pp += 3; /* Skip dimension permutation */ + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, 4, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, + "ran off end of input buffer while decoding"); *pp += 4; /* Skip reserved bytes */ + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, 4, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, + "ran off end of input buffer while decoding"); *pp += 4; /* Decode array dimension sizes */ - for (j = 0; j < 4; j++) - UINT32DECODE(*pp, dim[j]); - } /* end if */ + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, (4 * 4), p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, + "ran off end of input buffer while decoding"); + for (int i = 0; i < 4; i++) + UINT32DECODE(*pp, dim[i]); + } /* Allocate space for the field's datatype */ - if (NULL == (temp_type = H5T__alloc())) + if (NULL == (temp_type = H5T__alloc())) { + dt->shared->u.compnd.memb[dt->shared->u.compnd.nmembs].name = + H5MM_xfree(dt->shared->u.compnd.memb[dt->shared->u.compnd.nmembs].name); HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed") + } /* Decode the field's datatype information */ - if ((can_upgrade = H5O__dtype_decode_helper(ioflags, pp, temp_type)) < 0) { - for (j = 0; j <= i; j++) - H5MM_xfree(dt->shared->u.compnd.memb[j].name); - H5MM_xfree(dt->shared->u.compnd.memb); + if ((can_upgrade = H5O__dtype_decode_helper(ioflags, pp, temp_type, skip, p_end)) < 0) { + dt->shared->u.compnd.memb[dt->shared->u.compnd.nmembs].name = + H5MM_xfree(dt->shared->u.compnd.memb[dt->shared->u.compnd.nmembs].name); + if (H5T_close_real(temp_type) < 0) + HDONE_ERROR(H5E_DATATYPE, H5E_CANTRELEASE, FAIL, "can't release datatype info") HGOTO_ERROR(H5E_DATATYPE, H5E_CANTDECODE, FAIL, "unable to decode member type") - } /* end if */ + } + if (temp_type->shared->size == 0) + HGOTO_ERROR(H5E_DATATYPE, H5E_CANTDECODE, FAIL, "type size can't be zero") /* Upgrade the version if we can and it is necessary */ if (can_upgrade && temp_type->shared->version > version) { @@ -339,7 +460,7 @@ H5O__dtype_decode_helper(unsigned *ioflags /*in,out*/, const uint8_t **pp, H5T_t /* Pass "can_upgrade" flag down to parent type */ ret_value = TRUE; - } /* end if */ + } /* Go create the array datatype now, for older versions of the datatype message */ if (version == H5O_DTYPE_VERSION_1) { @@ -347,15 +468,21 @@ H5O__dtype_decode_helper(unsigned *ioflags /*in,out*/, const uint8_t **pp, H5T_t if (ndims > 0) { /* Create the array datatype for the field */ if ((array_dt = H5T__array_create(temp_type, ndims, dim)) == NULL) { - for (j = 0; j <= i; j++) - H5MM_xfree(dt->shared->u.compnd.memb[j].name); - H5MM_xfree(dt->shared->u.compnd.memb); + dt->shared->u.compnd.memb[dt->shared->u.compnd.nmembs].name = + H5MM_xfree(dt->shared->u.compnd.memb[dt->shared->u.compnd.nmembs].name); + if (H5T_close_real(temp_type) < 0) + HDONE_ERROR(H5E_DATATYPE, H5E_CANTRELEASE, FAIL, + "can't release datatype info") HGOTO_ERROR(H5E_DATATYPE, H5E_CANTREGISTER, FAIL, "unable to create array datatype") - } /* end if */ + } /* Close the base type for the array */ - (void)H5T_close_real(temp_type); + if (H5T_close_real(temp_type) < 0) { + dt->shared->u.compnd.memb[dt->shared->u.compnd.nmembs].name = + H5MM_xfree(dt->shared->u.compnd.memb[dt->shared->u.compnd.nmembs].name); + HGOTO_ERROR(H5E_DATATYPE, H5E_CANTRELEASE, FAIL, "can't release datatype info") + } /* Make the array type the type that is set for the field */ temp_type = array_dt; @@ -371,43 +498,51 @@ H5O__dtype_decode_helper(unsigned *ioflags /*in,out*/, const uint8_t **pp, H5T_t /* Set the return value to indicate that we should freely * upgrade parent types */ ret_value = TRUE; - } /* end else */ - } /* end if */ - } /* end if */ + } + } + } /* Keep track of the maximum member version found */ if (temp_type->shared->version > max_version) max_version = temp_type->shared->version; - /* - * Set the "force conversion" flag if VL datatype fields exist in this + /* Set the "force conversion" flag if VL datatype fields exist in this * type or any component types */ if (temp_type->shared->force_conv == TRUE) dt->shared->force_conv = TRUE; /* Member size */ - dt->shared->u.compnd.memb[i].size = temp_type->shared->size; + dt->shared->u.compnd.memb[dt->shared->u.compnd.nmembs].size = temp_type->shared->size; dt->shared->u.compnd.memb_size += temp_type->shared->size; /* Set the field datatype (finally :-) */ - dt->shared->u.compnd.memb[i].type = temp_type; - - /* Check if this field overlaps with a prior field */ - /* (probably indicates that the file is corrupt) */ - if (i > 0 && dt->shared->u.compnd.memb[i].offset < max_memb_pos) { - for (j = 0; j < i; j++) - if (dt->shared->u.compnd.memb[i].offset >= dt->shared->u.compnd.memb[j].offset && - dt->shared->u.compnd.memb[i].offset < - (dt->shared->u.compnd.memb[j].offset + dt->shared->u.compnd.memb[j].size)) + dt->shared->u.compnd.memb[dt->shared->u.compnd.nmembs].type = temp_type; + + /* Check if this field overlaps with a prior field + * (probably indicates that the file is corrupt) + */ + if (dt->shared->u.compnd.nmembs > 0 && + dt->shared->u.compnd.memb[dt->shared->u.compnd.nmembs].offset < max_memb_pos) { + for (unsigned u = 0; u < dt->shared->u.compnd.nmembs; u++) + if ((dt->shared->u.compnd.memb[dt->shared->u.compnd.nmembs].offset >= + dt->shared->u.compnd.memb[u].offset && + dt->shared->u.compnd.memb[dt->shared->u.compnd.nmembs].offset < + (dt->shared->u.compnd.memb[u].offset + dt->shared->u.compnd.memb[u].size)) || + (dt->shared->u.compnd.memb[dt->shared->u.compnd.nmembs].offset < + dt->shared->u.compnd.memb[u].offset && + (dt->shared->u.compnd.memb[dt->shared->u.compnd.nmembs].offset + + dt->shared->u.compnd.memb[dt->shared->u.compnd.nmembs].size) > + dt->shared->u.compnd.memb[u].offset)) HGOTO_ERROR(H5E_DATATYPE, H5E_CANTDECODE, FAIL, "member overlaps with previous member") - } /* end if */ + } /* Update the maximum member position covered */ - max_memb_pos = MAX(max_memb_pos, - (dt->shared->u.compnd.memb[i].offset + dt->shared->u.compnd.memb[i].size)); - } /* end for */ + max_memb_pos = + MAX(max_memb_pos, (dt->shared->u.compnd.memb[dt->shared->u.compnd.nmembs].offset + + dt->shared->u.compnd.memb[dt->shared->u.compnd.nmembs].size)); + } /* Check if the compound type is packed */ H5T__update_packed(dt); @@ -420,14 +555,17 @@ H5O__dtype_decode_helper(unsigned *ioflags /*in,out*/, const uint8_t **pp, H5T_t /* We won't mark the message dirty since there were no * errors in the file, simply type versions that we will no * longer encode. */ - } /* end if */ + } /* Check that no member of this compound has a version greater * than the compound itself. */ H5O_DTYPE_CHECK_VERSION(dt, version, max_version, ioflags, "compound", FAIL) } break; - case H5T_REFERENCE: /* Reference datatypes... */ + case H5T_REFERENCE: + /* + * Reference datatypes... + */ dt->shared->u.atomic.order = H5T_ORDER_NONE; dt->shared->u.atomic.prec = 8 * dt->shared->size; dt->shared->u.atomic.offset = 0; @@ -461,57 +599,102 @@ H5O__dtype_decode_helper(unsigned *ioflags /*in,out*/, const uint8_t **pp, H5T_t HGOTO_ERROR(H5E_DATATYPE, H5E_CANTINIT, FAIL, "invalid datatype location") break; - case H5T_ENUM: + case H5T_ENUM: { + unsigned nmembs; + /* * Enumeration datatypes... */ - dt->shared->u.enumer.nmembs = dt->shared->u.enumer.nalloc = flags & 0xffff; + nmembs = flags & 0xffff; if (NULL == (dt->shared->parent = H5T__alloc())) - HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed") - if (H5O__dtype_decode_helper(ioflags, pp, dt->shared->parent) < 0) + HGOTO_ERROR(H5E_RESOURCE, H5E_CANTALLOC, FAIL, "can't allocate parent datatype") + if (H5O__dtype_decode_helper(ioflags, pp, dt->shared->parent, skip, p_end) < 0) HGOTO_ERROR(H5E_DATATYPE, H5E_CANTDECODE, FAIL, "unable to decode parent datatype") + if (dt->shared->parent->shared->size != dt->shared->size) + HGOTO_ERROR(H5E_DATATYPE, H5E_BADSIZE, FAIL, "ENUM datatype size does not match parent") /* Check if the parent of this enum has a version greater than the * enum itself. */ H5O_DTYPE_CHECK_VERSION(dt, version, dt->shared->parent->shared->version, ioflags, "enum", FAIL) - if (NULL == (dt->shared->u.enumer.name = - (char **)H5MM_calloc(dt->shared->u.enumer.nalloc * sizeof(char *))) || - NULL == (dt->shared->u.enumer.value = (uint8_t *)H5MM_calloc( - dt->shared->u.enumer.nalloc * dt->shared->parent->shared->size))) - HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed") + /* Allocate name and value arrays */ + if (NULL == (dt->shared->u.enumer.name = (char **)H5MM_calloc(nmembs * sizeof(char *))) || + NULL == (dt->shared->u.enumer.value = + (uint8_t *)H5MM_calloc(nmembs * dt->shared->parent->shared->size))) + HGOTO_ERROR(H5E_RESOURCE, H5E_CANTALLOC, FAIL, "memory allocation failed") + dt->shared->u.enumer.nalloc = nmembs; /* Names */ - for (i = 0; i < dt->shared->u.enumer.nmembs; i++) { - dt->shared->u.enumer.name[i] = H5MM_xstrdup((const char *)*pp); + for (dt->shared->u.enumer.nmembs = 0; dt->shared->u.enumer.nmembs < nmembs; + dt->shared->u.enumer.nmembs++) { + + size_t actual_name_length = 0; /* Actual length of name */ + + /* Get the length of the enum name */ + if (!skip) { + /* There is a realistic buffer end, so check bounds */ + + size_t max = (size_t)(p_end - *pp + 1); /* Max possible name length */ + + actual_name_length = HDstrnlen((const char *)*pp, max); + if (actual_name_length == max) + HGOTO_ERROR(H5E_OHDR, H5E_NOSPACE, FAIL, "enum name not null terminated") + } + else { + /* The buffer end can't be determined when it's an unbounded buffer + * passed via H5Tdecode(), so don't bounds check and hope for + * the best. + */ + actual_name_length = HDstrlen((const char *)*pp); + } + + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, actual_name_length, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, "ran off end of input buffer while decoding"); + if (NULL == (dt->shared->u.enumer.name[dt->shared->u.enumer.nmembs] = + H5MM_xstrdup((const char *)*pp))) + HGOTO_ERROR(H5E_RESOURCE, H5E_CANTCOPY, FAIL, "can't duplicate enum name string") /* Version 3 of the datatype message eliminated the padding to multiple of 8 bytes */ - if (version >= H5O_DTYPE_VERSION_3) + if (version >= H5O_DTYPE_VERSION_3) { /* Advance past name, including null terminator */ - *pp += HDstrlen((const char *)*pp) + 1; - else + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, actual_name_length + 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, + "ran off end of input buffer while decoding"); + *pp += actual_name_length + 1; + } + else { /* Advance multiple of 8 w/ null terminator */ - *pp += ((HDstrlen((const char *)*pp) + 8) / 8) * 8; - } /* end for */ + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, ((actual_name_length + 8) / 8) * 8, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, + "ran off end of input buffer while decoding"); + *pp += ((actual_name_length + 8) / 8) * 8; + } + } + if (dt->shared->u.enumer.nmembs != nmembs) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, "incorrect number of enum members decoded"); /* Values */ - H5MM_memcpy(dt->shared->u.enumer.value, *pp, - dt->shared->u.enumer.nmembs * dt->shared->parent->shared->size); - *pp += dt->shared->u.enumer.nmembs * dt->shared->parent->shared->size; - break; + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, nmembs * dt->shared->parent->shared->size, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, "ran off end of input buffer while decoding"); + H5MM_memcpy(dt->shared->u.enumer.value, *pp, nmembs * dt->shared->parent->shared->size); + *pp += nmembs * dt->shared->parent->shared->size; + } break; - case H5T_VLEN: /* Variable length datatypes... */ + case H5T_VLEN: + /* + * Variable length datatypes... + */ /* Set the type of VL information, either sequence or string */ dt->shared->u.vlen.type = (H5T_vlen_type_t)(flags & 0x0f); if (dt->shared->u.vlen.type == H5T_VLEN_STRING) { dt->shared->u.vlen.pad = (H5T_str_t)((flags >> 4) & 0x0f); dt->shared->u.vlen.cset = (H5T_cset_t)((flags >> 8) & 0x0f); - } /* end if */ + } /* Decode base type of VL information */ if (NULL == (dt->shared->parent = H5T__alloc())) HGOTO_ERROR(H5E_DATATYPE, H5E_NOSPACE, FAIL, "memory allocation failed") - if (H5O__dtype_decode_helper(ioflags, pp, dt->shared->parent) < 0) + if (H5O__dtype_decode_helper(ioflags, pp, dt->shared->parent, skip, p_end) < 0) HGOTO_ERROR(H5E_DATATYPE, H5E_CANTDECODE, FAIL, "unable to decode VL parent type") /* Check if the parent of this vlen has a version greater than the @@ -526,8 +709,13 @@ H5O__dtype_decode_helper(unsigned *ioflags /*in,out*/, const uint8_t **pp, H5T_t HGOTO_ERROR(H5E_DATATYPE, H5E_CANTINIT, FAIL, "invalid datatype location") break; - case H5T_ARRAY: /* Array datatypes */ + case H5T_ARRAY: + /* + * Array datatypes... + */ /* Decode the number of dimensions */ + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, 1, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, "ran off end of input buffer while decoding"); dt->shared->u.array.ndims = *(*pp)++; /* Double-check the number of dimensions */ @@ -535,23 +723,32 @@ H5O__dtype_decode_helper(unsigned *ioflags /*in,out*/, const uint8_t **pp, H5T_t HGOTO_ERROR(H5E_DATATYPE, H5E_CANTLOAD, FAIL, "too many dimensions for array datatype") /* Skip reserved bytes, if version has them */ - if (version < H5O_DTYPE_VERSION_3) + if (version < H5O_DTYPE_VERSION_3) { + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, 3, p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, "ran off end of input buffer while decoding"); *pp += 3; + } /* Decode array dimension sizes & compute number of elements */ - for (i = 0, dt->shared->u.array.nelem = 1; i < (unsigned)dt->shared->u.array.ndims; i++) { - UINT32DECODE(*pp, dt->shared->u.array.dim[i]); - dt->shared->u.array.nelem *= dt->shared->u.array.dim[i]; - } /* end for */ + dt->shared->u.array.nelem = 1; + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, (dt->shared->u.array.ndims * 4), p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, "ran off end of input buffer while decoding"); + for (unsigned u = 0; u < dt->shared->u.array.ndims; u++) { + UINT32DECODE(*pp, dt->shared->u.array.dim[u]); + dt->shared->u.array.nelem *= dt->shared->u.array.dim[u]; + } /* Skip array dimension permutations, if version has them */ - if (version < H5O_DTYPE_VERSION_3) + if (version < H5O_DTYPE_VERSION_3) { + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *pp, (dt->shared->u.array.ndims * 4), p_end)) + HGOTO_ERROR(H5E_OHDR, H5E_OVERFLOW, FAIL, "ran off end of input buffer while decoding"); *pp += dt->shared->u.array.ndims * 4; + } /* Decode base type of array */ if (NULL == (dt->shared->parent = H5T__alloc())) HGOTO_ERROR(H5E_DATATYPE, H5E_NOSPACE, FAIL, "memory allocation failed") - if (H5O__dtype_decode_helper(ioflags, pp, dt->shared->parent) < 0) + if (H5O__dtype_decode_helper(ioflags, pp, dt->shared->parent, skip, p_end) < 0) HGOTO_ERROR(H5E_DATATYPE, H5E_CANTDECODE, FAIL, "unable to decode array parent type") /* Check if the parent of this array has a version greater than the @@ -561,8 +758,7 @@ H5O__dtype_decode_helper(unsigned *ioflags /*in,out*/, const uint8_t **pp, H5T_t /* There should be no array datatypes with version < 2. */ H5O_DTYPE_CHECK_VERSION(dt, version, H5O_DTYPE_VERSION_2, ioflags, "array", FAIL) - /* - * Set the "force conversion" flag if a VL base datatype is used or + /* Set the "force conversion" flag if a VL base datatype is used or * or if any components of the base datatype are VL types. */ if (dt->shared->parent->shared->force_conv == TRUE) @@ -573,17 +769,15 @@ H5O__dtype_decode_helper(unsigned *ioflags /*in,out*/, const uint8_t **pp, H5T_t case H5T_NCLASSES: default: HGOTO_ERROR(H5E_DATATYPE, H5E_UNSUPPORTED, FAIL, "unknown datatype class found") - } /* end switch */ + } done: + /* Cleanup on error */ if (ret_value < 0) - if (dt != NULL) { - if (dt->shared != NULL) { - HDassert(!dt->shared->owned_vol_obj); - dt->shared = H5FL_FREE(H5T_shared_t, dt->shared); - } /* end if */ - dt = H5FL_FREE(H5T_t, dt); - } /* end if */ + /* Release (reset) dt but do not free it - leave it as an empty datatype as was the case on + * function entry */ + if (H5T__free(dt) < 0) + HDONE_ERROR(H5E_DATATYPE, H5E_CANTRELEASE, FAIL, "can't release datatype info") FUNC_LEAVE_NOAPI(ret_value) } /* end H5O__dtype_decode_helper() */ @@ -613,7 +807,7 @@ H5O__dtype_encode_helper(uint8_t **pp, const H5T_t *dt) size_t n, z; herr_t ret_value = SUCCEED; /* Return value */ - FUNC_ENTER_STATIC + FUNC_ENTER_PACKAGE /* check args */ HDassert(pp && *pp); @@ -1113,33 +1307,48 @@ H5O__dtype_encode_helper(uint8_t **pp, const H5T_t *dt) Pointer to the new message in native order on success, NULL on failure DESCRIPTION This function decodes the "raw" disk form of a simple datatype message - into a struct in memory native format. The struct is allocated within this - function using malloc() and is returned to the caller. + into a struct in memory native format. The struct is allocated within this + function using malloc() and is returned to the caller. --------------------------------------------------------------------------*/ static void * H5O__dtype_decode(H5F_t H5_ATTR_UNUSED *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNUSED mesg_flags, - unsigned *ioflags /*in,out*/, size_t H5_ATTR_UNUSED p_size, const uint8_t *p) + unsigned *ioflags /*in,out*/, size_t p_size, const uint8_t *p) { - H5T_t *dt = NULL; - void *ret_value = NULL; /* Return value */ + hbool_t skip; + H5T_t *dt = NULL; + const uint8_t *p_end = p + p_size - 1; + void *ret_value = NULL; - FUNC_ENTER_STATIC + FUNC_ENTER_PACKAGE - /* check args */ + HDassert(f); HDassert(p); /* Allocate datatype message */ if (NULL == (dt = H5T__alloc())) HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, NULL, "memory allocation failed") + /* If we are decoding a buffer from H5Tdecode(), we won't have the size + * of the buffer and bounds checking will be impossible. In this case, + * the library will have set p_size to SIZE_MAX and we can use that + * as a signal to skip bounds checking. + */ + skip = (p_size == SIZE_MAX ? TRUE : FALSE); + /* Perform actual decode of message */ - if (H5O__dtype_decode_helper(ioflags, &p, dt) < 0) + if (H5O__dtype_decode_helper(ioflags, &p, dt, skip, p_end) < 0) HGOTO_ERROR(H5E_DATATYPE, H5E_CANTDECODE, NULL, "can't decode type") /* Set return value */ ret_value = dt; done: + /* Cleanup on error */ + if (!ret_value) + /* Free dt */ + if (H5T_close_real(dt) < 0) + HDONE_ERROR(H5E_DATATYPE, H5E_CANTRELEASE, NULL, "can't release datatype info") + FUNC_LEAVE_NOAPI(ret_value) } /* end H5O__dtype_decode() */ @@ -1166,7 +1375,7 @@ H5O__dtype_encode(H5F_t H5_ATTR_UNUSED *f, uint8_t *p, const void *mesg) const H5T_t *dt = (const H5T_t *)mesg; herr_t ret_value = SUCCEED; /* Return value */ - FUNC_ENTER_STATIC + FUNC_ENTER_PACKAGE /* check args */ HDassert(f); @@ -1205,7 +1414,7 @@ H5O__dtype_copy(const void *_src, void *_dst) H5T_t *dst; void *ret_value = NULL; /* Return value */ - FUNC_ENTER_STATIC + FUNC_ENTER_PACKAGE /* check args */ HDassert(src); @@ -1251,7 +1460,7 @@ H5O__dtype_size(const H5F_t *f, const void *_mesg) unsigned u; /* Local index variable */ size_t ret_value = 0; /* Return value */ - FUNC_ENTER_STATIC_NOERR + FUNC_ENTER_PACKAGE_NOERR HDassert(f); HDassert(dt); @@ -1379,7 +1588,7 @@ H5O__dtype_reset(void *_mesg) { H5T_t *dt = (H5T_t *)_mesg; - FUNC_ENTER_STATIC_NOERR + FUNC_ENTER_PACKAGE_NOERR if (dt) H5T__free(dt); @@ -1404,7 +1613,7 @@ H5O__dtype_free(void *mesg) { herr_t ret_value = SUCCEED; /* Return value */ - FUNC_ENTER_STATIC + FUNC_ENTER_PACKAGE /* Sanity check */ HDassert(mesg); @@ -1435,7 +1644,7 @@ H5O__dtype_set_share(void *_mesg /*in,out*/, const H5O_shared_t *sh) H5T_t *dt = (H5T_t *)_mesg; herr_t ret_value = SUCCEED; - FUNC_ENTER_STATIC + FUNC_ENTER_PACKAGE HDassert(dt); HDassert(sh); @@ -1491,7 +1700,7 @@ H5O__dtype_can_share(const void *_mesg) htri_t tri_ret; htri_t ret_value = TRUE; - FUNC_ENTER_STATIC + FUNC_ENTER_PACKAGE HDassert(mesg); @@ -1534,7 +1743,7 @@ H5O__dtype_pre_copy_file(H5F_t *file_src, const void *mesg_src, hbool_t H5_ATTR_ H5D_copy_file_ud_t *udata = (H5D_copy_file_ud_t *)_udata; /* Dataset copying user data */ herr_t ret_value = SUCCEED; /* Return value */ - FUNC_ENTER_STATIC + FUNC_ENTER_PACKAGE /* check args */ HDassert(file_src); @@ -1589,7 +1798,7 @@ H5O__dtype_copy_file(H5F_t H5_ATTR_UNUSED *file_src, const H5O_msg_class_t *mesg H5T_t *dst_mesg; /* Destination datatype */ void *ret_value = NULL; /* Return value */ - FUNC_ENTER_STATIC + FUNC_ENTER_PACKAGE /* Perform a normal copy of the object header message */ if (NULL == (dst_mesg = (H5T_t *)H5O__dtype_copy(native_src, NULL))) @@ -1629,7 +1838,7 @@ H5O__dtype_shared_post_copy_upd(const H5O_loc_t H5_ATTR_UNUSED *src_oloc, const H5T_t *dt_dst = (H5T_t *)mesg_dst; /* Destination datatype */ herr_t ret_value = SUCCEED; /* Return value */ - FUNC_ENTER_STATIC + FUNC_ENTER_PACKAGE if (dt_dst->sh_loc.type == H5O_SHARE_TYPE_COMMITTED) { HDassert(H5T_is_named(dt_dst)); @@ -1673,7 +1882,7 @@ H5O__dtype_debug(H5F_t *f, const void *mesg, FILE *stream, int indent, int fwidt unsigned i; size_t k; - FUNC_ENTER_STATIC_NOERR + FUNC_ENTER_PACKAGE_NOERR /* check args */ HDassert(f); @@ -2139,4 +2348,4 @@ H5O__dtype_debug(H5F_t *f, const void *mesg, FILE *stream, int indent, int fwidt } /* end else */ FUNC_LEAVE_NOAPI(SUCCEED) -} /* end H5O__dtype_debug() */ +} /* end H5O__dtype_debug() */ \ No newline at end of file diff --git a/src/H5Olayout.c b/src/H5Olayout.c index ed69fb1a559..0c5c993f566 100644 --- a/src/H5Olayout.c +++ b/src/H5Olayout.c @@ -639,13 +639,27 @@ H5O__layout_decode(H5F_t *f, H5O_t H5_ATTR_UNUSED *open_oh, unsigned H5_ATTR_UNU heap_block_p += tmp_size; /* Source selection */ - if (H5S_SELECT_DESERIALIZE(&mesg->storage.u.virt.list[i].source_select, - &heap_block_p) < 0) + avail_buffer_space = heap_block_p_end - heap_block_p + 1; + + if (avail_buffer_space <= 0) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, NULL, + "buffer overflow while decoding layout") + + if (H5S_SELECT_DESERIALIZE(&mesg->storage.u.virt.list[i].source_select, &heap_block_p, + (size_t)(avail_buffer_space)) < 0) HGOTO_ERROR(H5E_OHDR, H5E_CANTDECODE, NULL, "can't decode source space selection") /* Virtual selection */ + + /* Buffer space must be updated after previous deserialization */ + avail_buffer_space = heap_block_p_end - heap_block_p + 1; + + if (avail_buffer_space <= 0) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, NULL, + "buffer overflow while decoding layout") + if (H5S_SELECT_DESERIALIZE(&mesg->storage.u.virt.list[i].source_dset.virtual_select, - &heap_block_p) < 0) + &heap_block_p, (size_t)(avail_buffer_space)) < 0) HGOTO_ERROR(H5E_OHDR, H5E_CANTDECODE, NULL, "can't decode virtual space selection") diff --git a/src/H5Rint.c b/src/H5Rint.c index 656df7ba6ea..d46bbe5e355 100644 --- a/src/H5Rint.c +++ b/src/H5Rint.c @@ -1252,6 +1252,7 @@ static herr_t H5R__decode_region(const unsigned char *buf, size_t *nbytes, H5S_t **space_ptr) { const uint8_t *p = (const uint8_t *)buf; + const uint8_t *p_end = p + *nbytes - 1; size_t buf_size = 0; unsigned rank; H5S_t *space; @@ -1284,7 +1285,11 @@ H5R__decode_region(const unsigned char *buf, size_t *nbytes, H5S_t **space_ptr) HGOTO_ERROR(H5E_REFERENCE, H5E_CANTDECODE, FAIL, "Buffer size is too small") if (H5S_set_extent_simple(space, rank, NULL, NULL) < 0) HGOTO_ERROR(H5E_REFERENCE, H5E_CANTSET, FAIL, "can't set extent rank for selection") - if (H5S_SELECT_DESERIALIZE(&space, &p) < 0) + + if (p - 1 > p_end) + HGOTO_ERROR(H5E_REFERENCE, H5E_CANTDECODE, FAIL, "Ran off end of buffer while decoding") + + if (H5S_SELECT_DESERIALIZE(&space, &p, (size_t)(p_end - p + 1)) < 0) HGOTO_ERROR(H5E_REFERENCE, H5E_CANTDECODE, FAIL, "can't deserialize selection") *nbytes = buf_size; @@ -1547,7 +1552,8 @@ H5R__decode_token_region_compat(H5F_t *f, const unsigned char *buf, size_t *nbyt unsigned char *data = NULL; H5O_token_t token = {0}; size_t data_size; - const uint8_t *p; + const uint8_t *p = NULL; + const uint8_t *p_end = NULL; H5S_t *space = NULL; herr_t ret_value = SUCCEED; @@ -1563,7 +1569,8 @@ H5R__decode_token_region_compat(H5F_t *f, const unsigned char *buf, size_t *nbyt HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier") /* Get object address */ - p = (const uint8_t *)data; + p = (const uint8_t *)data; + p_end = p + data_size - 1; H5MM_memcpy(&token, p, token_size); p += token_size; @@ -1583,7 +1590,11 @@ H5R__decode_token_region_compat(H5F_t *f, const unsigned char *buf, size_t *nbyt HGOTO_ERROR(H5E_REFERENCE, H5E_NOTFOUND, FAIL, "not found") /* Unserialize the selection */ - if (H5S_SELECT_DESERIALIZE(&space, &p) < 0) + + if (p - 1 >= p_end) + HGOTO_ERROR(H5E_REFERENCE, H5E_CANTDECODE, FAIL, "Ran off end of buffer while deserializing") + + if (H5S_SELECT_DESERIALIZE(&space, &p, (size_t)(p_end - p + 1)) < 0) HGOTO_ERROR(H5E_REFERENCE, H5E_CANTDECODE, FAIL, "can't deserialize selection") *space_ptr = space; diff --git a/src/H5S.c b/src/H5S.c index 25e1158ab05..008b173c9b1 100644 --- a/src/H5S.c +++ b/src/H5S.c @@ -1695,9 +1695,10 @@ H5S_decode(const unsigned char **p) if (H5S_select_all(ds, FALSE) < 0) HGOTO_ERROR(H5E_DATASPACE, H5E_CANTSET, NULL, "unable to set all selection") - /* Decode the select part of dataspace. I believe this part always exists. */ + /* Decode the select part of dataspace. + * Because size of buffer is unknown, assume arbitrarily large buffer to allow decoding. */ *p = pp; - if (H5S_SELECT_DESERIALIZE(&ds, p) < 0) + if (H5S_SELECT_DESERIALIZE(&ds, p, SIZE_MAX) < 0) HGOTO_ERROR(H5E_DATASPACE, H5E_CANTDECODE, NULL, "can't decode space selection") /* Set return value */ diff --git a/src/H5Sall.c b/src/H5Sall.c index a1ae04eea06..00ab1e23193 100644 --- a/src/H5Sall.c +++ b/src/H5Sall.c @@ -50,7 +50,7 @@ static herr_t H5S__all_release(H5S_t *space); static htri_t H5S__all_is_valid(const H5S_t *space); static hssize_t H5S__all_serial_size(H5S_t *space); static herr_t H5S__all_serialize(H5S_t *space, uint8_t **p); -static herr_t H5S__all_deserialize(H5S_t **space, const uint8_t **p); +static herr_t H5S__all_deserialize(H5S_t **space, const uint8_t **p, const size_t p_size, hbool_t skip); static herr_t H5S__all_bounds(const H5S_t *space, hsize_t *start, hsize_t *end); static herr_t H5S__all_offset(const H5S_t *space, hsize_t *off); static int H5S__all_unlim_dim(const H5S_t *space); @@ -637,13 +637,13 @@ H5S__all_serialize(H5S_t *space, uint8_t **p) REVISION LOG --------------------------------------------------------------------------*/ static herr_t -H5S__all_deserialize(H5S_t **space, const uint8_t **p) +H5S__all_deserialize(H5S_t **space, const uint8_t **p, const size_t p_size, hbool_t skip) { - uint32_t version; /* Version number */ - H5S_t *tmp_space = NULL; /* Pointer to actual dataspace to use, - either *space or a newly allocated one */ - herr_t ret_value = SUCCEED; /* return value */ - + uint32_t version; /* Version number */ + H5S_t *tmp_space = NULL; /* Pointer to actual dataspace to use, + either *space or a newly allocated one */ + herr_t ret_value = SUCCEED; /* return value */ + const uint8_t *p_end = *p + p_size - 1; /* Pointer to last valid byte in buffer */ FUNC_ENTER_STATIC HDassert(p); @@ -663,12 +663,16 @@ H5S__all_deserialize(H5S_t **space, const uint8_t **p) tmp_space = *space; /* Decode version */ + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *p, sizeof(uint32_t), p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, "buffer overflow while decoding selection version") UINT32DECODE(*p, version); if (version < H5S_ALL_VERSION_1 || version > H5S_ALL_VERSION_LATEST) HGOTO_ERROR(H5E_DATASPACE, H5E_BADVALUE, FAIL, "bad version number for all selection") /* Skip over the remainder of the header */ + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *p, 8, p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, "buffer overflow while decoding header") *p += 8; /* Change to "all" selection */ diff --git a/src/H5Shyper.c b/src/H5Shyper.c index 1f614ba95eb..db8d5736c22 100644 --- a/src/H5Shyper.c +++ b/src/H5Shyper.c @@ -174,7 +174,7 @@ static htri_t H5S__hyper_is_valid(const H5S_t *space); static hsize_t H5S__hyper_span_nblocks(H5S_hyper_span_info_t *spans); static hssize_t H5S__hyper_serial_size(H5S_t *space); static herr_t H5S__hyper_serialize(H5S_t *space, uint8_t **p); -static herr_t H5S__hyper_deserialize(H5S_t **space, const uint8_t **p); +static herr_t H5S__hyper_deserialize(H5S_t **space, const uint8_t **p, const size_t p_size, hbool_t skip); static herr_t H5S__hyper_bounds(const H5S_t *space, hsize_t *start, hsize_t *end); static herr_t H5S__hyper_offset(const H5S_t *space, hsize_t *offset); static int H5S__hyper_unlim_dim(const H5S_t *space); @@ -4220,21 +4220,21 @@ H5S__hyper_serialize(H5S_t *space, uint8_t **p) REVISION LOG --------------------------------------------------------------------------*/ static herr_t -H5S__hyper_deserialize(H5S_t **space, const uint8_t **p) +H5S__hyper_deserialize(H5S_t **space, const uint8_t **p, const size_t p_size, hbool_t skip) { - H5S_t *tmp_space = NULL; /* Pointer to actual dataspace to use, - either *space or a newly allocated one */ - hsize_t dims[H5S_MAX_RANK]; /* Dimension sizes */ - hsize_t start[H5S_MAX_RANK]; /* hyperslab start information */ - hsize_t block[H5S_MAX_RANK]; /* hyperslab block information */ - uint32_t version; /* Version number */ - uint8_t flags = 0; /* Flags */ - uint8_t enc_size = 0; /* Encoded size of selection info */ - unsigned rank; /* rank of points */ - const uint8_t *pp; /* Local pointer for decoding */ - unsigned u; /* Local counting variable */ - herr_t ret_value = FAIL; /* return value */ - + H5S_t *tmp_space = NULL; /* Pointer to actual dataspace to use, + either *space or a newly allocated one */ + hsize_t dims[H5S_MAX_RANK]; /* Dimension sizes */ + hsize_t start[H5S_MAX_RANK]; /* hyperslab start information */ + hsize_t block[H5S_MAX_RANK]; /* hyperslab block information */ + uint32_t version; /* Version number */ + uint8_t flags = 0; /* Flags */ + uint8_t enc_size = 0; /* Encoded size of selection info */ + unsigned rank; /* rank of points */ + const uint8_t *pp; /* Local pointer for decoding */ + unsigned u; /* Local counting variable */ + herr_t ret_value = FAIL; /* return value */ + const uint8_t *p_end = *p + p_size - 1; /* Pointer to last valid byte in buffer */ FUNC_ENTER_STATIC /* Check args */ @@ -4255,6 +4255,8 @@ H5S__hyper_deserialize(H5S_t **space, const uint8_t **p) tmp_space = *space; /* Decode version */ + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, pp, sizeof(uint32_t), p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, "buffer overflow while decoding selection version") UINT32DECODE(pp, version); if (version < H5S_HYPER_VERSION_1 || version > H5S_HYPER_VERSION_LATEST) @@ -4262,13 +4264,22 @@ H5S__hyper_deserialize(H5S_t **space, const uint8_t **p) if (version >= (uint32_t)H5S_HYPER_VERSION_2) { /* Decode flags */ + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, pp, 1, p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, "buffer overflow while decoding selection flags") flags = *(pp)++; - if (version >= (uint32_t)H5S_HYPER_VERSION_3) + if (version >= (uint32_t)H5S_HYPER_VERSION_3) { /* decode size of offset info */ + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, pp, 1, p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, + "buffer overflow while decoding selection encoding size") enc_size = *(pp)++; + } else { /* Skip over the remainder of the header */ + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, pp, 4, p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, + "buffer overflow while decoding selection header") pp += 4; enc_size = H5S_SELECT_INFO_ENC_SIZE_8; } /* end else */ @@ -4279,6 +4290,8 @@ H5S__hyper_deserialize(H5S_t **space, const uint8_t **p) } else { /* Skip over the remainder of the header */ + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, pp, 8, p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, "buffer overflow while decoding selection header") pp += 8; enc_size = H5S_SELECT_INFO_ENC_SIZE_4; } /* end else */ @@ -4288,6 +4301,8 @@ H5S__hyper_deserialize(H5S_t **space, const uint8_t **p) HGOTO_ERROR(H5E_DATASPACE, H5E_CANTLOAD, FAIL, "unknown size of point/offset info for selection") /* Decode the rank of the point selection */ + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, pp, sizeof(uint32_t), p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, "buffer overflow while decoding selection rank") UINT32DECODE(pp, rank); if (!*space) { @@ -4314,6 +4329,10 @@ H5S__hyper_deserialize(H5S_t **space, const uint8_t **p) switch (enc_size) { case H5S_SELECT_INFO_ENC_SIZE_2: for (u = 0; u < tmp_space->extent.rank; u++) { + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, pp, 4 * sizeof(uint16_t), p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, + "buffer overflow while decoding selection ranks") + UINT16DECODE(pp, start[u]); UINT16DECODE(pp, stride[u]); @@ -4329,6 +4348,10 @@ H5S__hyper_deserialize(H5S_t **space, const uint8_t **p) case H5S_SELECT_INFO_ENC_SIZE_4: for (u = 0; u < tmp_space->extent.rank; u++) { + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, pp, 4 * sizeof(uint32_t), p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, + "buffer overflow while decoding selection ranks") + UINT32DECODE(pp, start[u]); UINT32DECODE(pp, stride[u]); @@ -4344,6 +4367,10 @@ H5S__hyper_deserialize(H5S_t **space, const uint8_t **p) case H5S_SELECT_INFO_ENC_SIZE_8: for (u = 0; u < tmp_space->extent.rank; u++) { + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, pp, 4 * sizeof(uint64_t), p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, + "buffer overflow while decoding selection ranks") + UINT64DECODE(pp, start[u]); UINT64DECODE(pp, stride[u]); @@ -4379,14 +4406,23 @@ H5S__hyper_deserialize(H5S_t **space, const uint8_t **p) /* Decode the number of blocks */ switch (enc_size) { case H5S_SELECT_INFO_ENC_SIZE_2: + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, pp, sizeof(uint16_t), p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, + "buffer overflow while decoding number of selection blocks") UINT16DECODE(pp, num_elem); break; case H5S_SELECT_INFO_ENC_SIZE_4: + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, pp, sizeof(uint32_t), p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, + "buffer overflow while decoding number of selection blocks") UINT32DECODE(pp, num_elem); break; case H5S_SELECT_INFO_ENC_SIZE_8: + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, pp, sizeof(uint64_t), p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, + "buffer overflow while decoding number of selection blocks") UINT64DECODE(pp, num_elem); break; @@ -4403,6 +4439,10 @@ H5S__hyper_deserialize(H5S_t **space, const uint8_t **p) /* Decode the starting and ending points */ switch (enc_size) { case H5S_SELECT_INFO_ENC_SIZE_2: + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, pp, rank * 2 * sizeof(uint16_t), p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, + "buffer overflow while decoding selection coordinates") + for (tstart = start, v = 0; v < rank; v++, tstart++) UINT16DECODE(pp, *tstart); for (tend = end, v = 0; v < rank; v++, tend++) @@ -4410,6 +4450,10 @@ H5S__hyper_deserialize(H5S_t **space, const uint8_t **p) break; case H5S_SELECT_INFO_ENC_SIZE_4: + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, pp, rank * 2 * sizeof(uint32_t), p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, + "buffer overflow while decoding selection coordinates") + for (tstart = start, v = 0; v < rank; v++, tstart++) UINT32DECODE(pp, *tstart); for (tend = end, v = 0; v < rank; v++, tend++) @@ -4417,6 +4461,10 @@ H5S__hyper_deserialize(H5S_t **space, const uint8_t **p) break; case H5S_SELECT_INFO_ENC_SIZE_8: + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, pp, rank * 2 * sizeof(uint64_t), p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, + "buffer overflow while decoding selection coordinates") + for (tstart = start, v = 0; v < rank; v++, tstart++) UINT64DECODE(pp, *tstart); for (tend = end, v = 0; v < rank; v++, tend++) diff --git a/src/H5Snone.c b/src/H5Snone.c index de52370a540..116396b9db1 100644 --- a/src/H5Snone.c +++ b/src/H5Snone.c @@ -50,7 +50,7 @@ static herr_t H5S__none_release(H5S_t *space); static htri_t H5S__none_is_valid(const H5S_t *space); static hssize_t H5S__none_serial_size(H5S_t *space); static herr_t H5S__none_serialize(H5S_t *space, uint8_t **p); -static herr_t H5S__none_deserialize(H5S_t **space, const uint8_t **p); +static herr_t H5S__none_deserialize(H5S_t **space, const uint8_t **p, const size_t p_size, hbool_t skip); static herr_t H5S__none_bounds(const H5S_t *space, hsize_t *start, hsize_t *end); static herr_t H5S__none_offset(const H5S_t *space, hsize_t *off); static int H5S__none_unlim_dim(const H5S_t *space); @@ -593,12 +593,13 @@ H5S__none_serialize(H5S_t *space, uint8_t **p) REVISION LOG --------------------------------------------------------------------------*/ static herr_t -H5S__none_deserialize(H5S_t **space, const uint8_t **p) +H5S__none_deserialize(H5S_t **space, const uint8_t **p, const size_t p_size, hbool_t skip) { - H5S_t *tmp_space = NULL; /* Pointer to actual dataspace to use, - either *space or a newly allocated one */ - uint32_t version; /* Version number */ - herr_t ret_value = SUCCEED; /* return value */ + H5S_t *tmp_space = NULL; /* Pointer to actual dataspace to use, + either *space or a newly allocated one */ + uint32_t version; /* Version number */ + herr_t ret_value = SUCCEED; /* return value */ + const uint8_t *p_end = *p + p_size - 1; /* Pointer to last valid byte in buffer */ FUNC_ENTER_STATIC @@ -618,12 +619,16 @@ H5S__none_deserialize(H5S_t **space, const uint8_t **p) tmp_space = *space; /* Decode version */ + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *p, sizeof(uint32_t), p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, "buffer overflow while decoding selection version") UINT32DECODE(*p, version); if (version < H5S_NONE_VERSION_1 || version > H5S_NONE_VERSION_LATEST) HGOTO_ERROR(H5E_DATASPACE, H5E_BADVALUE, FAIL, "bad version number for none selection") /* Skip over the remainder of the header */ + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *p, 8, p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, "buffer overflow while decoding selection header") *p += 8; /* Change to "none" selection */ diff --git a/src/H5Spkg.h b/src/H5Spkg.h index 5bd82199b2c..46631f69eb2 100644 --- a/src/H5Spkg.h +++ b/src/H5Spkg.h @@ -245,7 +245,8 @@ typedef hssize_t (*H5S_sel_serial_size_func_t)(H5S_t *space); /* Method to store current selection in "serialized" form (a byte sequence suitable for storing on disk) */ typedef herr_t (*H5S_sel_serialize_func_t)(H5S_t *space, uint8_t **p); /* Method to create selection from "serialized" form (a byte sequence suitable for storing on disk) */ -typedef herr_t (*H5S_sel_deserialize_func_t)(H5S_t **space, const uint8_t **p); +typedef herr_t (*H5S_sel_deserialize_func_t)(H5S_t **space, const uint8_t **p, const size_t p_size, + hbool_t skip); /* Method to determine smallest n-D bounding box containing the current selection */ typedef herr_t (*H5S_sel_bounds_func_t)(const H5S_t *space, hsize_t *start, hsize_t *end); /* Method to determine linear offset of initial element in selection within dataspace */ diff --git a/src/H5Spoint.c b/src/H5Spoint.c index c6fdecd3681..7ef7c38dcba 100644 --- a/src/H5Spoint.c +++ b/src/H5Spoint.c @@ -60,7 +60,7 @@ static herr_t H5S__point_release(H5S_t *space); static htri_t H5S__point_is_valid(const H5S_t *space); static hssize_t H5S__point_serial_size(H5S_t *space); static herr_t H5S__point_serialize(H5S_t *space, uint8_t **p); -static herr_t H5S__point_deserialize(H5S_t **space, const uint8_t **p); +static herr_t H5S__point_deserialize(H5S_t **space, const uint8_t **p, const size_t p_size, hbool_t skip); static herr_t H5S__point_bounds(const H5S_t *space, hsize_t *start, hsize_t *end); static herr_t H5S__point_offset(const H5S_t *space, hsize_t *off); static int H5S__point_unlim_dim(const H5S_t *space); @@ -1350,20 +1350,20 @@ H5S__point_serialize(H5S_t *space, uint8_t **p) REVISION LOG --------------------------------------------------------------------------*/ static herr_t -H5S__point_deserialize(H5S_t **space, const uint8_t **p) +H5S__point_deserialize(H5S_t **space, const uint8_t **p, const size_t p_size, hbool_t skip) { - H5S_t *tmp_space = NULL; /* Pointer to actual dataspace to use, - either *space or a newly allocated one */ - hsize_t dims[H5S_MAX_RANK]; /* Dimension sizes */ - uint32_t version; /* Version number */ - uint8_t enc_size = 0; /* Encoded size of selection info */ - hsize_t *coord = NULL, *tcoord; /* Pointer to array of elements */ - const uint8_t *pp; /* Local pointer for decoding */ - uint64_t num_elem = 0; /* Number of elements in selection */ - unsigned rank; /* Rank of points */ - unsigned i, j; /* local counting variables */ - herr_t ret_value = SUCCEED; /* Return value */ - + H5S_t *tmp_space = NULL; /* Pointer to actual dataspace to use, + either *space or a newly allocated one */ + hsize_t dims[H5S_MAX_RANK]; /* Dimension sizes */ + uint32_t version; /* Version number */ + uint8_t enc_size = 0; /* Encoded size of selection info */ + hsize_t *coord = NULL, *tcoord; /* Pointer to array of elements */ + const uint8_t *pp; /* Local pointer for decoding */ + uint64_t num_elem = 0; /* Number of elements in selection */ + unsigned rank; /* Rank of points */ + unsigned i, j; /* local counting variables */ + herr_t ret_value = SUCCEED; /* Return value */ + const uint8_t *p_end = *p + p_size - 1; /* Pointer to last valid byte in buffer */ FUNC_ENTER_STATIC /* Check args */ @@ -1384,16 +1384,23 @@ H5S__point_deserialize(H5S_t **space, const uint8_t **p) tmp_space = *space; /* Decode version */ + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, pp, sizeof(uint32_t), p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, "buffer overflow while decoding selection version") UINT32DECODE(pp, version); if (version < H5S_POINT_VERSION_1 || version > H5S_POINT_VERSION_LATEST) HGOTO_ERROR(H5E_DATASPACE, H5E_BADVALUE, FAIL, "bad version number for point selection") - if (version >= (uint32_t)H5S_POINT_VERSION_2) + if (version >= (uint32_t)H5S_POINT_VERSION_2) { /* Decode size of point info */ + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, pp, 1, p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, "buffer overflow while decoding point info") enc_size = *(pp)++; + } else { /* Skip over the remainder of the header */ + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, pp, 8, p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, "buffer overflow while decoding selection headers") pp += 8; enc_size = H5S_SELECT_INFO_ENC_SIZE_4; } @@ -1403,6 +1410,8 @@ H5S__point_deserialize(H5S_t **space, const uint8_t **p) HGOTO_ERROR(H5E_DATASPACE, H5E_CANTLOAD, FAIL, "unknown size of point/offset info for selection") /* Decode the rank of the point selection */ + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, pp, sizeof(uint32_t), p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, "buffer overflow while decoding selection rank") UINT32DECODE(pp, rank); if (!*space) { @@ -1420,12 +1429,24 @@ H5S__point_deserialize(H5S_t **space, const uint8_t **p) /* decode the number of points */ switch (enc_size) { case H5S_SELECT_INFO_ENC_SIZE_2: + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, pp, sizeof(uint16_t), p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, + "buffer overflow while decoding number of points") + UINT16DECODE(pp, num_elem); break; case H5S_SELECT_INFO_ENC_SIZE_4: + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, pp, sizeof(uint32_t), p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, + "buffer overflow while decoding number of points") + UINT32DECODE(pp, num_elem); break; case H5S_SELECT_INFO_ENC_SIZE_8: + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, pp, sizeof(uint64_t), p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, + "buffer overflow while decoding number of points") + UINT64DECODE(pp, num_elem); break; default: @@ -1437,6 +1458,29 @@ H5S__point_deserialize(H5S_t **space, const uint8_t **p) if (NULL == (coord = (hsize_t *)H5MM_malloc(num_elem * rank * sizeof(hsize_t)))) HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "can't allocate coordinate information") + /* Determine necessary size of buffer for coordinates */ + size_t enc_type_size = 0; + + switch (enc_size) { + case H5S_SELECT_INFO_ENC_SIZE_2: + enc_type_size = sizeof(uint16_t); + break; + case H5S_SELECT_INFO_ENC_SIZE_4: + enc_type_size = sizeof(uint32_t); + break; + case H5S_SELECT_INFO_ENC_SIZE_8: + enc_type_size = sizeof(uint64_t); + break; + default: + HGOTO_ERROR(H5E_DATASPACE, H5E_UNSUPPORTED, FAIL, "unknown point info size") + break; + } + + size_t coordinate_buffer_requirement = num_elem * rank * enc_type_size; + + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, pp, coordinate_buffer_requirement, p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, "buffer overflow while decoding selection coordinates") + /* Retrieve the coordinates from the buffer */ for (tcoord = coord, i = 0; i < num_elem; i++) for (j = 0; j < (unsigned)rank; j++, tcoord++) @@ -1444,11 +1488,9 @@ H5S__point_deserialize(H5S_t **space, const uint8_t **p) case H5S_SELECT_INFO_ENC_SIZE_2: UINT16DECODE(pp, *tcoord); break; - case H5S_SELECT_INFO_ENC_SIZE_4: UINT32DECODE(pp, *tcoord); break; - case H5S_SELECT_INFO_ENC_SIZE_8: UINT64DECODE(pp, *tcoord); break; diff --git a/src/H5Sprivate.h b/src/H5Sprivate.h index db627fb2143..c2fd2241d7e 100644 --- a/src/H5Sprivate.h +++ b/src/H5Sprivate.h @@ -190,7 +190,7 @@ typedef struct H5S_sel_iter_op_t { #define H5S_SELECT_SHAPE_SAME(S1, S2) (H5S_select_shape_same(S1, S2)) #define H5S_SELECT_INTERSECT_BLOCK(S, START, END) (H5S_select_intersect_block(S, START, END)) #define H5S_SELECT_RELEASE(S) (H5S_select_release(S)) -#define H5S_SELECT_DESERIALIZE(S, BUF) (H5S_select_deserialize(S, BUF)) +#define H5S_SELECT_DESERIALIZE(S, BUF, BUF_SIZE) (H5S_select_deserialize(S, BUF, BUF_SIZE)) /* Forward declaration of structs used below */ struct H5O_t; @@ -229,7 +229,7 @@ H5_DLL htri_t H5S_extent_equal(const H5S_t *ds1, const H5S_t *ds2); H5_DLL herr_t H5S_extent_copy(H5S_t *dst, const H5S_t *src); /* Operations on selections */ -H5_DLL herr_t H5S_select_deserialize(H5S_t **space, const uint8_t **p); +H5_DLL herr_t H5S_select_deserialize(H5S_t **space, const uint8_t **p, const size_t p_size); H5_DLL H5S_sel_type H5S_get_select_type(const H5S_t *space); H5_DLL herr_t H5S_select_iterate(void *buf, const H5T_t *type, H5S_t *space, const H5S_sel_iter_op_t *op, void *op_data); diff --git a/src/H5Sselect.c b/src/H5Sselect.c index 70f4cef9e19..2a485f5e54b 100644 --- a/src/H5Sselect.c +++ b/src/H5Sselect.c @@ -527,11 +527,12 @@ H5S_select_valid(const H5S_t *space) REVISION LOG --------------------------------------------------------------------------*/ herr_t -H5S_select_deserialize(H5S_t **space, const uint8_t **p) +H5S_select_deserialize(H5S_t **space, const uint8_t **p, const size_t p_size) { - uint32_t sel_type; /* Pointer to the selection type */ - herr_t ret_value = FAIL; /* Return value */ - + uint32_t sel_type; /* Pointer to the selection type */ + herr_t ret_value = FAIL; /* Return value */ + const uint8_t *p_end = *p + p_size - 1; /* Pointer to last valid byte in buffer */ + hbool_t skip = (p_size == SIZE_MAX ? TRUE : FALSE); /* If p_size is unknown, skip buffer checks */ FUNC_ENTER_NOAPI(FAIL) HDassert(space); @@ -539,24 +540,26 @@ H5S_select_deserialize(H5S_t **space, const uint8_t **p) /* Selection-type specific coding is moved to the callbacks. */ /* Decode selection type */ + if (H5_IS_KNOWN_BUFFER_OVERFLOW(skip, *p, sizeof(uint32_t), p_end)) + HGOTO_ERROR(H5E_DATASPACE, H5E_OVERFLOW, FAIL, "buffer overflow while decoding selection type") UINT32DECODE(*p, sel_type); /* Make routine for selection type */ switch (sel_type) { case H5S_SEL_POINTS: /* Sequence of points selected */ - ret_value = (*H5S_sel_point->deserialize)(space, p); + ret_value = (*H5S_sel_point->deserialize)(space, p, p_size - sizeof(uint32_t), skip); break; case H5S_SEL_HYPERSLABS: /* Hyperslab selection defined */ - ret_value = (*H5S_sel_hyper->deserialize)(space, p); + ret_value = (*H5S_sel_hyper->deserialize)(space, p, p_size - sizeof(uint32_t), skip); break; case H5S_SEL_ALL: /* Entire extent selected */ - ret_value = (*H5S_sel_all->deserialize)(space, p); + ret_value = (*H5S_sel_all->deserialize)(space, p, p_size - sizeof(uint32_t), skip); break; case H5S_SEL_NONE: /* Nothing selected */ - ret_value = (*H5S_sel_none->deserialize)(space, p); + ret_value = (*H5S_sel_none->deserialize)(space, p, p_size - sizeof(uint32_t), skip); break; default: diff --git a/src/H5private.h b/src/H5private.h index 7d15312cb7a..36a40adb3a2 100644 --- a/src/H5private.h +++ b/src/H5private.h @@ -403,6 +403,15 @@ */ #define H5_IS_BUFFER_OVERFLOW(ptr, size, buffer_end) (((ptr) + (size)-1) > (buffer_end)) +/* Variant of H5_IS_BUFFER_OVERFLOW, used with functions such as H5Tdecode() + * that don't take a size parameter, where we need to skip the bounds checks. + * + * This is a separate macro since we don't want to inflict that behavior on + * the entire library. + */ +#define H5_IS_KNOWN_BUFFER_OVERFLOW(skip, ptr, size, buffer_end) \ + (skip ? FALSE : ((ptr) + (size)-1) > (buffer_end)) + /* * HDF Boolean type. */ From 6fa5b7a04e73a514edbedab75d3914536939f7ef Mon Sep 17 00:00:00 2001 From: Larry Knox Date: Thu, 18 May 2023 11:30:08 -0500 Subject: [PATCH 066/108] Update 1.12 release_docs/README_HPC (#2976) --- release_docs/README_HPC | 26 ++++++++++++-------------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/release_docs/README_HPC b/release_docs/README_HPC index 6b31ce6c392..7fa4895c056 100644 --- a/release_docs/README_HPC +++ b/release_docs/README_HPC @@ -39,9 +39,6 @@ If no branch is specified, then the 'develop' version will be checked out. If no source directory is specified, then the source will be located in the 'hdf5' directory. The CMake scripts expect the source to be in a directory named hdf5-, where 'version string' uses the format '1.xx.xx'. -For example, for the current 'develop' version, the "hdf5" directory should -be renamed "hdf5-1.13.0", or for the first hdf5_1_12_0 pre-release version, -it should be renamed "hdf5-1.12.0-5". If the version number is not known a priori, the version string can be obtained by running bin/h5vers in the top level directory of the source clone, and @@ -65,15 +62,15 @@ scripts on compute nodes and to cross-compile for compute node hardware using a cross-compiling emulator. The setup steps will make default settings for parallel or serial only builds available to the CMake command. - 1. For the current 'develop' version the "hdf5" directory should be renamed - "hdf5-1.13.0". + 1. The "hdf5" directory should be renamed hdf5-. For + further explanation see section II. 2. Three cmake script files need to be copied to the working directory, or have symbolic links to them, created in the working directory: - hdf5-1.13.0/config/cmake/scripts/HDF5config.cmake - hdf5-1.13.0/config/cmake/scripts/CTestScript.cmake - hdf5-1.13.0/config/cmake/scripts/HDF5options.cmake + hdf5-/config/cmake/scripts/HDF5config.cmake + hdf5-/config/cmake/scripts/CTestScript.cmake + hdf5-/config/cmake/scripts/HDF5options.cmake should be copied to the working directory. @@ -82,7 +79,7 @@ parallel or serial only builds available to the CMake command. CTestScript.cmake HDF5config.cmake HDF5options.cmake - hdf5-1.13.0 + hdf5- Additionally, when the ctest command runs [1], it will add a build directory in the working directory. @@ -145,7 +142,8 @@ cori, another CrayXC40, that line is replaced by "#SBATCH -C knl,quad,cache". For cori (and other machines), the values in LOCAL_BATCH_SCRIPT_NAME and LOCAL_BATCH_SCRIPT_PARALLEL_NAME in the config/cmake/scripts/HPC/sbatch-HDF5options.cmake file can be replaced by cori_knl_ctestS.sl and cori_knl_ctestS.sl, or the lines -can be edited in the batch files in hdf5-1.13.0/bin/batch. +can be edited in the batch files in hdf5-/bin/batch (see section II +for version string explanation). ======================================================================== V. Manual alternatives @@ -153,11 +151,11 @@ V. Manual alternatives If using ctest is undesirable, one can create a build directory and run the cmake configure command, for example -"/projects/Mutrino/hpcsoft/cle6.0/common/cmake/3.12/bin/cmake" --C "/hdf5-1.13.0/config/cmake/cacheinit.cmake" +"/projects/Mutrino/hpcsoft/cle6.0/common/cmake//bin/cmake" +-C "/hdf5-/config/cmake/cacheinit.cmake" -DCMAKE_BUILD_TYPE:STRING=Release -DHDF5_BUILD_FORTRAN:BOOL=ON -DHDF5_BUILD_JAVA:BOOL=OFF --DCMAKE_INSTALL_PREFIX:PATH=/HDF_Group/HDF5/1.13.0 +-DCMAKE_INSTALL_PREFIX:PATH=/HDF_Group/HDF5/ -DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=OFF -DHDF5_ENABLE_SZIP_SUPPORT:BOOL=OFF -DHDF5_ENABLE_PARALLEL:BOOL=ON -DHDF5_BUILD_CPP_LIB:BOOL=OFF -DHDF5_BUILD_JAVA:BOOL=OFF -DHDF5_ENABLE_THREADSAFE:BOOL=OFF @@ -168,7 +166,7 @@ configure command, for example -DLOCAL_BATCH_SCRIPT_NAME:STRING=knl_ctestS.sl -DLOCAL_BATCH_SCRIPT_PARALLEL_NAME:STRING=knl_ctestP.sl -DSITE:STRING=mutrino -DBUILDNAME:STRING=par-knl_GCC493-SHARED-Linux-4.4.156-94.61.1.16335.0.PTF.1107299-default-x86_64 -"-GUnix Makefiles" "" "/hdf5-1.13.0" +"-GUnix Makefiles" "" "/hdf5-" followed by make and batch jobs to run tests. From 2b70d0dfdfa810e6c7b4e6d35922e58574c487f6 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Thu, 18 May 2023 11:41:05 -0500 Subject: [PATCH 067/108] Add h5copy help test and verify consistency (#2903) --- tools/src/h5copy/h5copy.c | 8 --- tools/src/h5import/h5import.c | 19 +++---- tools/src/h5stat/h5stat.c | 29 +---------- tools/test/h5copy/CMakeTests.cmake | 32 ++++++++++++ tools/test/h5copy/testfiles/h5copy_help1.ddl | 49 +++++++++++++++++++ tools/test/h5copy/testfiles/h5copy_help2.ddl | 49 +++++++++++++++++++ tools/test/h5stat/testfiles/h5stat_help1.ddl | 2 +- tools/test/h5stat/testfiles/h5stat_help2.ddl | 2 +- tools/test/h5stat/testfiles/h5stat_nofile.ddl | 2 +- 9 files changed, 142 insertions(+), 50 deletions(-) create mode 100644 tools/test/h5copy/testfiles/h5copy_help1.ddl create mode 100644 tools/test/h5copy/testfiles/h5copy_help2.ddl diff --git a/tools/src/h5copy/h5copy.c b/tools/src/h5copy/h5copy.c index c4e42751fdf..37346289d63 100644 --- a/tools/src/h5copy/h5copy.c +++ b/tools/src/h5copy/h5copy.c @@ -46,8 +46,6 @@ char *str_flag = NULL; * Programmer: Quincey Koziol * Saturday, 31. January 2004 * - * Modifications: - * *------------------------------------------------------------------------- */ static void @@ -77,8 +75,6 @@ leave(int ret) * * Programmer: Pedro Vicente Nunes, 7/8/2006 * - * Modifications: - * *------------------------------------------------------------------------- */ static void @@ -159,8 +155,6 @@ usage(void) * * Programmer: Pedro Vicente Nunes, 7/8/2006 * - * Modifications: - * *------------------------------------------------------------------------- */ @@ -207,8 +201,6 @@ parse_flag(const char *s_flag, unsigned *flag) * * Programmer: Pedro Vicente Nunes * - * Modifications: - * *------------------------------------------------------------------------- */ diff --git a/tools/src/h5import/h5import.c b/tools/src/h5import/h5import.c index bda7ea0318c..3984b5cb58d 100644 --- a/tools/src/h5import/h5import.c +++ b/tools/src/h5import/h5import.c @@ -342,9 +342,6 @@ gtoken(char *s) * * Programmer: pkmat * - * Modifications: pvn - * 7/23/2007. Added support for STR type, extra parameter FILE_ID - * *------------------------------------------------------------------------- */ @@ -455,7 +452,7 @@ processDataFile(char *infile, struct Input *in, hid_t file_id) error: if (strm) HDfclose(strm); - return (retval); + return retval; } static int @@ -1428,10 +1425,10 @@ processConfigurationFile(char *infile, struct Input *in) const char *err19 = "Unable to get integer value.\n"; const char *err20 = "Unable to get subset values.\n"; - /* create vector to map which keywords have been found - check vector after each keyword to check for violation - at the end check vector to see if required fields have been provided - process the output file according to the options + /* - create vector to map which keywords have been found + * - check vector after each keyword to check for violation + * - at the end check vector to see if required fields have been provided + * - process the output file according to the options */ /* Initialize machine endian */ @@ -2434,7 +2431,7 @@ processConfigurationFile(char *infile, struct Input *in) error: if (strm) HDfclose(strm); - return (retval); + return retval; } static int @@ -4742,7 +4739,7 @@ process(struct Options *opt) } /* STR */ H5Fclose(file_id); - return (0); + return 0; } uint16_t @@ -5084,7 +5081,7 @@ help(char *name) void usage(char *name) { - (void)HDfprintf(stdout, "\nUsage:\t%s -h[elp], OR\n", name); + (void)HDfprintf(stdout, "\nusage:\t%s -h[elp], OR\n", name); (void)HDfprintf(stdout, "\t%s -c[onfig] \ [ -c[config] ...] -o[utfile] \n\n", name); diff --git a/tools/src/h5stat/h5stat.c b/tools/src/h5stat/h5stat.c index 7ffb8619e5b..ec742e821c5 100644 --- a/tools/src/h5stat/h5stat.c +++ b/tools/src/h5stat/h5stat.c @@ -208,7 +208,7 @@ static void usage(const char *prog) { HDfflush(stdout); - HDfprintf(stdout, "Usage: %s [OPTIONS] file\n", prog); + HDfprintf(stdout, "usage: %s [OPTIONS] file\n", prog); HDfprintf(stdout, "\n"); HDfprintf(stdout, " ERROR\n"); HDfprintf(stdout, " --enable-error-stack Prints messages from the HDF5 error stack as they occur\n"); @@ -335,18 +335,6 @@ attribute_stats(iter_t *iter, const H5O_info2_t *oi, const H5O_native_info_t *na * Programmer: Quincey Koziol * Tuesday, August 16, 2005 * - * Modifications: Refactored code from the walk_function - * EIP, Wednesday, August 16, 2006 - * - * Vailin Choi 12 July 2007 - * 1. Gathered storage info for btree and heap - * (groups and attributes) - * 2. Gathered info for attributes - * - * Vailin Choi 14 July 2007 - * Cast "num_objs" and "num_attrs" to size_t - * Due to the -Mbounds problem for the pgi-32 bit compiler on indexing - * *------------------------------------------------------------------------- */ static herr_t @@ -1103,8 +1091,6 @@ iter_free(iter_t *iter) * Programmer: Elena Pourmal * Saturday, August 12, 2006 * - * Modifications: - * *------------------------------------------------------------------------- */ static herr_t @@ -1190,11 +1176,6 @@ print_file_metadata(const iter_t *iter) * Programmer: Elena Pourmal * Saturday, August 12, 2006 * - * Modifications: - * bug #1253; Oct 6th 2008; Vailin Choi - * Fixed segmentation fault: print iter->group_bins[0] when - * there is iter->group_nbins - * *------------------------------------------------------------------------- */ static herr_t @@ -1636,8 +1617,6 @@ print_file_statistics(const iter_t *iter) * Programmer: Elena Pourmal * Thursday, August 17, 2006 * - * Modifications: - * *------------------------------------------------------------------------- */ static void @@ -1658,8 +1637,6 @@ print_object_statistics(const char *name) * Programmer: Elena Pourmal * Thursday, August 17, 2006 * - * Modifications: - * *------------------------------------------------------------------------- */ static void @@ -1674,10 +1651,6 @@ print_statistics(const char *name, const iter_t *iter) /*------------------------------------------------------------------------- * Function: main * - * Modifications: - * 2/2010; Vailin Choi - * Get the size of user block - * *------------------------------------------------------------------------- */ int diff --git a/tools/test/h5copy/CMakeTests.cmake b/tools/test/h5copy/CMakeTests.cmake index 154ca15389a..e00d305e5ff 100644 --- a/tools/test/h5copy/CMakeTests.cmake +++ b/tools/test/h5copy/CMakeTests.cmake @@ -35,6 +35,8 @@ ${HDF5_TOOLS_TEST_H5COPY_SOURCE_DIR}/testfiles/tudfilter.h5_ERR.txt ${HDF5_TOOLS_TEST_H5COPY_SOURCE_DIR}/testfiles/h5copy_plugin_fail_ERR.out.h5.txt ${HDF5_TOOLS_TEST_H5COPY_SOURCE_DIR}/testfiles/h5copy_plugin_test.out.h5.txt + ${HDF5_TOOLS_TEST_H5COPY_SOURCE_DIR}/testfiles/h5copy_help1.ddl + ${HDF5_TOOLS_TEST_H5COPY_SOURCE_DIR}/testfiles/h5copy_help2.ddl ) file (MAKE_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles") @@ -376,6 +378,32 @@ endif () endmacro () + macro (ADD_SIMPLE_TEST resultfile resultcode) + # If using memchecker add tests without using scripts + if (HDF5_ENABLE_USING_MEMCHECKER) + add_test (NAME H5COPY-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) + if (${resultcode}) + set_tests_properties (H5COPY-${resultfile} PROPERTIES WILL_FAIL "true") + endif () + else (HDF5_ENABLE_USING_MEMCHECKER) + add_test ( + NAME H5COPY-${resultfile} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=$" + -D "TEST_ARGS=${ARGN}" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" + -D "TEST_OUTPUT=./testfiles/${resultfile}.out" + -D "TEST_EXPECT=${resultcode}" + -D "TEST_REFERENCE=./testfiles/${resultfile}.ddl" + -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + ) + endif () + set_tests_properties (H5COPY-${resultfile} PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}" + ) + endmacro () + ############################################################################## ############################################################################## ### T H E T E S T S ### @@ -448,6 +476,10 @@ set (USE_FILTER_SZIP "true") endif () +# Test for help flag + ADD_SIMPLE_TEST (h5copy_help1 0 -h) + ADD_SIMPLE_TEST (h5copy_help2 0 --help) + # "Test copying various forms of datasets" ADD_H5_TEST (simple 0 ${HDF_FILE1}.h5 -v -s simple -d simple) ADD_H5_TEST (chunk 0 ${HDF_FILE1}.h5 -v -s chunk -d chunk) diff --git a/tools/test/h5copy/testfiles/h5copy_help1.ddl b/tools/test/h5copy/testfiles/h5copy_help1.ddl new file mode 100644 index 00000000000..418faea77d6 --- /dev/null +++ b/tools/test/h5copy/testfiles/h5copy_help1.ddl @@ -0,0 +1,49 @@ + +usage: h5copy [OPTIONS] [OBJECTS...] + OBJECTS + -i, --input input file name + -o, --output output file name + -s, --source source object name + -d, --destination destination object name + ERROR + --enable-error-stack Prints messages from the HDF5 error stack as they occur. + Optional value 2 also prints file open errors. + OPTIONS + -h, --help Print a usage message and exit + -p, --parents No error if existing, make parent groups as needed + -v, --verbose Print information about OBJECTS and OPTIONS + -V, --version Print version number and exit + -f, --flag Flag type + + Flag type is one of the following strings: + + shallow Copy only immediate members for groups + + soft Expand soft links into new objects + + ext Expand external links into new objects + + ref Copy references and any referenced objects, i.e., objects + that the references point to. + Referenced objects are copied in addition to the objects + specified on the command line and reference datasets are + populated with correct reference values. Copies of referenced + datasets outside the copy range specified on the command line + will normally have a different name from the original. + (Default:Without this option, reference value(s) in any + reference datasets are set to NULL and referenced objects are + not copied unless they are otherwise within the copy range + specified on the command line.) + + noattr Copy object without copying attributes + + allflags Switches all flags from the default to the non-default setting + + These flag types correspond to the following API symbols + + H5O_COPY_SHALLOW_HIERARCHY_FLAG + H5O_COPY_EXPAND_SOFT_LINK_FLAG + H5O_COPY_EXPAND_EXT_LINK_FLAG + H5O_COPY_EXPAND_REFERENCE_FLAG + H5O_COPY_WITHOUT_ATTR_FLAG + H5O_COPY_ALL diff --git a/tools/test/h5copy/testfiles/h5copy_help2.ddl b/tools/test/h5copy/testfiles/h5copy_help2.ddl new file mode 100644 index 00000000000..418faea77d6 --- /dev/null +++ b/tools/test/h5copy/testfiles/h5copy_help2.ddl @@ -0,0 +1,49 @@ + +usage: h5copy [OPTIONS] [OBJECTS...] + OBJECTS + -i, --input input file name + -o, --output output file name + -s, --source source object name + -d, --destination destination object name + ERROR + --enable-error-stack Prints messages from the HDF5 error stack as they occur. + Optional value 2 also prints file open errors. + OPTIONS + -h, --help Print a usage message and exit + -p, --parents No error if existing, make parent groups as needed + -v, --verbose Print information about OBJECTS and OPTIONS + -V, --version Print version number and exit + -f, --flag Flag type + + Flag type is one of the following strings: + + shallow Copy only immediate members for groups + + soft Expand soft links into new objects + + ext Expand external links into new objects + + ref Copy references and any referenced objects, i.e., objects + that the references point to. + Referenced objects are copied in addition to the objects + specified on the command line and reference datasets are + populated with correct reference values. Copies of referenced + datasets outside the copy range specified on the command line + will normally have a different name from the original. + (Default:Without this option, reference value(s) in any + reference datasets are set to NULL and referenced objects are + not copied unless they are otherwise within the copy range + specified on the command line.) + + noattr Copy object without copying attributes + + allflags Switches all flags from the default to the non-default setting + + These flag types correspond to the following API symbols + + H5O_COPY_SHALLOW_HIERARCHY_FLAG + H5O_COPY_EXPAND_SOFT_LINK_FLAG + H5O_COPY_EXPAND_EXT_LINK_FLAG + H5O_COPY_EXPAND_REFERENCE_FLAG + H5O_COPY_WITHOUT_ATTR_FLAG + H5O_COPY_ALL diff --git a/tools/test/h5stat/testfiles/h5stat_help1.ddl b/tools/test/h5stat/testfiles/h5stat_help1.ddl index 1f65f0d1914..54d6a318eda 100644 --- a/tools/test/h5stat/testfiles/h5stat_help1.ddl +++ b/tools/test/h5stat/testfiles/h5stat_help1.ddl @@ -1,4 +1,4 @@ -Usage: h5stat [OPTIONS] file +usage: h5stat [OPTIONS] file ERROR --enable-error-stack Prints messages from the HDF5 error stack as they occur diff --git a/tools/test/h5stat/testfiles/h5stat_help2.ddl b/tools/test/h5stat/testfiles/h5stat_help2.ddl index 1f65f0d1914..54d6a318eda 100644 --- a/tools/test/h5stat/testfiles/h5stat_help2.ddl +++ b/tools/test/h5stat/testfiles/h5stat_help2.ddl @@ -1,4 +1,4 @@ -Usage: h5stat [OPTIONS] file +usage: h5stat [OPTIONS] file ERROR --enable-error-stack Prints messages from the HDF5 error stack as they occur diff --git a/tools/test/h5stat/testfiles/h5stat_nofile.ddl b/tools/test/h5stat/testfiles/h5stat_nofile.ddl index 1f65f0d1914..54d6a318eda 100644 --- a/tools/test/h5stat/testfiles/h5stat_nofile.ddl +++ b/tools/test/h5stat/testfiles/h5stat_nofile.ddl @@ -1,4 +1,4 @@ -Usage: h5stat [OPTIONS] file +usage: h5stat [OPTIONS] file ERROR --enable-error-stack Prints messages from the HDF5 error stack as they occur From 6787a5bbef0e69aacadc9182f773ffc8a6d12127 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Thu, 18 May 2023 13:07:19 -0500 Subject: [PATCH 068/108] Hdf5 1 12 dt action (#2965) --- .github/workflows/cmake-ctest.yml | 217 ++++++++++++++++++++++++++++++ .github/workflows/daily-build.yml | 28 ++++ .github/workflows/tarball.yml | 105 +++++++++++++++ config/toolchain/aarch64.cmake | 8 +- 4 files changed, 354 insertions(+), 4 deletions(-) create mode 100644 .github/workflows/cmake-ctest.yml create mode 100644 .github/workflows/daily-build.yml create mode 100644 .github/workflows/tarball.yml diff --git a/.github/workflows/cmake-ctest.yml b/.github/workflows/cmake-ctest.yml new file mode 100644 index 00000000000..365039ec290 --- /dev/null +++ b/.github/workflows/cmake-ctest.yml @@ -0,0 +1,217 @@ +name: hdf5 1.12 ctest runs + +# Controls when the action will run. Triggers the workflow on a schedule +on: + workflow_call: + inputs: + file_base: + description: "The common base name of the source tarballs" + required: true + type: string + +# A workflow run is made up of one or more jobs that can run sequentially or +# in parallel +jobs: + build_and_test_win: + # Windows w/ MSVC + CMake + # + name: "Windows MSVC CTest" + runs-on: windows-latest + steps: + - name: Install Dependencies (Windows) + run: choco install ninja + + - name: Enable Developer Command Prompt + uses: ilammy/msvc-dev-cmd@v1.12.1 + + - name: Set file base name (Windows) + id: set-file-base + run: | + FILE_NAME_BASE=$(echo "${{ inputs.file_base }}") + echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT + shell: bash + + # Get files created by release script + - name: Get zip-tarball (Windows) + uses: actions/download-artifact@v3 + with: + name: zip-tarball + path: ${{ github.workspace }} + + - name: using powershell + shell: pwsh + run: Get-Location + + - name: List files for the space (Windows) + run: | + Get-ChildItem -Path ${{ github.workspace }} + Get-ChildItem -Path ${{ runner.workspace }} + shell: pwsh + + - name: Uncompress source (Windows) + working-directory: ${{ github.workspace }} + run: 7z x ${{ steps.set-file-base.outputs.FILE_BASE }}.zip + shell: bash + + - name: Run ctest (Windows) + run: | + cd "${{ runner.workspace }}/hdf5/hdfsrc" + cmake --workflow --preset=ci-StdShar-MSVC --fresh + shell: bash + + - name: Publish binary (Windows) + id: publish-ctest-binary + run: | + mkdir "${{ runner.workspace }}/build" + mkdir "${{ runner.workspace }}/build/hdf5" + Copy-Item -Path ${{ runner.workspace }}/hdf5/hdfsrc/COPYING -Destination ${{ runner.workspace }}/build/hdf5/ + Copy-Item -Path ${{ runner.workspace }}/hdf5/hdfsrc/COPYING_LBNL_HDF5 -Destination ${{ runner.workspace }}/build/hdf5/ + Copy-Item -Path ${{ runner.workspace }}/hdf5/hdfsrc/README.md -Destination ${{ runner.workspace }}/build/hdf5/ + Copy-Item -Path ${{ runner.workspace }}/hdf5/build/ci-StdShar-MSVC/* -Destination ${{ runner.workspace }}/build/hdf5/ -Include *.zip + cd "${{ runner.workspace }}/build" + 7z a -tzip ${{ steps.set-file-base.outputs.FILE_BASE }}-win_vs2022.zip hdf5 + shell: pwsh + + - name: List files in the space (Windows) + run: | + Get-ChildItem -Path ${{ github.workspace }} + Get-ChildItem -Path ${{ runner.workspace }} + shell: pwsh + + # Save files created by ctest script + - name: Save published binary (Windows) + uses: actions/upload-artifact@v3 + with: + name: zip-vs2022-binary + path: ${{ runner.workspace }}/build/${{ steps.set-file-base.outputs.FILE_BASE }}-win_vs2022.zip + if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` + + build_and_test_linux: + # Linux (Ubuntu) w/ gcc + CMake + # + name: "Ubuntu gcc CMake" + runs-on: ubuntu-latest + steps: + - name: Install CMake Dependencies (Linux) + run: sudo apt-get install ninja-build + + - name: Set file base name (Linux) + id: set-file-base + run: | + FILE_NAME_BASE=$(echo "${{ inputs.file_base }}") + echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT + + # Get files created by release script + - name: Get tgz-tarball (Linux) + uses: actions/download-artifact@v3 + with: + name: tgz-tarball + path: ${{ github.workspace }} + + - name: List files for the space (Linux) + run: | + ls ${{ github.workspace }} + ls ${{ runner.workspace }} + + - name: Uncompress source (Linux) + run: tar -zxvf ${{ github.workspace }}/${{ steps.set-file-base.outputs.FILE_BASE }}.tar.gz + + - name: Run ctest (Linux) + run: | + cd "${{ runner.workspace }}/hdf5/hdfsrc" + cmake --workflow --preset=ci-StdShar-GNUC --fresh + shell: bash + + - name: Publish binary (Linux) + id: publish-ctest-binary + run: | + mkdir "${{ runner.workspace }}/build" + mkdir "${{ runner.workspace }}/build/hdf5" + cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING ${{ runner.workspace }}/build/hdf5 + cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING_LBNL_HDF5 ${{ runner.workspace }}/build/hdf5 + cp ${{ runner.workspace }}/hdf5/hdfsrc/README.md ${{ runner.workspace }}/build/hdf5 + cp ${{ runner.workspace }}/hdf5/build/ci-StdShar-GNUC/*.tar.gz ${{ runner.workspace }}/build/hdf5 + cd "${{ runner.workspace }}/build" + tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204.tar.gz hdf5 + shell: bash + + - name: List files in the space (Linux) + run: | + ls ${{ github.workspace }} + ls ${{ runner.workspace }} + + # Save files created by ctest script + - name: Save published binary (Linux) + uses: actions/upload-artifact@v3 + with: + name: tgz-ubuntu-2204-binary + path: ${{ runner.workspace }}/build/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204.tar.gz + if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` + + build_and_test_mac: + # MacOS w/ Clang + CMake + # + name: "MacOS Clang CMake" + runs-on: macos-11 + steps: + - name: Install Dependencies (MacOS) + run: brew install ninja + + - name: Set file base name (MacOS) + id: set-file-base + run: | + FILE_NAME_BASE=$(echo "${{ inputs.file_base }}") + echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT + + # Get files created by release script + - name: Get tgz-tarball (MacOS) + uses: actions/download-artifact@v3 + with: + name: tgz-tarball + path: ${{ github.workspace }} + + - name: List files for the space (MacOS) + run: | + ls ${{ github.workspace }} + ls ${{ runner.workspace }} + + - name: Uncompress source (MacOS) + run: tar -zxvf ${{ github.workspace }}/${{ steps.set-file-base.outputs.FILE_BASE }}.tar.gz + + # symlinks the compiler executables to a common location + - name: Setup GNU Fortran + uses: modflowpy/install-gfortran-action@v1 + + - name: Run ctest (MacOS) + id: run-ctest + run: | + cd "${{ runner.workspace }}/hdf5/hdfsrc" + cmake --workflow --preset=ci-StdShar-Clang --fresh + shell: bash + + - name: Publish binary (MacOS) + id: publish-ctest-binary + run: | + mkdir "${{ runner.workspace }}/build" + mkdir "${{ runner.workspace }}/build/hdf5" + cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING ${{ runner.workspace }}/build/hdf5 + cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING_LBNL_HDF5 ${{ runner.workspace }}/build/hdf5 + cp ${{ runner.workspace }}/hdf5/hdfsrc/README.md ${{ runner.workspace }}/build/hdf5 + cp ${{ runner.workspace }}/hdf5/build/ci-StdShar-Clang/*.tar.gz ${{ runner.workspace }}/build/hdf5 + cd "${{ runner.workspace }}/build" + tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-osx12.tar.gz hdf5 + shell: bash + + - name: List files in the space (MacOS) + run: | + ls ${{ github.workspace }} + ls ${{ runner.workspace }} + + # Save files created by ctest script + - name: Save published binary (MacOS) + uses: actions/upload-artifact@v3 + with: + name: tgz-osx12-binary + path: ${{ runner.workspace }}/build/${{ steps.set-file-base.outputs.FILE_BASE }}-osx12.tar.gz + if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` + diff --git a/.github/workflows/daily-build.yml b/.github/workflows/daily-build.yml new file mode 100644 index 00000000000..63b7dabe76a --- /dev/null +++ b/.github/workflows/daily-build.yml @@ -0,0 +1,28 @@ +name: hdf5 1.12 daily build + +# Controls when the action will run. Triggers the workflow on a schedule +on: + workflow_dispatch: + schedule: + - cron: "8 0 * * *" + +# A workflow run is made up of one or more jobs that can run sequentially or +# in parallel. +jobs: + call-workflow-tarball: + uses: ./.github/workflows/tarball.yml + + call-workflow-ctest: + needs: call-workflow-tarball + uses: ./.github/workflows/cmake-ctest.yml + with: + file_base: ${{ needs.call-workflow-tarball.outputs.file_base }} + if: ${{ needs.call-workflow-tarball.outputs.has_changes == 'true' }} + + call-workflow-release: + needs: call-workflow-ctest + uses: ./.github/workflows/release.yml + with: + file_base: ${{ needs.call-workflow-tarball.outputs.file_base }} + if: ${{ needs.call-workflow-tarball.outputs.has_changes == 'true' }} + diff --git a/.github/workflows/tarball.yml b/.github/workflows/tarball.yml new file mode 100644 index 00000000000..62fd2707cf8 --- /dev/null +++ b/.github/workflows/tarball.yml @@ -0,0 +1,105 @@ +name: hdf5 1.12 tarball + +# Controls when the action will run. Triggers the workflow on a schedule +on: + workflow_call: + outputs: + has_changes: + description: "Whether there were changes the previous day" + value: ${{ jobs.check_commits.outputs.has_changes }} + file_base: + description: "The common base name of the source tarballs" + value: ${{ jobs.create_tarball.outputs.file_base }} + +# A workflow run is made up of one or more jobs that can run sequentially or +# in parallel +jobs: + check_commits: + name: Check for recent commits + runs-on: ubuntu-latest + outputs: + has_changes: ${{ steps.check-new-commits.outputs.has-new-commits }} + branch_ref: ${{ steps.get-branch-name.outputs.BRANCH_REF }} + branch_sha: ${{ steps.get-branch-sha.outputs.BRANCH_SHA }} + steps: + - name: Get branch name + id: get-branch-name + env: + GITHUB_REF: ${{ github.ref }} + GITHUB_REF_NAME: ${{ github.ref_name }} + GITHUB_HEAD_REF: ${{ github.head_ref }} + #run: echo "${{ env.GITHUB_REF_NAME }} | grep -P '[0-9]+/merge' &> /dev/null && BRANCH_REF=${{ env.GITHUB_HEAD_REF }} || BRANCH_REF=${{ env.GITHUB_REF_NAME }}" >> $GITHUB_OUTPUT + run: echo "BRANCH_REF=${{ env.GITHUB_HEAD_REF || env.GITHUB_REF_NAME }}" >> $GITHUB_OUTPUT + + - name: Get branch sha + id: get-branch-sha + env: + GITHUB_SHA: ${{ github.sha }} + GITHUB_WF_SHA: ${{ github.workflow_sha }} + run: | + SHORT_SHA=$(echo "${{ env.GITHUB_WF_SHA }}" | cut -c1-7) + echo "BRANCH_SHA=$SHORT_SHA" >> $GITHUB_OUTPUT + + - name: Check for changed source + id: check-new-commits + uses: adriangl/check-new-commits-action@v1 + with: + seconds: 86400 # One day in seconds + branch: '${{ steps.get-branch-name.outputs.branch_ref }}' + + - run: echo "You have ${{ steps.check-new-commits.outputs.new-commits-number }} new commit(s) in ${{ steps.get-branch-name.outputs.BRANCH_REF }} ✅!" + if: ${{ steps.check-new-commits.outputs.has-new-commits == 'true' }} + + - run: echo "Short commit sha is ${{ steps.get-branch-sha.outputs.BRANCH_SHA }}!" + + create_tarball: + name: Create a source tarball + runs-on: ubuntu-latest + needs: check_commits + if: ${{ needs.check_commits.outputs.has_changes == 'true' }} + outputs: + file_base: ${{ steps.set-file-base.outputs.FILE_BASE }} + steps: + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - name: Get Sources + uses: actions/checkout@v3 + with: + path: hdfsrc + + - name: Install Autotools Dependencies (Linux, serial) + run: | + sudo apt update + sudo apt install automake autoconf libtool libtool-bin gzip dos2unix + + - name: Set file base name + id: set-file-base + run: | + FILE_NAME_BASE=$(echo "hdf5-${{ needs.check_commits.outputs.branch_ref }}-${{ needs.check_commits.outputs.branch_sha }}") + echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT + + - name: Run release script + id: run-release-script + run: | + cd "$GITHUB_WORKSPACE/hdfsrc" + bin/bbrelease -d $GITHUB_WORKSPACE --branch ${{ needs.check_commits.outputs.branch_ref }} --revision gzip zip + shell: bash + + - name: List files in the repository + run: | + ls ${{ github.workspace }} + ls $GITHUB_WORKSPACE + + # Save files created by release script + - name: Save tgz-tarball + uses: actions/upload-artifact@v3 + with: + name: tgz-tarball + path: ${{ steps.set-file-base.outputs.FILE_BASE }}.tar.gz + if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` + + - name: Save zip-tarball + uses: actions/upload-artifact@v3 + with: + name: zip-tarball + path: ${{ steps.set-file-base.outputs.FILE_BASE }}.zip + if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` diff --git a/config/toolchain/aarch64.cmake b/config/toolchain/aarch64.cmake index 69968336f78..fd216a99bb2 100644 --- a/config/toolchain/aarch64.cmake +++ b/config/toolchain/aarch64.cmake @@ -1,7 +1,7 @@ set(TOOLCHAIN_PREFIX aarch64-linux-gnu) -set(ANDROID_NDK /opt/android-ndk-linux) -set (CMAKE_SYSTEM_NAME Android) -set (CMAKE_ANDROID_ARCH_ABI x86_64) +set(CMAKE_SYSTEM_NAME Linux) +set(CMAKE_SYSTEM_PROCESSOR aarch64) +#set (CMAKE_ANDROID_ARCH_ABI x86_64) #set (CMAKE_ANDROID_STANDALONE_TOOLCHAIN ${ANDROID_NDK}/build/cmake/android.toolchain.cmake) set (CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}-gcc) set (CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}-g++) @@ -12,7 +12,7 @@ set (CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) set (CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) set (CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) set (CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY) -set (CMAKE_CROSSCOMPILING_EMULATOR qemu-aarch64) +set(CMAKE_CROSSCOMPILING_EMULATOR "qemu-aarch64-static;-L;/usr/aarch64-linux-gnu/" CACHE FILEPATH "Path to the emulator for the target system.") include_directories(/usr/${TOOLCHAIN_PREFIX}/include) From cd18732c2ee6e06384d5dfb033f9a9dc1c60ab0d Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Mon, 5 Jun 2023 18:22:10 -0500 Subject: [PATCH 069/108] Remove most unused options which are defaulted #2998 (#3046) * Remove most unused options which are defaulted #2998 * Correct doxygen predefined list * Correct doxygen list of predefineds --- configure.ac | 6 +- doxygen/CMakeLists.txt | 2 +- doxygen/Doxyfile.in | 896 ------------------ doxygen/aliases | 4 +- doxygen/dox/LearnBasics2.dox | 2 +- doxygen/dox/LearnBasics3.dox | 2 +- doxygen/dox/Overview.dox | 3 +- doxygen/dox/ReferenceManual.dox | 2 +- doxygen/dox/ViewTools.dox | 2 +- doxygen/examples/H5.format.1.0.html | 2 +- doxygen/examples/H5.format.1.1.html | 2 +- .../H5Pget_metadata_read_attempts.2.c | 4 +- .../H5Pget_metadata_read_attempts.3.c | 4 +- .../examples/H5Pset_metadata_read_attempts.c | 4 +- doxygen/examples/ImageSpec.html | 12 +- doxygen/examples/tables/propertyLists.dox | 4 +- doxygen/hdf5_navtree_hacks.js | 2 +- 17 files changed, 30 insertions(+), 923 deletions(-) diff --git a/configure.ac b/configure.ac index 28afd6421e9..e1ef78b44ae 100644 --- a/configure.ac +++ b/configure.ac @@ -1237,9 +1237,11 @@ if test "X$HDF5_DOXYGEN" = "Xyes"; then DOXYGEN_PACKAGE=${PACKAGE_NAME} DOXYGEN_VERSION_STRING=${PACKAGE_VERSION} DOXYGEN_DIR='$(SRCDIR)/doxygen' + DOXYGEN_INCLUDE_ALIASES_PATH='$(SRCDIR)/doxygen' DOXYGEN_INCLUDE_ALIASES='$(SRCDIR)/doxygen/aliases' + DOXYGEN_VERBATIM_VARS='DOXYGEN_INCLUDE_ALIASES' DOXYGEN_PROJECT_LOGO='$(SRCDIR)/doxygen/img/HDFG-logo.png' - DOXYGEN_PROJECT_BRIEF='' + DOXYGEN_PROJECT_BRIEF='API Reference' DOXYGEN_INPUT_DIRECTORY='$(SRCDIR) $(SRCDIR)/doxygen/dox' DOXYGEN_OPTIMIZE_OUTPUT_FOR_C=YES DOXYGEN_MACRO_EXPANSION=YES @@ -1256,7 +1258,7 @@ if test "X$HDF5_DOXYGEN" = "Xyes"; then DOXYGEN_SEARCHENGINE_URL= DOXYGEN_STRIP_FROM_PATH='$(SRCDIR)' DOXYGEN_STRIP_FROM_INC_PATH='$(SRCDIR)' - DOXYGEN_PREDEFINED='H5_HAVE_DIRECT H5_HAVE_LIBHDFS H5_HAVE_MAP_API H5_HAVE_PARALLEL H5_HAVE_ROS3_VFD H5_DOXYGEN_FORTRAN' + DOXYGEN_PREDEFINED='H5_HAVE_DIRECT H5_HAVE_LIBHDFS H5_HAVE_MAP_API H5_HAVE_PARALLEL H5_HAVE_ROS3_VFD H5_DOXYGEN_FORTRAN H5_HAVE_MIRROR_VFD' DX_INIT_DOXYGEN([HDF5], [./doxygen/Doxyfile], [hdf5lib_docs]) fi diff --git a/doxygen/CMakeLists.txt b/doxygen/CMakeLists.txt index fda7e7d6e03..aa241fb9c85 100644 --- a/doxygen/CMakeLists.txt +++ b/doxygen/CMakeLists.txt @@ -29,7 +29,7 @@ if (DOXYGEN_FOUND) set (DOXYGEN_SEARCHENGINE_URL) set (DOXYGEN_STRIP_FROM_PATH ${HDF5_SOURCE_DIR}) set (DOXYGEN_STRIP_FROM_INC_PATH ${HDF5_SOURCE_DIR}) - set (DOXYGEN_PREDEFINED "H5_HAVE_DIRECT H5_HAVE_LIBHDFS H5_HAVE_MAP_API H5_HAVE_PARALLEL H5_HAVE_ROS3_VFD H5_DOXYGEN_FORTRAN") + set (DOXYGEN_PREDEFINED "H5_HAVE_DIRECT H5_HAVE_LIBHDFS H5_HAVE_MAP_API H5_HAVE_PARALLEL H5_HAVE_ROS3_VFD H5_DOXYGEN_FORTRAN H5_HAVE_MIRROR_VFD") # This configure and individual custom targets work together # Replace variables inside @@ with the current values diff --git a/doxygen/Doxyfile.in b/doxygen/Doxyfile.in index d9b0fe9aa55..66e4826be51 100644 --- a/doxygen/Doxyfile.in +++ b/doxygen/Doxyfile.in @@ -60,16 +60,6 @@ PROJECT_LOGO = @DOXYGEN_PROJECT_LOGO@ OUTPUT_DIRECTORY = @DOXYGEN_OUTPUT_DIRECTORY@ -# If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub- -# directories (in 2 levels) under the output directory of each output format and -# will distribute the generated files over these directories. Enabling this -# option can be useful when feeding doxygen a huge amount of source files, where -# putting all generated files in the same directory would otherwise causes -# performance problems for the file system. -# The default value is: NO. - -CREATE_SUBDIRS = NO - # If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII # characters to appear in the names of generated files. If set to NO, non-ASCII # characters will be escaped, for example _xE3_x81_x84 will be used for Unicode @@ -93,30 +83,6 @@ ALLOW_UNICODE_NAMES = NO OUTPUT_LANGUAGE = English -# The OUTPUT_TEXT_DIRECTION tag is used to specify the direction in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all generated output in the proper direction. -# Possible values are: None, LTR, RTL and Context. -# The default value is: None. - -OUTPUT_TEXT_DIRECTION = None - -# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member -# descriptions after the members that are listed in the file and class -# documentation (similar to Javadoc). Set to NO to disable this. -# The default value is: YES. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief -# description of a member or function before the detailed description -# -# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. -# The default value is: YES. - -REPEAT_BRIEF = YES - # This tag implements a quasi-intelligent brief description abbreviator that is # used to form the text in various listings. Each string in this list, if found # as the leading text of the brief description, will be stripped from the text @@ -138,21 +104,6 @@ ABBREVIATE_BRIEF = "The $name class" \ an \ the -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# doxygen will generate a detailed section even if there is only a brief -# description. -# The default value is: NO. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. -# The default value is: NO. - -INLINE_INHERITED_MEMB = NO - # If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path # before files name in the file list and in the header files. If set to NO the # shortest path that makes the file name unique will be used @@ -181,40 +132,6 @@ STRIP_FROM_PATH = @DOXYGEN_STRIP_FROM_PATH@ STRIP_FROM_INC_PATH = @DOXYGEN_STRIP_FROM_INC_PATH@ -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but -# less readable) file names. This can be useful is your file systems doesn't -# support long names like on DOS, Mac, or CD-ROM. -# The default value is: NO. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the -# first line (until the first dot) of a Javadoc-style comment as the brief -# description. If set to NO, the Javadoc-style will behave just like regular Qt- -# style comments (thus requiring an explicit @brief command for a brief -# description.) -# The default value is: NO. - -JAVADOC_AUTOBRIEF = NO - -# If the JAVADOC_BANNER tag is set to YES then doxygen will interpret a line -# such as -# /*************** -# as being the beginning of a Javadoc-style comment "banner". If set to NO, the -# Javadoc-style will behave just like regular comments and it will not be -# interpreted by doxygen. -# The default value is: NO. - -JAVADOC_BANNER = NO - -# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first -# line (until the first dot) of a Qt-style comment as the brief description. If -# set to NO, the Qt-style will behave just like regular Qt-style comments (thus -# requiring an explicit \brief command for a brief description.) -# The default value is: NO. - -QT_AUTOBRIEF = NO - # The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a # multi-line C++ special comment block (i.e. a block of //! or /// comments) as # a brief description. This used to be the default behavior. The new default is @@ -227,19 +144,6 @@ QT_AUTOBRIEF = NO MULTILINE_CPP_IS_BRIEF = NO -# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the -# documentation from any documented member that it re-implements. -# The default value is: YES. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new -# page for each member. If set to NO, the documentation of a member will be part -# of the file/class/namespace that contains it. -# The default value is: NO. - -SEPARATE_MEMBER_PAGES = NO - # The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen # uses this value to replace tabs by spaces in code fragments. # Minimum value: 1, maximum value: 16, default value: 4. @@ -288,20 +192,6 @@ OPTIMIZE_OUTPUT_JAVA = YES OPTIMIZE_FOR_FORTRAN = YES -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for VHDL. -# The default value is: NO. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Set the OPTIMIZE_OUTPUT_SLICE tag to YES if your project consists of Slice -# sources only. Doxygen will then generate output that is more tailored for that -# language. For instance, namespaces will be presented as modules, types will be -# separated into more groups, etc. -# The default value is: NO. - -OPTIMIZE_OUTPUT_SLICE = NO - # Doxygen selects the parser to use depending on the extension of the files it # parses. With this tag you can assign which parser to use for a given # extension. Doxygen has a built-in mapping, but you can override or extend it @@ -349,48 +239,6 @@ TOC_INCLUDE_HEADINGS = 5 AUTOLINK_SUPPORT = YES -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should set this -# tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); -# versus func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. -# The default value is: NO. - -BUILTIN_STL_SUPPORT = NO - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. -# The default value is: NO. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip (see: -# https://www.riverbankcomputing.com/software/sip/intro) sources only. Doxygen -# will parse them like normal C++ but will assume all classes use public instead -# of private inheritance when no explicit protection keyword is present. -# The default value is: NO. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate -# getter and setter methods for a property. Setting this option to YES will make -# doxygen to replace the get and set methods by a property in the documentation. -# This will only work if the methods are indeed getting or setting a simple -# type. If this is not the case, or you want to show the methods anyway, you -# should set this option to NO. -# The default value is: YES. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. -# The default value is: NO. - -DISTRIBUTE_GROUP_DOC = NO - # If one adds a struct or class to a group and this option is enabled, then also # any nested class or struct is added to the same group. By default this option # is disabled and one has to add nested compounds explicitly via \ingroup. @@ -740,27 +588,6 @@ FILE_VERSION_FILTER = LAYOUT_FILE = @DOXYGEN_LAYOUT_FILE@ -# The CITE_BIB_FILES tag can be used to specify one or more bib files containing -# the reference definitions. This must be a list of .bib files. The .bib -# extension is automatically appended if omitted. This requires the bibtex tool -# to be installed. See also https://en.wikipedia.org/wiki/BibTeX for more info. -# For LaTeX the style of the bibliography can be controlled using -# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the -# search path. See also \cite for info how to create references. - -CITE_BIB_FILES = - -#--------------------------------------------------------------------------- -# Configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated to -# standard output by doxygen. If QUIET is set to YES this implies that the -# messages are off. -# The default value is: NO. - -QUIET = NO - # The WARNINGS tag can be used to turn on/off the warning messages that are # generated to standard error (stderr) by doxygen. If WARNINGS is set to YES # this implies that the warnings are on. @@ -1545,81 +1372,6 @@ EXT_LINKS_IN_WINDOW = NO HTML_FORMULA_FORMAT = png -# Use this tag to change the font size of LaTeX formulas included as images in -# the HTML documentation. When you change the font size after a successful -# doxygen run you need to manually remove any form_*.png images from the HTML -# output directory to force them to be regenerated. -# Minimum value: 8, maximum value: 50, default value: 10. -# This tag requires that the tag GENERATE_HTML is set to YES. - -FORMULA_FONTSIZE = 10 - -# Use the FORMULA_TRANSPARENT tag to determine whether or not the images -# generated for formulas are transparent PNGs. Transparent PNGs are not -# supported properly for IE 6.0, but are supported on all modern browsers. -# -# Note that when changing this option you need to delete any form_*.png files in -# the HTML output directory before the changes have effect. -# The default value is: YES. -# This tag requires that the tag GENERATE_HTML is set to YES. - -FORMULA_TRANSPARENT = YES - -# The FORMULA_MACROFILE can contain LaTeX \newcommand and \renewcommand commands -# to create new LaTeX commands to be used in formulas as building blocks. See -# the section "Including formulas" for details. - -FORMULA_MACROFILE = - -# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see -# https://www.mathjax.org) which uses client side JavaScript for the rendering -# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX -# installed or if you want to formulas look prettier in the HTML output. When -# enabled you may also need to install MathJax separately and configure the path -# to it using the MATHJAX_RELPATH option. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -USE_MATHJAX = NO - -# When MathJax is enabled you can set the default output format to be used for -# the MathJax output. See the MathJax site (see: -# http://docs.mathjax.org/en/latest/output.html) for more details. -# Possible values are: HTML-CSS (which is slower, but has the best -# compatibility), NativeMML (i.e. MathML) and SVG. -# The default value is: HTML-CSS. -# This tag requires that the tag USE_MATHJAX is set to YES. - -MATHJAX_FORMAT = HTML-CSS - -# When MathJax is enabled you need to specify the location relative to the HTML -# output directory using the MATHJAX_RELPATH option. The destination directory -# should contain the MathJax.js script. For instance, if the mathjax directory -# is located at the same level as the HTML output directory, then -# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax -# Content Delivery Network so you can quickly see the result without installing -# MathJax. However, it is strongly recommended to install a local copy of -# MathJax from https://www.mathjax.org before deployment. -# The default value is: https://cdn.jsdelivr.net/npm/mathjax@2. -# This tag requires that the tag USE_MATHJAX is set to YES. - -MATHJAX_RELPATH = https://cdn.jsdelivr.net/npm/mathjax@2 - -# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax -# extension names that should be enabled during MathJax rendering. For example -# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols -# This tag requires that the tag USE_MATHJAX is set to YES. - -MATHJAX_EXTENSIONS = - -# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces -# of code that will be used on startup of the MathJax code. See the MathJax site -# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an -# example see the documentation. -# This tag requires that the tag USE_MATHJAX is set to YES. - -MATHJAX_CODEFILE = - # When the SEARCHENGINE tag is enabled doxygen will generate a search box for # the HTML output. The underlying search engine uses javascript and DHTML and # should work on any modern browser. Note that when using HTML help @@ -1715,188 +1467,6 @@ EXTRA_SEARCH_MAPPINGS = GENERATE_LATEX = NO -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. If a -# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of -# it. -# The default directory is: latex. -# This tag requires that the tag GENERATE_LATEX is set to YES. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. -# -# Note that when not enabling USE_PDFLATEX the default is latex when enabling -# USE_PDFLATEX the default is pdflatex and when in the later case latex is -# chosen this is overwritten by pdflatex. For specific output languages the -# default can have been set differently, this depends on the implementation of -# the output language. -# This tag requires that the tag GENERATE_LATEX is set to YES. - -LATEX_CMD_NAME = - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to generate -# index for LaTeX. -# Note: This tag is used in the Makefile / make.bat. -# See also: LATEX_MAKEINDEX_CMD for the part in the generated output file -# (.tex). -# The default file is: makeindex. -# This tag requires that the tag GENERATE_LATEX is set to YES. - -MAKEINDEX_CMD_NAME = makeindex - -# The LATEX_MAKEINDEX_CMD tag can be used to specify the command name to -# generate index for LaTeX. In case there is no backslash (\) as first character -# it will be automatically added in the LaTeX code. -# Note: This tag is used in the generated output file (.tex). -# See also: MAKEINDEX_CMD_NAME for the part in the Makefile / make.bat. -# The default value is: makeindex. -# This tag requires that the tag GENERATE_LATEX is set to YES. - -LATEX_MAKEINDEX_CMD = makeindex - -# If the COMPACT_LATEX tag is set to YES, doxygen generates more compact LaTeX -# documents. This may be useful for small projects and may help to save some -# trees in general. -# The default value is: NO. -# This tag requires that the tag GENERATE_LATEX is set to YES. - -COMPACT_LATEX = NO - -# The PAPER_TYPE tag can be used to set the paper type that is used by the -# printer. -# Possible values are: a4 (210 x 297 mm), letter (8.5 x 11 inches), legal (8.5 x -# 14 inches) and executive (7.25 x 10.5 inches). -# The default value is: a4. -# This tag requires that the tag GENERATE_LATEX is set to YES. - -PAPER_TYPE = a4 - -# The EXTRA_PACKAGES tag can be used to specify one or more LaTeX package names -# that should be included in the LaTeX output. The package can be specified just -# by its name or with the correct syntax as to be used with the LaTeX -# \usepackage command. To get the times font for instance you can specify : -# EXTRA_PACKAGES=times or EXTRA_PACKAGES={times} -# To use the option intlimits with the amsmath package you can specify: -# EXTRA_PACKAGES=[intlimits]{amsmath} -# If left blank no extra packages will be included. -# This tag requires that the tag GENERATE_LATEX is set to YES. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for the -# generated LaTeX document. The header should contain everything until the first -# chapter. If it is left blank doxygen will generate a standard header. See -# section "Doxygen usage" for information on how to let doxygen write the -# default header to a separate file. -# -# Note: Only use a user-defined header if you know what you are doing! The -# following commands have a special meaning inside the header: $title, -# $datetime, $date, $doxygenversion, $projectname, $projectnumber, -# $projectbrief, $projectlogo. Doxygen will replace $title with the empty -# string, for the replacement values of the other commands the user is referred -# to HTML_HEADER. -# This tag requires that the tag GENERATE_LATEX is set to YES. - -LATEX_HEADER = - -# The LATEX_FOOTER tag can be used to specify a personal LaTeX footer for the -# generated LaTeX document. The footer should contain everything after the last -# chapter. If it is left blank doxygen will generate a standard footer. See -# LATEX_HEADER for more information on how to generate a default footer and what -# special commands can be used inside the footer. -# -# Note: Only use a user-defined footer if you know what you are doing! -# This tag requires that the tag GENERATE_LATEX is set to YES. - -LATEX_FOOTER = - -# The LATEX_EXTRA_STYLESHEET tag can be used to specify additional user-defined -# LaTeX style sheets that are included after the standard style sheets created -# by doxygen. Using this option one can overrule certain style aspects. Doxygen -# will copy the style sheet files to the output directory. -# Note: The order of the extra style sheet files is of importance (e.g. the last -# style sheet in the list overrules the setting of the previous ones in the -# list). -# This tag requires that the tag GENERATE_LATEX is set to YES. - -LATEX_EXTRA_STYLESHEET = - -# The LATEX_EXTRA_FILES tag can be used to specify one or more extra images or -# other source files which should be copied to the LATEX_OUTPUT output -# directory. Note that the files will be copied as-is; there are no commands or -# markers available. -# This tag requires that the tag GENERATE_LATEX is set to YES. - -LATEX_EXTRA_FILES = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated is -# prepared for conversion to PDF (using ps2pdf or pdflatex). The PDF file will -# contain links (just like the HTML output) instead of page references. This -# makes the output suitable for online browsing using a PDF viewer. -# The default value is: YES. -# This tag requires that the tag GENERATE_LATEX is set to YES. - -PDF_HYPERLINKS = YES - -# If the USE_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate -# the PDF file directly from the LaTeX files. Set this option to YES, to get a -# higher quality PDF documentation. -# The default value is: YES. -# This tag requires that the tag GENERATE_LATEX is set to YES. - -USE_PDFLATEX = YES - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \batchmode -# command to the generated LaTeX files. This will instruct LaTeX to keep running -# if errors occur, instead of asking the user for help. This option is also used -# when generating formulas in HTML. -# The default value is: NO. -# This tag requires that the tag GENERATE_LATEX is set to YES. - -LATEX_BATCHMODE = NO - -# If the LATEX_HIDE_INDICES tag is set to YES then doxygen will not include the -# index chapters (such as File Index, Compound Index, etc.) in the output. -# The default value is: NO. -# This tag requires that the tag GENERATE_LATEX is set to YES. - -LATEX_HIDE_INDICES = NO - -# If the LATEX_SOURCE_CODE tag is set to YES then doxygen will include source -# code with syntax highlighting in the LaTeX output. -# -# Note that which sources are shown also depends on other settings such as -# SOURCE_BROWSER. -# The default value is: NO. -# This tag requires that the tag GENERATE_LATEX is set to YES. - -LATEX_SOURCE_CODE = NO - -# The LATEX_BIB_STYLE tag can be used to specify the style to use for the -# bibliography, e.g. plainnat, or ieeetr. See -# https://en.wikipedia.org/wiki/BibTeX and \cite for more info. -# The default value is: plain. -# This tag requires that the tag GENERATE_LATEX is set to YES. - -LATEX_BIB_STYLE = plain - -# If the LATEX_TIMESTAMP tag is set to YES then the footer of each generated -# page will contain the date and time when the page was generated. Setting this -# to NO can help when comparing the output of multiple runs. -# The default value is: NO. -# This tag requires that the tag GENERATE_LATEX is set to YES. - -LATEX_TIMESTAMP = NO - -# The LATEX_EMOJI_DIRECTORY tag is used to specify the (relative or absolute) -# path from which the emoji images will be read. If a relative path is entered, -# it will be relative to the LATEX_OUTPUT directory. If left blank the -# LATEX_OUTPUT directory will be used. -# This tag requires that the tag GENERATE_LATEX is set to YES. - -LATEX_EMOJI_DIRECTORY = - #--------------------------------------------------------------------------- # Configuration options related to the RTF output #--------------------------------------------------------------------------- @@ -1908,61 +1478,6 @@ LATEX_EMOJI_DIRECTORY = GENERATE_RTF = NO -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. If a -# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of -# it. -# The default directory is: rtf. -# This tag requires that the tag GENERATE_RTF is set to YES. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES, doxygen generates more compact RTF -# documents. This may be useful for small projects and may help to save some -# trees in general. -# The default value is: NO. -# This tag requires that the tag GENERATE_RTF is set to YES. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated will -# contain hyperlink fields. The RTF file will contain links (just like the HTML -# output) instead of page references. This makes the output suitable for online -# browsing using Word or some other Word compatible readers that support those -# fields. -# -# Note: WordPad (write) and others do not support links. -# The default value is: NO. -# This tag requires that the tag GENERATE_RTF is set to YES. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# configuration file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. -# -# See also section "Doxygen usage" for information on how to generate the -# default style sheet that doxygen normally uses. -# This tag requires that the tag GENERATE_RTF is set to YES. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an RTF document. Syntax is -# similar to doxygen's configuration file. A template extensions file can be -# generated using doxygen -e rtf extensionFile. -# This tag requires that the tag GENERATE_RTF is set to YES. - -RTF_EXTENSIONS_FILE = - -# If the RTF_SOURCE_CODE tag is set to YES then doxygen will include source code -# with syntax highlighting in the RTF output. -# -# Note that which sources are shown also depends on other settings such as -# SOURCE_BROWSER. -# The default value is: NO. -# This tag requires that the tag GENERATE_RTF is set to YES. - -RTF_SOURCE_CODE = NO - #--------------------------------------------------------------------------- # Configuration options related to the man page output #--------------------------------------------------------------------------- @@ -1973,40 +1488,6 @@ RTF_SOURCE_CODE = NO GENERATE_MAN = NO -# The MAN_OUTPUT tag is used to specify where the man pages will be put. If a -# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of -# it. A directory man3 will be created inside the directory specified by -# MAN_OUTPUT. -# The default directory is: man. -# This tag requires that the tag GENERATE_MAN is set to YES. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to the generated -# man pages. In case the manual section does not start with a number, the number -# 3 is prepended. The dot (.) at the beginning of the MAN_EXTENSION tag is -# optional. -# The default value is: .3. -# This tag requires that the tag GENERATE_MAN is set to YES. - -MAN_EXTENSION = .3 - -# The MAN_SUBDIR tag determines the name of the directory created within -# MAN_OUTPUT in which the man pages are placed. If defaults to man followed by -# MAN_EXTENSION with the initial . removed. -# This tag requires that the tag GENERATE_MAN is set to YES. - -MAN_SUBDIR = - -# If the MAN_LINKS tag is set to YES and doxygen generates man output, then it -# will generate one additional man file for each entity documented in the real -# man page(s). These additional files only source the real man page, but without -# them the man command would be unable to find the correct page. -# The default value is: NO. -# This tag requires that the tag GENERATE_MAN is set to YES. - -MAN_LINKS = NO - #--------------------------------------------------------------------------- # Configuration options related to the XML output #--------------------------------------------------------------------------- @@ -2017,30 +1498,6 @@ MAN_LINKS = NO GENERATE_XML = NO -# The XML_OUTPUT tag is used to specify where the XML pages will be put. If a -# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of -# it. -# The default directory is: xml. -# This tag requires that the tag GENERATE_XML is set to YES. - -XML_OUTPUT = xml - -# If the XML_PROGRAMLISTING tag is set to YES, doxygen will dump the program -# listings (including syntax highlighting and cross-referencing information) to -# the XML output. Note that enabling this will significantly increase the size -# of the XML output. -# The default value is: YES. -# This tag requires that the tag GENERATE_XML is set to YES. - -XML_PROGRAMLISTING = YES - -# If the XML_NS_MEMB_FILE_SCOPE tag is set to YES, doxygen will include -# namespace members in file scope as well, matching the HTML output. -# The default value is: NO. -# This tag requires that the tag GENERATE_XML is set to YES. - -XML_NS_MEMB_FILE_SCOPE = NO - #--------------------------------------------------------------------------- # Configuration options related to the DOCBOOK output #--------------------------------------------------------------------------- @@ -2051,23 +1508,6 @@ XML_NS_MEMB_FILE_SCOPE = NO GENERATE_DOCBOOK = NO -# The DOCBOOK_OUTPUT tag is used to specify where the Docbook pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be put in -# front of it. -# The default directory is: docbook. -# This tag requires that the tag GENERATE_DOCBOOK is set to YES. - -DOCBOOK_OUTPUT = docbook - -# If the DOCBOOK_PROGRAMLISTING tag is set to YES, doxygen will include the -# program listings (including syntax highlighting and cross-referencing -# information) to the DOCBOOK output. Note that enabling this will significantly -# increase the size of the DOCBOOK output. -# The default value is: NO. -# This tag requires that the tag GENERATE_DOCBOOK is set to YES. - -DOCBOOK_PROGRAMLISTING = NO - #--------------------------------------------------------------------------- # Configuration options for the AutoGen Definitions output #--------------------------------------------------------------------------- @@ -2092,32 +1532,6 @@ GENERATE_AUTOGEN_DEF = NO GENERATE_PERLMOD = NO -# If the PERLMOD_LATEX tag is set to YES, doxygen will generate the necessary -# Makefile rules, Perl scripts and LaTeX code to be able to generate PDF and DVI -# output from the Perl module output. -# The default value is: NO. -# This tag requires that the tag GENERATE_PERLMOD is set to YES. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES, the Perl module output will be nicely -# formatted so it can be parsed by a human reader. This is useful if you want to -# understand what is going on. On the other hand, if this tag is set to NO, the -# size of the Perl module output will be much smaller and Perl will parse it -# just the same. -# The default value is: YES. -# This tag requires that the tag GENERATE_PERLMOD is set to YES. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file are -# prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. This is useful -# so different doxyrules.make files included by the same Makefile don't -# overwrite each other's variables. -# This tag requires that the tag GENERATE_PERLMOD is set to YES. - -PERLMOD_MAKEVAR_PREFIX = - #--------------------------------------------------------------------------- # Configuration options related to the preprocessor #--------------------------------------------------------------------------- @@ -2241,313 +1655,3 @@ EXTERNAL_GROUPS = YES # The default value is: YES. EXTERNAL_PAGES = YES - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES, doxygen will generate a class diagram -# (in HTML and LaTeX) for classes with base or super classes. Setting the tag to -# NO turns the diagrams off. Note that this option also works with HAVE_DOT -# disabled, but it is recommended to install and use dot, since it yields more -# powerful graphs. -# The default value is: YES. - -CLASS_DIAGRAMS = YES - -# You can include diagrams made with dia in doxygen documentation. Doxygen will -# then run dia to produce the diagram and insert it in the documentation. The -# DIA_PATH tag allows you to specify the directory where the dia binary resides. -# If left empty dia is assumed to be found in the default search path. - -DIA_PATH = - -# If set to YES the inheritance and collaboration graphs will hide inheritance -# and usage relations if the target is undocumented or is not a class. -# The default value is: YES. - -HIDE_UNDOC_RELATIONS = YES - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz (see: -# http://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent -# Bell Labs. The other options in this section have no effect if this option is -# set to NO -# The default value is: NO. - -HAVE_DOT = NO - -# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is allowed -# to run in parallel. When set to 0 doxygen will base this on the number of -# processors available in the system. You can set it explicitly to a value -# larger than 0 to get control over the balance between CPU load and processing -# speed. -# Minimum value: 0, maximum value: 32, default value: 0. -# This tag requires that the tag HAVE_DOT is set to YES. - -DOT_NUM_THREADS = 0 - -# When you want a differently looking font in the dot files that doxygen -# generates you can specify the font name using DOT_FONTNAME. You need to make -# sure dot is able to find the font, which can be done by putting it in a -# standard location or by setting the DOTFONTPATH environment variable or by -# setting DOT_FONTPATH to the directory containing the font. -# The default value is: Helvetica. -# This tag requires that the tag HAVE_DOT is set to YES. - -DOT_FONTNAME = Helvetica - -# The DOT_FONTSIZE tag can be used to set the size (in points) of the font of -# dot graphs. -# Minimum value: 4, maximum value: 24, default value: 10. -# This tag requires that the tag HAVE_DOT is set to YES. - -DOT_FONTSIZE = 10 - -# By default doxygen will tell dot to use the default font as specified with -# DOT_FONTNAME. If you specify a different font using DOT_FONTNAME you can set -# the path where dot can find it using this tag. -# This tag requires that the tag HAVE_DOT is set to YES. - -DOT_FONTPATH = - -# If the CLASS_GRAPH tag is set to YES then doxygen will generate a graph for -# each documented class showing the direct and indirect inheritance relations. -# Setting this tag to YES will force the CLASS_DIAGRAMS tag to NO. -# The default value is: YES. -# This tag requires that the tag HAVE_DOT is set to YES. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH tag is set to YES then doxygen will generate a -# graph for each documented class showing the direct and indirect implementation -# dependencies (inheritance, containment, and class references variables) of the -# class with other documented classes. -# The default value is: YES. -# This tag requires that the tag HAVE_DOT is set to YES. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS tag is set to YES then doxygen will generate a graph for -# groups, showing the direct groups dependencies. -# The default value is: YES. -# This tag requires that the tag HAVE_DOT is set to YES. - -GROUP_GRAPHS = YES - -# If the UML_LOOK tag is set to YES, doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. -# The default value is: NO. -# This tag requires that the tag HAVE_DOT is set to YES. - -UML_LOOK = NO - -# If the UML_LOOK tag is enabled, the fields and methods are shown inside the -# class node. If there are many fields or methods and many nodes the graph may -# become too big to be useful. The UML_LIMIT_NUM_FIELDS threshold limits the -# number of items for each type to make the size more manageable. Set this to 0 -# for no limit. Note that the threshold may be exceeded by 50% before the limit -# is enforced. So when you set the threshold to 10, up to 15 fields may appear, -# but if the number exceeds 15, the total amount of fields shown is limited to -# 10. -# Minimum value: 0, maximum value: 100, default value: 10. -# This tag requires that the tag HAVE_DOT is set to YES. - -UML_LIMIT_NUM_FIELDS = 10 - -# If the TEMPLATE_RELATIONS tag is set to YES then the inheritance and -# collaboration graphs will show the relations between templates and their -# instances. -# The default value is: NO. -# This tag requires that the tag HAVE_DOT is set to YES. - -TEMPLATE_RELATIONS = NO - -# If the INCLUDE_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are set to -# YES then doxygen will generate a graph for each documented file showing the -# direct and indirect include dependencies of the file with other documented -# files. -# The default value is: YES. -# This tag requires that the tag HAVE_DOT is set to YES. - -INCLUDE_GRAPH = YES - -# If the INCLUDED_BY_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are -# set to YES then doxygen will generate a graph for each documented file showing -# the direct and indirect include dependencies of the file with other documented -# files. -# The default value is: YES. -# This tag requires that the tag HAVE_DOT is set to YES. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH tag is set to YES then doxygen will generate a call -# dependency graph for every global function or class method. -# -# Note that enabling this option will significantly increase the time of a run. -# So in most cases it will be better to enable call graphs for selected -# functions only using the \callgraph command. Disabling a call graph can be -# accomplished by means of the command \hidecallgraph. -# The default value is: NO. -# This tag requires that the tag HAVE_DOT is set to YES. - -CALL_GRAPH = NO - -# If the CALLER_GRAPH tag is set to YES then doxygen will generate a caller -# dependency graph for every global function or class method. -# -# Note that enabling this option will significantly increase the time of a run. -# So in most cases it will be better to enable caller graphs for selected -# functions only using the \callergraph command. Disabling a caller graph can be -# accomplished by means of the command \hidecallergraph. -# The default value is: NO. -# This tag requires that the tag HAVE_DOT is set to YES. - -CALLER_GRAPH = NO - -# If the GRAPHICAL_HIERARCHY tag is set to YES then doxygen will graphical -# hierarchy of all classes instead of a textual one. -# The default value is: YES. -# This tag requires that the tag HAVE_DOT is set to YES. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH tag is set to YES then doxygen will show the -# dependencies a directory has on other directories in a graphical way. The -# dependency relations are determined by the #include relations between the -# files in the directories. -# The default value is: YES. -# This tag requires that the tag HAVE_DOT is set to YES. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. For an explanation of the image formats see the section -# output formats in the documentation of the dot tool (Graphviz (see: -# http://www.graphviz.org/)). -# Note: If you choose svg you need to set HTML_FILE_EXTENSION to xhtml in order -# to make the SVG files visible in IE 9+ (other browsers do not have this -# requirement). -# Possible values are: png, jpg, gif, svg, png:gd, png:gd:gd, png:cairo, -# png:cairo:gd, png:cairo:cairo, png:cairo:gdiplus, png:gdiplus and -# png:gdiplus:gdiplus. -# The default value is: png. -# This tag requires that the tag HAVE_DOT is set to YES. - -DOT_IMAGE_FORMAT = png - -# If DOT_IMAGE_FORMAT is set to svg, then this option can be set to YES to -# enable generation of interactive SVG images that allow zooming and panning. -# -# Note that this requires a modern browser other than Internet Explorer. Tested -# and working are Firefox, Chrome, Safari, and Opera. -# Note: For IE 9+ you need to set HTML_FILE_EXTENSION to xhtml in order to make -# the SVG files visible. Older versions of IE do not have SVG support. -# The default value is: NO. -# This tag requires that the tag HAVE_DOT is set to YES. - -INTERACTIVE_SVG = NO - -# The DOT_PATH tag can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. -# This tag requires that the tag HAVE_DOT is set to YES. - -DOT_PATH = - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the \dotfile -# command). -# This tag requires that the tag HAVE_DOT is set to YES. - -DOTFILE_DIRS = - -# The MSCFILE_DIRS tag can be used to specify one or more directories that -# contain msc files that are included in the documentation (see the \mscfile -# command). - -MSCFILE_DIRS = - -# The DIAFILE_DIRS tag can be used to specify one or more directories that -# contain dia files that are included in the documentation (see the \diafile -# command). - -DIAFILE_DIRS = - -# When using plantuml, the PLANTUML_JAR_PATH tag should be used to specify the -# path where java can find the plantuml.jar file. If left blank, it is assumed -# PlantUML is not used or called during a preprocessing step. Doxygen will -# generate a warning when it encounters a \startuml command in this case and -# will not generate output for the diagram. - -PLANTUML_JAR_PATH = - -# When using plantuml, the PLANTUML_CFG_FILE tag can be used to specify a -# configuration file for plantuml. - -PLANTUML_CFG_FILE = - -# When using plantuml, the specified paths are searched for files specified by -# the !include statement in a plantuml block. - -PLANTUML_INCLUDE_PATH = - -# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of nodes -# that will be shown in the graph. If the number of nodes in a graph becomes -# larger than this value, doxygen will truncate the graph, which is visualized -# by representing a node as a red box. Note that doxygen if the number of direct -# children of the root node in a graph is already larger than -# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note that -# the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. -# Minimum value: 0, maximum value: 10000, default value: 50. -# This tag requires that the tag HAVE_DOT is set to YES. - -DOT_GRAPH_MAX_NODES = 50 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the graphs -# generated by dot. A depth value of 3 means that only nodes reachable from the -# root by following a path via at most 3 edges will be shown. Nodes that lay -# further from the root node will be omitted. Note that setting this option to 1 -# or 2 may greatly reduce the computation time needed for large code bases. Also -# note that the size of a graph can be further restricted by -# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. -# Minimum value: 0, maximum value: 1000, default value: 0. -# This tag requires that the tag HAVE_DOT is set to YES. - -MAX_DOT_GRAPH_DEPTH = 0 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, because dot on Windows does not seem -# to support this out of the box. -# -# Warning: Depending on the platform used, enabling this option may lead to -# badly anti-aliased labels on the edges of a graph (i.e. they become hard to -# read). -# The default value is: NO. -# This tag requires that the tag HAVE_DOT is set to YES. - -DOT_TRANSPARENT = NO - -# Set the DOT_MULTI_TARGETS tag to YES to allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) support -# this, this feature is disabled by default. -# The default value is: NO. -# This tag requires that the tag HAVE_DOT is set to YES. - -DOT_MULTI_TARGETS = NO - -# If the GENERATE_LEGEND tag is set to YES doxygen will generate a legend page -# explaining the meaning of the various boxes and arrows in the dot generated -# graphs. -# The default value is: YES. -# This tag requires that the tag HAVE_DOT is set to YES. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES, doxygen will remove the intermediate dot -# files that are used to generate the various graphs. -# The default value is: YES. -# This tag requires that the tag HAVE_DOT is set to YES. - -DOT_CLEANUP = YES diff --git a/doxygen/aliases b/doxygen/aliases index dc796238cf1..8e447d1d7b3 100644 --- a/doxygen/aliases +++ b/doxygen/aliases @@ -15,7 +15,7 @@ ALIASES += Code{1}="\1" ALIASES += success{1}="\Bold{Success:} \1" ALIASES += failure{1}="\Bold{Failure:} \1" -ALIASES += herr_t="Returns a non-negative value if successful; otherwise returns a negative value." +ALIASES += herr_t="Returns a non-negative value if successful; otherwise, returns a negative value." ALIASES += herr_t_iter="\li Zero causes the iterator to continue, returning zero when the iteration is complete. \li A positive value causes the iterator to immediately return that positive value, indicating short-circuit success. \li A negative value causes the iterator to immediately return that value, indicating failure." ALIASES += hid_t{1}="Returns a \1 identifier if successful; otherwise returns #H5I_INVALID_HID. " ALIASES += hid_ti{1}="Returns an \1 identifier if successful; otherwise returns #H5I_INVALID_HID. " @@ -74,7 +74,7 @@ ALIASES += space_id="\param[in] space_id Dataspace identifier" ALIASES += space_id{1}="\param[in] \1 Dataspace identifier" ################################################################################ -# Dataypes +# Datatypes ################################################################################ ALIASES += type_id="\param[in] type_id Datatype identifier" diff --git a/doxygen/dox/LearnBasics2.dox b/doxygen/dox/LearnBasics2.dox index 6f94c7f7eb5..87bbe87fe47 100644 --- a/doxygen/dox/LearnBasics2.dox +++ b/doxygen/dox/LearnBasics2.dox @@ -788,7 +788,7 @@ The function #H5Tarray_create creates a new array datatype object. Parameters sp \li the dimension permutation of the array, i.e., whether the elements of the array are listed in C or FORTRAN order.

    Working with existing array datatypes

    -When working with existing arrays, one must first determine the the rank, or number of dimensions, of the array. +When working with existing arrays, one must first determine the rank, or number of dimensions, of the array. The function #H5Tget_array_dims returns the rank of a specified array datatype. diff --git a/doxygen/dox/LearnBasics3.dox b/doxygen/dox/LearnBasics3.dox index 2fe0f5249d9..06afacd7d27 100644 --- a/doxygen/dox/LearnBasics3.dox +++ b/doxygen/dox/LearnBasics3.dox @@ -210,7 +210,7 @@ For details on compiling an HDF5 application: an extendible array dataset, pass in #H5P_DATASET_CREATE for the property list class. \li The #H5Pset_chunk call modifies a Dataset Creation Property List instance to store a chunked layout dataset and sets the size of the chunks used. -\li To extend an unlimited dimension dataset use the the #H5Dset_extent call. Please be aware that +\li To extend an unlimited dimension dataset use the #H5Dset_extent call. Please be aware that after this call, the dataset's dataspace must be refreshed with #H5Dget_space before more data can be accessed. \li The #H5Pget_chunk call retrieves the size of chunks for the raw data of a chunked layout dataset. \li Once there is no longer a need for a Property List instance, it should be closed with the #H5Pclose call. diff --git a/doxygen/dox/Overview.dox b/doxygen/dox/Overview.dox index eaa942e219c..befbb29d9fe 100644 --- a/doxygen/dox/Overview.dox +++ b/doxygen/dox/Overview.dox @@ -23,8 +23,9 @@ documents cover a mix of tasks, concepts, and reference, to help a specific \par Versions Version-specific documentation (see the version in the title area) can be found here: - - HDF5 1.12 branch (this site) + - HDF5 develop branch - HDF5 1.14.x + - HDF5 1.12.x (this site) - HDF5 1.10.x - HDF5 1.8.x diff --git a/doxygen/dox/ReferenceManual.dox b/doxygen/dox/ReferenceManual.dox index b9bcd498357..40f8b8af183 100644 --- a/doxygen/dox/ReferenceManual.dox +++ b/doxygen/dox/ReferenceManual.dox @@ -158,7 +158,7 @@ Follow these simple rules and stay out of trouble: \li \Bold{Dynamic memory allocation:} The API contains a few functions in which the HDF5 library dynamically allocates memory on the caller's behalf. The caller owns this memory and eventually must free it by calling H5free_memory() and not language-explicit memory functions. -\li \Bold{Be careful with that saw:} Do not modify the underlying collection when an +\li \Bold{Don't modify while iterating:} Do not modify the underlying collection when an iteration is in progress! \li \Bold{Use of locations:} Certain API functions, typically called \Code{H5***_by_name} use a combination of identifiers and path names to refer to HDF5 objects. diff --git a/doxygen/dox/ViewTools.dox b/doxygen/dox/ViewTools.dox index 2212d4ba3f2..66b2def0624 100644 --- a/doxygen/dox/ViewTools.dox +++ b/doxygen/dox/ViewTools.dox @@ -465,7 +465,7 @@ example h5_crtgrpar.c. To disp \endcode \subsubsection subsubsecViewToolsViewDset_h5dumpEx5 Example 5 -The -p option is used to examine the the dataset filters, storage layout, and fill value properties of a dataset. +The -p option is used to examine the dataset filters, storage layout, and fill value properties of a dataset. This option can be useful for checking how well compression works, or even for analyzing performance and dataset size issues related to chunking. (The smaller the chunk size, the more chunks that HDF5 diff --git a/doxygen/examples/H5.format.1.0.html b/doxygen/examples/H5.format.1.0.html index 4eb05480239..26d04213d84 100644 --- a/doxygen/examples/H5.format.1.0.html +++ b/doxygen/examples/H5.format.1.0.html @@ -1488,7 +1488,7 @@

    Disk Format: Level 1E - Global Heap

    - Object Size This is the size of the the fields + Object Size This is the size of the fields above plus the object data stored for the object. The actual storage size is rounded up to a multiple of eight. diff --git a/doxygen/examples/H5.format.1.1.html b/doxygen/examples/H5.format.1.1.html index 9d03a766a12..3af50d66194 100644 --- a/doxygen/examples/H5.format.1.1.html +++ b/doxygen/examples/H5.format.1.1.html @@ -6091,7 +6091,7 @@

    Name: Shared Object Message

    Used by the library before version 1.6.1. In this version, the Flags field is used to indicate whether the actual message is stored in the global heap (never implemented). The Pointer field - either contains the the header message address in the global heap + either contains the header message address in the global heap (never implemented) or the address of the shared object header. diff --git a/doxygen/examples/H5Pget_metadata_read_attempts.2.c b/doxygen/examples/H5Pget_metadata_read_attempts.2.c index 2cd12dbca57..44b26e9d8ed 100644 --- a/doxygen/examples/H5Pget_metadata_read_attempts.2.c +++ b/doxygen/examples/H5Pget_metadata_read_attempts.2.c @@ -1,7 +1,7 @@ /* Open the file with SWMR access and default file access property list */ fid = H5Fopen(FILE, (H5F_ACC_RDONLY | H5F_ACC_SWMR_READ), H5P_DEFAULT); -/* Get the file's file access roperty list */ +/* Get the file's file access property list */ file_fapl = H5Fget_access_plist(fid); /* Retrieve the # of read attempts from the file's file access property list */ @@ -26,7 +26,7 @@ H5Pset_metadata_read_attempts(fapl, 20); /* Open the file with SWMR access and the non-default file access property list */ fid = H5Fopen(FILE, (H5F_ACC_RDONLY | H5F_ACC_SWMR_READ), fapl); -/* Get the file's file access roperty list */ +/* Get the file's file access property list */ file_fapl = H5Fget_access_plist(fid); /* Retrieve the # of read attempts from the file's file access property list */ diff --git a/doxygen/examples/H5Pget_metadata_read_attempts.3.c b/doxygen/examples/H5Pget_metadata_read_attempts.3.c index 4b5ea3a6208..8edda9f1383 100644 --- a/doxygen/examples/H5Pget_metadata_read_attempts.3.c +++ b/doxygen/examples/H5Pget_metadata_read_attempts.3.c @@ -1,7 +1,7 @@ /* Open the file with non-SWMR access and default file access property list */ fid = H5Fopen(FILE, H5F_ACC_RDONLY, H5P_DEFAULT); -/* Get the file's file access roperty list */ +/* Get the file's file access property list */ file_fapl = H5Fget_access_plist(fid); /* Retrieve the # of read attempts from the file's file access property list */ @@ -26,7 +26,7 @@ H5Pset_metadata_read_attempts(fapl, 20); /* Open the file with non-SWMR access and the non-default file access property list */ fid = H5Fopen(FILE, H5F_ACC_RDONLY, fapl); -/* Get the file's file access roperty list */ +/* Get the file's file access property list */ file_fapl = H5Fget_access_plist(fid); /* Retrieve the # of read attempts from the file's file access property list */ diff --git a/doxygen/examples/H5Pset_metadata_read_attempts.c b/doxygen/examples/H5Pset_metadata_read_attempts.c index 7c2f65d3208..db3573b9315 100644 --- a/doxygen/examples/H5Pset_metadata_read_attempts.c +++ b/doxygen/examples/H5Pset_metadata_read_attempts.c @@ -8,7 +8,7 @@ H5Pset_metadata_read_attempts(fapl, 20); /* Open the file with SWMR access and the non-default file access property list */ fid = H5Fopen(FILE, (H5F_ACC_RDONLY | H5F_ACC_SWMR_READ), fapl); -/* Get the file's file access roperty list */ +/* Get the file's file access property list */ file_fapl = H5Fget_access_plist(fid); /* Retrieve the # of read attempts from the file's file access property list */ @@ -38,7 +38,7 @@ H5Pset_metadata_read_attempts(fapl, 20); /* Open the file with SWMR access and the non-default file access property list */ fid = H5Fopen(FILE, H5F_ACC_RDONLY, fapl); -/* Get the file's file access roperty list */ +/* Get the file's file access property list */ file_fapl = H5Fget_access_plist(fid); /* Retrieve the # of read attempts from the file's file access property list */ diff --git a/doxygen/examples/ImageSpec.html b/doxygen/examples/ImageSpec.html index 1b700ff7a93..130d86ecf6a 100644 --- a/doxygen/examples/ImageSpec.html +++ b/doxygen/examples/ImageSpec.html @@ -851,7 +851,7 @@

    "RGB"
    -Each color index contains a triplet where the the first value defines the +Each color index contains a triplet where the first value defines the red component, second defines the green component, and the third the blue component.
    @@ -859,7 +859,7 @@

    "CMY"
    -Each color index contains a triplet where the the first value defines the +Each color index contains a triplet where the first value defines the cyan component, second defines the magenta component, and the third the yellow component.
    @@ -867,7 +867,7 @@

    "CMYK"
    -Each color index contains a quadruplet where the the first value defines +Each color index contains a quadruplet where the first value defines the cyan component, second defines the magenta component, the third the yellow component, and the forth the black component.
    @@ -875,7 +875,7 @@

    "YCbCr"
    -Class Y encoding model. Each color index contains a triplet where the the +Class Y encoding model. Each color index contains a triplet where the first value defines the luminance, second defines the Cb Chromonance, and the third the Cr Chromonance.
    @@ -884,14 +884,14 @@

    Composite encoding color model. Each color index contains a triplet where -the the first value defines the luminance component, second defines the +the first value defines the luminance component, second defines the chromonance component, and the third the value component.
    "HSV"
    -Each color index contains a triplet where the the first value defines the +Each color index contains a triplet where the first value defines the hue component, second defines the saturation component, and the third the value component. The hue component defines the hue spectrum with a low value representing magenta/red progressing to a high value which would diff --git a/doxygen/examples/tables/propertyLists.dox b/doxygen/examples/tables/propertyLists.dox index e77c2fd4f6e..039ac979b23 100644 --- a/doxygen/examples/tables/propertyLists.dox +++ b/doxygen/examples/tables/propertyLists.dox @@ -245,7 +245,7 @@ file access property list. #H5Pset_page_buffer_size/#H5Pget_page_buffer_size -Set/get the the maximum size for the page buffer. +Set/get the maximum size for the page buffer. #H5Pset_sieve_buf_size/#H5Pget_sieve_buf_size @@ -742,7 +742,7 @@ C function is a macro: \see \ref api-compat-macros. Sets up use of the Fletcher32 checksum filter. -#H5Pset_local_heap_size_hint#H5Pget_local_heap_size_hint/ +#H5Pset_local_heap_size_hint/#H5Pget_local_heap_size_hint Sets/gets the anticipated maximum size of a local heap. diff --git a/doxygen/hdf5_navtree_hacks.js b/doxygen/hdf5_navtree_hacks.js index dda89846701..804701f7f6f 100644 --- a/doxygen/hdf5_navtree_hacks.js +++ b/doxygen/hdf5_navtree_hacks.js @@ -141,7 +141,7 @@ function initNavTree(toroot,relpath) $(window).on("load", showRoot); } -// return false if the the node has no children at all, or has only section/subsection children +// return false if the node has no children at all, or has only section/subsection children function checkChildrenData(node) { if (!(typeof(node.childrenData)==='string')) { for (var i in node.childrenData) { From b5b1f4c7dfcc4a40af7c6c8ab1c89e49054f33c6 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Mon, 5 Jun 2023 18:24:59 -0500 Subject: [PATCH 070/108] Update actions and change defaults of compression (fix vars) (#3043) * Update actions and change defaults of compression (fix vars) * disable TS and parallel * remove parallel for TS * Correct list of default options --- .github/workflows/clang-format-check.yml | 2 + .github/workflows/clang-format-fix.yml | 4 + .github/workflows/cmake-ctest.yml | 3 + .github/workflows/codespell.yml | 2 + .github/workflows/daily-build.yml | 4 +- .github/workflows/main.yml | 69 +++++++-- .github/workflows/release.yml | 115 +++++++++++++++ .github/workflows/tarball.yml | 9 ++ CMakeFilters.cmake | 90 ++++++------ CMakeInstallation.cmake | 2 +- CMakeLists.txt | 12 +- CMakePresets.json | 3 - config/cmake/HDF5PluginCache.cmake | 18 +-- config/cmake/HDF5PluginMacros.cmake | 19 +-- config/cmake/LIBAEC/CMakeLists.txt | 10 -- config/cmake/ZLIB/CMakeLists.txt | 10 -- config/cmake/cacheinit.cmake | 21 ++- config/cmake_ext_mod/HDFMacros.cmake | 2 +- configure.ac | 2 +- release_docs/INSTALL_CMake.txt | 171 +++++++++++++---------- 20 files changed, 367 insertions(+), 201 deletions(-) create mode 100644 .github/workflows/release.yml diff --git a/.github/workflows/clang-format-check.yml b/.github/workflows/clang-format-check.yml index 70809a1156a..56d2b48c3be 100644 --- a/.github/workflows/clang-format-check.yml +++ b/.github/workflows/clang-format-check.yml @@ -1,6 +1,8 @@ name: clang-format Check on: pull_request: +permissions: + contents: read jobs: formatting-check: name: Formatting Check diff --git a/.github/workflows/clang-format-fix.yml b/.github/workflows/clang-format-fix.yml index 00d23529cbd..59811181f9b 100644 --- a/.github/workflows/clang-format-fix.yml +++ b/.github/workflows/clang-format-fix.yml @@ -11,11 +11,15 @@ name: clang-format Commit Changes on: workflow_dispatch: push: +permissions: + contents: read jobs: formatting-check: name: Commit Format Changes runs-on: ubuntu-latest if: "!contains(github.event.head_commit.message, 'skip-ci')" + permissions: + contents: write # In order to allow EndBug/add-and-commit to commit changes steps: - uses: actions/checkout@v3 - name: Fix C and Java formatting issues detected by clang-format diff --git a/.github/workflows/cmake-ctest.yml b/.github/workflows/cmake-ctest.yml index 365039ec290..4363d0f8538 100644 --- a/.github/workflows/cmake-ctest.yml +++ b/.github/workflows/cmake-ctest.yml @@ -9,6 +9,9 @@ on: required: true type: string +permissions: + contents: read + # A workflow run is made up of one or more jobs that can run sequentially or # in parallel jobs: diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml index 3ceda828683..57b8137274e 100644 --- a/.github/workflows/codespell.yml +++ b/.github/workflows/codespell.yml @@ -3,6 +3,8 @@ # https://github.com/codespell-project/actions-codespell name: codespell on: [push, pull_request] +permissions: + contents: read jobs: codespell: name: Check for spelling errors diff --git a/.github/workflows/daily-build.yml b/.github/workflows/daily-build.yml index 63b7dabe76a..25768c2e06c 100644 --- a/.github/workflows/daily-build.yml +++ b/.github/workflows/daily-build.yml @@ -20,9 +20,11 @@ jobs: if: ${{ needs.call-workflow-tarball.outputs.has_changes == 'true' }} call-workflow-release: - needs: call-workflow-ctest + needs: [call-workflow-tarball, call-workflow-ctest] uses: ./.github/workflows/release.yml with: file_base: ${{ needs.call-workflow-tarball.outputs.file_base }} + file_branch: ${{ needs.call-workflow-tarball.outputs.file_branch }} + file_sha: ${{ needs.call-workflow-tarball.outputs.file_sha }} if: ${{ needs.call-workflow-tarball.outputs.has_changes == 'true' }} diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 80278ad18c6..635890b621f 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -14,7 +14,15 @@ on: - 'ACKNOWLEDGEMENTS' - 'COPYING**' - '**.md' - + +# Using concurrency to cancel any in-progress job or run +concurrency: + group: ${{ github.workflow }}-${{ github.sha || github.event.pull_request.number }} + cancel-in-progress: true + +permissions: + contents: read + # A workflow run is made up of one or more jobs that can run sequentially or # in parallel. We just have one job, but the matrix items defined below will # run in parallel. @@ -74,7 +82,9 @@ jobs: fortran: OFF java: ON libaecfc: ON + localaec: OFF zlibfc: ON + localzlib: OFF parallel: OFF mirror_vfd: OFF direct_vfd: OFF @@ -91,7 +101,9 @@ jobs: fortran: ON java: ON libaecfc: ON + localaec: OFF zlibfc: ON + localzlib: OFF parallel: OFF mirror_vfd: ON direct_vfd: ON @@ -113,6 +125,7 @@ jobs: direct_vfd: enable deprec_sym: enable default_api: v112 + szip: yes toolchain: "" generator: "autogen" flags: "" @@ -133,6 +146,7 @@ jobs: direct_vfd: disable deprec_sym: enable default_api: v112 + szip: yes toolchain: "" generator: "autogen" flags: "CC=mpicc" @@ -148,7 +162,9 @@ jobs: fortran: OFF java: ON libaecfc: ON + localaec: OFF zlibfc: ON + localzlib: OFF parallel: OFF mirror_vfd: ON direct_vfd: OFF @@ -175,6 +191,7 @@ jobs: direct_vfd: enable deprec_sym: enable default_api: v16 + szip: yes toolchain: "" generator: "autogen" flags: "" @@ -197,6 +214,7 @@ jobs: direct_vfd: enable deprec_sym: enable default_api: v18 + szip: yes toolchain: "" generator: "autogen" flags: "" @@ -219,6 +237,7 @@ jobs: direct_vfd: enable deprec_sym: enable default_api: v110 + szip: yes toolchain: "" generator: "autogen" flags: "" @@ -241,6 +260,7 @@ jobs: direct_vfd: enable deprec_sym: enable default_api: v112 + szip: yes toolchain: "" generator: "autogen" flags: "" @@ -263,6 +283,7 @@ jobs: direct_vfd: enable deprec_sym: disable default_api: v112 + szip: yes toolchain: "" generator: "autogen" flags: "" @@ -307,6 +328,7 @@ jobs: echo "CC=gcc-11" >> $GITHUB_ENV echo "CXX=g++-11" >> $GITHUB_ENV echo "FC=gfortran-11" >> $GITHUB_ENV + sudo apt install libaec0 libaec-dev if: (matrix.generator == 'autogen') && (matrix.parallel != 'enable') - name: Install Autotools Dependencies (Linux, parallel) @@ -316,6 +338,7 @@ jobs: sudo apt install openmpi-bin openmpi-common mpi-default-dev echo "CC=mpicc" >> $GITHUB_ENV echo "FC=mpif90" >> $GITHUB_ENV + sudo apt install libaec0 libaec-dev if: (matrix.generator == 'autogen') && (matrix.parallel == 'enable') - name: Install Dependencies (Windows) @@ -346,18 +369,18 @@ jobs: sh ./autogen.sh mkdir "${{ runner.workspace }}/build" cd "${{ runner.workspace }}/build" - ${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --${{ matrix.deprec_sym }}-deprecated-symbols --with-default-api-version=${{ matrix.default_api }} --enable-shared --${{ matrix.parallel }}-parallel --${{ matrix.cpp }}-cxx --${{ matrix.fortran }}-fortran --${{ matrix.java }}-java --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd + ${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --${{ matrix.deprec_sym }}-deprecated-symbols --with-default-api-version=${{ matrix.default_api }} --enable-shared --${{ matrix.parallel }}-parallel --${{ matrix.cpp }}-cxx --${{ matrix.fortran }}-fortran --${{ matrix.java }}-java --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd --with-szlib=${{ matrix.szip }} shell: bash - if: (matrix.generator == 'autogen') && (! matrix.thread_safe.enabled) + if: (matrix.generator == 'autogen') && ! (matrix.thread_safety.enabled) - name: Autotools Configure (Thread-Safe) run: | sh ./autogen.sh mkdir "${{ runner.workspace }}/build" cd "${{ runner.workspace }}/build" - ${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --enable-shared --enable-threadsafe --disable-hl --${{ matrix.parallel }}-parallel --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd + ${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --enable-shared --enable-threadsafe --disable-hl --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd --with-szlib=${{ matrix.szip }} shell: bash - if: (matrix.generator == 'autogen') && (matrix.thread_safe.enabled) + if: (matrix.generator == 'autogen') && (matrix.thread_safety.enabled) # # CMAKE CONFIGURE @@ -367,44 +390,60 @@ jobs: run: | mkdir "${{ runner.workspace }}/build" cd "${{ runner.workspace }}/build" - cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=${{ matrix.cpp }} -DHDF5_BUILD_FORTRAN=${{ matrix.fortran }} -DHDF5_BUILD_JAVA=${{ matrix.java }} -DBUILD_SZIP_WITH_FETCHCONTENT=${{ matrix.libaecfc }} -DBUILD_ZLIB_WITH_FETCHCONTENT=${{ matrix.zlibfc }} -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} $GITHUB_WORKSPACE + cmake -C $GITHUB_WORKSPACE/config/cmake/cacheinit.cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=${{ matrix.cpp }} -DHDF5_BUILD_FORTRAN=${{ matrix.fortran }} -DHDF5_BUILD_JAVA=${{ matrix.java }} -DBUILD_SZIP_WITH_FETCHCONTENT=${{ matrix.libaecfc }} -DLIBAEC_USE_LOCALCONTENT=${{ matrix.localaec }} -DBUILD_ZLIB_WITH_FETCHCONTENT=${{ matrix.zlibfc }} -DZLIB_USE_LOCALCONTENT=${{ matrix.localzlib }} -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} $GITHUB_WORKSPACE shell: bash - if: (matrix.generator != 'autogen') && (! matrix.thread_safe.enabled) + if: (matrix.generator != 'autogen') && ! (matrix.thread_safety.enabled) - name: CMake Configure (Thread-Safe) run: | mkdir "${{ runner.workspace }}/build" cd "${{ runner.workspace }}/build" - cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_THREADSAFE:BOOL=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_HL_LIB:BOOL=OFF -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} $GITHUB_WORKSPACE + cmake -C $GITHUB_WORKSPACE/config/cmake/cacheinit.cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_THREADSAFE:BOOL=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=OFF -DHDF5_BUILD_FORTRAN:BOOL=OFF -DHDF5_BUILD_JAVA:BOOL=OFF -DHDF5_BUILD_HL_LIB:BOOL=OFF -DBUILD_SZIP_WITH_FETCHCONTENT=${{ matrix.libaecfc }} -DLIBAEC_USE_LOCALCONTENT=${{ matrix.localaec }} -DBUILD_ZLIB_WITH_FETCHCONTENT=${{ matrix.zlibfc }} -DZLIB_USE_LOCALCONTENT=${{ matrix.localzlib }} -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} $GITHUB_WORKSPACE shell: bash - if: (matrix.generator != 'autogen') && (matrix.thread_safe.enabled) + if: (matrix.generator != 'autogen') && (matrix.thread_safety.enabled) # # BUILD # - name: Autotools Build - run: make + run: make -j3 working-directory: ${{ runner.workspace }}/build if: matrix.generator == 'autogen' - name: CMake Build - run: cmake --build . --config ${{ matrix.build_mode.cmake }} + run: cmake --build . --parallel 3 --config ${{ matrix.build_mode.cmake }} working-directory: ${{ runner.workspace }}/build - if: matrix.generator != 'autogen' + if: (matrix.generator != 'autogen') # # RUN TESTS # + # NORMAL + - name: Autotools Run Tests - run: make check + run: make check -j2 working-directory: ${{ runner.workspace }}/build - if: (matrix.generator == 'autogen') && (matrix.run_tests) + if: (matrix.generator == 'autogen') && (matrix.run_tests) && ! (matrix.thread_safety.enabled) - name: CMake Run Tests - run: ctest --build . -C ${{ matrix.build_mode.cmake }} -V + run: ctest --build . --parallel 2 -C ${{ matrix.build_mode.cmake }} -V working-directory: ${{ runner.workspace }}/build # Skip Debug MSVC while we investigate H5L Java test timeouts if: (matrix.generator != 'autogen') && (matrix.run_tests) && ! ((matrix.name == 'Windows MSVC CMake') && (matrix.build_mode.cmake == 'Debug')) + + # + # INSTALL (note that this runs even when we don't run the tests) + # + + - name: Autotools Install + run: make install + working-directory: ${{ runner.workspace }}/build + if: (matrix.generator == 'autogen') + + - name: Autotools Verify Install + run: make check-install + working-directory: ${{ runner.workspace }}/build + if: (matrix.generator == 'autogen') diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000000..e15c6b358c1 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,115 @@ +name: hdf5 1.12 release + +# Controls when the action will run. Triggers the workflow on a schedule +on: + workflow_call: + inputs: + file_base: + description: "The common base name of the source tarballs" + required: true + type: string + file_branch: + description: "The branch name for the source tarballs" + required: true + type: string + file_sha: + description: "The sha for the source tarballs" + required: true + type: string + +# Previous workflows must pass to get here so tag the commit that created the files +jobs: + create-tag: + runs-on: ubuntu-latest + permissions: + contents: write # In order to allow tag creation + steps: + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - name: Get Sources + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - run: | + git checkout ${{ inputs.file_sha }} + + - uses: rickstaa/action-create-tag@v1 + id: "tag_create" + with: + commit_sha: ${{ inputs.file_sha }} + tag: "snapshot" + force_push_tag: true + message: "Latest snapshot" + + # Print result using the action output. + - run: | + echo "Tag already present: ${{ steps.tag_create.outputs.tag_exists }}" + + getfiles: + runs-on: ubuntu-latest + steps: + - name: Set file base name + id: set-file-base + run: | + FILE_NAME_BASE=$(echo "${{ inputs.file_base }}") + echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT + + # Get files created by tarball script + - name: Get tgz-tarball (Linux) + uses: actions/download-artifact@v3 + with: + name: tgz-tarball + path: ${{ github.workspace }} + + - name: Get zip-tarball (Windows) + uses: actions/download-artifact@v3 + with: + name: zip-tarball + path: ${{ github.workspace }} + + # Get files created by cmake-ctest script + - name: Get published binary (Windows) + uses: actions/download-artifact@v3 + with: + name: zip-vs2022-binary + path: ${{ github.workspace }} + + - name: Get published binary (MacOS) + uses: actions/download-artifact@v3 + with: + name: tgz-osx12-binary + path: ${{ github.workspace }} + + - name: Get published binary (Linux) + uses: actions/download-artifact@v3 + with: + name: tgz-ubuntu-2204-binary + path: ${{ github.workspace }} + + # Get files used by release script + + PreRelease: + runs-on: ubuntu-latest + needs: [create-tag, getfiles] + environment: snapshots_1_12 + permissions: + contents: write + steps: + - name: Set file base name + id: get-file-base + run: | + FILE_NAME_BASE=$(echo "${{ inputs.file_base }}") + echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT + + - name: PreRelease tag + uses: softprops/action-gh-release@v1 + with: + tag_name: "snapshot" + prerelease: true + files: | + ${{ steps.get-file-base.outputs.FILE_BASE }}.tar.gz + ${{ steps.get-file-base.outputs.FILE_BASE }}.zip + ${{ steps.get-file-base.outputs.FILE_BASE }}-osx12.tar.gz + ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204.tar.gz + ${{ steps.get-file-base.outputs.FILE_BASE }}-win_vs2022.zip + if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` diff --git a/.github/workflows/tarball.yml b/.github/workflows/tarball.yml index 62fd2707cf8..4a9d0d88c5a 100644 --- a/.github/workflows/tarball.yml +++ b/.github/workflows/tarball.yml @@ -10,6 +10,15 @@ on: file_base: description: "The common base name of the source tarballs" value: ${{ jobs.create_tarball.outputs.file_base }} + file_branch: + description: "The branch used for the source tarballs" + value: ${{ jobs.check_commits.outputs.branch_ref }} + file_sha: + description: "The sha used for the source tarballs" + value: ${{ jobs.check_commits.outputs.branch_sha }} + +permissions: + contents: read # A workflow run is made up of one or more jobs that can run sequentially or # in parallel diff --git a/CMakeFilters.cmake b/CMakeFilters.cmake index cac35d42805..b81d6e21a7e 100644 --- a/CMakeFilters.cmake +++ b/CMakeFilters.cmake @@ -61,10 +61,12 @@ if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MAT if (NOT BUILD_ZLIB_WITH_FETCHCONTENT) set (ZLIB_URL ${TGZPATH}/${ZLIB_TGZ_NAME}) endif () - if (NOT EXISTS "${ZLIB_URL}") - set (HDF5_ENABLE_Z_LIB_SUPPORT OFF CACHE BOOL "" FORCE) - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE "Filter ZLIB file ${ZLIB_URL} not found") + if (ZLIB_USE_LOCALCONTENT) + if (NOT EXISTS "${ZLIB_URL}") + set (HDF5_ENABLE_Z_LIB_SUPPORT OFF CACHE BOOL "" FORCE) + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "Filter ZLIB file ${ZLIB_URL} not found") + endif () endif () endif () if (NOT BUILD_SZIP_WITH_FETCHCONTENT) @@ -73,10 +75,12 @@ if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MAT set (SZIP_URL ${TGZPATH}/${SZAEC_TGZ_NAME}) endif () endif () - if (NOT EXISTS "${SZIP_URL}") - set (HDF5_ENABLE_SZIP_SUPPORT OFF CACHE BOOL "" FORCE) - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE "Filter SZIP file ${SZIP_URL} not found") + if (LIBAEC_USE_LOCALCONTENT) + if (NOT EXISTS "${SZIP_URL}") + set (HDF5_ENABLE_SZIP_SUPPORT OFF CACHE BOOL "" FORCE) + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "Filter SZIP file ${SZIP_URL} not found") + endif () endif () endif () else () @@ -88,7 +92,7 @@ endif () #----------------------------------------------------------------------------- # Option for ZLib support #----------------------------------------------------------------------------- -option (HDF5_ENABLE_Z_LIB_SUPPORT "Enable Zlib Filters" OFF) +option (HDF5_ENABLE_Z_LIB_SUPPORT "Enable Zlib Filters" ON) if (HDF5_ENABLE_Z_LIB_SUPPORT) if (NOT H5_ZLIB_HEADER) if (NOT ZLIB_USE_EXTERNAL) @@ -97,9 +101,6 @@ if (HDF5_ENABLE_Z_LIB_SUPPORT) find_package (ZLIB) # Legacy find endif () if (ZLIB_FOUND) - set (H5_HAVE_FILTER_DEFLATE 1) - set (H5_HAVE_ZLIB_H 1) - set (H5_HAVE_LIBZ 1) set (H5_ZLIB_HEADER "zlib.h") set (ZLIB_INCLUDE_DIR_GEN ${ZLIB_INCLUDE_DIR}) set (ZLIB_INCLUDE_DIRS ${ZLIB_INCLUDE_DIRS} ${ZLIB_INCLUDE_DIR}) @@ -109,24 +110,16 @@ if (HDF5_ENABLE_Z_LIB_SUPPORT) if (BUILD_ZLIB_WITH_FETCHCONTENT) # Only tgz files available ORIGINAL_ZLIB_LIBRARY ("TGZ") - set (H5_HAVE_FILTER_DEFLATE 1) - set (H5_HAVE_ZLIB_H 1) - set (H5_HAVE_LIBZ 1) if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") message (VERBOSE "HDF5_ZLIB is built from fetch content") endif () set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${ZLIB_STATIC_LIBRARY}) elseif (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ") EXTERNAL_ZLIB_LIBRARY (${HDF5_ALLOW_EXTERNAL_SUPPORT}) - set (H5_HAVE_FILTER_DEFLATE 1) - set (H5_HAVE_ZLIB_H 1) - set (H5_HAVE_LIBZ 1) if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") message (VERBOSE "Filter HDF5_ZLIB is built") endif () set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${ZLIB_STATIC_LIBRARY}) - else () - message (FATAL_ERROR " ZLib is Required for ZLib support in HDF5") endif () endif () else () @@ -135,12 +128,19 @@ if (HDF5_ENABLE_Z_LIB_SUPPORT) set (H5_HAVE_ZLIB_H 1) set (H5_HAVE_LIBZ 1) endif () - if (H5_HAVE_FILTER_DEFLATE) - set (EXTERNAL_FILTERS "${EXTERNAL_FILTERS} DEFLATE") - endif () - INCLUDE_DIRECTORIES (${ZLIB_INCLUDE_DIRS}) - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE "Filter HDF5_ZLIB is ON") + if (ZLIB_FOUND) + set (H5_HAVE_FILTER_DEFLATE 1) + set (H5_HAVE_ZLIB_H 1) + set (H5_HAVE_LIBZ 1) + if (H5_HAVE_FILTER_DEFLATE) + set (EXTERNAL_FILTERS "${EXTERNAL_FILTERS} DEFLATE") + endif () + INCLUDE_DIRECTORIES (${ZLIB_INCLUDE_DIRS}) + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "Filter HDF5_ZLIB is ON") + endif () + else () + message (WARNING " ZLib support in HDF5 was enabled but not found") endif () endif () @@ -149,7 +149,7 @@ endif () #----------------------------------------------------------------------------- option (HDF5_ENABLE_SZIP_SUPPORT "Use SZip Filter" OFF) if (HDF5_ENABLE_SZIP_SUPPORT) - option (HDF5_ENABLE_SZIP_ENCODING "Use SZip Encoding" OFF) + option (HDF5_ENABLE_SZIP_ENCODING "Use SZip Encoding" ON) if (NOT SZIP_USE_EXTERNAL) set(SZIP_FOUND FALSE) if (USE_LIBAEC) @@ -165,9 +165,6 @@ if (HDF5_ENABLE_SZIP_SUPPORT) find_package (SZIP) # Legacy find endif () if (SZIP_FOUND) - set (H5_HAVE_FILTER_SZIP 1) - set (H5_HAVE_SZLIB_H 1) - set (H5_HAVE_LIBSZ 1) set (SZIP_INCLUDE_DIR_GEN ${SZIP_INCLUDE_DIR}) set (SZIP_INCLUDE_DIRS ${SZIP_INCLUDE_DIRS} ${SZIP_INCLUDE_DIR}) set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${SZIP_LIBRARIES}) @@ -177,9 +174,6 @@ if (HDF5_ENABLE_SZIP_SUPPORT) if (BUILD_SZIP_WITH_FETCHCONTENT) # Only tgz files available ORIGINAL_SZIP_LIBRARY ("TGZ" ${HDF5_ENABLE_SZIP_ENCODING}) - set (H5_HAVE_FILTER_SZIP 1) - set (H5_HAVE_SZLIB_H 1) - set (H5_HAVE_LIBSZ 1) if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") message (VERBOSE "SZIP is built from fetch content") endif () @@ -194,9 +188,6 @@ if (HDF5_ENABLE_SZIP_SUPPORT) set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${SZIP_STATIC_LIBRARY}) elseif (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ") EXTERNAL_SZIP_LIBRARY (${HDF5_ALLOW_EXTERNAL_SUPPORT} ${HDF5_ENABLE_SZIP_ENCODING}) - set (H5_HAVE_FILTER_SZIP 1) - set (H5_HAVE_SZLIB_H 1) - set (H5_HAVE_LIBSZ 1) if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") message (VERBOSE "Filter SZIP is built") endif () @@ -213,15 +204,22 @@ if (HDF5_ENABLE_SZIP_SUPPORT) message (FATAL_ERROR "SZIP is Required for SZIP support in HDF5") endif () endif () - INCLUDE_DIRECTORIES (${SZIP_INCLUDE_DIRS}) - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE "Filter SZIP is ON") - endif () - if (H5_HAVE_FILTER_SZIP) - set (EXTERNAL_FILTERS "${EXTERNAL_FILTERS} DECODE") - endif () - if (HDF5_ENABLE_SZIP_ENCODING) - set (H5_HAVE_SZIP_ENCODER 1) - set (EXTERNAL_FILTERS "${EXTERNAL_FILTERS} ENCODE") + if (SZIP_FOUND) + set (H5_HAVE_FILTER_SZIP 1) + set (H5_HAVE_SZLIB_H 1) + set (H5_HAVE_LIBSZ 1) + INCLUDE_DIRECTORIES (${SZIP_INCLUDE_DIRS}) + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "Filter SZIP is ON") + endif () + if (H5_HAVE_FILTER_SZIP) + set (EXTERNAL_FILTERS "${EXTERNAL_FILTERS} DECODE") + endif () + if (HDF5_ENABLE_SZIP_ENCODING) + set (H5_HAVE_SZIP_ENCODER 1) + set (EXTERNAL_FILTERS "${EXTERNAL_FILTERS} ENCODE") + endif () + else () + message (WARNING "SZIP support in HDF5 was enabled but not found") endif () endif () diff --git a/CMakeInstallation.cmake b/CMakeInstallation.cmake index 54034a7373a..216028db997 100644 --- a/CMakeInstallation.cmake +++ b/CMakeInstallation.cmake @@ -52,7 +52,7 @@ endif () # Set includes needed for build #----------------------------------------------------------------------------- set (HDF5_INCLUDES_BUILD_TIME - ${HDF5_SRC_DIR} ${HDF5_CPP_SRC_DIR} ${HDF5_HL_SRC_DIR} + ${HDF5_SRC_INCLUDE_DIRS} ${HDF5_CPP_SRC_DIR} ${HDF5_HL_SRC_DIR} ${HDF5_TOOLS_SRC_DIR} ${HDF5_SRC_BINARY_DIR} ) diff --git a/CMakeLists.txt b/CMakeLists.txt index 1e657fa008e..ca94ce44fbb 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -242,6 +242,8 @@ set (HDF5_JAVA_LOGGING_NOP_JAR ${HDF5_SOURCE_DIR}/java/lib/ext/slf4j-nop-1.7 set (HDF5_JAVA_LOGGING_SIMPLE_JAR ${HDF5_SOURCE_DIR}/java/lib/ext/slf4j-simple-1.7.33.jar) set (HDF5_DOXYGEN_DIR ${HDF5_SOURCE_DIR}/doxygen) +set (HDF5_SRC_INCLUDE_DIRS ${HDF5_SRC_DIR}) + #----------------------------------------------------------------------------- # parse the full version number from H5public.h and include in H5_VERS_INFO #----------------------------------------------------------------------------- @@ -637,16 +639,6 @@ if (MSVC) set (WIN_LINK_FLAGS "") endif () -set (MAKE_SYSTEM) -if (CMAKE_MAKE_PROGRAM MATCHES "make") - set (MAKE_SYSTEM 1) -endif () - -set (CFG_INIT "/${CMAKE_CFG_INTDIR}") -if (MAKE_SYSTEM) - set (CFG_INIT "") -endif () - # Do not generate test programs by default option (HDF5_BUILD_GENERATORS "Build Test Generators" OFF) diff --git a/CMakePresets.json b/CMakePresets.json index 66f31a4745d..13766ca66f8 100644 --- a/CMakePresets.json +++ b/CMakePresets.json @@ -18,9 +18,6 @@ "hidden": true, "inherits": "ci-base-tgz", "cacheVariables": { - "HDF5_ENABLE_Z_LIB_SUPPORT": "ON", - "HDF5_ENABLE_SZIP_SUPPORT": "ON", - "HDF5_ENABLE_SZIP_ENCODING": "ON", "BUILD_ZLIB_WITH_FETCHCONTENT": "ON", "ZLIB_PACKAGE_NAME": {"type": "STRING", "value": "zlib"}, "ZLIB_TGZ_ORIGPATH": {"type": "STRING", "value": "https://github.com/madler/zlib/releases/download/v1.2.13"}, diff --git a/config/cmake/HDF5PluginCache.cmake b/config/cmake/HDF5PluginCache.cmake index f5334bba740..0d1795de82b 100644 --- a/config/cmake/HDF5PluginCache.cmake +++ b/config/cmake/HDF5PluginCache.cmake @@ -8,15 +8,15 @@ set (H5PL_BUILD_TESTING ON CACHE BOOL "Enable H5PL testing" FORCE) set (BUILD_EXAMPLES ON CACHE BOOL "Build H5PL Examples" FORCE) -set (HDF5_HDF5_HEADER "H5pubconf.h" CACHE STRING "Name of HDF5 header" FORCE) -set (HDF5_LINK_LIBS ${HDF5_LIBSH_TARGET} CACHE STRING "HDF5 target" FORCE) -#set (HDF5_INCLUDE_DIR $ CACHE PATH "HDF5 include dirs" FORCE) -set (HDF5_INCLUDE_DIR "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR}" CACHE PATH "HDF5 include dirs" FORCE) -set (HDF5_INCLUDE_DIRS "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR}" CACHE PATH "HDF5 include dirs" FORCE) -set (HDF5_DIR ${CMAKE_CURRENT_BINARY_DIR} CACHE STRING "HDF5 build folder" FORCE) - -set (HDF5_DUMP_EXECUTABLE $ CACHE STRING "HDF5 h5dump target" FORCE) -set (HDF5_REPACK_EXECUTABLE $ CACHE STRING "HDF5 h5repack target" FORCE) +#preset HDF5 cache vars to this projects libraries instead of searching +set (H5PL_HDF5_HEADER "H5pubconf.h" CACHE STRING "Name of HDF5 header" FORCE) +set (H5PL_HDF5_LINK_LIBS ${HDF5_LIBSH_TARGET} CACHE STRING "HDF5 target" FORCE) +#set (H5PL_HDF5_INCLUDE_DIRS $ CACHE PATH "HDF5 include dirs" FORCE) +set (H5PL_HDF5_INCLUDE_DIRS "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR}" CACHE PATH "HDF5 include dirs" FORCE) +set (H5PL_HDF5_DIR ${CMAKE_CURRENT_BINARY_DIR} CACHE STRING "HDF5 build folder" FORCE) + +set (H5PL_HDF5_DUMP_EXECUTABLE $ CACHE STRING "HDF5 h5dump target" FORCE) +set (H5PL_HDF5_REPACK_EXECUTABLE $ CACHE STRING "HDF5 h5repack target" FORCE) set (H5PL_ALLOW_EXTERNAL_SUPPORT "${HDF5_ALLOW_EXTERNAL_SUPPORT}" CACHE STRING "Allow External Library Building (NO GIT TGZ)" FORCE) diff --git a/config/cmake/HDF5PluginMacros.cmake b/config/cmake/HDF5PluginMacros.cmake index 5cb5f99b057..e2ace24b021 100644 --- a/config/cmake/HDF5PluginMacros.cmake +++ b/config/cmake/HDF5PluginMacros.cmake @@ -25,47 +25,47 @@ macro (EXTERNAL_PLUGIN_LIBRARY compress_type) if (ENABLE_BLOSC) add_dependencies (h5blosc ${HDF5_LIBSH_TARGET}) add_dependencies (h5ex_d_blosc ${HDF5_LIBSH_TARGET}) - target_include_directories (h5ex_d_blosc PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR}") + target_include_directories (h5ex_d_blosc PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR}") endif () if (ENABLE_BSHUF) add_dependencies (h5bshuf ${HDF5_LIBSH_TARGET}) add_dependencies (h5ex_d_bshuf ${HDF5_LIBSH_TARGET}) - target_include_directories (h5ex_d_bshuf PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR}") + target_include_directories (h5ex_d_bshuf PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR}") endif () if (ENABLE_BZIP2) add_dependencies (h5bz2 ${HDF5_LIBSH_TARGET}) add_dependencies (h5ex_d_bzip2 ${HDF5_LIBSH_TARGET}) - target_include_directories (h5ex_d_bzip2 PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR}") + target_include_directories (h5ex_d_bzip2 PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR}") endif () if (ENABLE_JPEG) add_dependencies (h5jpeg ${HDF5_LIBSH_TARGET}) add_dependencies (h5ex_d_jpeg ${HDF5_LIBSH_TARGET}) - target_include_directories (h5ex_d_jpeg PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR}") + target_include_directories (h5ex_d_jpeg PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR}") endif () if (ENABLE_LZ4) add_dependencies (h5lz4 ${HDF5_LIBSH_TARGET}) add_dependencies (h5ex_d_lz4 ${HDF5_LIBSH_TARGET}) - target_include_directories (h5ex_d_lz4 PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR}") + target_include_directories (h5ex_d_lz4 PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR}") endif () if (ENABLE_LZF) add_dependencies (h5lzf ${HDF5_LIBSH_TARGET}) add_dependencies (h5ex_d_lzf ${HDF5_LIBSH_TARGET}) - target_include_directories (h5ex_d_lzf PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR}") + target_include_directories (h5ex_d_lzf PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR}") endif () if (ENABLE_MAFISC) add_dependencies (h5mafisc ${HDF5_LIBSH_TARGET}) add_dependencies (h5ex_d_mafisc ${HDF5_LIBSH_TARGET}) - target_include_directories (h5ex_d_mafisc PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR}") + target_include_directories (h5ex_d_mafisc PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR}") endif () if (ENABLE_SZ) add_dependencies (h5sz ${HDF5_LIBSH_TARGET}) add_dependencies (h5ex_d_sz ${HDF5_LIBSH_TARGET}) - target_include_directories (h5ex_d_sz PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR}") + target_include_directories (h5ex_d_sz PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR}") endif () if (ENABLE_ZFP) add_dependencies (h5zfp ${HDF5_LIBSH_TARGET}) add_dependencies (h5ex_d_zfp ${HDF5_LIBSH_TARGET}) - target_include_directories (h5ex_d_zfp PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR}") + target_include_directories (h5ex_d_zfp PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR}") endif () if (ENABLE_ZSTD) add_dependencies (h5zstd ${HDF5_LIBSH_TARGET}) @@ -88,6 +88,7 @@ macro (FILTER_OPTION plname) option (ENABLE_${plname} "Enable Library Building for ${plname} plugin" ON) if (ENABLE_${plname}) option (HDF_${plname}_USE_EXTERNAL "Use External Library Building for ${PLUGIN_NAME} plugin" 0) + mark_as_advanced (HDF_${plname}_USE_EXTERNAL) if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ") set (HDF_${plname}_USE_EXTERNAL 1 CACHE BOOL "Use External Library Building for ${PLUGIN_NAME} plugin" FORCE) if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT") diff --git a/config/cmake/LIBAEC/CMakeLists.txt b/config/cmake/LIBAEC/CMakeLists.txt index fb650ec04b5..53950d8942a 100644 --- a/config/cmake/LIBAEC/CMakeLists.txt +++ b/config/cmake/LIBAEC/CMakeLists.txt @@ -123,16 +123,6 @@ if (MSVC) set (CMAKE_MFC_FLAG 0) endif () -set (MAKE_SYSTEM) -if (CMAKE_BUILD_TOOL MATCHES "make") - set (MAKE_SYSTEM 1) -endif () - -set (CFG_INIT "/${CMAKE_CFG_INTDIR}") -if (MAKE_SYSTEM) - set (CFG_INIT "") -endif () - #----------------------------------------------------------------------------- # Compiler specific flags : Shouldn't there be compiler tests for these #----------------------------------------------------------------------------- diff --git a/config/cmake/ZLIB/CMakeLists.txt b/config/cmake/ZLIB/CMakeLists.txt index 5e42fb2576c..12411ac9e07 100644 --- a/config/cmake/ZLIB/CMakeLists.txt +++ b/config/cmake/ZLIB/CMakeLists.txt @@ -125,16 +125,6 @@ if (MSVC) set (CMAKE_MFC_FLAG 0) endif () -set (MAKE_SYSTEM) -if (CMAKE_BUILD_TOOL MATCHES "make") - set (MAKE_SYSTEM 1) -endif () - -set (CFG_INIT "/${CMAKE_CFG_INTDIR}") -if (MAKE_SYSTEM) - set (CFG_INIT "") -endif () - #----------------------------------------------------------------------------- # Compiler specific flags : Shouldn't there be compiler tests for these #----------------------------------------------------------------------------- diff --git a/config/cmake/cacheinit.cmake b/config/cmake/cacheinit.cmake index 7c5cc1e2bb5..3cae5ab89c3 100644 --- a/config/cmake/cacheinit.cmake +++ b/config/cmake/cacheinit.cmake @@ -25,17 +25,13 @@ set (HDF5_BUILD_CPP_LIB ON CACHE BOOL "Build C++ support" FORCE) set (HDF5_BUILD_FORTRAN ON CACHE BOOL "Build FORTRAN support" FORCE) +set (HDF5_BUILD_JAVA ON CACHE BOOL "Build JAVA support" FORCE) + set (HDF5_INSTALL_MOD_FORTRAN "NO" CACHE STRING "Copy FORTRAN mod files to include directory (NO SHARED STATIC)" FORCE) set_property (CACHE HDF5_INSTALL_MOD_FORTRAN PROPERTY STRINGS NO SHARED STATIC) set (HDF5_BUILD_GENERATORS ON CACHE BOOL "Build Test Generators" FORCE) -set (HDF5_ENABLE_Z_LIB_SUPPORT ON CACHE BOOL "Enable Zlib Filters" FORCE) - -set (HDF5_ENABLE_SZIP_SUPPORT ON CACHE BOOL "Use SZip Filter" FORCE) - -set (HDF5_ENABLE_SZIP_ENCODING ON CACHE BOOL "Use SZip Encoding" FORCE) - set (MPIEXEC_MAX_NUMPROCS "4" CACHE STRING "Minimum number of processes for HDF parallel tests" FORCE) set (HDF5_ENABLE_ALL_WARNINGS ON CACHE BOOL "Enable all warnings" FORCE) @@ -44,24 +40,27 @@ set (HDF_TEST_EXPRESS "2" CACHE STRING "Control testing framework (0-3)" FORCE) set (HDF5_MINGW_STATIC_GCC_LIBS ON CACHE BOOL "Statically link libgcc/libstdc++" FORCE) -set (HDF5_ALLOW_EXTERNAL_SUPPORT "NO" CACHE STRING "Allow External Library Building (NO GIT TGZ)" FORCE) +set (HDF5_ALLOW_EXTERNAL_SUPPORT "TGZ" CACHE STRING "Allow External Library Building (NO GIT TGZ)" FORCE) set_property (CACHE HDF5_ALLOW_EXTERNAL_SUPPORT PROPERTY STRINGS NO GIT TGZ) +######################## +# compression options +######################## set (ZLIB_PACKAGE_NAME "zlib" CACHE STRING "Name of ZLIB package" FORCE) set (ZLIB_TGZ_NAME "ZLib.tar.gz" CACHE STRING "Use HDF5_ZLib from compressed file" FORCE) set (ZLIB_TGZ_ORIGPATH "https://github.com/madler/zlib/releases/download/v1.2.13" CACHE STRING "Use ZLIB from original location" FORCE) set (ZLIB_TGZ_ORIGNAME "zlib-1.2.13.tar.gz" CACHE STRING "Use ZLIB from original compressed file" FORCE) -set (ZLIB_USE_LOCALCONTENT OFF CACHE BOOL "Use local file for ZLIB FetchContent" FORCE) +set (ZLIB_USE_LOCALCONTENT ON CACHE BOOL "Use local file for ZLIB FetchContent" FORCE) set (SZIP_PACKAGE_NAME "szip" CACHE STRING "Name of SZIP package" FORCE) set (LIBAEC_PACKAGE_NAME "libaec" CACHE STRING "Name of AEC SZIP package" FORCE) set (SZIP_TGZ_NAME "SZip.tar.gz" CACHE STRING "Use SZip from compressed file" FORCE) set (SZAEC_TGZ_NAME "LIBAEC.tar.gz" CACHE STRING "Use SZip AEC from compressed file" FORCE) set (USE_LIBAEC ON CACHE BOOL "Use libaec szip replacement" FORCE) -set (LIBAEC_TGZ_ORIGPATH "https://github.com/MathisRosenhauer/libaec/releases/download/v1.0.6/libaec-1.0.6.tar.gz" CACHE STRING "Use LIBAEC from original location" FORCE) -set (LIBAEC_TGZ_ORIGNAME "libaec-v1.0.6.tar.gz" CACHE STRING "Use LIBAEC from original compressed file" FORCE) -set (LIBAEC_USE_LOCALCONTENT OFF CACHE BOOL "Use local file for LIBAEC FetchContent" FORCE) +set (LIBAEC_TGZ_ORIGPATH "https://github.com/MathisRosenhauer/libaec/releases/download/v1.0.6" CACHE STRING "Use LIBAEC from original location" FORCE) +set (LIBAEC_TGZ_ORIGNAME "libaec-1.0.6.tar.gz" CACHE STRING "Use LIBAEC from original compressed file" FORCE) +set (LIBAEC_USE_LOCALCONTENT ON CACHE BOOL "Use local file for LIBAEC FetchContent" FORCE) ######################## # filter plugin options diff --git a/config/cmake_ext_mod/HDFMacros.cmake b/config/cmake_ext_mod/HDFMacros.cmake index ef910080eb9..6037570af16 100644 --- a/config/cmake_ext_mod/HDFMacros.cmake +++ b/config/cmake_ext_mod/HDFMacros.cmake @@ -21,7 +21,7 @@ macro (SET_HDF_BUILD_TYPE) set (HDF_CFG_NAME ${CMAKE_BUILD_TYPE}) else () # set the default to the MultiConfig variable - set (HDF_CFG_NAME ${CMAKE_CFG_INTDIR}) + set (HDF_CFG_NAME "$") endif () else () set (HDF_CFG_BUILD_TYPE ".") diff --git a/configure.ac b/configure.ac index e1ef78b44ae..455fff633c6 100644 --- a/configure.ac +++ b/configure.ac @@ -622,7 +622,7 @@ if test "X$HDF_FORTRAN" = "Xyes"; then [fmoddir=$withval], [fmoddir="\${includedir}"]) AC_SUBST([fmoddir], [$fmoddir]) - + ## Change to the Fortran 90 language AC_LANG_PUSH(Fortran) diff --git a/release_docs/INSTALL_CMake.txt b/release_docs/INSTALL_CMake.txt index 361e3e6d415..6d6660f6f93 100644 --- a/release_docs/INSTALL_CMake.txt +++ b/release_docs/INSTALL_CMake.txt @@ -308,7 +308,7 @@ IV. Further considerations CMake options: HDF5_ALLOW_EXTERNAL_SUPPORT:STRING="TGZ" ZLIB_TGZ_NAME:STRING="zlib_src.ext" - SZIP_TGZ_NAME:STRING="szip_src.ext" + SZAEC_TGZ_NAME:STRING="szaec_src.ext" TGZPATH:STRING="some_location" where "some_location" is the URL or full path to the compressed file and ext is the type of compression file. Also set CMAKE_BUILD_TYPE @@ -326,17 +326,17 @@ IV. Further considerations ZLIB_TGZ_ORIGPATH:STRING="some_location" HDF5_ALLOW_EXTERNAL_SUPPORT:STRING="TGZ" - where "some_location" is the URL or full path to the compressed - file and ext is the type of compression file. The individual filters are - enabled by setting the BUILD__WITH_FETCHCONTENT CMake variable to ON. + where "some_location" is the URL and by setting + ZLIB_USE_LOCALCONTENT:BOOL=OFF + LIBAEC_USE_LOCALCONTENT:BOOL=OFF + or full path to the compressed file and ext is the type of compression file. + The individual filters are enabled by setting the BUILD__WITH_FETCHCONTENT + CMake variable to ON. Also set CMAKE_BUILD_TYPE to the configuration type during configuration. See the settings in the config/cmake/cacheinit.cmake file HDF uses for testing. The files can also be retrieved from a local path if necessary TGZPATH:STRING="some_location" - by setting - ZLIB_USE_LOCALCONTENT:BOOL=ON - LIBAEC_USE_LOCALCONTENT:BOOL=ON 3. If you plan to use compression plugins: A. Use source packages from an GIT server by adding the following CMake @@ -514,28 +514,32 @@ These five steps are described in detail below. ######################## set (CMAKE_INSTALL_FRAMEWORK_PREFIX "Library/Frameworks" CACHE STRING "Frameworks installation directory" FORCE) set (HDF_PACKAGE_EXT "" CACHE STRING "Name of HDF package extension" FORCE) + set (HDF_PACKAGE_NAMESPACE "hdf5::" CACHE STRING "Name for HDF package namespace (can be empty)" FORCE) + set (HDF5_BUILD_CPP_LIB ON CACHE BOOL "Build C++ support" FORCE) set (HDF5_BUILD_FORTRAN ON CACHE BOOL "Build FORTRAN support" FORCE) - set (HDF5_ENABLE_Z_LIB_SUPPORT ON CACHE BOOL "Enable Zlib Filters" FORCE) - set (HDF5_ENABLE_SZIP_SUPPORT ON CACHE BOOL "Use SZip Filter" FORCE) - set (HDF5_ENABLE_SZIP_ENCODING ON CACHE BOOL "Use SZip Encoding" FORCE) - set (MPIEXEC_MAX_NUMPROCS "3" CACHE STRING "Minimum number of processes for HDF parallel tests" FORCE) + set (HDF5_BUILD_JAVA ON CACHE BOOL "Build JAVA support" FORCE) + set (HDF5_INSTALL_MOD_FORTRAN "NO" CACHE STRING "Copy FORTRAN mod files to include directory (NO SHARED STATIC)" FORCE) + set_property (CACHE HDF5_INSTALL_MOD_FORTRAN PROPERTY STRINGS NO SHARED STATIC) + set (HDF5_BUILD_GENERATORS ON CACHE BOOL "Build Test Generators" FORCE) + set (MPIEXEC_MAX_NUMPROCS "4" CACHE STRING "Minimum number of processes for HDF parallel tests" FORCE) set (HDF5_ENABLE_ALL_WARNINGS ON CACHE BOOL "Enable all warnings" FORCE) set (HDF_TEST_EXPRESS "2" CACHE STRING "Control testing framework (0-3)" FORCE) - set (HDF5_ALLOW_EXTERNAL_SUPPORT "NO" CACHE STRING "Allow External Library Building (NO GIT TGZ)" FORCE) + set (HDF5_MINGW_STATIC_GCC_LIBS ON CACHE BOOL "Statically link libgcc/libstdc++" FORCE) + set (HDF5_ALLOW_EXTERNAL_SUPPORT "TGZ" CACHE STRING "Allow External Library Building (NO GIT TGZ)" FORCE) set_property (CACHE HDF5_ALLOW_EXTERNAL_SUPPORT PROPERTY STRINGS NO GIT TGZ) set (ZLIB_PACKAGE_NAME "zlib" CACHE STRING "Name of ZLIB package" FORCE) - set (ZLIB_TGZ_NAME "ZLib.tar.gz" CACHE STRING "Use ZLib from compressed file" FORCE) + set (ZLIB_TGZ_NAME "ZLib.tar.gz" CACHE STRING "Use HDF5_ZLib from compressed file" FORCE) set (ZLIB_TGZ_ORIGPATH "https://github.com/madler/zlib/releases/download/v1.2.13" CACHE STRING "Use ZLIB from original location" FORCE) set (ZLIB_TGZ_ORIGNAME "zlib-1.2.13.tar.gz" CACHE STRING "Use ZLIB from original compressed file" FORCE) - set (ZLIB_USE_LOCALCONTENT OFF CACHE BOOL "Use local file for ZLIB FetchContent" FORCE) + set (ZLIB_USE_LOCALCONTENT ON CACHE BOOL "Use local file for ZLIB FetchContent" FORCE) set (SZIP_PACKAGE_NAME "szip" CACHE STRING "Name of SZIP package" FORCE) set (LIBAEC_PACKAGE_NAME "libaec" CACHE STRING "Name of AEC SZIP package" FORCE) set (SZIP_TGZ_NAME "SZip.tar.gz" CACHE STRING "Use SZip from compressed file" FORCE) set (SZAEC_TGZ_NAME "LIBAEC.tar.gz" CACHE STRING "Use SZip AEC from compressed file" FORCE) set (USE_LIBAEC ON CACHE BOOL "Use libaec szip replacement" FORCE) - set (LIBAEC_TGZ_ORIGPATH "https://github.com/MathisRosenhauer/libaec/releases/download/v1.0.6/libaec-1.0.6.tar.gz" CACHE STRING "Use LIBAEC from original location" FORCE) - set (LIBAEC_TGZ_ORIGNAME "libaec-v1.0.6.tar.gz" CACHE STRING "Use LIBAEC from original compressed file" FORCE) - set (LIBAEC_USE_LOCALCONTENT OFF CACHE BOOL "Use local file for LIBAEC FetchContent" FORCE) + set (LIBAEC_TGZ_ORIGPATH "https://github.com/MathisRosenhauer/libaec/releases/download/v1.0.6" CACHE STRING "Use LIBAEC from original location" FORCE) + set (LIBAEC_TGZ_ORIGNAME "libaec-1.0.6.tar.gz" CACHE STRING "Use LIBAEC from original compressed file" FORCE) + set (LIBAEC_USE_LOCALCONTENT ON CACHE BOOL "Use local file for LIBAEC FetchContent" FORCE) ####################### # filter plugin options ####################### @@ -616,6 +620,13 @@ These five steps are described in detail below. set (ZFP_GIT_BRANCH "master" CACHE STRING "" FORCE) set (ZFP_TGZ_NAME "zfp.tar.gz" CACHE STRING "Use ZFP from compressed file" FORCE) set (ZFP_PACKAGE_NAME "zfp" CACHE STRING "Name of ZFP package" FORCE) + ###### + # zstd + ###### + set (ZSTD_GIT_URL "https://github.com/facebook/zstd" CACHE STRING "Use ZSTD from repository" FORCE) + set (ZSTD_GIT_BRANCH "dev" CACHE STRING "" FORCE) + set (ZSTD_TGZ_NAME "zstd.tar.gz" CACHE STRING "Use ZSTD from compressed file" FORCE) + set (ZSTD_PACKAGE_NAME "zstd" CACHE STRING "Name of ZSTD package" FORCE) 2. Configure the cache settings @@ -766,13 +777,13 @@ BUILD_STATIC_EXECS "Build Static Executables" OFF BUILD_TESTING "Build HDF5 Unit Testing" ON ---------------- HDF5 Build Options --------------------- -HDF5_BUILD_CPP_LIB "Build HDF5 C++ Library" OFF -HDF5_BUILD_EXAMPLES "Build HDF5 Library Examples" ON -HDF5_BUILD_FORTRAN "Build FORTRAN support" OFF -HDF5_BUILD_JAVA "Build JAVA support" OFF -HDF5_BUILD_HL_LIB "Build HIGH Level HDF5 Library" ON -HDF5_BUILD_TOOLS "Build HDF5 Tools" ON -HDF5_BUILD_HL_TOOLS "Build HIGH Level HDF5 Tools" ON +HDF5_BUILD_CPP_LIB "Build HDF5 C++ Library" OFF +HDF5_BUILD_EXAMPLES "Build HDF5 Library Examples" ON +HDF5_BUILD_FORTRAN "Build FORTRAN support" OFF +HDF5_BUILD_JAVA "Build JAVA support" OFF +HDF5_BUILD_HL_LIB "Build HIGH Level HDF5 Library" ON +HDF5_BUILD_TOOLS "Build HDF5 Tools" ON +HDF5_BUILD_HL_TOOLS "Build HIGH Level HDF5 Tools" ON ---------------- HDF5 Folder Build Options --------------------- Defaults relative to $ @@ -798,72 +809,84 @@ HDF5_INSTALL_DOC_DIR "HDF5_INSTALL_DATA_DIR/doc/hdf5" ---------------- HDF5 Advanced Options --------------------- HDF5_USE_GNU_DIRS "TRUE to use GNU Coding Standard install directory variables, - FALSE to use historical settings" FALSE -ONLY_SHARED_LIBS "Only Build Shared Libraries" OFF -ALLOW_UNSUPPORTED "Allow unsupported combinations of configure options" OFF -HDF5_EXTERNAL_LIB_PREFIX "Use prefix for custom library naming." "" -HDF5_DISABLE_COMPILER_WARNINGS "Disable compiler warnings" OFF -HDF5_ENABLE_ALL_WARNINGS "Enable all warnings" OFF -HDF5_ENABLE_CODESTACK "Enable the function stack tracing (for developer debugging)." OFF -HDF5_ENABLE_COVERAGE "Enable code coverage for Libraries and Programs" OFF -HDF5_ENABLE_DEBUG_APIS "Turn on extra debug output in all packages" OFF -HDF5_ENABLE_DEPRECATED_SYMBOLS "Enable deprecated public API symbols" ON -HDF5_ENABLE_DIRECT_VFD "Build the Direct I/O Virtual File Driver" OFF -HDF5_ENABLE_EMBEDDED_LIBINFO "embed library info into executables" ON -HDF5_ENABLE_PARALLEL "Enable parallel build (requires MPI)" OFF + FALSE to use historical settings" FALSE +ONLY_SHARED_LIBS "Only Build Shared Libraries" OFF +ALLOW_UNSUPPORTED "Allow unsupported combinations of configure options" OFF +HDF5_EXTERNAL_LIB_PREFIX "Use prefix for custom library naming." "" +HDF5_DISABLE_COMPILER_WARNINGS "Disable compiler warnings" OFF +HDF5_ENABLE_ALL_WARNINGS "Enable all warnings" OFF +HDF5_ENABLE_CODESTACK "Enable the function stack tracing (for developer debugging)." OFF +HDF5_ENABLE_COVERAGE "Enable code coverage for Libraries and Programs" OFF +HDF5_ENABLE_DEBUG_APIS "Turn on extra debug output in all packages" OFF +HDF5_ENABLE_DEPRECATED_SYMBOLS "Enable deprecated public API symbols" ON +HDF5_ENABLE_DIRECT_VFD "Build the Direct I/O Virtual File Driver" OFF +HDF5_ENABLE_EMBEDDED_LIBINFO "embed library info into executables" ON +HDF5_ENABLE_PARALLEL "Enable parallel build (requires MPI)" OFF HDF5_ENABLE_PREADWRITE "Use pread/pwrite in sec2/log/core VFDs in place of read/write (when available)" ON -HDF5_ENABLE_TRACE "Enable API tracing capability" OFF -HDF5_ENABLE_USING_MEMCHECKER "Indicate that a memory checker is used" OFF -HDF5_GENERATE_HEADERS "Rebuild Generated Files" ON -HDF5_BUILD_GENERATORS "Build Test Generators" OFF -HDF5_JAVA_PACK_JRE "Package a JRE installer directory" OFF -HDF5_MEMORY_ALLOC_SANITY_CHECK "Indicate that internal memory allocation sanity checks are enabled" OFF -HDF5_NO_PACKAGES "Do not include CPack Packaging" OFF -HDF5_PACK_EXAMPLES "Package the HDF5 Library Examples Compressed File" OFF -HDF5_PACK_MACOSX_FRAMEWORK "Package the HDF5 Library in a Frameworks" OFF +HDF5_ENABLE_TRACE "Enable API tracing capability" OFF +HDF5_ENABLE_USING_MEMCHECKER "Indicate that a memory checker is used" OFF +HDF5_GENERATE_HEADERS "Rebuild Generated Files" ON +HDF5_BUILD_GENERATORS "Build Test Generators" OFF +HDF5_JAVA_PACK_JRE "Package a JRE installer directory" OFF +HDF5_NO_PACKAGES "Do not include CPack Packaging" OFF +HDF5_PACK_EXAMPLES "Package the HDF5 Library Examples Compressed File" OFF +HDF5_PACK_MACOSX_FRAMEWORK "Package the HDF5 Library in a Frameworks" OFF HDF5_BUILD_FRAMEWORKS "TRUE to build as frameworks libraries, - FALSE to build according to BUILD_SHARED_LIBS" FALSE -HDF5_PACKAGE_EXTLIBS "CPACK - include external libraries" OFF -HDF5_STRICT_FORMAT_CHECKS "Whether to perform strict file format checks" OFF -HDF_TEST_EXPRESS "Control testing framework (0-3)" "3" -HDF5_TEST_VFD "Execute tests with different VFDs" OFF -HDF5_TEST_PASSTHROUGH_VOL "Execute tests with different passthrough VOL connectors" OFF -DEFAULT_API_VERSION "Enable default API (v16, v18, v110, v112)" "v112" -HDF5_USE_FOLDERS "Enable folder grouping of projects in IDEs." ON -HDF5_WANT_DATA_ACCURACY "IF data accuracy is guaranteed during data conversions" ON + FALSE to build according to BUILD_SHARED_LIBS" FALSE +HDF5_PACKAGE_EXTLIBS "CPACK - include external libraries" OFF +HDF5_STRICT_FORMAT_CHECKS "Whether to perform strict file format checks" OFF +DEFAULT_API_VERSION "Enable default API (v16, v18, v110, v112)" "v112" +HDF5_USE_FOLDERS "Enable folder grouping of projects in IDEs." ON +HDF5_WANT_DATA_ACCURACY "IF data accuracy is guaranteed during data conversions" ON HDF5_WANT_DCONV_EXCEPTION "exception handling functions is checked during data conversions" ON -HDF5_ENABLE_THREADSAFE "Enable Threadsafety" OFF -HDF5_MSVC_NAMING_CONVENTION "Use MSVC Naming conventions for Shared Libraries" OFF -HDF5_MINGW_STATIC_GCC_LIBS "Statically link libgcc/libstdc++" OFF +HDF5_ENABLE_THREADSAFE "Enable Threadsafety" OFF +HDF5_MSVC_NAMING_CONVENTION "Use MSVC Naming conventions for Shared Libraries" OFF +HDF5_MINGW_STATIC_GCC_LIBS "Statically link libgcc/libstdc++" OFF if (APPLE) HDF5_BUILD_WITH_INSTALL_NAME "Build with library install_name set to the installation path" OFF if (CMAKE_BUILD_TYPE MATCHES Debug) HDF5_ENABLE_INSTRUMENT "Instrument The library" OFF -if (HDF5_TEST_VFD) - HDF5_TEST_FHEAP_VFD "Execute fheap test with different VFDs" ON if (HDF5_BUILD_FORTRAN) - HDF5_INSTALL_MOD_FORTRAN "Copy FORTRAN mod files to include directory (NO SHARED STATIC)" "XX" - if (BUILD_SHARED_LIBS AND BUILD_STATIC_LIBS) default HDF5_INSTALL_MOD_FORTRAN is SHARED - if (BUILD_SHARED_LIBS AND NOT BUILD_STATIC_LIBS) default HDF5_INSTALL_MOD_FORTRAN is SHARED - if (NOT BUILD_SHARED_LIBS AND BUILD_STATIC_LIBS) default HDF5_INSTALL_MOD_FORTRAN is STATIC + HDF5_INSTALL_MOD_FORTRAN "Copy FORTRAN mod files to include directory (NO SHARED STATIC)" SHARED + if (BUILD_SHARED_LIBS AND BUILD_STATIC_LIBS) default HDF5_INSTALL_MOD_FORTRAN is SHARED + if (BUILD_SHARED_LIBS AND NOT BUILD_STATIC_LIBS) default HDF5_INSTALL_MOD_FORTRAN is SHARED + if (NOT BUILD_SHARED_LIBS AND BUILD_STATIC_LIBS) default HDF5_INSTALL_MOD_FORTRAN is STATIC if (NOT BUILD_SHARED_LIBS AND NOT BUILD_STATIC_LIBS) default HDF5_INSTALL_MOD_FORTRAN is SHARED -HDF5_BUILD_DOC "Build documentation" OFF -HDF5_ENABLE_ANALYZER_TOOLS "enable the use of Clang tools" OFF -HDF5_ENABLE_SANITIZERS "execute the Clang sanitizer" OFF -HDF5_ENABLE_FORMATTERS "format source files" OFF -TEST_SHELL_SCRIPTS "Enable shell script tests" ON +HDF5_BUILD_DOC "Build documentation" OFF +HDF5_ENABLE_ANALYZER_TOOLS "enable the use of Clang tools" OFF +HDF5_ENABLE_SANITIZERS "execute the Clang sanitizer" OFF +HDF5_ENABLE_FORMATTERS "format source files" OFF + +---------------- HDF5 Advanced Test Options --------------------- +if (BUILD_TESTING) + HDF5_TEST_SERIAL "Execute non-parallel tests" ON + HDF5_TEST_TOOLS "Execute tools tests" ON + HDF5_TEST_EXAMPLES "Execute tests on examples" ON + HDF5_TEST_SWMR "Execute SWMR tests" ON + HDF5_TEST_PARALLEL "Execute parallel tests" ON + HDF5_TEST_FORTRAN "Execute fortran tests" ON + HDF5_TEST_CPP "Execute cpp tests" ON + HDF5_TEST_JAVA "Execute java tests" ON + HDF_TEST_EXPRESS "Control testing framework (0-3)" "3" + HDF5_TEST_PASSTHROUGH_VOL "Execute tests with different passthrough VOL connectors" OFF + if (HDF5_TEST_PASSTHROUGH_VOL) + HDF5_TEST_FHEAP_PASSTHROUGH_VOL "Execute fheap test with different passthrough VOL connectors" ON + HDF5_TEST_VFD "Execute tests with different VFDs" OFF + if (HDF5_TEST_VFD) + HDF5_TEST_FHEAP_VFD "Execute fheap test with different VFDs" ON + TEST_SHELL_SCRIPTS "Enable shell script tests" ON ---------------- External Library Options --------------------- HDF5_ALLOW_EXTERNAL_SUPPORT "Allow External Library Building (NO GIT TGZ)" "NO" HDF5_ENABLE_PLUGIN_SUPPORT "Enable PLUGIN Filters" OFF HDF5_ENABLE_SZIP_SUPPORT "Use SZip Filter" OFF -HDF5_ENABLE_Z_LIB_SUPPORT "Enable Zlib Filters" OFF +HDF5_ENABLE_Z_LIB_SUPPORT "Enable Zlib Filters" ON PLUGIN_USE_EXTERNAL "Use External Library Building for PLUGINS" 0 ZLIB_USE_EXTERNAL "Use External Library Building for ZLIB" 0 SZIP_USE_EXTERNAL "Use External Library Building for SZIP" 0 if (HDF5_ENABLE_SZIP_SUPPORT) - HDF5_ENABLE_SZIP_ENCODING "Use SZip Encoding" OFF - USE_LIBAEC "Use libaec szip replacement" OFF + HDF5_ENABLE_SZIP_ENCODING "Use SZip Encoding" ON + USE_LIBAEC "Use libaec szip replacement" ON if (WINDOWS) H5_DEFAULT_PLUGINDIR "%ALLUSERSPROFILE%/hdf5/lib/plugin" else () @@ -872,11 +895,11 @@ endif () if (BUILD_SZIP_WITH_FETCHCONTENT) LIBAEC_TGZ_ORIGPATH "Use LIBAEC from original location" "https://github.com/MathisRosenhauer/libaec/releases/download/v1.0.6/libaec-1.0.6.tar.gz" LIBAEC_TGZ_ORIGNAME "Use LIBAEC from original compressed file" "libaec-v1.0.6.tar.gz" - LIBAEC_USE_LOCALCONTENT "Use local file for LIBAEC FetchContent" OFF + LIBAEC_USE_LOCALCONTENT "Use local file for LIBAEC FetchContent" ON if (BUILD_ZLIB_WITH_FETCHCONTENT) ZLIB_TGZ_ORIGPATH "Use ZLIB from original location" "https://github.com/madler/zlib/releases/download/v1.2.13" ZLIB_TGZ_ORIGNAME "Use ZLIB from original compressed file" "zlib-1.2.13.tar.gz" - ZLIB_USE_LOCALCONTENT "Use local file for ZLIB FetchContent" OFF + ZLIB_USE_LOCALCONTENT "Use local file for ZLIB FetchContent" ON NOTE: The BUILD_STATIC_EXECS ("Build Static Executables") option is only valid From 4c57a5b06417e823eef153ae306a6ce06bd907ee Mon Sep 17 00:00:00 2001 From: Dana Robinson <43805+derobins@users.noreply.github.com> Date: Tue, 6 Jun 2023 06:44:33 -0700 Subject: [PATCH 071/108] Bring key changesets from develop (#3052) * Bump GitHub action macOS version to 13 (#2999, #3009) * Bump GitHub action gcc/g++/gfortran version to 12 (#3015) * Bump Autoconf version to 2.71 (#2944) * Fix missing h5_reset() calls in accum test (#3001) * Only run ttsafe in GitHub thread-safe actions (#2777) * Fix Java debug asserts on Windows (#3012) * Fix long double dt_arith bug on macOS (#3038) --- .github/workflows/main.yml | 31 ++++++++++++++++++++--------- config/cmake/HDF5_Examples.cmake.in | 5 +++++ config/cmake/jrunTest.cmake | 2 ++ config/cmake_ext_mod/grepTest.cmake | 2 ++ config/cmake_ext_mod/runTest.cmake | 2 ++ configure.ac | 2 +- java/src/jni/h5lImp.c | 8 ++++---- release_docs/RELEASE.txt | 8 ++++++++ src/H5FDtest.c | 6 ------ src/H5Tconv.c | 24 ++++++++++++++++++++-- test/accum_swmr_reader.c | 12 +++++------ test/swmr.c | 22 ++++++++++---------- 12 files changed, 85 insertions(+), 39 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 635890b621f..3a8854d0936 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -157,7 +157,7 @@ jobs: # We could also build with the Autotools via brew installing them, # but that seems unnecessary - name: "MacOS Clang CMake" - os: macos-11 + os: macos-13 cpp: ON fortran: OFF java: ON @@ -324,10 +324,10 @@ jobs: run: | sudo apt update sudo apt install automake autoconf libtool libtool-bin - sudo apt install gcc-11 g++-11 gfortran-11 - echo "CC=gcc-11" >> $GITHUB_ENV - echo "CXX=g++-11" >> $GITHUB_ENV - echo "FC=gfortran-11" >> $GITHUB_ENV + sudo apt install gcc-12 g++-12 gfortran-12 + echo "CC=gcc-12" >> $GITHUB_ENV + echo "CXX=g++-12" >> $GITHUB_ENV + echo "FC=gfortran-12" >> $GITHUB_ENV sudo apt install libaec0 libaec-dev if: (matrix.generator == 'autogen') && (matrix.parallel != 'enable') @@ -347,7 +347,7 @@ jobs: - name: Install Dependencies (macOS) run: brew install ninja - if: matrix.os == 'macos-11' + if: matrix.os == 'macos-13' - name: Set environment for MSVC (Windows) run: | @@ -429,10 +429,23 @@ jobs: if: (matrix.generator == 'autogen') && (matrix.run_tests) && ! (matrix.thread_safety.enabled) - name: CMake Run Tests - run: ctest --build . --parallel 2 -C ${{ matrix.build_mode.cmake }} -V + run: ctest . --parallel 2 -C ${{ matrix.build_mode.cmake }} -V working-directory: ${{ runner.workspace }}/build - # Skip Debug MSVC while we investigate H5L Java test timeouts - if: (matrix.generator != 'autogen') && (matrix.run_tests) && ! ((matrix.name == 'Windows MSVC CMake') && (matrix.build_mode.cmake == 'Debug')) + if: (matrix.generator != 'autogen') && (matrix.run_tests) && ! (matrix.thread_safety.enabled) + + # THREAD-SAFE + + - name: Autotools Run Thread-Safe Tests + run: | + cd test + ./ttsafe + working-directory: ${{ runner.workspace }}/build + if: (matrix.generator == 'autogen') && (matrix.run_tests) && (matrix.thread_safety.enabled) + + - name: CMake Run Thread-Safe Tests + run: ctest . --parallel 2 -C ${{ matrix.build_mode.cmake }} -V -R ttsafe + working-directory: ${{ runner.workspace }}/build + if: (matrix.generator != 'autogen') && (matrix.run_tests) && (matrix.thread_safety.enabled) # # INSTALL (note that this runs even when we don't run the tests) diff --git a/config/cmake/HDF5_Examples.cmake.in b/config/cmake/HDF5_Examples.cmake.in index 21a1dbb87e2..9f3db03fb80 100644 --- a/config/cmake/HDF5_Examples.cmake.in +++ b/config/cmake/HDF5_Examples.cmake.in @@ -87,6 +87,11 @@ if(WIN32) set(CTEST_BINARY_NAME ${CTEST_SOURCE_NAME}\\build) set(CTEST_SOURCE_DIRECTORY "${CTEST_DASHBOARD_ROOT}\\${CTEST_SOURCE_NAME}") set(CTEST_BINARY_DIRECTORY "${CTEST_DASHBOARD_ROOT}\\${CTEST_BINARY_NAME}") +else(APPLE) + set(ENV{DYLD_LIBRARY_PATH} "${INSTALLDIR}/lib") + set(CTEST_BINARY_NAME ${CTEST_SOURCE_NAME}/build) + set(CTEST_SOURCE_DIRECTORY "${CTEST_DASHBOARD_ROOT}/${CTEST_SOURCE_NAME}") + set(CTEST_BINARY_DIRECTORY "${CTEST_DASHBOARD_ROOT}/${CTEST_BINARY_NAME}") else() set(ENV{LD_LIBRARY_PATH} "${INSTALLDIR}/lib") set(CTEST_BINARY_NAME ${CTEST_SOURCE_NAME}/build) diff --git a/config/cmake/jrunTest.cmake b/config/cmake/jrunTest.cmake index 13e50dd8cd7..f6fa3b3fe9b 100644 --- a/config/cmake/jrunTest.cmake +++ b/config/cmake/jrunTest.cmake @@ -56,6 +56,8 @@ endif () if (WIN32) set (ENV{PATH} "$ENV{PATH}\\;${TEST_LIBRARY_DIRECTORY}") +elseif (APPLE) + set (ENV{DYLD_LIBRARY_PATH} "$ENV{DYLD_LIBRARY_PATH}:${TEST_LIBRARY_DIRECTORY}") else () set (ENV{LD_LIBRARY_PATH} "$ENV{LD_LIBRARY_PATH}:${TEST_LIBRARY_DIRECTORY}") endif () diff --git a/config/cmake_ext_mod/grepTest.cmake b/config/cmake_ext_mod/grepTest.cmake index 631fca6fdfc..c4a6eb7e98c 100644 --- a/config/cmake_ext_mod/grepTest.cmake +++ b/config/cmake_ext_mod/grepTest.cmake @@ -42,6 +42,8 @@ message (STATUS "COMMAND: ${TEST_EMULATOR} ${TEST_PROGRAM} ${TEST_ARGS}") if (TEST_LIBRARY_DIRECTORY) if (WIN32) set (ENV{PATH} "$ENV{PATH};${TEST_LIBRARY_DIRECTORY}") + elseif (APPLE) + set (ENV{DYLD_LIBRARY_PATH} "$ENV{DYLD_LIBRARY_PATH}:${TEST_LIBRARY_DIRECTORY}") else () set (ENV{LD_LIBRARY_PATH} "$ENV{LD_LIBRARY_PATH}:${TEST_LIBRARY_DIRECTORY}") endif () diff --git a/config/cmake_ext_mod/runTest.cmake b/config/cmake_ext_mod/runTest.cmake index b373fe4e6ea..d1c454f0ed4 100644 --- a/config/cmake_ext_mod/runTest.cmake +++ b/config/cmake_ext_mod/runTest.cmake @@ -40,6 +40,8 @@ message (STATUS "COMMAND: ${TEST_EMULATOR} ${TEST_PROGRAM} ${TEST_ARGS}") if (TEST_LIBRARY_DIRECTORY) if (WIN32) set (ENV{PATH} "$ENV{PATH};${TEST_LIBRARY_DIRECTORY}") + elseif (APPLE) + set (ENV{DYLD_LIBRARY_PATH} "$ENV{DYLD_LIBRARY_PATH}:${TEST_LIBRARY_DIRECTORY}") else () set (ENV{LD_LIBRARY_PATH} "$ENV{LD_LIBRARY_PATH}:${TEST_LIBRARY_DIRECTORY}") endif () diff --git a/configure.ac b/configure.ac index 455fff633c6..6d4f6d7fc96 100644 --- a/configure.ac +++ b/configure.ac @@ -13,7 +13,7 @@ ## ---------------------------------------------------------------------- ## Initialize configure. ## -AC_PREREQ([2.69]) +AC_PREREQ([2.71]) ## AC_INIT takes the name of the package, the version number, and an ## email address to report bugs. AC_CONFIG_SRCDIR takes a unique file diff --git a/java/src/jni/h5lImp.c b/java/src/jni/h5lImp.c index 1f2e8d8079b..5389600a04f 100644 --- a/java/src/jni/h5lImp.c +++ b/java/src/jni/h5lImp.c @@ -277,7 +277,7 @@ Java_hdf_hdf5lib_H5_H5Lexists(JNIEnv *env, jclass clss, jlong loc_id, jstring na JNIEXPORT jobject JNICALL Java_hdf_hdf5lib_H5_H5Lget_1info(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jlong access_id) { - H5L_info2_t infobuf; + H5L_info2_t infobuf = {H5L_TYPE_ERROR, FALSE, -1, H5T_CSET_ERROR, 0}; const char *linkName = NULL; jvalue args[5]; herr_t status = FAIL; @@ -340,7 +340,7 @@ JNIEXPORT jobject JNICALL Java_hdf_hdf5lib_H5_H5Lget_1info_1by_1idx(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jint index_field, jint order, jlong link_n, jlong access_id) { - H5L_info2_t infobuf; + H5L_info2_t infobuf = {H5L_TYPE_ERROR, FALSE, -1, H5T_CSET_ERROR, 0}; const char *groupName = NULL; jvalue args[5]; herr_t status = FAIL; @@ -454,7 +454,7 @@ JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Lget_1value(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jobjectArray link_value, jlong access_id) { - H5L_info2_t infobuf; + H5L_info2_t infobuf = {H5L_TYPE_ERROR, FALSE, -1, H5T_CSET_ERROR, 0}; const char *file_name = NULL; const char *obj_name = NULL; const char *linkName = NULL; @@ -544,7 +544,7 @@ Java_hdf_hdf5lib_H5_H5Lget_1value_1by_1idx(JNIEnv *env, jclass clss, jlong loc_i jint index_field, jint order, jlong link_n, jobjectArray link_value, jlong access_id) { - H5L_info2_t infobuf; + H5L_info2_t infobuf = {H5L_TYPE_ERROR, FALSE, -1, H5T_CSET_ERROR, 0}; const char *file_name = NULL; const char *obj_name = NULL; const char *grpName = NULL; diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 7f44441442c..9e9dde4b6ff 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -402,6 +402,14 @@ Bug Fixes since HDF5-1.12.2 release Configuration ------------- + - The accum test now passes on macOS 12+ (Monterey) w/ CMake + + Due to changes in the way macOS handles LD_LIBRARY_PATH, the accum test + started failing on macOS 12+ when building with CMake. CMake has been + updated to set DYLD_LIBRARY_PATH on macOS and the test now passes. + + Fixes GitHub #2994, #2261, and #1289 + - Fixed syntax of generator expressions used by CMake Adding quotes around the generator expression should allow CMake to diff --git a/src/H5FDtest.c b/src/H5FDtest.c index dc9ad9378f8..87d24b64f75 100644 --- a/src/H5FDtest.c +++ b/src/H5FDtest.c @@ -71,8 +71,6 @@ * VFD feature flags (which do not exist until the driver * is instantiated). * - * See test/Makefile.am for a list of the VFD strings. - * * This function is only intended for use in the test code. * * Return: TRUE (1) if the VFD supports SWMR I/O or vfd_name is @@ -82,10 +80,6 @@ * * This function cannot fail at this time so there is no * error return value. - * - * Programmer: Dana Robinson - * Fall 2014 - * *------------------------------------------------------------------------- */ hbool_t diff --git a/src/H5Tconv.c b/src/H5Tconv.c index 5deae85076b..f6ab0d7b183 100644 --- a/src/H5Tconv.c +++ b/src/H5Tconv.c @@ -887,7 +887,17 @@ done: /* Macro defining action on source data which needs to be aligned (before main action) */ #define H5T_CONV_LOOP_PRE_SALIGN(ST) \ { \ - H5MM_memcpy(&src_aligned, src, sizeof(ST)); \ + /* The uint8_t * cast is required to avoid tripping over undefined behavior. \ + * \ + * The typed pointer arrives via a void pointer, which may have any alignment. \ + * We then cast it to a pointer to a type that is assumed to be aligned, which \ + * is undefined behavior (section 6.3.2.3 paragraph 7 of the C99 standard). \ + * In the past this hasn't caused many problems, but in some cases (e.g. \ + * converting long doubles on macOS), an optimizing compiler might do the \ + * wrong thing (in the macOS case, the conversion uses SSE, which has stricter \ + * requirements about alignment). \ + */ \ + H5MM_memcpy(&src_aligned, (const uint8_t *)src, sizeof(ST)); \ } /* Macro defining action on source data which doesn't need to be aligned (before main action) */ @@ -919,7 +929,17 @@ done: /* Macro defining action on destination data which needs to be aligned (after main action) */ #define H5T_CONV_LOOP_POST_DALIGN(DT) \ { \ - H5MM_memcpy(dst, &dst_aligned, sizeof(DT)); \ + /* The uint8_t * cast is required to avoid tripping over undefined behavior. \ + * \ + * The typed pointer arrives via a void pointer, which may have any alignment. \ + * We then cast it to a pointer to a type that is assumed to be aligned, which \ + * is undefined behavior (section 6.3.2.3 paragraph 7 of the C99 standard). \ + * In the past this hasn't caused many problems, but in some cases (e.g. \ + * converting long doubles on macOS), an optimizing compiler might do the \ + * wrong thing (in the macOS case, the conversion uses SSE, which has stricter \ + * requirements about alignment). \ + */ \ + H5MM_memcpy((uint8_t *)dst, &dst_aligned, sizeof(DT)); \ } /* Macro defining action on destination data which doesn't need to be aligned (after main action) */ diff --git a/test/accum_swmr_reader.c b/test/accum_swmr_reader.c index aca9db46f65..a7898c4a8f1 100644 --- a/test/accum_swmr_reader.c +++ b/test/accum_swmr_reader.c @@ -34,17 +34,14 @@ const char *FILENAME[] = {"accum", "accum_swmr_big", NULL}; * * Return: Success: EXIT_SUCCESS * Failure: EXIT_FAILURE - * - * Programmer: Vailin Choi; June 2013 - * *------------------------------------------------------------------------- */ int main(void) { - hid_t fid = -1; /* File ID */ - hid_t fapl = -1; /* file access property list ID */ - H5F_t *f = NULL; /* File pointer */ + hid_t fid = H5I_INVALID_HID; /* File ID */ + hid_t fapl = H5I_INVALID_HID; /* file access property list ID */ + H5F_t *f = NULL; /* File pointer */ char filename[1024]; unsigned u; /* Local index variable */ uint8_t rbuf[1024]; /* Buffer for reading */ @@ -52,6 +49,9 @@ main(void) char *driver = NULL; /* VFD string (from env variable) */ hbool_t api_ctx_pushed = FALSE; /* Whether API context pushed */ + /* Testing setup */ + h5_reset(); + /* Skip this test if SWMR I/O is not supported for the VFD specified * by the environment variable. */ diff --git a/test/swmr.c b/test/swmr.c index acac2aeb156..bbd55f01842 100644 --- a/test/swmr.c +++ b/test/swmr.c @@ -7728,12 +7728,15 @@ test_multiple_same(hid_t in_fapl, hbool_t new_format) int main(void) { - int nerrors = 0; /* The # of errors */ - hid_t fapl = -1; /* File access property list ID */ - char *driver = NULL; /* VFD string (from env variable) */ - char *lock_env_var = NULL; /* file locking env var pointer */ - hbool_t use_file_locking; /* read from env var */ - hbool_t file_locking_enabled = FALSE; /* Checks if the file system supports locks */ + int nerrors = 0; /* The # of errors */ + hid_t fapl = H5I_INVALID_HID; /* File access property list ID */ + char *driver = NULL; /* VFD string (from env variable) */ + char *lock_env_var = NULL; /* file locking env var pointer */ + hbool_t use_file_locking; /* read from env var */ + hbool_t file_locking_enabled = FALSE; /* Checks if the file system supports locks */ + + /* Testing setup */ + h5_reset(); /* Skip this test if SWMR I/O is not supported for the VFD specified * by the environment variable. @@ -7742,7 +7745,7 @@ main(void) if (!H5FD__supports_swmr_test(driver)) { HDprintf("This VFD does not support SWMR I/O\n"); return EXIT_SUCCESS; - } /* end if */ + } /* Check the environment variable that determines if we care * about file locking. File locking should be used unless explicitly @@ -7761,9 +7764,6 @@ main(void) return EXIT_FAILURE; } - /* Set up */ - h5_reset(); - /* Get file access property list */ fapl = h5_fileaccess(); @@ -7822,7 +7822,7 @@ main(void) */ nerrors += test_file_lock_swmr_same(fapl); nerrors += test_file_lock_swmr_concur(fapl); - } /* end if */ + } /* Tests SWMR VFD compatibility flag. * Only needs to run when the VFD is the default (sec2). From 2ea39d926a481769ee54a561bf6b754adb8fa0a6 Mon Sep 17 00:00:00 2001 From: Dana Robinson <43805+derobins@users.noreply.github.com> Date: Wed, 7 Jun 2023 05:50:59 -0700 Subject: [PATCH 072/108] Move config/cmake_ext_mod content to config/cmake (#3062) --- CMakeInstallation.cmake | 16 +- CMakeLists.txt | 9 +- c++/examples/CMakeTests.cmake | 4 +- c++/test/CMakeTests.cmake | 2 +- config/cmake/CTestCustom.cmake | 8 + config/cmake/ConfigureChecks.cmake | 684 ++++++++++++++++- .../{cmake_ext_mod => cmake}/FindSZIP.cmake | 0 .../GetTimeOfDayTest.cpp | 0 .../HDFLibMacros.cmake | 0 .../{cmake_ext_mod => cmake}/HDFMacros.cmake | 0 config/{cmake_ext_mod => cmake}/HDFTests.c | 0 .../HDFUseFortran.cmake | 0 .../NSIS.InstallOptions.ini.in | 0 .../{cmake_ext_mod => cmake}/NSIS.template.in | 0 .../{cmake_ext_mod => cmake}/grepTest.cmake | 0 config/{cmake_ext_mod => cmake}/hdf.bmp | Bin config/{cmake_ext_mod => cmake}/hdf.icns | Bin config/{cmake_ext_mod => cmake}/hdf.ico | Bin config/{cmake_ext_mod => cmake}/runTest.cmake | 0 .../{cmake_ext_mod => cmake}/version.plist.in | 0 config/cmake_ext_mod/CTestCustom.cmake | 17 - config/cmake_ext_mod/ConfigureChecks.cmake | 693 ------------------ examples/CMakeTests.cmake | 4 +- fortran/examples/CMakeTests.cmake | 4 +- fortran/test/CMakeTests.cmake | 8 +- hl/c++/examples/CMakeTests.cmake | 2 +- hl/c++/test/CMakeTests.cmake | 2 +- hl/examples/CMakeTests.cmake | 2 +- hl/fortran/examples/CMakeTests.cmake | 2 +- hl/fortran/test/CMakeTests.cmake | 2 +- hl/test/CMakeTests.cmake | 2 +- hl/tools/h5watch/CMakeTests.cmake | 6 +- java/CMakeLists.txt | 2 +- test/CMakeTests.cmake | 22 +- tools/test/h5copy/CMakeTests.cmake | 16 +- tools/test/h5diff/CMakeTests.cmake | 8 +- tools/test/h5dump/CMakeTests.cmake | 22 +- tools/test/h5dump/CMakeTestsPBITS.cmake | 2 +- tools/test/h5dump/CMakeTestsVDS.cmake | 6 +- tools/test/h5dump/CMakeTestsXML.cmake | 2 +- tools/test/h5format_convert/CMakeTests.cmake | 16 +- tools/test/h5import/CMakeTests.cmake | 18 +- tools/test/h5jam/CMakeTests.cmake | 10 +- tools/test/h5ls/CMakeTests.cmake | 6 +- tools/test/h5ls/CMakeTestsVDS.cmake | 4 +- tools/test/h5repack/CMakeTests.cmake | 28 +- tools/test/h5stat/CMakeTests.cmake | 4 +- tools/test/misc/CMakeTestsClear.cmake | 16 +- tools/test/misc/CMakeTestsMkgrp.cmake | 4 +- tools/test/perform/CMakeTests.cmake | 14 +- 50 files changed, 821 insertions(+), 846 deletions(-) rename config/{cmake_ext_mod => cmake}/FindSZIP.cmake (100%) rename config/{cmake_ext_mod => cmake}/GetTimeOfDayTest.cpp (100%) rename config/{cmake_ext_mod => cmake}/HDFLibMacros.cmake (100%) rename config/{cmake_ext_mod => cmake}/HDFMacros.cmake (100%) rename config/{cmake_ext_mod => cmake}/HDFTests.c (100%) rename config/{cmake_ext_mod => cmake}/HDFUseFortran.cmake (100%) rename config/{cmake_ext_mod => cmake}/NSIS.InstallOptions.ini.in (100%) rename config/{cmake_ext_mod => cmake}/NSIS.template.in (100%) rename config/{cmake_ext_mod => cmake}/grepTest.cmake (100%) rename config/{cmake_ext_mod => cmake}/hdf.bmp (100%) rename config/{cmake_ext_mod => cmake}/hdf.icns (100%) rename config/{cmake_ext_mod => cmake}/hdf.ico (100%) rename config/{cmake_ext_mod => cmake}/runTest.cmake (100%) rename config/{cmake_ext_mod => cmake}/version.plist.in (100%) delete mode 100644 config/cmake_ext_mod/CTestCustom.cmake delete mode 100644 config/cmake_ext_mod/ConfigureChecks.cmake diff --git a/CMakeInstallation.cmake b/CMakeInstallation.cmake index 216028db997..67fd493e9f4 100644 --- a/CMakeInstallation.cmake +++ b/CMakeInstallation.cmake @@ -308,7 +308,7 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES) else () set (CPACK_PACKAGE_INSTALL_DIRECTORY "${CPACK_PACKAGE_VENDOR}/${CPACK_PACKAGE_NAME}/${CPACK_PACKAGE_VERSION}") endif () - set (CPACK_PACKAGE_ICON "${HDF_RESOURCES_EXT_DIR}/hdf.bmp") + set (CPACK_PACKAGE_ICON "${HDF_RESOURCES_DIR}/hdf.bmp") set (CPACK_GENERATOR "TGZ") if (WIN32) @@ -331,10 +331,10 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES) endif () # set the install/unistall icon used for the installer itself # There is a bug in NSI that does not handle full unix paths properly. - set (CPACK_NSIS_MUI_ICON "${HDF_RESOURCES_EXT_DIR}\\\\hdf.ico") - set (CPACK_NSIS_MUI_UNIICON "${HDF_RESOURCES_EXT_DIR}\\\\hdf.ico") + set (CPACK_NSIS_MUI_ICON "${HDF_RESOURCES_DIR}\\\\hdf.ico") + set (CPACK_NSIS_MUI_UNIICON "${HDF_RESOURCES_DIR}\\\\hdf.ico") # set the package header icon for MUI - set (CPACK_PACKAGE_ICON "${HDF_RESOURCES_EXT_DIR}\\\\hdf.bmp") + set (CPACK_PACKAGE_ICON "${HDF_RESOURCES_DIR}\\\\hdf.bmp") set (CPACK_NSIS_DISPLAY_NAME "${CPACK_NSIS_PACKAGE_NAME}") if (OVERRIDE_INSTALL_VERSION) set (CPACK_PACKAGE_INSTALL_DIRECTORY "${CPACK_PACKAGE_VENDOR}\\\\${CPACK_PACKAGE_NAME}\\\\${OVERRIDE_INSTALL_VERSION}") @@ -364,7 +364,7 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES) set (CPACK_RESOURCE_FILE_LICENSE "${HDF5_BINARY_DIR}/COPYING.txt") # .. variable:: CPACK_WIX_PRODUCT_ICON # The Icon shown next to the program name in Add/Remove programs. - set(CPACK_WIX_PRODUCT_ICON "${HDF_RESOURCES_EXT_DIR}\\\\hdf.ico") + set(CPACK_WIX_PRODUCT_ICON "${HDF_RESOURCES_DIR}\\\\hdf.ico") # # .. variable:: CPACK_WIX_UI_BANNER # @@ -395,14 +395,14 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES) list (APPEND CPACK_GENERATOR "DragNDrop") set (CPACK_COMPONENTS_ALL_IN_ONE_PACKAGE ON) set (CPACK_PACKAGING_INSTALL_PREFIX "/${CPACK_PACKAGE_INSTALL_DIRECTORY}") - set (CPACK_PACKAGE_ICON "${HDF_RESOURCES_EXT_DIR}/hdf.icns") + set (CPACK_PACKAGE_ICON "${HDF_RESOURCES_DIR}/hdf.icns") option (HDF5_PACK_MACOSX_FRAMEWORK "Package the HDF5 Library in a Frameworks" OFF) if (HDF5_PACK_MACOSX_FRAMEWORK AND HDF5_BUILD_FRAMEWORKS) set (CPACK_BUNDLE_NAME "${HDF5_PACKAGE_STRING}") set (CPACK_BUNDLE_LOCATION "/") # make sure CMAKE_INSTALL_PREFIX ends in / set (CMAKE_INSTALL_PREFIX "/${CPACK_BUNDLE_NAME}.framework/Versions/${CPACK_PACKAGE_VERSION}/${CPACK_PACKAGE_NAME}/") - set (CPACK_BUNDLE_ICON "${HDF_RESOURCES_EXT_DIR}/hdf.icns") + set (CPACK_BUNDLE_ICON "${HDF_RESOURCES_DIR}/hdf.icns") set (CPACK_BUNDLE_PLIST "${HDF5_BINARY_DIR}/CMakeFiles/Info.plist") set (CPACK_SHORT_VERSION_STRING "${CPACK_PACKAGE_VERSION}") #----------------------------------------------------------------------------- @@ -417,7 +417,7 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES) ${HDF5_BINARY_DIR}/CMakeFiles/PkgInfo @ONLY ) configure_file ( - ${HDF_RESOURCES_EXT_DIR}/version.plist.in + ${HDF_RESOURCES_DIR}/version.plist.in ${HDF5_BINARY_DIR}/CMakeFiles/version.plist @ONLY ) install ( diff --git a/CMakeLists.txt b/CMakeLists.txt index ca94ce44fbb..2b51a31364f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -220,7 +220,6 @@ set (HDF5_HL_F90_C_LIBSH_TARGET "${HDF5_HL_F90_C_LIB_CORENAME}-shared") #----------------------------------------------------------------------------- set (HDF_CONFIG_DIR ${HDF5_SOURCE_DIR}/config) set (HDF_RESOURCES_DIR ${HDF5_SOURCE_DIR}/config/cmake) -set (HDF_RESOURCES_EXT_DIR ${HDF5_SOURCE_DIR}/config/cmake_ext_mod) set (HDF5_SRC_DIR ${HDF5_SOURCE_DIR}/src) set (HDF5_TEST_SRC_DIR ${HDF5_SOURCE_DIR}/test) set (HDF5_CPP_SRC_DIR ${HDF5_SOURCE_DIR}/c++) @@ -440,11 +439,11 @@ set (HDF5_PACKAGE_BUGREPORT "help@hdfgroup.org") #----------------------------------------------------------------------------- # Include some macros for reusable code #----------------------------------------------------------------------------- -include (${HDF_RESOURCES_EXT_DIR}/HDFMacros.cmake) +include (${HDF_RESOURCES_DIR}/HDFMacros.cmake) HDF_DIR_PATHS(${HDF5_PACKAGE_NAME}) -include (${HDF_RESOURCES_EXT_DIR}/HDFLibMacros.cmake) +include (${HDF_RESOURCES_DIR}/HDFLibMacros.cmake) include (${HDF_RESOURCES_DIR}/HDF5PluginMacros.cmake) include (${HDF_RESOURCES_DIR}/HDF5Macros.cmake) @@ -675,7 +674,7 @@ else () endif () include (${HDF_RESOURCES_DIR}/HDFCompilerFlags.cmake) -set (CMAKE_MODULE_PATH ${HDF_RESOURCES_DIR} ${HDF_RESOURCES_EXT_DIR} ${CMAKE_MODULE_PATH}) +set (CMAKE_MODULE_PATH ${HDF_RESOURCES_DIR} ${CMAKE_MODULE_PATH}) #----------------------------------------------------------------------------- # Option to Enable HDFS @@ -1065,7 +1064,7 @@ set (H5_FC_FUNC_ "H5_FC_FUNC_(name,NAME) name ## _") if (EXISTS "${HDF5_SOURCE_DIR}/fortran" AND IS_DIRECTORY "${HDF5_SOURCE_DIR}/fortran") option (HDF5_BUILD_FORTRAN "Build FORTRAN support" OFF) if (HDF5_BUILD_FORTRAN) - include (${HDF_RESOURCES_EXT_DIR}/HDFUseFortran.cmake) + include (${HDF_RESOURCES_DIR}/HDFUseFortran.cmake) if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") message (VERBOSE "Fortran compiler ID is ${CMAKE_Fortran_COMPILER_ID}") diff --git a/c++/examples/CMakeTests.cmake b/c++/examples/CMakeTests.cmake index a7fc9b8c86d..5af0b2a8546 100644 --- a/c++/examples/CMakeTests.cmake +++ b/c++/examples/CMakeTests.cmake @@ -41,7 +41,7 @@ foreach (example ${examples}) -D "TEST_OUTPUT=cpp_ex_${example}.txt" #-D "TEST_REFERENCE=cpp_ex_${example}.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (CPP_ex_${example} PROPERTIES FIXTURES_REQUIRED clear_cppex) @@ -80,7 +80,7 @@ foreach (example ${tutr_examples}) -D "TEST_OUTPUT=cpp_ex_${example}.txt" #-D "TEST_REFERENCE=cpp_ex_${example}.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (CPP_ex_${example} PROPERTIES FIXTURES_REQUIRED clear_cppex_tutr) diff --git a/c++/test/CMakeTests.cmake b/c++/test/CMakeTests.cmake index 2a0a6be4da5..e34f62970c8 100644 --- a/c++/test/CMakeTests.cmake +++ b/c++/test/CMakeTests.cmake @@ -43,7 +43,7 @@ else () -D "TEST_OUTPUT=cpp_testhdf5.txt" #-D "TEST_REFERENCE=cpp_testhdf5.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (CPP_testhdf5 PROPERTIES DEPENDS CPP_testhdf5-clear-objects) diff --git a/config/cmake/CTestCustom.cmake b/config/cmake/CTestCustom.cmake index f0ecde1fb71..94a6481694b 100644 --- a/config/cmake/CTestCustom.cmake +++ b/config/cmake/CTestCustom.cmake @@ -33,6 +33,14 @@ set (CTEST_CUSTOM_WARNING_EXCEPTION # "note: expanded from macro" # HDDFFV-11074 "This directive is not standard" + ".*note.*expected.*void.*but argument is of type.*volatile.*" + ".*src.SZIP.*:[ \t]*warning.*" + ".*src.ZLIB.*:[ \t]*warning.*" + ".*src.JPEG.*:[ \t]*warning.*" + ".*POSIX name for this item is deprecated.*" + ".*disabling jobserver mode.*" + ".*warning.*implicit declaration of function.*" + ".*note: expanded from macro.*" ) set (CTEST_CUSTOM_MEMCHECK_IGNORE diff --git a/config/cmake/ConfigureChecks.cmake b/config/cmake/ConfigureChecks.cmake index 27eb93f8ff7..c72f415fc40 100644 --- a/config/cmake/ConfigureChecks.cmake +++ b/config/cmake/ConfigureChecks.cmake @@ -12,8 +12,687 @@ #----------------------------------------------------------------------------- # Include all the necessary files for macros #----------------------------------------------------------------------------- +include (CheckFunctionExists) +include (CheckIncludeFile) +include (CheckIncludeFiles) +include (CheckLibraryExists) +include (CheckSymbolExists) +include (CheckTypeSize) +include (CheckVariableExists) +include (TestBigEndian) +include (CheckStructHasMember) + set (HDF_PREFIX "H5") -include (${HDF_RESOURCES_EXT_DIR}/ConfigureChecks.cmake) + +# Check for Darwin (not just Apple - we also want to catch OpenDarwin) +if (${CMAKE_SYSTEM_NAME} MATCHES "Darwin") + set (${HDF_PREFIX}_HAVE_DARWIN 1) +endif () + +# Check for Solaris +if (${CMAKE_SYSTEM_NAME} MATCHES "SunOS") + set (${HDF_PREFIX}_HAVE_SOLARIS 1) +endif () + +#----------------------------------------------------------------------------- +# This MACRO checks IF the symbol exists in the library and IF it +# does, it appends library to the list. +#----------------------------------------------------------------------------- +set (LINK_LIBS "") +macro (CHECK_LIBRARY_EXISTS_CONCAT LIBRARY SYMBOL VARIABLE) + CHECK_LIBRARY_EXISTS ("${LIBRARY};${LINK_LIBS}" ${SYMBOL} "" ${VARIABLE}) + if (${VARIABLE}) + set (LINK_LIBS ${LINK_LIBS} ${LIBRARY}) + endif () +endmacro () + +# ---------------------------------------------------------------------- +# WINDOWS Hard code Values +# ---------------------------------------------------------------------- +set (WINDOWS) + +if (MINGW) + set (${HDF_PREFIX}_HAVE_MINGW 1) + set (WINDOWS 1) # MinGW tries to imitate Windows + set (CMAKE_REQUIRED_FLAGS "-DWIN32_LEAN_AND_MEAN=1 -DNOGDI=1") + set (${HDF_PREFIX}_HAVE_WINSOCK2_H 1) + set (__USE_MINGW_ANSI_STDIO 1) +endif () + +if (WIN32 AND NOT MINGW) + if (NOT UNIX) + set (WINDOWS 1) + set (CMAKE_REQUIRED_FLAGS "/DWIN32_LEAN_AND_MEAN=1 /DNOGDI=1") + if (MSVC) + set (${HDF_PREFIX}_HAVE_VISUAL_STUDIO 1) + endif () + endif () +endif () + +if (WINDOWS) + set (HDF5_REQUIRED_LIBRARIES "ws2_32.lib;wsock32.lib") + set (${HDF_PREFIX}_HAVE_WIN32_API 1) + set (${HDF_PREFIX}_HAVE_LIBM 1) + set (${HDF_PREFIX}_HAVE_STRDUP 1) + set (${HDF_PREFIX}_HAVE_SYSTEM 1) + set (${HDF_PREFIX}_HAVE_LONGJMP 1) + if (NOT MINGW) + set (${HDF_PREFIX}_HAVE_GETHOSTNAME 1) + set (${HDF_PREFIX}_HAVE_FUNCTION 1) + endif () + if (NOT UNIX AND NOT CYGWIN) + set (${HDF_PREFIX}_HAVE_GETCONSOLESCREENBUFFERINFO 1) + set (${HDF_PREFIX}_GETTIMEOFDAY_GIVES_TZ 1) + set (${HDF_PREFIX}_HAVE_TIMEZONE 1) + set (${HDF_PREFIX}_HAVE_GETTIMEOFDAY 1) + set (${HDF_PREFIX}_HAVE_LIBWS2_32 1) + set (${HDF_PREFIX}_HAVE_LIBWSOCK32 1) + endif () +endif () + +# ---------------------------------------------------------------------- +# END of WINDOWS Hard code Values +# ---------------------------------------------------------------------- + +if (NOT WINDOWS) + TEST_BIG_ENDIAN (${HDF_PREFIX}_WORDS_BIGENDIAN) +endif () + +#----------------------------------------------------------------------------- +# Check IF header file exists and add it to the list. +#----------------------------------------------------------------------------- +macro (CHECK_INCLUDE_FILE_CONCAT FILE VARIABLE) + CHECK_INCLUDE_FILES ("${USE_INCLUDES};${FILE}" ${VARIABLE}) + if (${VARIABLE}) + set (USE_INCLUDES ${USE_INCLUDES} ${FILE}) + endif () +endmacro () + +#----------------------------------------------------------------------------- +# Check for the existence of certain header files +#----------------------------------------------------------------------------- +CHECK_INCLUDE_FILE_CONCAT ("sys/file.h" ${HDF_PREFIX}_HAVE_SYS_FILE_H) +CHECK_INCLUDE_FILE_CONCAT ("sys/ioctl.h" ${HDF_PREFIX}_HAVE_SYS_IOCTL_H) +CHECK_INCLUDE_FILE_CONCAT ("sys/resource.h" ${HDF_PREFIX}_HAVE_SYS_RESOURCE_H) +CHECK_INCLUDE_FILE_CONCAT ("sys/socket.h" ${HDF_PREFIX}_HAVE_SYS_SOCKET_H) +CHECK_INCLUDE_FILE_CONCAT ("sys/stat.h" ${HDF_PREFIX}_HAVE_SYS_STAT_H) +CHECK_INCLUDE_FILE_CONCAT ("sys/time.h" ${HDF_PREFIX}_HAVE_SYS_TIME_H) +CHECK_INCLUDE_FILE_CONCAT ("sys/types.h" ${HDF_PREFIX}_HAVE_SYS_TYPES_H) +CHECK_INCLUDE_FILE_CONCAT ("features.h" ${HDF_PREFIX}_HAVE_FEATURES_H) +CHECK_INCLUDE_FILE_CONCAT ("dirent.h" ${HDF_PREFIX}_HAVE_DIRENT_H) +CHECK_INCLUDE_FILE_CONCAT ("setjmp.h" ${HDF_PREFIX}_HAVE_SETJMP_H) +CHECK_INCLUDE_FILE_CONCAT ("stddef.h" ${HDF_PREFIX}_HAVE_STDDEF_H) +CHECK_INCLUDE_FILE_CONCAT ("stdint.h" ${HDF_PREFIX}_HAVE_STDINT_H) +CHECK_INCLUDE_FILE_CONCAT ("unistd.h" ${HDF_PREFIX}_HAVE_UNISTD_H) +CHECK_INCLUDE_FILE_CONCAT ("pwd.h" ${HDF_PREFIX}_HAVE_PWD_H) + +# Windows +CHECK_INCLUDE_FILE_CONCAT ("io.h" ${HDF_PREFIX}_HAVE_IO_H) +if (NOT CYGWIN) + CHECK_INCLUDE_FILE_CONCAT ("winsock2.h" ${HDF_PREFIX}_HAVE_WINSOCK2_H) +endif () + +if (CMAKE_SYSTEM_NAME MATCHES "OSF") + CHECK_INCLUDE_FILE_CONCAT ("sys/sysinfo.h" ${HDF_PREFIX}_HAVE_SYS_SYSINFO_H) + CHECK_INCLUDE_FILE_CONCAT ("sys/proc.h" ${HDF_PREFIX}_HAVE_SYS_PROC_H) +else () + set (${HDF_PREFIX}_HAVE_SYS_SYSINFO_H "" CACHE INTERNAL "" FORCE) + set (${HDF_PREFIX}_HAVE_SYS_PROC_H "" CACHE INTERNAL "" FORCE) +endif () + +CHECK_INCLUDE_FILE_CONCAT ("globus/common.h" ${HDF_PREFIX}_HAVE_GLOBUS_COMMON_H) +CHECK_INCLUDE_FILE_CONCAT ("pdb.h" ${HDF_PREFIX}_HAVE_PDB_H) +CHECK_INCLUDE_FILE_CONCAT ("pthread.h" ${HDF_PREFIX}_HAVE_PTHREAD_H) +CHECK_INCLUDE_FILE_CONCAT ("srbclient.h" ${HDF_PREFIX}_HAVE_SRBCLIENT_H) +CHECK_INCLUDE_FILE_CONCAT ("string.h" ${HDF_PREFIX}_HAVE_STRING_H) +CHECK_INCLUDE_FILE_CONCAT ("strings.h" ${HDF_PREFIX}_HAVE_STRINGS_H) +CHECK_INCLUDE_FILE_CONCAT ("stdlib.h" ${HDF_PREFIX}_HAVE_STDLIB_H) +CHECK_INCLUDE_FILE_CONCAT ("memory.h" ${HDF_PREFIX}_HAVE_MEMORY_H) +CHECK_INCLUDE_FILE_CONCAT ("dlfcn.h" ${HDF_PREFIX}_HAVE_DLFCN_H) +CHECK_INCLUDE_FILE_CONCAT ("inttypes.h" ${HDF_PREFIX}_HAVE_INTTYPES_H) +CHECK_INCLUDE_FILE_CONCAT ("netinet/in.h" ${HDF_PREFIX}_HAVE_NETINET_IN_H) +CHECK_INCLUDE_FILE_CONCAT ("netdb.h" ${HDF_PREFIX}_HAVE_NETDB_H) +CHECK_INCLUDE_FILE_CONCAT ("arpa/inet.h" ${HDF_PREFIX}_HAVE_ARPA_INET_H) +# _Bool type support +CHECK_INCLUDE_FILE_CONCAT (stdbool.h ${HDF_PREFIX}_HAVE_STDBOOL_H) + +## Check for non-standard extension quadmath.h + +CHECK_INCLUDE_FILES(quadmath.h C_HAVE_QUADMATH) +if (${C_HAVE_QUADMATH}) + set(${HDF_PREFIX}_HAVE_QUADMATH_H 1) +else () + set(${HDF_PREFIX}_HAVE_QUADMATH_H 0) +endif () + +if (CYGWIN) + set (${HDF_PREFIX}_HAVE_LSEEK64 0) +endif () + +#----------------------------------------------------------------------------- +# Check for the math library "m" +#----------------------------------------------------------------------------- +if (MINGW OR NOT WINDOWS) + CHECK_LIBRARY_EXISTS_CONCAT ("m" ceil ${HDF_PREFIX}_HAVE_LIBM) + CHECK_LIBRARY_EXISTS_CONCAT ("dl" dlopen ${HDF_PREFIX}_HAVE_LIBDL) + CHECK_LIBRARY_EXISTS_CONCAT ("ws2_32" WSAStartup ${HDF_PREFIX}_HAVE_LIBWS2_32) + CHECK_LIBRARY_EXISTS_CONCAT ("wsock32" gethostbyname ${HDF_PREFIX}_HAVE_LIBWSOCK32) +endif () + +# UCB (BSD) compatibility library +CHECK_LIBRARY_EXISTS_CONCAT ("ucb" gethostname ${HDF_PREFIX}_HAVE_LIBUCB) + +# For other tests to use the same libraries +set (HDF5_REQUIRED_LIBRARIES ${HDF5_REQUIRED_LIBRARIES} ${LINK_LIBS}) + +set (USE_INCLUDES "") +if (WINDOWS) + set (USE_INCLUDES ${USE_INCLUDES} "windows.h") +endif () + +# For other specific tests, use this MACRO. +macro (HDF_FUNCTION_TEST OTHER_TEST) + if (NOT DEFINED ${HDF_PREFIX}_${OTHER_TEST}) + set (MACRO_CHECK_FUNCTION_DEFINITIONS "-D${OTHER_TEST} ${CMAKE_REQUIRED_FLAGS}") + + foreach (def + HAVE_SYS_TIME_H + HAVE_UNISTD_H + HAVE_SYS_TYPES_H + HAVE_SYS_SOCKET_H + ) + if ("${${HDF_PREFIX}_${def}}") + set (MACRO_CHECK_FUNCTION_DEFINITIONS "${MACRO_CHECK_FUNCTION_DEFINITIONS} -D${def}") + endif () + endforeach () + + if (LARGEFILE) + set (MACRO_CHECK_FUNCTION_DEFINITIONS + "${MACRO_CHECK_FUNCTION_DEFINITIONS} -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -D_LARGEFILE_SOURCE" + ) + endif () + + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (TRACE "Performing ${OTHER_TEST}") + endif () + try_compile (${OTHER_TEST} + ${CMAKE_BINARY_DIR} + ${HDF_RESOURCES_DIR}/HDFTests.c + COMPILE_DEFINITIONS "${MACRO_CHECK_FUNCTION_DEFINITIONS}" + LINK_LIBRARIES "${HDF5_REQUIRED_LIBRARIES}" + OUTPUT_VARIABLE OUTPUT + ) + if (${OTHER_TEST}) + set (${HDF_PREFIX}_${OTHER_TEST} 1 CACHE INTERNAL "Other test ${FUNCTION}") + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "Performing Other Test ${OTHER_TEST} - Success") + endif () + else () + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "Performing Other Test ${OTHER_TEST} - Failed") + endif () + set (${HDF_PREFIX}_${OTHER_TEST} "" CACHE INTERNAL "Other test ${FUNCTION}") + file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log + "Performing Other Test ${OTHER_TEST} failed with the following output:\n" + "${OUTPUT}\n" + ) + endif () + endif () +endmacro () + +#----------------------------------------------------------------------------- +# Check for these functions before the time headers are checked +#----------------------------------------------------------------------------- +HDF_FUNCTION_TEST (STDC_HEADERS) + +#----------------------------------------------------------------------------- +# Check for large file support +#----------------------------------------------------------------------------- + +# The linux-lfs option is deprecated. +set (LINUX_LFS 0) + +set (HDF_EXTRA_C_FLAGS) +set (HDF_EXTRA_FLAGS) +if (MINGW OR NOT WINDOWS) + if (CMAKE_SYSTEM_NAME MATCHES "Linux") + # Linux Specific flags + # This was originally defined as _POSIX_SOURCE which was updated to + # _POSIX_C_SOURCE=199506L to expose a greater amount of POSIX + # functionality so clock_gettime and CLOCK_MONOTONIC are defined + # correctly. This was later updated to 200112L so that + # posix_memalign() is visible for the direct VFD code on Linux + # systems. + # POSIX feature information can be found in the gcc manual at: + # http://www.gnu.org/s/libc/manual/html_node/Feature-Test-Macros.html + set (HDF_EXTRA_C_FLAGS -D_POSIX_C_SOURCE=200809L) + + # Need to add this so that O_DIRECT is visible for the direct + # VFD on Linux systems. + set (HDF_EXTRA_C_FLAGS ${HDF_EXTRA_C_FLAGS} -D_GNU_SOURCE) + + option (HDF_ENABLE_LARGE_FILE "Enable support for large (64-bit) files on Linux." ON) + if (HDF_ENABLE_LARGE_FILE AND NOT DEFINED TEST_LFS_WORKS_RUN) + set (msg "Performing TEST_LFS_WORKS") + try_run (TEST_LFS_WORKS_RUN TEST_LFS_WORKS_COMPILE + ${CMAKE_BINARY_DIR} + ${HDF_RESOURCES_DIR}/HDFTests.c + COMPILE_DEFINITIONS "-DTEST_LFS_WORKS" + ) + + # The LARGEFILE definitions were from the transition period + # and are probably no longer needed. The FILE_OFFSET_BITS + # check should be generalized for all POSIX systems as it + # is in the Autotools. + if (TEST_LFS_WORKS_COMPILE) + if (TEST_LFS_WORKS_RUN MATCHES 0) + set (TEST_LFS_WORKS 1 CACHE INTERNAL ${msg}) + set (LARGEFILE 1) + set (HDF_EXTRA_FLAGS ${HDF_EXTRA_FLAGS} -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -D_LARGEFILE_SOURCE) + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "${msg}... yes") + endif () + else () + set (TEST_LFS_WORKS "" CACHE INTERNAL ${msg}) + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "${msg}... no") + endif () + file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log + "Test TEST_LFS_WORKS Run failed with the following exit code:\n ${TEST_LFS_WORKS_RUN}\n" + ) + endif () + else () + set (TEST_LFS_WORKS "" CACHE INTERNAL ${msg}) + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "${msg}... no") + endif () + file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log + "Test TEST_LFS_WORKS Compile failed\n" + ) + endif () + endif () + set (CMAKE_REQUIRED_DEFINITIONS ${CMAKE_REQUIRED_DEFINITIONS} ${HDF_EXTRA_FLAGS}) + endif () +endif () + +#----------------------------------------------------------------------------- +# Check for HAVE_OFF64_T functionality +#----------------------------------------------------------------------------- +if (MINGW OR NOT WINDOWS) + HDF_FUNCTION_TEST (HAVE_OFF64_T) + if (${HDF_PREFIX}_HAVE_OFF64_T) + CHECK_FUNCTION_EXISTS (lseek64 ${HDF_PREFIX}_HAVE_LSEEK64) + endif () + + CHECK_FUNCTION_EXISTS (fseeko ${HDF_PREFIX}_HAVE_FSEEKO) + + CHECK_STRUCT_HAS_MEMBER("struct stat64" st_blocks "sys/types.h;sys/stat.h" HAVE_STAT64_STRUCT) + if (HAVE_STAT64_STRUCT) + CHECK_FUNCTION_EXISTS (stat64 ${HDF_PREFIX}_HAVE_STAT64) + endif () +endif () + +#----------------------------------------------------------------------------- +# Check the size in bytes of all the int and float types +#----------------------------------------------------------------------------- +macro (HDF_CHECK_TYPE_SIZE type var) + set (aType ${type}) + set (aVar ${var}) + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (TRACE "Checking size of ${aType} and storing into ${aVar}") + endif () + CHECK_TYPE_SIZE (${aType} ${aVar}) + if (NOT ${aVar}) + set (${aVar} 0 CACHE INTERNAL "SizeOf for ${aType}") + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (TRACE "Size of ${aType} was NOT Found") + endif () + endif () +endmacro () + +HDF_CHECK_TYPE_SIZE (char ${HDF_PREFIX}_SIZEOF_CHAR) +HDF_CHECK_TYPE_SIZE (short ${HDF_PREFIX}_SIZEOF_SHORT) +HDF_CHECK_TYPE_SIZE (int ${HDF_PREFIX}_SIZEOF_INT) +HDF_CHECK_TYPE_SIZE (unsigned ${HDF_PREFIX}_SIZEOF_UNSIGNED) +if (NOT APPLE) + HDF_CHECK_TYPE_SIZE (long ${HDF_PREFIX}_SIZEOF_LONG) +endif () +HDF_CHECK_TYPE_SIZE ("long long" ${HDF_PREFIX}_SIZEOF_LONG_LONG) +HDF_CHECK_TYPE_SIZE (__int64 ${HDF_PREFIX}_SIZEOF___INT64) +if (NOT ${HDF_PREFIX}_SIZEOF___INT64) + set (${HDF_PREFIX}_SIZEOF___INT64 0) +endif () + +HDF_CHECK_TYPE_SIZE (float ${HDF_PREFIX}_SIZEOF_FLOAT) +HDF_CHECK_TYPE_SIZE (double ${HDF_PREFIX}_SIZEOF_DOUBLE) +HDF_CHECK_TYPE_SIZE ("long double" ${HDF_PREFIX}_SIZEOF_LONG_DOUBLE) + +HDF_CHECK_TYPE_SIZE (int8_t ${HDF_PREFIX}_SIZEOF_INT8_T) +HDF_CHECK_TYPE_SIZE (uint8_t ${HDF_PREFIX}_SIZEOF_UINT8_T) +HDF_CHECK_TYPE_SIZE (int_least8_t ${HDF_PREFIX}_SIZEOF_INT_LEAST8_T) +HDF_CHECK_TYPE_SIZE (uint_least8_t ${HDF_PREFIX}_SIZEOF_UINT_LEAST8_T) +HDF_CHECK_TYPE_SIZE (int_fast8_t ${HDF_PREFIX}_SIZEOF_INT_FAST8_T) +HDF_CHECK_TYPE_SIZE (uint_fast8_t ${HDF_PREFIX}_SIZEOF_UINT_FAST8_T) + +HDF_CHECK_TYPE_SIZE (int16_t ${HDF_PREFIX}_SIZEOF_INT16_T) +HDF_CHECK_TYPE_SIZE (uint16_t ${HDF_PREFIX}_SIZEOF_UINT16_T) +HDF_CHECK_TYPE_SIZE (int_least16_t ${HDF_PREFIX}_SIZEOF_INT_LEAST16_T) +HDF_CHECK_TYPE_SIZE (uint_least16_t ${HDF_PREFIX}_SIZEOF_UINT_LEAST16_T) +HDF_CHECK_TYPE_SIZE (int_fast16_t ${HDF_PREFIX}_SIZEOF_INT_FAST16_T) +HDF_CHECK_TYPE_SIZE (uint_fast16_t ${HDF_PREFIX}_SIZEOF_UINT_FAST16_T) + +HDF_CHECK_TYPE_SIZE (int32_t ${HDF_PREFIX}_SIZEOF_INT32_T) +HDF_CHECK_TYPE_SIZE (uint32_t ${HDF_PREFIX}_SIZEOF_UINT32_T) +HDF_CHECK_TYPE_SIZE (int_least32_t ${HDF_PREFIX}_SIZEOF_INT_LEAST32_T) +HDF_CHECK_TYPE_SIZE (uint_least32_t ${HDF_PREFIX}_SIZEOF_UINT_LEAST32_T) +HDF_CHECK_TYPE_SIZE (int_fast32_t ${HDF_PREFIX}_SIZEOF_INT_FAST32_T) +HDF_CHECK_TYPE_SIZE (uint_fast32_t ${HDF_PREFIX}_SIZEOF_UINT_FAST32_T) + +HDF_CHECK_TYPE_SIZE (int64_t ${HDF_PREFIX}_SIZEOF_INT64_T) +HDF_CHECK_TYPE_SIZE (uint64_t ${HDF_PREFIX}_SIZEOF_UINT64_T) +HDF_CHECK_TYPE_SIZE (int_least64_t ${HDF_PREFIX}_SIZEOF_INT_LEAST64_T) +HDF_CHECK_TYPE_SIZE (uint_least64_t ${HDF_PREFIX}_SIZEOF_UINT_LEAST64_T) +HDF_CHECK_TYPE_SIZE (int_fast64_t ${HDF_PREFIX}_SIZEOF_INT_FAST64_T) +HDF_CHECK_TYPE_SIZE (uint_fast64_t ${HDF_PREFIX}_SIZEOF_UINT_FAST64_T) + +HDF_CHECK_TYPE_SIZE (size_t ${HDF_PREFIX}_SIZEOF_SIZE_T) +HDF_CHECK_TYPE_SIZE (ssize_t ${HDF_PREFIX}_SIZEOF_SSIZE_T) +if (NOT ${HDF_PREFIX}_SIZEOF_SSIZE_T) + set (${HDF_PREFIX}_SIZEOF_SSIZE_T 0) +endif () +if (MINGW OR NOT WINDOWS) + HDF_CHECK_TYPE_SIZE (ptrdiff_t ${HDF_PREFIX}_SIZEOF_PTRDIFF_T) +endif () + +HDF_CHECK_TYPE_SIZE (off_t ${HDF_PREFIX}_SIZEOF_OFF_T) +HDF_CHECK_TYPE_SIZE (off64_t ${HDF_PREFIX}_SIZEOF_OFF64_T) +if (NOT ${HDF_PREFIX}_SIZEOF_OFF64_T) + set (${HDF_PREFIX}_SIZEOF_OFF64_T 0) +endif () +HDF_CHECK_TYPE_SIZE (time_t ${HDF_PREFIX}_SIZEOF_TIME_T) + +#----------------------------------------------------------------------------- +# Extra C99 types +#----------------------------------------------------------------------------- + +# _Bool type support +if (HAVE_STDBOOL_H) + set (CMAKE_EXTRA_INCLUDE_FILES stdbool.h) + HDF_CHECK_TYPE_SIZE (bool ${HDF_PREFIX}_SIZEOF_BOOL) +else () + HDF_CHECK_TYPE_SIZE (_Bool ${HDF_PREFIX}_SIZEOF_BOOL) +endif () + +if (MINGW OR NOT WINDOWS) + #----------------------------------------------------------------------------- + # Check if the dev_t type is a scalar type + #----------------------------------------------------------------------------- + HDF_FUNCTION_TEST (DEV_T_IS_SCALAR) + + # ---------------------------------------------------------------------- + # Check for MONOTONIC_TIMER support (used in clock_gettime). This has + # to be done after any POSIX/BSD defines to ensure that the test gets + # the correct POSIX level on linux. + CHECK_VARIABLE_EXISTS (CLOCK_MONOTONIC HAVE_CLOCK_MONOTONIC) + + #----------------------------------------------------------------------------- + # Check a bunch of time functions + #----------------------------------------------------------------------------- + CHECK_STRUCT_HAS_MEMBER("struct tm" tm_gmtoff "time.h" ${HDF_PREFIX}_HAVE_TM_GMTOFF) + CHECK_STRUCT_HAS_MEMBER("struct tm" __tm_gmtoff "time.h" ${HDF_PREFIX}_HAVE___TM_GMTOFF) + CHECK_STRUCT_HAS_MEMBER("struct tm" tm_sec "sys/types.h;sys/time.h;time.h" ${HDF_PREFIX}_TIME_WITH_SYS_TIME) + if (${HDF_PREFIX}_HAVE_SYS_TIME_H) + CHECK_STRUCT_HAS_MEMBER("struct tm" tz_minuteswest "sys/types.h;sys/time.h;time.h" ${HDF_PREFIX}_HAVE_STRUCT_TIMEZONE) + else () + CHECK_STRUCT_HAS_MEMBER("struct tm" tz_minuteswest "sys/types.h;time.h" ${HDF_PREFIX}_HAVE_STRUCT_TIMEZONE) + endif () + CHECK_FUNCTION_EXISTS (gettimeofday ${HDF_PREFIX}_HAVE_GETTIMEOFDAY) + foreach (time_test +# HAVE_TIMEZONE + GETTIMEOFDAY_GIVES_TZ + HAVE_TM_ZONE + HAVE_STRUCT_TM_TM_ZONE + ) + HDF_FUNCTION_TEST (${time_test}) + endforeach () + if (NOT CYGWIN AND NOT MINGW) + HDF_FUNCTION_TEST (HAVE_TIMEZONE) + endif () + + # ---------------------------------------------------------------------- + # Does the struct stat have the st_blocks field? This field is not POSIX. + # + CHECK_STRUCT_HAS_MEMBER("struct stat" st_blocks "sys/types.h;sys/stat.h" ${HDF_PREFIX}_HAVE_STAT_ST_BLOCKS) + + # ---------------------------------------------------------------------- + # How do we figure out the width of a tty in characters? + # + CHECK_FUNCTION_EXISTS (ioctl ${HDF_PREFIX}_HAVE_IOCTL) + CHECK_STRUCT_HAS_MEMBER ("struct videoconfig" numtextcols "" ${HDF_PREFIX}_HAVE_STRUCT_VIDEOCONFIG) + CHECK_STRUCT_HAS_MEMBER ("struct text_info" screenwidth "" ${HDF_PREFIX}_HAVE_STRUCT_TEXT_INFO) + CHECK_FUNCTION_EXISTS (_getvideoconfig ${HDF_PREFIX}_HAVE__GETVIDEOCONFIG) + CHECK_FUNCTION_EXISTS (gettextinfo ${HDF_PREFIX}_HAVE_GETTEXTINFO) + CHECK_FUNCTION_EXISTS (_scrsize ${HDF_PREFIX}_HAVE__SCRSIZE) + if (NOT CYGWIN) + CHECK_FUNCTION_EXISTS (GetConsoleScreenBufferInfo ${HDF_PREFIX}_HAVE_GETCONSOLESCREENBUFFERINFO) + endif () + CHECK_SYMBOL_EXISTS (TIOCGWINSZ "sys/ioctl.h" ${HDF_PREFIX}_HAVE_TIOCGWINSZ) + CHECK_SYMBOL_EXISTS (TIOCGETD "sys/ioctl.h" ${HDF_PREFIX}_HAVE_TIOCGETD) + + # ---------------------------------------------------------------------- + # cygwin user credentials are different then on linux + # + if (NOT CYGWIN AND NOT MINGW) + CHECK_FUNCTION_EXISTS (getpwuid ${HDF_PREFIX}_HAVE_GETPWUID) + endif () +endif () + +#----------------------------------------------------------------------------- +# Check for some functions that are used +# +CHECK_FUNCTION_EXISTS (alarm ${HDF_PREFIX}_HAVE_ALARM) +CHECK_FUNCTION_EXISTS (fcntl ${HDF_PREFIX}_HAVE_FCNTL) +CHECK_FUNCTION_EXISTS (flock ${HDF_PREFIX}_HAVE_FLOCK) +CHECK_FUNCTION_EXISTS (fork ${HDF_PREFIX}_HAVE_FORK) +CHECK_FUNCTION_EXISTS (frexpf ${HDF_PREFIX}_HAVE_FREXPF) +CHECK_FUNCTION_EXISTS (frexpl ${HDF_PREFIX}_HAVE_FREXPL) + +CHECK_FUNCTION_EXISTS (gethostname ${HDF_PREFIX}_HAVE_GETHOSTNAME) +CHECK_FUNCTION_EXISTS (getrusage ${HDF_PREFIX}_HAVE_GETRUSAGE) +CHECK_FUNCTION_EXISTS (llround ${HDF_PREFIX}_HAVE_LLROUND) +CHECK_FUNCTION_EXISTS (llroundf ${HDF_PREFIX}_HAVE_LLROUNDF) +CHECK_FUNCTION_EXISTS (lround ${HDF_PREFIX}_HAVE_LROUND) +CHECK_FUNCTION_EXISTS (lroundf ${HDF_PREFIX}_HAVE_LROUNDF) +CHECK_FUNCTION_EXISTS (lstat ${HDF_PREFIX}_HAVE_LSTAT) + +CHECK_FUNCTION_EXISTS (pread ${HDF_PREFIX}_HAVE_PREAD) +CHECK_FUNCTION_EXISTS (pwrite ${HDF_PREFIX}_HAVE_PWRITE) +CHECK_FUNCTION_EXISTS (rand_r ${HDF_PREFIX}_HAVE_RAND_R) +CHECK_FUNCTION_EXISTS (random ${HDF_PREFIX}_HAVE_RANDOM) +CHECK_FUNCTION_EXISTS (round ${HDF_PREFIX}_HAVE_ROUND) +CHECK_FUNCTION_EXISTS (roundf ${HDF_PREFIX}_HAVE_ROUNDF) +CHECK_FUNCTION_EXISTS (setsysinfo ${HDF_PREFIX}_HAVE_SETSYSINFO) + +CHECK_FUNCTION_EXISTS (signal ${HDF_PREFIX}_HAVE_SIGNAL) +CHECK_FUNCTION_EXISTS (longjmp ${HDF_PREFIX}_HAVE_LONGJMP) +CHECK_FUNCTION_EXISTS (setjmp ${HDF_PREFIX}_HAVE_SETJMP) +CHECK_FUNCTION_EXISTS (siglongjmp ${HDF_PREFIX}_HAVE_SIGLONGJMP) +CHECK_FUNCTION_EXISTS (sigsetjmp ${HDF_PREFIX}_HAVE_SIGSETJMP) +CHECK_FUNCTION_EXISTS (sigprocmask ${HDF_PREFIX}_HAVE_SIGPROCMASK) + +CHECK_FUNCTION_EXISTS (snprintf ${HDF_PREFIX}_HAVE_SNPRINTF) +CHECK_FUNCTION_EXISTS (srandom ${HDF_PREFIX}_HAVE_SRANDOM) +CHECK_FUNCTION_EXISTS (strdup ${HDF_PREFIX}_HAVE_STRDUP) +CHECK_FUNCTION_EXISTS (strtoll ${HDF_PREFIX}_HAVE_STRTOLL) +CHECK_FUNCTION_EXISTS (strtoull ${HDF_PREFIX}_HAVE_STRTOULL) +CHECK_FUNCTION_EXISTS (symlink ${HDF_PREFIX}_HAVE_SYMLINK) +CHECK_FUNCTION_EXISTS (system ${HDF_PREFIX}_HAVE_SYSTEM) + +CHECK_FUNCTION_EXISTS (tmpfile ${HDF_PREFIX}_HAVE_TMPFILE) +CHECK_FUNCTION_EXISTS (asprintf ${HDF_PREFIX}_HAVE_ASPRINTF) +CHECK_FUNCTION_EXISTS (vasprintf ${HDF_PREFIX}_HAVE_VASPRINTF) +CHECK_FUNCTION_EXISTS (waitpid ${HDF_PREFIX}_HAVE_WAITPID) + +CHECK_FUNCTION_EXISTS (vsnprintf ${HDF_PREFIX}_HAVE_VSNPRINTF) +if (MINGW OR NOT WINDOWS) + if (${HDF_PREFIX}_HAVE_VSNPRINTF) + HDF_FUNCTION_TEST (VSNPRINTF_WORKS) + endif () +endif () + +#----------------------------------------------------------------------------- +# sigsetjmp is special; may actually be a macro +#----------------------------------------------------------------------------- +if (NOT ${HDF_PREFIX}_HAVE_SIGSETJMP) + if (${HDF_PREFIX}_HAVE_SETJMP_H) + CHECK_SYMBOL_EXISTS (sigsetjmp "setjmp.h" ${HDF_PREFIX}_HAVE_MACRO_SIGSETJMP) + if (${HDF_PREFIX}_HAVE_MACRO_SIGSETJMP) + set (${HDF_PREFIX}_HAVE_SIGSETJMP 1) + endif () + endif () +endif () + +#----------------------------------------------------------------------------- +# Check a bunch of other functions +#----------------------------------------------------------------------------- +if (MINGW OR NOT WINDOWS) + foreach (other_test + HAVE_ATTRIBUTE + HAVE_C99_FUNC + HAVE_FUNCTION + HAVE_C99_DESIGNATED_INITIALIZER + SYSTEM_SCOPE_THREADS + HAVE_SOCKLEN_T + ) + HDF_FUNCTION_TEST (${other_test}) + endforeach () +endif () + +#----------------------------------------------------------------------------- +# Check if InitOnceExecuteOnce is available +#----------------------------------------------------------------------------- +if (WINDOWS) + if (NOT HDF_NO_IOEO_TEST) + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "Checking for InitOnceExecuteOnce:") + endif () + if (NOT DEFINED ${HDF_PREFIX}_HAVE_IOEO) + if (LARGEFILE) + set (CMAKE_REQUIRED_DEFINITIONS + "${CURRENT_TEST_DEFINITIONS} -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -D_LARGEFILE_SOURCE" + ) + endif () + set (MACRO_CHECK_FUNCTION_DEFINITIONS "-DHAVE_IOEO ${CMAKE_REQUIRED_FLAGS}") + if (CMAKE_REQUIRED_INCLUDES) + set (CHECK_C_SOURCE_COMPILES_ADD_INCLUDES "-DINCLUDE_DIRECTORIES:STRING=${CMAKE_REQUIRED_INCLUDES}") + else () + set (CHECK_C_SOURCE_COMPILES_ADD_INCLUDES) + endif () + + TRY_RUN(HAVE_IOEO_EXITCODE HAVE_IOEO_COMPILED + ${CMAKE_BINARY_DIR} + ${HDF_RESOURCES_DIR}/HDFTests.c + COMPILE_DEFINITIONS "${CMAKE_REQUIRED_DEFINITIONS} ${MACRO_CHECK_FUNCTION_DEFINITIONS}" + LINK_LIBRARIES "${HDF5_REQUIRED_LIBRARIES}" + CMAKE_FLAGS "${CHECK_C_SOURCE_COMPILES_ADD_INCLUDES} -DCMAKE_SKIP_RPATH:BOOL=${CMAKE_SKIP_RPATH}" + COMPILE_OUTPUT_VARIABLE OUTPUT + ) + # if it did not compile make the return value fail code of 1 + if (NOT HAVE_IOEO_COMPILED) + set (HAVE_IOEO_EXITCODE 1) + endif () + # if the return value was 0 then it worked + if ("${HAVE_IOEO_EXITCODE}" EQUAL 0) + set (${HDF_PREFIX}_HAVE_IOEO 1 CACHE INTERNAL "Test InitOnceExecuteOnce") + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "Performing Test InitOnceExecuteOnce - Success") + endif () + file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeOutput.log + "Performing C SOURCE FILE Test InitOnceExecuteOnce succeeded with the following output:\n" + "${OUTPUT}\n" + "Return value: ${HAVE_IOEO}\n") + else () + if (CMAKE_CROSSCOMPILING AND "${HAVE_IOEO_EXITCODE}" MATCHES "FAILED_TO_RUN") + set (${HDF_PREFIX}_HAVE_IOEO "${HAVE_IOEO_EXITCODE}") + else () + set (${HDF_PREFIX}_HAVE_IOEO "" CACHE INTERNAL "Test InitOnceExecuteOnce") + endif () + + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "Performing Test InitOnceExecuteOnce - Failed") + endif () + file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log + "Performing InitOnceExecuteOnce Test failed with the following output:\n" + "${OUTPUT}\n" + "Return value: ${HAVE_IOEO_EXITCODE}\n") + endif () + endif () + endif () +endif () + +#----------------------------------------------------------------------------- +# Determine how 'inline' is used +#----------------------------------------------------------------------------- +foreach (inline_test inline __inline__ __inline) + string (TOUPPER ${inline_test} INLINE_TEST_MACRO) + HDF_FUNCTION_TEST (HAVE_${INLINE_TEST_MACRO}) +endforeach () + +#----------------------------------------------------------------------------- +# Check how to print a Long Long integer +#----------------------------------------------------------------------------- +if (NOT ${HDF_PREFIX}_PRINTF_LL_WIDTH OR ${HDF_PREFIX}_PRINTF_LL_WIDTH MATCHES "unknown") + set (PRINT_LL_FOUND 0) + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "Checking for appropriate format for 64 bit long:") + endif () + set (CURRENT_TEST_DEFINITIONS "-DPRINTF_LL_WIDTH") + if (${HDF_PREFIX}_SIZEOF_LONG_LONG) + set (CURRENT_TEST_DEFINITIONS "${CURRENT_TEST_DEFINITIONS} -DHAVE_LONG_LONG") + endif () + TRY_RUN (${HDF_PREFIX}_PRINTF_LL_TEST_RUN ${HDF_PREFIX}_PRINTF_LL_TEST_COMPILE + ${CMAKE_BINARY_DIR} + ${HDF_RESOURCES_DIR}/HDFTests.c + COMPILE_DEFINITIONS "${CURRENT_TEST_DEFINITIONS}" + RUN_OUTPUT_VARIABLE OUTPUT + ) + if (${HDF_PREFIX}_PRINTF_LL_TEST_COMPILE) + if (${HDF_PREFIX}_PRINTF_LL_TEST_RUN MATCHES 0) + string(REGEX REPLACE ".*PRINTF_LL_WIDTH=\\[(.*)\\].*" "\\1" ${HDF_PREFIX}_PRINTF_LL "${OUTPUT}") + set (${HDF_PREFIX}_PRINTF_LL_WIDTH "\"${${HDF_PREFIX}_PRINTF_LL}\"" CACHE INTERNAL "Width for printf for type `long long' or `__int64', us. `ll") + set (PRINT_LL_FOUND 1) + else () + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "Width test failed with result: ${${HDF_PREFIX}_PRINTF_LL_TEST_RUN}") + endif () + endif () + else () + file (APPEND ${CMAKE_BINARY_DIR}/CMakeFiles/CMakeError.log + "Test ${HDF_PREFIX}_PRINTF_LL_WIDTH failed\n" + ) + endif () + + if (PRINT_LL_FOUND) + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "Checking for appropriate format for 64 bit long: found ${${HDF_PREFIX}_PRINTF_LL_WIDTH}") + endif () + else () + if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") + message (VERBOSE "Checking for appropriate format for 64 bit long: not found") + endif () + set (${HDF_PREFIX}_PRINTF_LL_WIDTH "\"unknown\"" CACHE INTERNAL + "Width for printf for type `long long' or `__int64', us. `ll" + ) + endif () +endif () + +# ---------------------------------------------------------------------- +# Set the flag to indicate that the machine can handle converting +# denormalized floating-point values. +# (This flag should be set for all machines, except for the Crays, where +# the cache value is set in its config file) +#----------------------------------------------------------------------------- +set (${HDF_PREFIX}_CONVERT_DENORMAL_FLOAT 1) if (HDF5_ENABLE_USING_MEMCHECKER) set (${HDF_PREFIX}_USING_MEMCHECKER 1) @@ -146,7 +825,7 @@ if (NOT WINDOWS) set (MACRO_CHECK_FUNCTION_DEFINITIONS "-DTEST_DIRECT_VFD_WORKS -D_GNU_SOURCE ${CMAKE_REQUIRED_FLAGS}") TRY_RUN (TEST_DIRECT_VFD_WORKS_RUN TEST_DIRECT_VFD_WORKS_COMPILE ${CMAKE_BINARY_DIR} - ${HDF_RESOURCES_EXT_DIR}/HDFTests.c + ${HDF_RESOURCES_DIR}/HDFTests.c CMAKE_FLAGS -DCOMPILE_DEFINITIONS:STRING=${MACRO_CHECK_FUNCTION_DEFINITIONS} OUTPUT_VARIABLE OUTPUT ) @@ -194,7 +873,6 @@ endif () # ---------------------------------------------------------------------- # Check whether we can build the Mirror VFD -# Header-check flags set in config/cmake_ext_mod/ConfigureChecks.cmake # ---------------------------------------------------------------------- option (HDF5_ENABLE_MIRROR_VFD "Build the Mirror Virtual File Driver" OFF) if (HDF5_ENABLE_MIRROR_VFD) diff --git a/config/cmake_ext_mod/FindSZIP.cmake b/config/cmake/FindSZIP.cmake similarity index 100% rename from config/cmake_ext_mod/FindSZIP.cmake rename to config/cmake/FindSZIP.cmake diff --git a/config/cmake_ext_mod/GetTimeOfDayTest.cpp b/config/cmake/GetTimeOfDayTest.cpp similarity index 100% rename from config/cmake_ext_mod/GetTimeOfDayTest.cpp rename to config/cmake/GetTimeOfDayTest.cpp diff --git a/config/cmake_ext_mod/HDFLibMacros.cmake b/config/cmake/HDFLibMacros.cmake similarity index 100% rename from config/cmake_ext_mod/HDFLibMacros.cmake rename to config/cmake/HDFLibMacros.cmake diff --git a/config/cmake_ext_mod/HDFMacros.cmake b/config/cmake/HDFMacros.cmake similarity index 100% rename from config/cmake_ext_mod/HDFMacros.cmake rename to config/cmake/HDFMacros.cmake diff --git a/config/cmake_ext_mod/HDFTests.c b/config/cmake/HDFTests.c similarity index 100% rename from config/cmake_ext_mod/HDFTests.c rename to config/cmake/HDFTests.c diff --git a/config/cmake_ext_mod/HDFUseFortran.cmake b/config/cmake/HDFUseFortran.cmake similarity index 100% rename from config/cmake_ext_mod/HDFUseFortran.cmake rename to config/cmake/HDFUseFortran.cmake diff --git a/config/cmake_ext_mod/NSIS.InstallOptions.ini.in b/config/cmake/NSIS.InstallOptions.ini.in similarity index 100% rename from config/cmake_ext_mod/NSIS.InstallOptions.ini.in rename to config/cmake/NSIS.InstallOptions.ini.in diff --git a/config/cmake_ext_mod/NSIS.template.in b/config/cmake/NSIS.template.in similarity index 100% rename from config/cmake_ext_mod/NSIS.template.in rename to config/cmake/NSIS.template.in diff --git a/config/cmake_ext_mod/grepTest.cmake b/config/cmake/grepTest.cmake similarity index 100% rename from config/cmake_ext_mod/grepTest.cmake rename to config/cmake/grepTest.cmake diff --git a/config/cmake_ext_mod/hdf.bmp b/config/cmake/hdf.bmp similarity index 100% rename from config/cmake_ext_mod/hdf.bmp rename to config/cmake/hdf.bmp diff --git a/config/cmake_ext_mod/hdf.icns b/config/cmake/hdf.icns similarity index 100% rename from config/cmake_ext_mod/hdf.icns rename to config/cmake/hdf.icns diff --git a/config/cmake_ext_mod/hdf.ico b/config/cmake/hdf.ico similarity index 100% rename from config/cmake_ext_mod/hdf.ico rename to config/cmake/hdf.ico diff --git a/config/cmake_ext_mod/runTest.cmake b/config/cmake/runTest.cmake similarity index 100% rename from config/cmake_ext_mod/runTest.cmake rename to config/cmake/runTest.cmake diff --git a/config/cmake_ext_mod/version.plist.in b/config/cmake/version.plist.in similarity index 100% rename from config/cmake_ext_mod/version.plist.in rename to config/cmake/version.plist.in diff --git a/config/cmake_ext_mod/CTestCustom.cmake b/config/cmake_ext_mod/CTestCustom.cmake deleted file mode 100644 index 2d72e8d6ad9..00000000000 --- a/config/cmake_ext_mod/CTestCustom.cmake +++ /dev/null @@ -1,17 +0,0 @@ -set (CTEST_CUSTOM_MAXIMUM_NUMBER_OF_WARNINGS 3000) - -set (CTEST_CUSTOM_WARNING_EXCEPTION - ${CTEST_CUSTOM_WARNING_EXCEPTION} - ".*note.*expected.*void.*but argument is of type.*volatile.*" - ".*src.SZIP.*:[ \t]*warning.*" - ".*src.ZLIB.*:[ \t]*warning.*" - ".*src.JPEG.*:[ \t]*warning.*" - ".*POSIX name for this item is deprecated.*" - ".*disabling jobserver mode.*" - ".*warning.*implicit declaration of function.*" - ".*note: expanded from macro.*" -) - -set (CTEST_CUSTOM_MEMCHECK_IGNORE - ${CTEST_CUSTOM_MEMCHECK_IGNORE} -) diff --git a/config/cmake_ext_mod/ConfigureChecks.cmake b/config/cmake_ext_mod/ConfigureChecks.cmake deleted file mode 100644 index 41f53e1a09b..00000000000 --- a/config/cmake_ext_mod/ConfigureChecks.cmake +++ /dev/null @@ -1,693 +0,0 @@ -# -# Copyright by The HDF Group. -# All rights reserved. -# -# This file is part of HDF5. The full HDF5 copyright notice, including -# terms governing use, modification, and redistribution, is contained in -# the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://www.hdfgroup.org/licenses. -# If you do not have access to either file, you may request a copy from -# help@hdfgroup.org. -# -#----------------------------------------------------------------------------- -# Include all the necessary files for macros -#----------------------------------------------------------------------------- -include (CheckFunctionExists) -include (CheckIncludeFile) -include (CheckIncludeFiles) -include (CheckLibraryExists) -include (CheckSymbolExists) -include (CheckTypeSize) -include (CheckVariableExists) -include (TestBigEndian) -include (CheckStructHasMember) - -# Check for Darwin (not just Apple - we also want to catch OpenDarwin) -if (${CMAKE_SYSTEM_NAME} MATCHES "Darwin") - set (${HDF_PREFIX}_HAVE_DARWIN 1) -endif () - -# Check for Solaris -if (${CMAKE_SYSTEM_NAME} MATCHES "SunOS") - set (${HDF_PREFIX}_HAVE_SOLARIS 1) -endif () - -#----------------------------------------------------------------------------- -# This MACRO checks IF the symbol exists in the library and IF it -# does, it appends library to the list. -#----------------------------------------------------------------------------- -set (LINK_LIBS "") -macro (CHECK_LIBRARY_EXISTS_CONCAT LIBRARY SYMBOL VARIABLE) - CHECK_LIBRARY_EXISTS ("${LIBRARY};${LINK_LIBS}" ${SYMBOL} "" ${VARIABLE}) - if (${VARIABLE}) - set (LINK_LIBS ${LINK_LIBS} ${LIBRARY}) - endif () -endmacro () - -# ---------------------------------------------------------------------- -# WINDOWS Hard code Values -# ---------------------------------------------------------------------- -set (WINDOWS) - -if (MINGW) - set (${HDF_PREFIX}_HAVE_MINGW 1) - set (WINDOWS 1) # MinGW tries to imitate Windows - set (CMAKE_REQUIRED_FLAGS "-DWIN32_LEAN_AND_MEAN=1 -DNOGDI=1") - set (${HDF_PREFIX}_HAVE_WINSOCK2_H 1) - set (__USE_MINGW_ANSI_STDIO 1) -endif () - -if (WIN32 AND NOT MINGW) - if (NOT UNIX) - set (WINDOWS 1) - set (CMAKE_REQUIRED_FLAGS "/DWIN32_LEAN_AND_MEAN=1 /DNOGDI=1") - if (MSVC) - set (${HDF_PREFIX}_HAVE_VISUAL_STUDIO 1) - endif () - endif () -endif () - -if (WINDOWS) - set (HDF5_REQUIRED_LIBRARIES "ws2_32.lib;wsock32.lib") - set (${HDF_PREFIX}_HAVE_WIN32_API 1) - set (${HDF_PREFIX}_HAVE_LIBM 1) - set (${HDF_PREFIX}_HAVE_STRDUP 1) - set (${HDF_PREFIX}_HAVE_SYSTEM 1) - set (${HDF_PREFIX}_HAVE_LONGJMP 1) - if (NOT MINGW) - set (${HDF_PREFIX}_HAVE_GETHOSTNAME 1) - set (${HDF_PREFIX}_HAVE_FUNCTION 1) - endif () - if (NOT UNIX AND NOT CYGWIN) - set (${HDF_PREFIX}_HAVE_GETCONSOLESCREENBUFFERINFO 1) - set (${HDF_PREFIX}_GETTIMEOFDAY_GIVES_TZ 1) - set (${HDF_PREFIX}_HAVE_TIMEZONE 1) - set (${HDF_PREFIX}_HAVE_GETTIMEOFDAY 1) - set (${HDF_PREFIX}_HAVE_LIBWS2_32 1) - set (${HDF_PREFIX}_HAVE_LIBWSOCK32 1) - endif () -endif () - -# ---------------------------------------------------------------------- -# END of WINDOWS Hard code Values -# ---------------------------------------------------------------------- - -if (NOT WINDOWS) - TEST_BIG_ENDIAN (${HDF_PREFIX}_WORDS_BIGENDIAN) -endif () - -#----------------------------------------------------------------------------- -# Check IF header file exists and add it to the list. -#----------------------------------------------------------------------------- -macro (CHECK_INCLUDE_FILE_CONCAT FILE VARIABLE) - CHECK_INCLUDE_FILES ("${USE_INCLUDES};${FILE}" ${VARIABLE}) - if (${VARIABLE}) - set (USE_INCLUDES ${USE_INCLUDES} ${FILE}) - endif () -endmacro () - -#----------------------------------------------------------------------------- -# Check for the existence of certain header files -#----------------------------------------------------------------------------- -CHECK_INCLUDE_FILE_CONCAT ("sys/file.h" ${HDF_PREFIX}_HAVE_SYS_FILE_H) -CHECK_INCLUDE_FILE_CONCAT ("sys/ioctl.h" ${HDF_PREFIX}_HAVE_SYS_IOCTL_H) -CHECK_INCLUDE_FILE_CONCAT ("sys/resource.h" ${HDF_PREFIX}_HAVE_SYS_RESOURCE_H) -CHECK_INCLUDE_FILE_CONCAT ("sys/socket.h" ${HDF_PREFIX}_HAVE_SYS_SOCKET_H) -CHECK_INCLUDE_FILE_CONCAT ("sys/stat.h" ${HDF_PREFIX}_HAVE_SYS_STAT_H) -CHECK_INCLUDE_FILE_CONCAT ("sys/time.h" ${HDF_PREFIX}_HAVE_SYS_TIME_H) -CHECK_INCLUDE_FILE_CONCAT ("sys/types.h" ${HDF_PREFIX}_HAVE_SYS_TYPES_H) -CHECK_INCLUDE_FILE_CONCAT ("features.h" ${HDF_PREFIX}_HAVE_FEATURES_H) -CHECK_INCLUDE_FILE_CONCAT ("dirent.h" ${HDF_PREFIX}_HAVE_DIRENT_H) -CHECK_INCLUDE_FILE_CONCAT ("setjmp.h" ${HDF_PREFIX}_HAVE_SETJMP_H) -CHECK_INCLUDE_FILE_CONCAT ("stddef.h" ${HDF_PREFIX}_HAVE_STDDEF_H) -CHECK_INCLUDE_FILE_CONCAT ("stdint.h" ${HDF_PREFIX}_HAVE_STDINT_H) -CHECK_INCLUDE_FILE_CONCAT ("unistd.h" ${HDF_PREFIX}_HAVE_UNISTD_H) -CHECK_INCLUDE_FILE_CONCAT ("pwd.h" ${HDF_PREFIX}_HAVE_PWD_H) - -# Windows -CHECK_INCLUDE_FILE_CONCAT ("io.h" ${HDF_PREFIX}_HAVE_IO_H) -if (NOT CYGWIN) - CHECK_INCLUDE_FILE_CONCAT ("winsock2.h" ${HDF_PREFIX}_HAVE_WINSOCK2_H) -endif () - -if (CMAKE_SYSTEM_NAME MATCHES "OSF") - CHECK_INCLUDE_FILE_CONCAT ("sys/sysinfo.h" ${HDF_PREFIX}_HAVE_SYS_SYSINFO_H) - CHECK_INCLUDE_FILE_CONCAT ("sys/proc.h" ${HDF_PREFIX}_HAVE_SYS_PROC_H) -else () - set (${HDF_PREFIX}_HAVE_SYS_SYSINFO_H "" CACHE INTERNAL "" FORCE) - set (${HDF_PREFIX}_HAVE_SYS_PROC_H "" CACHE INTERNAL "" FORCE) -endif () - -CHECK_INCLUDE_FILE_CONCAT ("globus/common.h" ${HDF_PREFIX}_HAVE_GLOBUS_COMMON_H) -CHECK_INCLUDE_FILE_CONCAT ("pdb.h" ${HDF_PREFIX}_HAVE_PDB_H) -CHECK_INCLUDE_FILE_CONCAT ("pthread.h" ${HDF_PREFIX}_HAVE_PTHREAD_H) -CHECK_INCLUDE_FILE_CONCAT ("srbclient.h" ${HDF_PREFIX}_HAVE_SRBCLIENT_H) -CHECK_INCLUDE_FILE_CONCAT ("string.h" ${HDF_PREFIX}_HAVE_STRING_H) -CHECK_INCLUDE_FILE_CONCAT ("strings.h" ${HDF_PREFIX}_HAVE_STRINGS_H) -CHECK_INCLUDE_FILE_CONCAT ("stdlib.h" ${HDF_PREFIX}_HAVE_STDLIB_H) -CHECK_INCLUDE_FILE_CONCAT ("memory.h" ${HDF_PREFIX}_HAVE_MEMORY_H) -CHECK_INCLUDE_FILE_CONCAT ("dlfcn.h" ${HDF_PREFIX}_HAVE_DLFCN_H) -CHECK_INCLUDE_FILE_CONCAT ("inttypes.h" ${HDF_PREFIX}_HAVE_INTTYPES_H) -CHECK_INCLUDE_FILE_CONCAT ("netinet/in.h" ${HDF_PREFIX}_HAVE_NETINET_IN_H) -CHECK_INCLUDE_FILE_CONCAT ("netdb.h" ${HDF_PREFIX}_HAVE_NETDB_H) -CHECK_INCLUDE_FILE_CONCAT ("arpa/inet.h" ${HDF_PREFIX}_HAVE_ARPA_INET_H) -# _Bool type support -CHECK_INCLUDE_FILE_CONCAT (stdbool.h ${HDF_PREFIX}_HAVE_STDBOOL_H) - -## Check for non-standard extension quadmath.h - -CHECK_INCLUDE_FILES(quadmath.h C_HAVE_QUADMATH) -if (${C_HAVE_QUADMATH}) - set(${HDF_PREFIX}_HAVE_QUADMATH_H 1) -else () - set(${HDF_PREFIX}_HAVE_QUADMATH_H 0) -endif () - -if (CYGWIN) - set (${HDF_PREFIX}_HAVE_LSEEK64 0) -endif () - -#----------------------------------------------------------------------------- -# Check for the math library "m" -#----------------------------------------------------------------------------- -if (MINGW OR NOT WINDOWS) - CHECK_LIBRARY_EXISTS_CONCAT ("m" ceil ${HDF_PREFIX}_HAVE_LIBM) - CHECK_LIBRARY_EXISTS_CONCAT ("dl" dlopen ${HDF_PREFIX}_HAVE_LIBDL) - CHECK_LIBRARY_EXISTS_CONCAT ("ws2_32" WSAStartup ${HDF_PREFIX}_HAVE_LIBWS2_32) - CHECK_LIBRARY_EXISTS_CONCAT ("wsock32" gethostbyname ${HDF_PREFIX}_HAVE_LIBWSOCK32) -endif () - -# UCB (BSD) compatibility library -CHECK_LIBRARY_EXISTS_CONCAT ("ucb" gethostname ${HDF_PREFIX}_HAVE_LIBUCB) - -# For other tests to use the same libraries -set (HDF5_REQUIRED_LIBRARIES ${HDF5_REQUIRED_LIBRARIES} ${LINK_LIBS}) - -set (USE_INCLUDES "") -if (WINDOWS) - set (USE_INCLUDES ${USE_INCLUDES} "windows.h") -endif () - -# For other specific tests, use this MACRO. -macro (HDF_FUNCTION_TEST OTHER_TEST) - if (NOT DEFINED ${HDF_PREFIX}_${OTHER_TEST}) - set (MACRO_CHECK_FUNCTION_DEFINITIONS "-D${OTHER_TEST} ${CMAKE_REQUIRED_FLAGS}") - - foreach (def - HAVE_SYS_TIME_H - HAVE_UNISTD_H - HAVE_SYS_TYPES_H - HAVE_SYS_SOCKET_H - ) - if ("${${HDF_PREFIX}_${def}}") - set (MACRO_CHECK_FUNCTION_DEFINITIONS "${MACRO_CHECK_FUNCTION_DEFINITIONS} -D${def}") - endif () - endforeach () - - if (LARGEFILE) - set (MACRO_CHECK_FUNCTION_DEFINITIONS - "${MACRO_CHECK_FUNCTION_DEFINITIONS} -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -D_LARGEFILE_SOURCE" - ) - endif () - - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (TRACE "Performing ${OTHER_TEST}") - endif () - try_compile (${OTHER_TEST} - ${CMAKE_BINARY_DIR} - ${HDF_RESOURCES_EXT_DIR}/HDFTests.c - COMPILE_DEFINITIONS "${MACRO_CHECK_FUNCTION_DEFINITIONS}" - LINK_LIBRARIES "${HDF5_REQUIRED_LIBRARIES}" - OUTPUT_VARIABLE OUTPUT - ) - if (${OTHER_TEST}) - set (${HDF_PREFIX}_${OTHER_TEST} 1 CACHE INTERNAL "Other test ${FUNCTION}") - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE "Performing Other Test ${OTHER_TEST} - Success") - endif () - else () - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE "Performing Other Test ${OTHER_TEST} - Failed") - endif () - set (${HDF_PREFIX}_${OTHER_TEST} "" CACHE INTERNAL "Other test ${FUNCTION}") - file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log - "Performing Other Test ${OTHER_TEST} failed with the following output:\n" - "${OUTPUT}\n" - ) - endif () - endif () -endmacro () - -#----------------------------------------------------------------------------- -# Check for these functions before the time headers are checked -#----------------------------------------------------------------------------- -HDF_FUNCTION_TEST (STDC_HEADERS) - -#----------------------------------------------------------------------------- -# Check for large file support -#----------------------------------------------------------------------------- - -# The linux-lfs option is deprecated. -set (LINUX_LFS 0) - -set (HDF_EXTRA_C_FLAGS) -set (HDF_EXTRA_FLAGS) -if (MINGW OR NOT WINDOWS) - if (CMAKE_SYSTEM_NAME MATCHES "Linux") - # Linux Specific flags - # This was originally defined as _POSIX_SOURCE which was updated to - # _POSIX_C_SOURCE=199506L to expose a greater amount of POSIX - # functionality so clock_gettime and CLOCK_MONOTONIC are defined - # correctly. This was later updated to 200112L so that - # posix_memalign() is visible for the direct VFD code on Linux - # systems. - # POSIX feature information can be found in the gcc manual at: - # http://www.gnu.org/s/libc/manual/html_node/Feature-Test-Macros.html - set (HDF_EXTRA_C_FLAGS -D_POSIX_C_SOURCE=200809L) - - # Need to add this so that O_DIRECT is visible for the direct - # VFD on Linux systems. - set (HDF_EXTRA_C_FLAGS ${HDF_EXTRA_C_FLAGS} -D_GNU_SOURCE) - - option (HDF_ENABLE_LARGE_FILE "Enable support for large (64-bit) files on Linux." ON) - if (HDF_ENABLE_LARGE_FILE AND NOT DEFINED TEST_LFS_WORKS_RUN) - set (msg "Performing TEST_LFS_WORKS") - try_run (TEST_LFS_WORKS_RUN TEST_LFS_WORKS_COMPILE - ${CMAKE_BINARY_DIR} - ${HDF_RESOURCES_EXT_DIR}/HDFTests.c - COMPILE_DEFINITIONS "-DTEST_LFS_WORKS" - ) - - # The LARGEFILE definitions were from the transition period - # and are probably no longer needed. The FILE_OFFSET_BITS - # check should be generalized for all POSIX systems as it - # is in the Autotools. - if (TEST_LFS_WORKS_COMPILE) - if (TEST_LFS_WORKS_RUN MATCHES 0) - set (TEST_LFS_WORKS 1 CACHE INTERNAL ${msg}) - set (LARGEFILE 1) - set (HDF_EXTRA_FLAGS ${HDF_EXTRA_FLAGS} -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -D_LARGEFILE_SOURCE) - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE "${msg}... yes") - endif () - else () - set (TEST_LFS_WORKS "" CACHE INTERNAL ${msg}) - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE "${msg}... no") - endif () - file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log - "Test TEST_LFS_WORKS Run failed with the following exit code:\n ${TEST_LFS_WORKS_RUN}\n" - ) - endif () - else () - set (TEST_LFS_WORKS "" CACHE INTERNAL ${msg}) - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE "${msg}... no") - endif () - file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log - "Test TEST_LFS_WORKS Compile failed\n" - ) - endif () - endif () - set (CMAKE_REQUIRED_DEFINITIONS ${CMAKE_REQUIRED_DEFINITIONS} ${HDF_EXTRA_FLAGS}) - endif () -endif () - -#----------------------------------------------------------------------------- -# Check for HAVE_OFF64_T functionality -#----------------------------------------------------------------------------- -if (MINGW OR NOT WINDOWS) - HDF_FUNCTION_TEST (HAVE_OFF64_T) - if (${HDF_PREFIX}_HAVE_OFF64_T) - CHECK_FUNCTION_EXISTS (lseek64 ${HDF_PREFIX}_HAVE_LSEEK64) - endif () - - CHECK_FUNCTION_EXISTS (fseeko ${HDF_PREFIX}_HAVE_FSEEKO) - - CHECK_STRUCT_HAS_MEMBER("struct stat64" st_blocks "sys/types.h;sys/stat.h" HAVE_STAT64_STRUCT) - if (HAVE_STAT64_STRUCT) - CHECK_FUNCTION_EXISTS (stat64 ${HDF_PREFIX}_HAVE_STAT64) - endif () -endif () - -#----------------------------------------------------------------------------- -# Check the size in bytes of all the int and float types -#----------------------------------------------------------------------------- -macro (HDF_CHECK_TYPE_SIZE type var) - set (aType ${type}) - set (aVar ${var}) - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (TRACE "Checking size of ${aType} and storing into ${aVar}") - endif () - CHECK_TYPE_SIZE (${aType} ${aVar}) - if (NOT ${aVar}) - set (${aVar} 0 CACHE INTERNAL "SizeOf for ${aType}") - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (TRACE "Size of ${aType} was NOT Found") - endif () - endif () -endmacro () - -HDF_CHECK_TYPE_SIZE (char ${HDF_PREFIX}_SIZEOF_CHAR) -HDF_CHECK_TYPE_SIZE (short ${HDF_PREFIX}_SIZEOF_SHORT) -HDF_CHECK_TYPE_SIZE (int ${HDF_PREFIX}_SIZEOF_INT) -HDF_CHECK_TYPE_SIZE (unsigned ${HDF_PREFIX}_SIZEOF_UNSIGNED) -if (NOT APPLE) - HDF_CHECK_TYPE_SIZE (long ${HDF_PREFIX}_SIZEOF_LONG) -endif () -HDF_CHECK_TYPE_SIZE ("long long" ${HDF_PREFIX}_SIZEOF_LONG_LONG) -HDF_CHECK_TYPE_SIZE (__int64 ${HDF_PREFIX}_SIZEOF___INT64) -if (NOT ${HDF_PREFIX}_SIZEOF___INT64) - set (${HDF_PREFIX}_SIZEOF___INT64 0) -endif () - -HDF_CHECK_TYPE_SIZE (float ${HDF_PREFIX}_SIZEOF_FLOAT) -HDF_CHECK_TYPE_SIZE (double ${HDF_PREFIX}_SIZEOF_DOUBLE) -HDF_CHECK_TYPE_SIZE ("long double" ${HDF_PREFIX}_SIZEOF_LONG_DOUBLE) - -HDF_CHECK_TYPE_SIZE (int8_t ${HDF_PREFIX}_SIZEOF_INT8_T) -HDF_CHECK_TYPE_SIZE (uint8_t ${HDF_PREFIX}_SIZEOF_UINT8_T) -HDF_CHECK_TYPE_SIZE (int_least8_t ${HDF_PREFIX}_SIZEOF_INT_LEAST8_T) -HDF_CHECK_TYPE_SIZE (uint_least8_t ${HDF_PREFIX}_SIZEOF_UINT_LEAST8_T) -HDF_CHECK_TYPE_SIZE (int_fast8_t ${HDF_PREFIX}_SIZEOF_INT_FAST8_T) -HDF_CHECK_TYPE_SIZE (uint_fast8_t ${HDF_PREFIX}_SIZEOF_UINT_FAST8_T) - -HDF_CHECK_TYPE_SIZE (int16_t ${HDF_PREFIX}_SIZEOF_INT16_T) -HDF_CHECK_TYPE_SIZE (uint16_t ${HDF_PREFIX}_SIZEOF_UINT16_T) -HDF_CHECK_TYPE_SIZE (int_least16_t ${HDF_PREFIX}_SIZEOF_INT_LEAST16_T) -HDF_CHECK_TYPE_SIZE (uint_least16_t ${HDF_PREFIX}_SIZEOF_UINT_LEAST16_T) -HDF_CHECK_TYPE_SIZE (int_fast16_t ${HDF_PREFIX}_SIZEOF_INT_FAST16_T) -HDF_CHECK_TYPE_SIZE (uint_fast16_t ${HDF_PREFIX}_SIZEOF_UINT_FAST16_T) - -HDF_CHECK_TYPE_SIZE (int32_t ${HDF_PREFIX}_SIZEOF_INT32_T) -HDF_CHECK_TYPE_SIZE (uint32_t ${HDF_PREFIX}_SIZEOF_UINT32_T) -HDF_CHECK_TYPE_SIZE (int_least32_t ${HDF_PREFIX}_SIZEOF_INT_LEAST32_T) -HDF_CHECK_TYPE_SIZE (uint_least32_t ${HDF_PREFIX}_SIZEOF_UINT_LEAST32_T) -HDF_CHECK_TYPE_SIZE (int_fast32_t ${HDF_PREFIX}_SIZEOF_INT_FAST32_T) -HDF_CHECK_TYPE_SIZE (uint_fast32_t ${HDF_PREFIX}_SIZEOF_UINT_FAST32_T) - -HDF_CHECK_TYPE_SIZE (int64_t ${HDF_PREFIX}_SIZEOF_INT64_T) -HDF_CHECK_TYPE_SIZE (uint64_t ${HDF_PREFIX}_SIZEOF_UINT64_T) -HDF_CHECK_TYPE_SIZE (int_least64_t ${HDF_PREFIX}_SIZEOF_INT_LEAST64_T) -HDF_CHECK_TYPE_SIZE (uint_least64_t ${HDF_PREFIX}_SIZEOF_UINT_LEAST64_T) -HDF_CHECK_TYPE_SIZE (int_fast64_t ${HDF_PREFIX}_SIZEOF_INT_FAST64_T) -HDF_CHECK_TYPE_SIZE (uint_fast64_t ${HDF_PREFIX}_SIZEOF_UINT_FAST64_T) - -HDF_CHECK_TYPE_SIZE (size_t ${HDF_PREFIX}_SIZEOF_SIZE_T) -HDF_CHECK_TYPE_SIZE (ssize_t ${HDF_PREFIX}_SIZEOF_SSIZE_T) -if (NOT ${HDF_PREFIX}_SIZEOF_SSIZE_T) - set (${HDF_PREFIX}_SIZEOF_SSIZE_T 0) -endif () -if (MINGW OR NOT WINDOWS) - HDF_CHECK_TYPE_SIZE (ptrdiff_t ${HDF_PREFIX}_SIZEOF_PTRDIFF_T) -endif () - -HDF_CHECK_TYPE_SIZE (off_t ${HDF_PREFIX}_SIZEOF_OFF_T) -HDF_CHECK_TYPE_SIZE (off64_t ${HDF_PREFIX}_SIZEOF_OFF64_T) -if (NOT ${HDF_PREFIX}_SIZEOF_OFF64_T) - set (${HDF_PREFIX}_SIZEOF_OFF64_T 0) -endif () -HDF_CHECK_TYPE_SIZE (time_t ${HDF_PREFIX}_SIZEOF_TIME_T) - -#----------------------------------------------------------------------------- -# Extra C99 types -#----------------------------------------------------------------------------- - -# _Bool type support -if (HAVE_STDBOOL_H) - set (CMAKE_EXTRA_INCLUDE_FILES stdbool.h) - HDF_CHECK_TYPE_SIZE (bool ${HDF_PREFIX}_SIZEOF_BOOL) -else () - HDF_CHECK_TYPE_SIZE (_Bool ${HDF_PREFIX}_SIZEOF_BOOL) -endif () - -if (MINGW OR NOT WINDOWS) - #----------------------------------------------------------------------------- - # Check if the dev_t type is a scalar type - #----------------------------------------------------------------------------- - HDF_FUNCTION_TEST (DEV_T_IS_SCALAR) - - # ---------------------------------------------------------------------- - # Check for MONOTONIC_TIMER support (used in clock_gettime). This has - # to be done after any POSIX/BSD defines to ensure that the test gets - # the correct POSIX level on linux. - CHECK_VARIABLE_EXISTS (CLOCK_MONOTONIC HAVE_CLOCK_MONOTONIC) - - #----------------------------------------------------------------------------- - # Check a bunch of time functions - #----------------------------------------------------------------------------- - CHECK_STRUCT_HAS_MEMBER("struct tm" tm_gmtoff "time.h" ${HDF_PREFIX}_HAVE_TM_GMTOFF) - CHECK_STRUCT_HAS_MEMBER("struct tm" __tm_gmtoff "time.h" ${HDF_PREFIX}_HAVE___TM_GMTOFF) - CHECK_STRUCT_HAS_MEMBER("struct tm" tm_sec "sys/types.h;sys/time.h;time.h" ${HDF_PREFIX}_TIME_WITH_SYS_TIME) - if (${HDF_PREFIX}_HAVE_SYS_TIME_H) - CHECK_STRUCT_HAS_MEMBER("struct tm" tz_minuteswest "sys/types.h;sys/time.h;time.h" ${HDF_PREFIX}_HAVE_STRUCT_TIMEZONE) - else () - CHECK_STRUCT_HAS_MEMBER("struct tm" tz_minuteswest "sys/types.h;time.h" ${HDF_PREFIX}_HAVE_STRUCT_TIMEZONE) - endif () - CHECK_FUNCTION_EXISTS (gettimeofday ${HDF_PREFIX}_HAVE_GETTIMEOFDAY) - foreach (time_test -# HAVE_TIMEZONE - GETTIMEOFDAY_GIVES_TZ - HAVE_TM_ZONE - HAVE_STRUCT_TM_TM_ZONE - ) - HDF_FUNCTION_TEST (${time_test}) - endforeach () - if (NOT CYGWIN AND NOT MINGW) - HDF_FUNCTION_TEST (HAVE_TIMEZONE) - endif () - - # ---------------------------------------------------------------------- - # Does the struct stat have the st_blocks field? This field is not Posix. - # - CHECK_STRUCT_HAS_MEMBER("struct stat" st_blocks "sys/types.h;sys/stat.h" ${HDF_PREFIX}_HAVE_STAT_ST_BLOCKS) - - # ---------------------------------------------------------------------- - # How do we figure out the width of a tty in characters? - # - CHECK_FUNCTION_EXISTS (ioctl ${HDF_PREFIX}_HAVE_IOCTL) - CHECK_STRUCT_HAS_MEMBER ("struct videoconfig" numtextcols "" ${HDF_PREFIX}_HAVE_STRUCT_VIDEOCONFIG) - CHECK_STRUCT_HAS_MEMBER ("struct text_info" screenwidth "" ${HDF_PREFIX}_HAVE_STRUCT_TEXT_INFO) - CHECK_FUNCTION_EXISTS (_getvideoconfig ${HDF_PREFIX}_HAVE__GETVIDEOCONFIG) - CHECK_FUNCTION_EXISTS (gettextinfo ${HDF_PREFIX}_HAVE_GETTEXTINFO) - CHECK_FUNCTION_EXISTS (_scrsize ${HDF_PREFIX}_HAVE__SCRSIZE) - if (NOT CYGWIN) - CHECK_FUNCTION_EXISTS (GetConsoleScreenBufferInfo ${HDF_PREFIX}_HAVE_GETCONSOLESCREENBUFFERINFO) - endif () - CHECK_SYMBOL_EXISTS (TIOCGWINSZ "sys/ioctl.h" ${HDF_PREFIX}_HAVE_TIOCGWINSZ) - CHECK_SYMBOL_EXISTS (TIOCGETD "sys/ioctl.h" ${HDF_PREFIX}_HAVE_TIOCGETD) - - # ---------------------------------------------------------------------- - # cygwin user credentials are different then on linux - # - if (NOT CYGWIN AND NOT MINGW) - CHECK_FUNCTION_EXISTS (getpwuid ${HDF_PREFIX}_HAVE_GETPWUID) - endif () -endif () - -#----------------------------------------------------------------------------- -# Check for some functions that are used -# -CHECK_FUNCTION_EXISTS (alarm ${HDF_PREFIX}_HAVE_ALARM) -CHECK_FUNCTION_EXISTS (fcntl ${HDF_PREFIX}_HAVE_FCNTL) -CHECK_FUNCTION_EXISTS (flock ${HDF_PREFIX}_HAVE_FLOCK) -CHECK_FUNCTION_EXISTS (fork ${HDF_PREFIX}_HAVE_FORK) -CHECK_FUNCTION_EXISTS (frexpf ${HDF_PREFIX}_HAVE_FREXPF) -CHECK_FUNCTION_EXISTS (frexpl ${HDF_PREFIX}_HAVE_FREXPL) - -CHECK_FUNCTION_EXISTS (gethostname ${HDF_PREFIX}_HAVE_GETHOSTNAME) -CHECK_FUNCTION_EXISTS (getrusage ${HDF_PREFIX}_HAVE_GETRUSAGE) -CHECK_FUNCTION_EXISTS (llround ${HDF_PREFIX}_HAVE_LLROUND) -CHECK_FUNCTION_EXISTS (llroundf ${HDF_PREFIX}_HAVE_LLROUNDF) -CHECK_FUNCTION_EXISTS (lround ${HDF_PREFIX}_HAVE_LROUND) -CHECK_FUNCTION_EXISTS (lroundf ${HDF_PREFIX}_HAVE_LROUNDF) -CHECK_FUNCTION_EXISTS (lstat ${HDF_PREFIX}_HAVE_LSTAT) - -CHECK_FUNCTION_EXISTS (pread ${HDF_PREFIX}_HAVE_PREAD) -CHECK_FUNCTION_EXISTS (pwrite ${HDF_PREFIX}_HAVE_PWRITE) -CHECK_FUNCTION_EXISTS (rand_r ${HDF_PREFIX}_HAVE_RAND_R) -CHECK_FUNCTION_EXISTS (random ${HDF_PREFIX}_HAVE_RANDOM) -CHECK_FUNCTION_EXISTS (round ${HDF_PREFIX}_HAVE_ROUND) -CHECK_FUNCTION_EXISTS (roundf ${HDF_PREFIX}_HAVE_ROUNDF) -CHECK_FUNCTION_EXISTS (setsysinfo ${HDF_PREFIX}_HAVE_SETSYSINFO) - -CHECK_FUNCTION_EXISTS (signal ${HDF_PREFIX}_HAVE_SIGNAL) -CHECK_FUNCTION_EXISTS (longjmp ${HDF_PREFIX}_HAVE_LONGJMP) -CHECK_FUNCTION_EXISTS (setjmp ${HDF_PREFIX}_HAVE_SETJMP) -CHECK_FUNCTION_EXISTS (siglongjmp ${HDF_PREFIX}_HAVE_SIGLONGJMP) -CHECK_FUNCTION_EXISTS (sigsetjmp ${HDF_PREFIX}_HAVE_SIGSETJMP) -CHECK_FUNCTION_EXISTS (sigprocmask ${HDF_PREFIX}_HAVE_SIGPROCMASK) - -CHECK_FUNCTION_EXISTS (snprintf ${HDF_PREFIX}_HAVE_SNPRINTF) -CHECK_FUNCTION_EXISTS (srandom ${HDF_PREFIX}_HAVE_SRANDOM) -CHECK_FUNCTION_EXISTS (strdup ${HDF_PREFIX}_HAVE_STRDUP) -CHECK_FUNCTION_EXISTS (strtoll ${HDF_PREFIX}_HAVE_STRTOLL) -CHECK_FUNCTION_EXISTS (strtoull ${HDF_PREFIX}_HAVE_STRTOULL) -CHECK_FUNCTION_EXISTS (symlink ${HDF_PREFIX}_HAVE_SYMLINK) -CHECK_FUNCTION_EXISTS (system ${HDF_PREFIX}_HAVE_SYSTEM) - -CHECK_FUNCTION_EXISTS (tmpfile ${HDF_PREFIX}_HAVE_TMPFILE) -CHECK_FUNCTION_EXISTS (asprintf ${HDF_PREFIX}_HAVE_ASPRINTF) -CHECK_FUNCTION_EXISTS (vasprintf ${HDF_PREFIX}_HAVE_VASPRINTF) -CHECK_FUNCTION_EXISTS (waitpid ${HDF_PREFIX}_HAVE_WAITPID) - -CHECK_FUNCTION_EXISTS (vsnprintf ${HDF_PREFIX}_HAVE_VSNPRINTF) -if (MINGW OR NOT WINDOWS) - if (${HDF_PREFIX}_HAVE_VSNPRINTF) - HDF_FUNCTION_TEST (VSNPRINTF_WORKS) - endif () -endif () - -#----------------------------------------------------------------------------- -# sigsetjmp is special; may actually be a macro -#----------------------------------------------------------------------------- -if (NOT ${HDF_PREFIX}_HAVE_SIGSETJMP) - if (${HDF_PREFIX}_HAVE_SETJMP_H) - CHECK_SYMBOL_EXISTS (sigsetjmp "setjmp.h" ${HDF_PREFIX}_HAVE_MACRO_SIGSETJMP) - if (${HDF_PREFIX}_HAVE_MACRO_SIGSETJMP) - set (${HDF_PREFIX}_HAVE_SIGSETJMP 1) - endif () - endif () -endif () - -#----------------------------------------------------------------------------- -# Check a bunch of other functions -#----------------------------------------------------------------------------- -if (MINGW OR NOT WINDOWS) - foreach (other_test - HAVE_ATTRIBUTE - HAVE_C99_FUNC - HAVE_FUNCTION - HAVE_C99_DESIGNATED_INITIALIZER - SYSTEM_SCOPE_THREADS - HAVE_SOCKLEN_T - ) - HDF_FUNCTION_TEST (${other_test}) - endforeach () -endif () - -#----------------------------------------------------------------------------- -# Check if InitOnceExecuteOnce is available -#----------------------------------------------------------------------------- -if (WINDOWS) - if (NOT HDF_NO_IOEO_TEST) - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE "Checking for InitOnceExecuteOnce:") - endif () - if (NOT DEFINED ${HDF_PREFIX}_HAVE_IOEO) - if (LARGEFILE) - set (CMAKE_REQUIRED_DEFINITIONS - "${CURRENT_TEST_DEFINITIONS} -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -D_LARGEFILE_SOURCE" - ) - endif () - set (MACRO_CHECK_FUNCTION_DEFINITIONS "-DHAVE_IOEO ${CMAKE_REQUIRED_FLAGS}") - if (CMAKE_REQUIRED_INCLUDES) - set (CHECK_C_SOURCE_COMPILES_ADD_INCLUDES "-DINCLUDE_DIRECTORIES:STRING=${CMAKE_REQUIRED_INCLUDES}") - else () - set (CHECK_C_SOURCE_COMPILES_ADD_INCLUDES) - endif () - - TRY_RUN(HAVE_IOEO_EXITCODE HAVE_IOEO_COMPILED - ${CMAKE_BINARY_DIR} - ${HDF_RESOURCES_EXT_DIR}/HDFTests.c - COMPILE_DEFINITIONS "${CMAKE_REQUIRED_DEFINITIONS} ${MACRO_CHECK_FUNCTION_DEFINITIONS}" - LINK_LIBRARIES "${HDF5_REQUIRED_LIBRARIES}" - CMAKE_FLAGS "${CHECK_C_SOURCE_COMPILES_ADD_INCLUDES} -DCMAKE_SKIP_RPATH:BOOL=${CMAKE_SKIP_RPATH}" - COMPILE_OUTPUT_VARIABLE OUTPUT - ) - # if it did not compile make the return value fail code of 1 - if (NOT HAVE_IOEO_COMPILED) - set (HAVE_IOEO_EXITCODE 1) - endif () - # if the return value was 0 then it worked - if ("${HAVE_IOEO_EXITCODE}" EQUAL 0) - set (${HDF_PREFIX}_HAVE_IOEO 1 CACHE INTERNAL "Test InitOnceExecuteOnce") - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE "Performing Test InitOnceExecuteOnce - Success") - endif () - file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeOutput.log - "Performing C SOURCE FILE Test InitOnceExecuteOnce succeeded with the following output:\n" - "${OUTPUT}\n" - "Return value: ${HAVE_IOEO}\n") - else () - if (CMAKE_CROSSCOMPILING AND "${HAVE_IOEO_EXITCODE}" MATCHES "FAILED_TO_RUN") - set (${HDF_PREFIX}_HAVE_IOEO "${HAVE_IOEO_EXITCODE}") - else () - set (${HDF_PREFIX}_HAVE_IOEO "" CACHE INTERNAL "Test InitOnceExecuteOnce") - endif () - - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE "Performing Test InitOnceExecuteOnce - Failed") - endif () - file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log - "Performing InitOnceExecuteOnce Test failed with the following output:\n" - "${OUTPUT}\n" - "Return value: ${HAVE_IOEO_EXITCODE}\n") - endif () - endif () - endif () -endif () - -#----------------------------------------------------------------------------- -# Determine how 'inline' is used -#----------------------------------------------------------------------------- -foreach (inline_test inline __inline__ __inline) - string (TOUPPER ${inline_test} INLINE_TEST_MACRO) - HDF_FUNCTION_TEST (HAVE_${INLINE_TEST_MACRO}) -endforeach () - -#----------------------------------------------------------------------------- -# Check how to print a Long Long integer -#----------------------------------------------------------------------------- -if (NOT ${HDF_PREFIX}_PRINTF_LL_WIDTH OR ${HDF_PREFIX}_PRINTF_LL_WIDTH MATCHES "unknown") - set (PRINT_LL_FOUND 0) - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE "Checking for appropriate format for 64 bit long:") - endif () - set (CURRENT_TEST_DEFINITIONS "-DPRINTF_LL_WIDTH") - if (${HDF_PREFIX}_SIZEOF_LONG_LONG) - set (CURRENT_TEST_DEFINITIONS "${CURRENT_TEST_DEFINITIONS} -DHAVE_LONG_LONG") - endif () - TRY_RUN (${HDF_PREFIX}_PRINTF_LL_TEST_RUN ${HDF_PREFIX}_PRINTF_LL_TEST_COMPILE - ${CMAKE_BINARY_DIR} - ${HDF_RESOURCES_EXT_DIR}/HDFTests.c - COMPILE_DEFINITIONS "${CURRENT_TEST_DEFINITIONS}" - RUN_OUTPUT_VARIABLE OUTPUT - ) - if (${HDF_PREFIX}_PRINTF_LL_TEST_COMPILE) - if (${HDF_PREFIX}_PRINTF_LL_TEST_RUN MATCHES 0) - string(REGEX REPLACE ".*PRINTF_LL_WIDTH=\\[(.*)\\].*" "\\1" ${HDF_PREFIX}_PRINTF_LL "${OUTPUT}") - set (${HDF_PREFIX}_PRINTF_LL_WIDTH "\"${${HDF_PREFIX}_PRINTF_LL}\"" CACHE INTERNAL "Width for printf for type `long long' or `__int64', us. `ll") - set (PRINT_LL_FOUND 1) - else () - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE "Width test failed with result: ${${HDF_PREFIX}_PRINTF_LL_TEST_RUN}") - endif () - endif () - else () - file (APPEND ${CMAKE_BINARY_DIR}/CMakeFiles/CMakeError.log - "Test ${HDF_PREFIX}_PRINTF_LL_WIDTH failed\n" - ) - endif () - - if (PRINT_LL_FOUND) - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE "Checking for appropriate format for 64 bit long: found ${${HDF_PREFIX}_PRINTF_LL_WIDTH}") - endif () - else () - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE "Checking for appropriate format for 64 bit long: not found") - endif () - set (${HDF_PREFIX}_PRINTF_LL_WIDTH "\"unknown\"" CACHE INTERNAL - "Width for printf for type `long long' or `__int64', us. `ll" - ) - endif () -endif () - -# ---------------------------------------------------------------------- -# Set the flag to indicate that the machine can handle converting -# denormalized floating-point values. -# (This flag should be set for all machines, except for the Crays, where -# the cache value is set in it's config file) -# -set (${HDF_PREFIX}_CONVERT_DENORMAL_FLOAT 1) diff --git a/examples/CMakeTests.cmake b/examples/CMakeTests.cmake index 3e24ba01ad0..6c84d1d0971 100644 --- a/examples/CMakeTests.cmake +++ b/examples/CMakeTests.cmake @@ -85,7 +85,7 @@ if (HDF5_TEST_SERIAL) -D "TEST_OUTPUT=${example}.txt" #-D "TEST_REFERENCE=${example}.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (EXAMPLES-${example} PROPERTIES FIXTURES_REQUIRED clear_EXAMPLES) @@ -115,7 +115,7 @@ if (H5_HAVE_PARALLEL AND HDF5_TEST_PARALLEL AND NOT WIN32) -D "TEST_REFERENCE:STRING=PHDF5 example finished with no errors" #-D "TEST_FILTER:STRING=PHDF5 tests finished with no errors" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -P "${HDF_RESOURCES_EXT_DIR}/grepTest.cmake" + -P "${HDF_RESOURCES_DIR}/grepTest.cmake" ) endif () if (last_test) diff --git a/fortran/examples/CMakeTests.cmake b/fortran/examples/CMakeTests.cmake index e59d6f88a44..09b3fa0d08a 100644 --- a/fortran/examples/CMakeTests.cmake +++ b/fortran/examples/CMakeTests.cmake @@ -55,7 +55,7 @@ if (HDF5_TEST_SERIAL) -D "TEST_OUTPUT=f90_ex_${example}.txt" #-D "TEST_REFERENCE=f90_ex_${example}.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (f90_ex_${example} PROPERTIES FIXTURES_REQUIRED clear_f90_ex) @@ -78,7 +78,7 @@ if (HDF5_TEST_SERIAL) -D "TEST_OUTPUT=f03_ex_${example}.txt" #-D "TEST_REFERENCE=f03_ex_${example}.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (f03_ex_${example} PROPERTIES FIXTURES_REQUIRED clear_f90_ex) diff --git a/fortran/test/CMakeTests.cmake b/fortran/test/CMakeTests.cmake index 0588416072f..68d8a5ad8c1 100644 --- a/fortran/test/CMakeTests.cmake +++ b/fortran/test/CMakeTests.cmake @@ -73,7 +73,7 @@ else () -D "TEST_OUTPUT=testhdf5_fortran.txt" #-D "TEST_REFERENCE=testhdf5_fortran.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () #set_tests_properties (FORTRAN_testhdf5_fortran PROPERTIES PASS_REGULAR_EXPRESSION "[ ]*0 error.s") @@ -96,7 +96,7 @@ else () -D "TEST_OUTPUT=testhdf5_fortran_1_8.txt" #-D "TEST_REFERENCE=testhdf5_fortran_1_8.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () #set_tests_properties (FORTRAN_testhdf5_fortran_1_8 PROPERTIES PASS_REGULAR_EXPRESSION "[ ]*0 error.s") @@ -120,7 +120,7 @@ else () -D "TEST_OUTPUT=fortranlib_test_F03.txt" #-D "TEST_REFERENCE=fortranlib_test_F03.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () # set_tests_properties (FORTRAN_fortranlib_test_F03 PROPERTIES PASS_REGULAR_EXPRESSION "[ ]*0 error.s") @@ -144,7 +144,7 @@ else () -D "TEST_OUTPUT=vol_connector.txt" #-D "TEST_REFERENCE=vol_connector.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () diff --git a/hl/c++/examples/CMakeTests.cmake b/hl/c++/examples/CMakeTests.cmake index 82b32cfbc1e..7564fd7274a 100644 --- a/hl/c++/examples/CMakeTests.cmake +++ b/hl/c++/examples/CMakeTests.cmake @@ -36,7 +36,7 @@ else () -D "TEST_OUTPUT=ptExampleFL.txt" #-D "TEST_REFERENCE=ptExampleFL.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (HL_CPP_ex_ptExampleFL PROPERTIES DEPENDS HL_CPP_ex_ptExampleFL-clear-objects) diff --git a/hl/c++/test/CMakeTests.cmake b/hl/c++/test/CMakeTests.cmake index 28ee5df9aa1..5abbd4793ca 100644 --- a/hl/c++/test/CMakeTests.cmake +++ b/hl/c++/test/CMakeTests.cmake @@ -33,7 +33,7 @@ else () -D "TEST_OUTPUT=hl_ptableTest.txt" #-D "TEST_REFERENCE=hl_ptableTest.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (HL_CPP_ptableTest PROPERTIES DEPENDS HL_CPP_ptableTest-clear-objects) diff --git a/hl/examples/CMakeTests.cmake b/hl/examples/CMakeTests.cmake index b99709620d3..6014f6c77d3 100644 --- a/hl/examples/CMakeTests.cmake +++ b/hl/examples/CMakeTests.cmake @@ -66,7 +66,7 @@ foreach (example ${examples}) -D "TEST_OUTPUT=hl_ex_${example}.txt" #-D "TEST_REFERENCE=hl_ex_${example}.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () if (last_test) diff --git a/hl/fortran/examples/CMakeTests.cmake b/hl/fortran/examples/CMakeTests.cmake index 2e73ad9885e..6389fbe8506 100644 --- a/hl/fortran/examples/CMakeTests.cmake +++ b/hl/fortran/examples/CMakeTests.cmake @@ -39,7 +39,7 @@ foreach (example ${examples}) -D "TEST_OUTPUT=hl_f90_ex_${example}.txt" #-D "TEST_REFERENCE=hl_f90_ex_${example}.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (HL_FORTRAN_f90_ex_${example} PROPERTIES diff --git a/hl/fortran/test/CMakeTests.cmake b/hl/fortran/test/CMakeTests.cmake index bceb6eede4d..e082f4c8162 100644 --- a/hl/fortran/test/CMakeTests.cmake +++ b/hl/fortran/test/CMakeTests.cmake @@ -49,7 +49,7 @@ macro (ADD_H5_FORTRAN_TEST file) -D "TEST_OUTPUT=hl_f90_${file}.txt" #-D "TEST_REFERENCE=hl_f90_${file}.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (HL_FORTRAN_f90_${file} PROPERTIES diff --git a/hl/test/CMakeTests.cmake b/hl/test/CMakeTests.cmake index e532253f18b..cc4786e5806 100644 --- a/hl/test/CMakeTests.cmake +++ b/hl/test/CMakeTests.cmake @@ -99,7 +99,7 @@ macro (HL_ADD_TEST hl_name) -D "TEST_OUTPUT=hl_${hl_name}.txt" #-D "TEST_REFERENCE=hl_${hl_name}.out" -D "TEST_FOLDER=${HDF5_HL_TEST_BINARY_DIR}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (HL_${hl_name} PROPERTIES diff --git a/hl/tools/h5watch/CMakeTests.cmake b/hl/tools/h5watch/CMakeTests.cmake index b2f689bfeeb..32b2b95c4da 100644 --- a/hl/tools/h5watch/CMakeTests.cmake +++ b/hl/tools/h5watch/CMakeTests.cmake @@ -80,7 +80,7 @@ add_custom_target(H5WATCH_files ALL COMMENT "Copying files needed by H5WATCH tes -D "TEST_OUTPUT=${resultfile}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}.ddl" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5WATCH_ARGS-h5watch-${resultfile} PROPERTIES DEPENDS ${last_test} @@ -103,7 +103,7 @@ add_custom_target(H5WATCH_files ALL COMMENT "Copying files needed by H5WATCH tes -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}.mty" -D "TEST_ERRREF=${resultfile}.err" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5WATCH_ARGS-h5watch-${resultfile} PROPERTIES DEPENDS ${last_test} @@ -131,7 +131,7 @@ add_custom_target(H5WATCH_files ALL COMMENT "Copying files needed by H5WATCH tes -D "TEST_OUTPUT=${resultfile}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}.txt" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5WATCH-${resultfile} PROPERTIES DEPENDS H5WATCH-${resultfile}-clear-objects diff --git a/java/CMakeLists.txt b/java/CMakeLists.txt index 5c79bd3b700..cc22d9e4bbc 100644 --- a/java/CMakeLists.txt +++ b/java/CMakeLists.txt @@ -1,7 +1,7 @@ cmake_minimum_required (VERSION 3.12) project (HDF5_JAVA C Java) -set (CMAKE_MODULE_PATH "${HDF_RESOURCES_DIR};${HDF_RESOURCES_EXT_DIR}") +set (CMAKE_MODULE_PATH "${HDF_RESOURCES_DIR};${HDF_RESOURCES_DIR}") find_package (Java) #----------------------------------------------------------------------------- diff --git a/test/CMakeTests.cmake b/test/CMakeTests.cmake index a2eeabfbeea..5f4f89af986 100644 --- a/test/CMakeTests.cmake +++ b/test/CMakeTests.cmake @@ -440,7 +440,7 @@ foreach (h5_test ${H5_TESTS}) -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}" #-D "TEST_REFERENCE=${test}.out" -D "TEST_FOLDER=${HDF5_TEST_BINARY_DIR}/H5TEST" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (H5TEST-${h5_test} PROPERTIES @@ -477,7 +477,7 @@ if (NOT CYGWIN) -D "TEST_OUTPUT=cache.txt" #-D "TEST_REFERENCE=cache.out" -D "TEST_FOLDER=${HDF5_TEST_BINARY_DIR}/H5TEST" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (H5TEST-cache PROPERTIES @@ -534,7 +534,7 @@ else () -D "TEST_OUTPUT=external_env.txt" #-D "TEST_REFERENCE=external_env.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/H5TEST" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (H5TEST-external_env PROPERTIES @@ -568,7 +568,7 @@ else () -D "TEST_OUTPUT=vds_env.txt" #-D "TEST_REFERENCE=vds_env.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/H5TEST" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (H5TEST-vds_env PROPERTIES @@ -603,7 +603,7 @@ else () -D "TEST_SKIP_COMPARE=TRUE" -D "TEST_OUTPUT=flush1.txt" -D "TEST_FOLDER=${HDF5_TEST_BINARY_DIR}/H5TEST" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (H5TEST-flush1 PROPERTIES @@ -622,7 +622,7 @@ else () -D "TEST_SKIP_COMPARE=TRUE" -D "TEST_OUTPUT=flush2.txt" -D "TEST_FOLDER=${HDF5_TEST_BINARY_DIR}/H5TEST" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (H5TEST-flush2 PROPERTIES @@ -719,7 +719,7 @@ if (HDF5_ENABLE_DEPRECATED_SYMBOLS AND NOT MINGW) -D "TEST_OUTPUT=err_compat.txt" -D "TEST_REFERENCE=err_compat_1" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/H5TEST" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5TEST-err_compat PROPERTIES ENVIRONMENT "srcdir=${HDF5_TEST_BINARY_DIR}/H5TEST" @@ -736,7 +736,7 @@ else () -D "TEST_OUTPUT=err_compat.txt" -D "TEST_REFERENCE=err_compat_2" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/H5TEST" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5TEST-err_compat PROPERTIES ENVIRONMENT "srcdir=${HDF5_TEST_BINARY_DIR}/H5TEST" @@ -756,7 +756,7 @@ if (DEFAULT_API_VERSION MATCHES "v16" OR MINGW) -D "TEST_OUTPUT=error_test.txt" -D "TEST_REFERENCE=error_test_2" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/H5TEST" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5TEST-error_test PROPERTIES ENVIRONMENT "srcdir=${HDF5_TEST_BINARY_DIR}/H5TEST;HDF5_PLUGIN_PRELOAD=::" @@ -773,7 +773,7 @@ else () -D "TEST_OUTPUT=error_test.txt" -D "TEST_REFERENCE=error_test_1" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/H5TEST" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5TEST-error_test PROPERTIES ENVIRONMENT "srcdir=${HDF5_TEST_BINARY_DIR}/H5TEST;HDF5_PLUGIN_PRELOAD=::" @@ -804,7 +804,7 @@ else () -D "TEST_OUTPUT=links_env.txt" -D "TEST_REFERENCE=links_env.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/H5TEST" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (H5TEST-links_env PROPERTIES diff --git a/tools/test/h5copy/CMakeTests.cmake b/tools/test/h5copy/CMakeTests.cmake index e00d305e5ff..9310ec0252c 100644 --- a/tools/test/h5copy/CMakeTests.cmake +++ b/tools/test/h5copy/CMakeTests.cmake @@ -244,7 +244,7 @@ -D "TEST_REFERENCE=./testfiles/${testname}.out" -D "TEST_ERRREF=./testfiles/${testname}.err" -D "TEST_MASK=true" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5COPY-CMP-${testname} PROPERTIES DEPENDS H5COPY-CMP-${testname}-clear-objects) endif () @@ -272,7 +272,7 @@ -D "TEST_ENV_VAR=HDF5_PLUGIN_PATH" -D "TEST_ENV_VALUE=${CMAKE_BINARY_DIR}" -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) else () add_test ( @@ -289,7 +289,7 @@ -D "TEST_ENV_VAR=HDF5_PLUGIN_PATH" -D "TEST_ENV_VALUE=${CMAKE_BINARY_DIR}/plugins" -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (H5COPY_UD-${testname} PROPERTIES DEPENDS H5COPY_UD-${testname}-clear-objects) @@ -307,7 +307,7 @@ -D "TEST_ENV_VAR=HDF5_PLUGIN_PATH" -D "TEST_ENV_VALUE=${CMAKE_BINARY_DIR}/plugins" -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5COPY_UD-${testname}-DIFF PROPERTIES DEPENDS H5COPY_UD-${testname}) endif () @@ -336,7 +336,7 @@ -D "TEST_ENV_VAR=HDF5_PLUGIN_PATH" -D "TEST_ENV_VALUE=${CMAKE_BINARY_DIR}" -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) else () add_test ( @@ -354,7 +354,7 @@ -D "TEST_ENV_VAR=HDF5_PLUGIN_PATH" -D "TEST_ENV_VALUE=${CMAKE_BINARY_DIR}/plugins" -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (H5COPY_UD_ERR-${testname} PROPERTIES DEPENDS H5COPY_UD_ERR-${testname}-clearall-objects) @@ -372,7 +372,7 @@ -D "TEST_ENV_VAR=HDF5_PLUGIN_PATH" -D "TEST_ENV_VALUE=${CMAKE_BINARY_DIR}/plugins" -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5COPY_UD_ERR-${testname}-DIFF PROPERTIES DEPENDS H5COPY_UD_ERR-${testname}) endif () @@ -396,7 +396,7 @@ -D "TEST_OUTPUT=./testfiles/${resultfile}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=./testfiles/${resultfile}.ddl" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (H5COPY-${resultfile} PROPERTIES diff --git a/tools/test/h5diff/CMakeTests.cmake b/tools/test/h5diff/CMakeTests.cmake index 8e016059a0d..d2f0617b922 100644 --- a/tools/test/h5diff/CMakeTests.cmake +++ b/tools/test/h5diff/CMakeTests.cmake @@ -406,7 +406,7 @@ -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}.txt" -D "TEST_APPEND=EXIT CODE:" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) if (last_test) set_tests_properties (H5DIFF-${resultfile} PROPERTIES DEPENDS ${last_test}) @@ -439,7 +439,7 @@ -D "TEST_REF_APPEND=EXIT CODE: [0-9]" -D "TEST_REF_FILTER=EXIT CODE: 0" -D "TEST_SORT_COMPARE=TRUE" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) if (last_test) set_tests_properties (MPI_TEST_H5DIFF-${resultfile} PROPERTIES DEPENDS ${last_test}) @@ -465,7 +465,7 @@ -D "TEST_ENV_VAR=HDF5_PLUGIN_PATH" -D "TEST_ENV_VALUE=${CMAKE_BINARY_DIR}" -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) else () add_test ( @@ -482,7 +482,7 @@ -D "TEST_ENV_VAR=HDF5_PLUGIN_PATH" -D "TEST_ENV_VALUE=${CMAKE_BINARY_DIR}/plugins" -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () if (last_test) diff --git a/tools/test/h5dump/CMakeTests.cmake b/tools/test/h5dump/CMakeTests.cmake index be1a414fce7..2cba7b7f73e 100644 --- a/tools/test/h5dump/CMakeTests.cmake +++ b/tools/test/h5dump/CMakeTests.cmake @@ -450,7 +450,7 @@ -D "TEST_OUTPUT=h5dump-${testname}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=h5dump-${testname}.txt" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) if (last_test) set_tests_properties (H5DUMP-${testname} PROPERTIES DEPENDS ${last_test}) @@ -499,7 +499,7 @@ -D "TEST_OUTPUT=${resultfile}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}.ddl" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5DUMP-${resultfile} PROPERTIES DEPENDS "H5DUMP-${resultfile}-clear-objects") endif () @@ -532,7 +532,7 @@ -D "TEST_OUTPUT=${resultfile}-N.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}-N.ddl" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5DUMP-N-${resultfile} PROPERTIES DEPENDS "H5DUMP-N-${resultfile}-clear-objects") endif () @@ -565,7 +565,7 @@ -D "TEST_OUTPUT=${resultfile}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}.ddl" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5DUMP-${resultfile} PROPERTIES DEPENDS "H5DUMP-${resultfile}-clear-objects") add_test ( @@ -606,7 +606,7 @@ -D "TEST_OUTPUT=${resultfile}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}.ddl" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5DUMP-${resultfile} PROPERTIES DEPENDS "H5DUMP-${resultfile}-clear-objects") add_test ( @@ -659,7 +659,7 @@ -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}.ddl" -D "TEST_MASK_ERROR=true" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () endmacro () @@ -676,7 +676,7 @@ -D "TEST_OUTPUT=${resultfile}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${result_check}" - -P "${HDF_RESOURCES_EXT_DIR}/grepTest.cmake" + -P "${HDF_RESOURCES_DIR}/grepTest.cmake" ) endif () endmacro () @@ -694,7 +694,7 @@ -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}.ddl" -D "TEST_ERRREF=${result_errcheck}" - -P "${HDF_RESOURCES_EXT_DIR}/grepTest.cmake" + -P "${HDF_RESOURCES_DIR}/grepTest.cmake" ) endif () endmacro () @@ -714,7 +714,7 @@ -D "TEST_ERRREF=${result_errcheck}" -D "TEST_ENV_VAR:STRING=${envvar}" -D "TEST_ENV_VALUE:STRING=${envval}" - -P "${HDF_RESOURCES_EXT_DIR}/grepTest.cmake" + -P "${HDF_RESOURCES_DIR}/grepTest.cmake" ) endif () endmacro () @@ -739,7 +739,7 @@ -D "TEST_OUTPUT=${conffile}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${conffile}.ddl" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5DUMP-IMPORT-${resultfile} PROPERTIES DEPENDS "H5DUMP-IMPORT-${resultfile}-clear-objects") add_test (NAME H5DUMP-IMPORT-h5import-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${resultfile}.bin -c ${conffile}.out -o ${resultfile}.h5) @@ -766,7 +766,7 @@ -D "TEST_ENV_VAR=HDF5_PLUGIN_PATH" -D "TEST_ENV_VALUE=${CMAKE_BINARY_DIR}/plugins" -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () endmacro () diff --git a/tools/test/h5dump/CMakeTestsPBITS.cmake b/tools/test/h5dump/CMakeTestsPBITS.cmake index 0c3297f9740..3c188c41328 100644 --- a/tools/test/h5dump/CMakeTestsPBITS.cmake +++ b/tools/test/h5dump/CMakeTestsPBITS.cmake @@ -141,7 +141,7 @@ -D "TEST_OUTPUT=${resultfile}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}.ddl" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () endmacro () diff --git a/tools/test/h5dump/CMakeTestsVDS.cmake b/tools/test/h5dump/CMakeTestsVDS.cmake index 88df9ad2ec5..31624e608aa 100644 --- a/tools/test/h5dump/CMakeTestsVDS.cmake +++ b/tools/test/h5dump/CMakeTestsVDS.cmake @@ -137,7 +137,7 @@ -D "TEST_OUTPUT=${resultfile}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}.ddl" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () endmacro () @@ -169,7 +169,7 @@ -D "TEST_REFERENCE=${resultfile}.ddl" -D "TEST_ENV_VAR=HDF5_VDS_PREFIX" -D "TEST_ENV_VALUE=${PROJECT_BINARY_DIR}/testfiles/vds/" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () endmacro () @@ -196,7 +196,7 @@ -D "TEST_OUTPUT=${resultfile}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}.ddl" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () endmacro () diff --git a/tools/test/h5dump/CMakeTestsXML.cmake b/tools/test/h5dump/CMakeTestsXML.cmake index d73525748e9..a8083fbcbfc 100644 --- a/tools/test/h5dump/CMakeTestsXML.cmake +++ b/tools/test/h5dump/CMakeTestsXML.cmake @@ -195,7 +195,7 @@ -D "TEST_OUTPUT=${resultfile}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}.xml" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () endmacro () diff --git a/tools/test/h5format_convert/CMakeTests.cmake b/tools/test/h5format_convert/CMakeTests.cmake index 00a28bb78e4..5b8c51fca64 100644 --- a/tools/test/h5format_convert/CMakeTests.cmake +++ b/tools/test/h5format_convert/CMakeTests.cmake @@ -124,7 +124,7 @@ -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}" -D "TEST_ERRREF=${resultfile}.err" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5FC-${testname}-${testfile} PROPERTIES DEPENDS "H5FC-${testname}-${testfile}-tmpfile") set (last_test "H5FC-${testname}-${testfile}") @@ -139,7 +139,7 @@ -D "TEST_OUTPUT=${testname}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5FC-${testname}-NA PROPERTIES DEPENDS "H5FC-${testname}-clear-objects") set (last_test "H5FC-${testname}-NA") @@ -172,7 +172,7 @@ -D "TEST_OUTPUT=${testname}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5FC-${testname}-${testfile} PROPERTIES DEPENDS "H5FC-${testname}-${testfile}-tmpfile") set (last_test "H5FC-${testname}-${testfile}") @@ -205,7 +205,7 @@ -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}" -D "TEST_ERRREF=${result_errcheck}" - -P "${HDF_RESOURCES_EXT_DIR}/grepTest.cmake" + -P "${HDF_RESOURCES_DIR}/grepTest.cmake" ) set_tests_properties (H5FC-${testname}-${testfile} PROPERTIES DEPENDS "H5FC-${testname}-${testfile}-tmpfile") set (last_test "H5FC-${testname}-${testfile}") @@ -237,7 +237,7 @@ -D "TEST_OUTPUT=testfiles/${testname}.out" -D "TEST_SKIP_COMPARE=TRUE" -D "TEST_EXPECT=${resultcode}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5FC-${testname} PROPERTIES DEPENDS "H5FC-${testname}-tmpfile") set (last_test "H5FC-${testname}") @@ -280,7 +280,7 @@ -D "TEST_OUTPUT=testfiles/${testname}.out" -D "TEST_SKIP_COMPARE=TRUE" -D "TEST_EXPECT=${resultcode}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5FC-${testname} PROPERTIES DEPENDS "H5FC-${testname}-tmpfile") add_test ( @@ -317,7 +317,7 @@ -D "TEST_OUTPUT=testfiles/${testname}.out" -D "TEST_SKIP_COMPARE=TRUE" -D "TEST_EXPECT=0" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5FC-${testname} PROPERTIES DEPENDS "H5FC-${testname}-tmpfile") add_test ( @@ -330,7 +330,7 @@ -D "TEST_OUTPUT=testfiles/${testname}_chk.out" -D "TEST_EXPECT=0" -D "TEST_REFERENCE=testfiles/${testname}.ddl" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5FC_CHECK_DUMP-${testname} PROPERTIES DEPENDS "H5FC-${testname}") set (last_test "H5FC_CHECK_DUMP-${testname}") diff --git a/tools/test/h5import/CMakeTests.cmake b/tools/test/h5import/CMakeTests.cmake index ee1d401ad5d..81ebc4aeefb 100644 --- a/tools/test/h5import/CMakeTests.cmake +++ b/tools/test/h5import/CMakeTests.cmake @@ -128,7 +128,7 @@ -D "TEST_EXPECT=0" -D "TEST_FILTER=(^(HDF5)[^\n]*)" -D "TEST_SKIP_COMPARE=TRUE" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5IMPORT-${testname}-H5DMP PROPERTIES DEPENDS H5IMPORT-${testname} @@ -144,7 +144,7 @@ -D "TEST_EXPECT=0" -D "TEST_FILTER=(^(HDF5)[^\n]*)" -D "TEST_REFERENCE=${testfile}.new" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5IMPORT-${testname}-H5DMP_CMP PROPERTIES DEPENDS H5IMPORT-${testname}-H5DMP @@ -176,7 +176,7 @@ -D "TEST_OUTPUT=d${testfile}.dmp" -D "TEST_EXPECT=0" -D "TEST_SKIP_COMPARE=TRUE" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) else () add_test ( @@ -189,7 +189,7 @@ -D "TEST_OUTPUT=d${testfile}.dmp" -D "TEST_EXPECT=0" -D "TEST_SKIP_COMPARE=TRUE" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (H5IMPORT-DUMP-${testname}-H5DMP PROPERTIES @@ -206,7 +206,7 @@ -D "TEST_OUTPUT=d${testfile}.imp" -D "TEST_EXPECT=0" -D "TEST_SKIP_COMPARE=TRUE" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5IMPORT-DUMP-${testname} PROPERTIES DEPENDS "H5IMPORT-DUMP-${testname}-H5DMP" @@ -223,7 +223,7 @@ -D "TEST_EXPECT=0" -D "TEST_FILTER=(^(Warning)[^\n]*)" -D "TEST_REFERENCE=testfiles/d${testfile}.txt" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5IMPORT-DUMP-${testname}-H5DFF PROPERTIES DEPENDS "H5IMPORT-DUMP-${testname}" @@ -254,7 +254,7 @@ -D "TEST_OUTPUT=${testname}.dmp" -D "TEST_EXPECT=0" -D "TEST_SKIP_COMPARE=TRUE" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5IMPORT_SUB-DUMP-${testname}-H5DMP PROPERTIES DEPENDS "H5IMPORT_SUB-DUMP-${testname}-clear-objects" @@ -270,7 +270,7 @@ -D "TEST_OUTPUT=${testname}.imp" -D "TEST_EXPECT=0" -D "TEST_SKIP_COMPARE=TRUE" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5IMPORT_SUB-DUMP-${testname}-H5IMP PROPERTIES DEPENDS "H5IMPORT_SUB-DUMP-${testname}-H5DMP" @@ -285,7 +285,7 @@ -D "TEST_OUTPUT=d-${testname}.dmp" -D "TEST_EXPECT=0" -D "TEST_REFERENCE=testfiles/${testname}.ddl" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5IMPORT_SUB-DUMP-${testname}-CMP PROPERTIES DEPENDS "H5IMPORT_SUB-DUMP-${testname}-H5IMP" diff --git a/tools/test/h5jam/CMakeTests.cmake b/tools/test/h5jam/CMakeTests.cmake index 82d99b73abc..e19ec867fe0 100644 --- a/tools/test/h5jam/CMakeTests.cmake +++ b/tools/test/h5jam/CMakeTests.cmake @@ -71,7 +71,7 @@ -D "TEST_ERRREF=testfiles/${expectfile}.txt" -D "TEST_SKIP_COMPARE=1" -D "TEST_REFERENCE=testfiles/${expectfile}.txt" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () endmacro () @@ -98,7 +98,7 @@ -D "TEST_OUTPUT=${expectfile}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=testfiles/${expectfile}.txt" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () endmacro () @@ -117,7 +117,7 @@ -D "TEST_EXPECT=0" -D "TEST_FILTER=(^(HDF5)[^\n]*)" -D "TEST_SKIP_COMPARE=TRUE" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5JAM-${testname}-CHECKFILE-H5DMP PROPERTIES DEPENDS ${testdepends}) add_test ( @@ -131,7 +131,7 @@ -D "TEST_EXPECT=0" -D "TEST_FILTER=(^(HDF5)[^\n]*)" -D "TEST_REFERENCE=${actual}.new" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5JAM-${testname}-CHECKFILE-H5DMP_CMP PROPERTIES DEPENDS H5JAM-${testname}-CHECKFILE-H5DMP) endif () @@ -174,7 +174,7 @@ -D "TEST_OUTPUT=${outfile}.ufile.txt" -D "TEST_EXPECT=0" -D "TEST_SKIP_COMPARE=TRUE" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5JAM-${testname}-UNJAM PROPERTIES DEPENDS H5JAM-${testname}-UNJAM-clear-objects) set (compare_test "${outfile}.ufile.txt") diff --git a/tools/test/h5ls/CMakeTests.cmake b/tools/test/h5ls/CMakeTests.cmake index cd2f764b236..34f8e6fd210 100644 --- a/tools/test/h5ls/CMakeTests.cmake +++ b/tools/test/h5ls/CMakeTests.cmake @@ -154,7 +154,7 @@ -D "TEST_OUTPUT=${resultfile}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}.ls" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () endmacro () @@ -179,7 +179,7 @@ -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}.ls" -D "TEST_ERRREF=${resultfile}.err" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () endmacro () @@ -199,7 +199,7 @@ -D "TEST_ENV_VAR=HDF5_PLUGIN_PATH" -D "TEST_ENV_VALUE=${CMAKE_BINARY_DIR}/plugins" -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () endmacro () diff --git a/tools/test/h5ls/CMakeTestsVDS.cmake b/tools/test/h5ls/CMakeTestsVDS.cmake index e93e7e7b278..5ef21abbb29 100644 --- a/tools/test/h5ls/CMakeTestsVDS.cmake +++ b/tools/test/h5ls/CMakeTestsVDS.cmake @@ -99,7 +99,7 @@ -D "TEST_OUTPUT=${resultfile}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}.ls" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () endmacro () @@ -128,7 +128,7 @@ -D "TEST_REFERENCE=vds/prefix/${resultfile}.ls" -D "TEST_ENV_VAR=HDF5_VDS_PREFIX" -D "TEST_ENV_VALUE=\${ORIGIN}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () endmacro () diff --git a/tools/test/h5repack/CMakeTests.cmake b/tools/test/h5repack/CMakeTests.cmake index 09648ffd7c9..936ee3e08b7 100644 --- a/tools/test/h5repack/CMakeTests.cmake +++ b/tools/test/h5repack/CMakeTests.cmake @@ -236,7 +236,7 @@ -D "TEST_OUTPUT=h5repack-${testname}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=h5repack-${testname}.txt" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (H5REPACK-h5repack-${testname} PROPERTIES @@ -347,7 +347,7 @@ -D "TEST_EXPECT=${resultcode}" -D "TEST_FILTER:STRING=${testfilter}" -D "TEST_REFERENCE=${resultfile}-${testname}.tst" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5REPACK_CMP-${testname} PROPERTIES DEPENDS H5REPACK_CMP-${testname}-clear-objects @@ -395,7 +395,7 @@ -D "TEST_SKIP_COMPARE=true" -D "TEST_REFERENCE=${resultfile}.mty" -D "TEST_ERRREF=${result_errcheck}" - -P "${HDF_RESOURCES_EXT_DIR}/grepTest.cmake" + -P "${HDF_RESOURCES_DIR}/grepTest.cmake" ) set_tests_properties (H5REPACK_MASK-${testname} PROPERTIES DEPENDS H5REPACK_MASK-${testname}-clear-objects) endif () @@ -437,7 +437,7 @@ -D "TEST_OUTPUT=${resultfile}-${testname}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${testname}.${resultfile}.ddl" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5REPACK_DMP-h5dump-${testname} PROPERTIES DEPENDS "H5REPACK_DMP-${testname}" @@ -480,7 +480,7 @@ -D "TEST_OUTPUT=out-${testname}.${testfile}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${testname}.${testfile}.tst" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5REPACK_DIFF-${testname}_DFF PROPERTIES DEPENDS H5REPACK_DIFF-${testname} @@ -523,7 +523,7 @@ -D "TEST_OUTPUT=${resultfile}-${testname}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${statarg}.${resultfile}.ddl" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5REPACK_STAT-h5stat-${testname} PROPERTIES DEPENDS "H5REPACK_STAT-${testname}" @@ -576,7 +576,7 @@ -D "TEST_EXPECT=${resultcode}" -D "TEST_FILTER:STRING=${testfilter}" -D "TEST_REFERENCE=${testfilter}" - -P "${HDF_RESOURCES_EXT_DIR}/grepTest.cmake" + -P "${HDF_RESOURCES_DIR}/grepTest.cmake" ) set_tests_properties (H5REPACK_VERIFY_LAYOUT-${testname}_DMP PROPERTIES DEPENDS H5REPACK_VERIFY_LAYOUT-${testname}_DFF @@ -602,7 +602,7 @@ -D "TEST_EXPECT=${resultcode}" -D "TEST_FILTER:STRING=${nottestfilter}" -D "TEST_REFERENCE=${testfilter}" - -P "${HDF_RESOURCES_EXT_DIR}/grepTest.cmake" + -P "${HDF_RESOURCES_DIR}/grepTest.cmake" ) set_tests_properties (H5REPACK_VERIFY_LAYOUT-${testname}_DMP PROPERTIES DEPENDS H5REPACK_VERIFY_LAYOUT-${testname}_DFF @@ -649,7 +649,7 @@ -D "TEST_OUTPUT=${testfile}-${testname}-v.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${testfile}-${testname}-v.ddl" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5REPACK_VERIFY_LAYOUT_VDS-${testname}_DMP PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles" @@ -687,7 +687,7 @@ -D "TEST_EXPECT=${resultcode}" -D "TEST_FILTER:STRING=SUPERBLOCK_VERSION ${superblock}" -D "TEST_REFERENCE=SUPERBLOCK_VERSION ${superblock}" - -P "${HDF_RESOURCES_EXT_DIR}/grepTest.cmake" + -P "${HDF_RESOURCES_DIR}/grepTest.cmake" ) set_tests_properties (H5REPACK_VERIFY_SUPERBLOCK-${testname}_DMP PROPERTIES DEPENDS H5REPACK_VERIFY_SUPERBLOCK-${testname} @@ -745,7 +745,7 @@ -D "TEST_OUTPUT=out-${testname}_N.${testname}.out" -D "TEST_EXPECT=0" -D "TEST_REFERENCE=out-${testname}_N.${testname}.txt" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5REPACK_META-${testname}_N_DFF PROPERTIES DEPENDS H5REPACK_META-${testname}_N @@ -767,7 +767,7 @@ -D "TEST_OUTPUT=out-${testname}_M.${testname}.out" -D "TEST_EXPECT=0" -D "TEST_REFERENCE=out-${testname}_M.${testname}.txt" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5REPACK_META-${testname}_M_DFF PROPERTIES DEPENDS H5REPACK_META-${testname}_M @@ -814,7 +814,7 @@ -D "TEST_ENV_VAR=HDF5_PLUGIN_PATH" -D "TEST_ENV_VALUE=${CMAKE_BINARY_DIR}/plugins" -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5REPACK_UD-${testname} PROPERTIES DEPENDS H5REPACK_UD-${testname}-clear-objects @@ -832,7 +832,7 @@ -D "TEST_ENV_VAR=HDF5_PLUGIN_PATH" -D "TEST_ENV_VALUE=${CMAKE_BINARY_DIR}/plugins" -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5REPACK_UD-${testname}-h5dump PROPERTIES DEPENDS "H5REPACK_UD-${testname}" diff --git a/tools/test/h5stat/CMakeTests.cmake b/tools/test/h5stat/CMakeTests.cmake index d4238b547e1..c79c0a17bca 100644 --- a/tools/test/h5stat/CMakeTests.cmake +++ b/tools/test/h5stat/CMakeTests.cmake @@ -111,7 +111,7 @@ -D "TEST_OUTPUT=${resultfile}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}.ddl" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () endmacro () @@ -135,7 +135,7 @@ -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}.mty" -D "TEST_ERRREF=${resultfile}.err" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () endmacro () diff --git a/tools/test/misc/CMakeTestsClear.cmake b/tools/test/misc/CMakeTestsClear.cmake index 198a3637155..5e307aa3fcc 100644 --- a/tools/test/misc/CMakeTestsClear.cmake +++ b/tools/test/misc/CMakeTestsClear.cmake @@ -97,7 +97,7 @@ -D "TEST_OUTPUT=${testname}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}.ddl" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () endmacro () @@ -115,7 +115,7 @@ -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}.mty" -D "TEST_ERRREF=${resultfile}.err" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () endmacro () @@ -144,7 +144,7 @@ -D "TEST_OUTPUT=${testname}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}.ddl" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5CLEAR_CMP-${testname} PROPERTIES DEPENDS H5CLEAR_CMP-copy_${testname} @@ -177,7 +177,7 @@ -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}.mty" -D "TEST_ERRREF=${resultfile}.err" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5CLEAR_CMP-${testname} PROPERTIES DEPENDS H5CLEAR_CMP-copy_${testname} @@ -222,7 +222,7 @@ -D "TEST_OUTPUT=${testname}_before_size.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}_before_size.ddl" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5CLEAR_FILESIZE_CMP-${testname}_before_size PROPERTIES DEPENDS H5CLEAR_FILESIZE_TEST-copy_${testname} @@ -253,7 +253,7 @@ -D "TEST_OUTPUT=${testname}_after_size.out" -D "TEST_EXPECT=0" -D "TEST_REFERENCE=${resultfile}_after_size.ddl" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5CLEAR_FILESIZE_CMP-${testname}_after_size PROPERTIES DEPENDS H5CLEAR_FILESIZE_INCR-${testname} @@ -285,7 +285,7 @@ -D "TEST_OUTPUT=${testname}_before_size.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}.mty" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5CLEAR_FILESIZE_FAIL_CMP-${testname}_before_size PROPERTIES DEPENDS H5CLEAR_FILESIZE_FAIL_TEST-copy_${testname} @@ -315,7 +315,7 @@ -D "TEST_OUTPUT=${testname}_after_size.out" -D "TEST_EXPECT=0" -D "TEST_REFERENCE=${resultfile}_after_size.ddl" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5CLEAR_FILESIZE_FAIL_CMP-${testname}_after_size PROPERTIES DEPENDS H5CLEAR_FILESIZE_FAIL_INCR-${testname} diff --git a/tools/test/misc/CMakeTestsMkgrp.cmake b/tools/test/misc/CMakeTestsMkgrp.cmake index 11c9fdf0184..b4d1a563a30 100644 --- a/tools/test/misc/CMakeTestsMkgrp.cmake +++ b/tools/test/misc/CMakeTestsMkgrp.cmake @@ -87,7 +87,7 @@ -D "TEST_EXPECT=${resultcode}" -D "TEST_MASK_MOD=true" -D "TEST_REFERENCE=${resultfile}.ls" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5MKGRP-${resultfile}-h5ls PROPERTIES DEPENDS H5MKGRP-${resultfile}) endif () @@ -114,7 +114,7 @@ -D "TEST_OUTPUT=${resultfile}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}.txt" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5MKGRP_CMP-${resultfile} PROPERTIES DEPENDS H5MKGRP_CMP-${resultfile}-clear-objects diff --git a/tools/test/perform/CMakeTests.cmake b/tools/test/perform/CMakeTests.cmake index 73b22604d1b..17f4b48f4e7 100644 --- a/tools/test/perform/CMakeTests.cmake +++ b/tools/test/perform/CMakeTests.cmake @@ -64,7 +64,7 @@ if (HDF5_TEST_SERIAL) -D "TEST_OUTPUT=h5perf_serial.txt" #-D "TEST_REFERENCE=h5perf_serial.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (PERFORM_h5perf_serial PROPERTIES @@ -88,7 +88,7 @@ if (HDF5_TEST_SERIAL) -D "TEST_OUTPUT=chunk.txt" #-D "TEST_REFERENCE=chunk.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (PERFORM_chunk PROPERTIES @@ -107,7 +107,7 @@ if (HDF5_TEST_SERIAL) -D "TEST_OUTPUT=iopipe.txt" #-D "TEST_REFERENCE=iopipe.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (PERFORM_iopipe PROPERTIES @@ -126,7 +126,7 @@ if (HDF5_TEST_SERIAL) -D "TEST_OUTPUT=overhead.txt" #-D "TEST_REFERENCE=overhead.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (PERFORM_overhead PROPERTIES @@ -145,7 +145,7 @@ if (HDF5_TEST_SERIAL) -D "TEST_OUTPUT=perf_meta.txt" #-D "TEST_REFERENCE=perf_meta.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (PERFORM_perf_meta PROPERTIES @@ -164,7 +164,7 @@ if (HDF5_TEST_SERIAL) -D "TEST_OUTPUT=zip_perf-h.txt" #-D "TEST_REFERENCE=zip_perf-h.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (PERFORM_zip_perf_help PROPERTIES @@ -183,7 +183,7 @@ if (HDF5_TEST_SERIAL) -D "TEST_OUTPUT=zip_perf.txt" #-D "TEST_REFERENCE=zip_perf.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () set_tests_properties (PERFORM_zip_perf PROPERTIES From 2ceccedc54ed9f822fd0da71f2f7c5c73bcb2728 Mon Sep 17 00:00:00 2001 From: Dana Robinson <43805+derobins@users.noreply.github.com> Date: Wed, 7 Jun 2023 05:51:19 -0700 Subject: [PATCH 073/108] Bring gcc/clang attribute and warning macros over (#3063) --- src/H5private.h | 102 ++++++++++++++++++++++++++++-------------------- src/H5public.h | 34 ++++++++++++++-- 2 files changed, 89 insertions(+), 47 deletions(-) diff --git a/src/H5private.h b/src/H5private.h index 36a40adb3a2..ff23370a05c 100644 --- a/src/H5private.h +++ b/src/H5private.h @@ -276,52 +276,68 @@ * gcc warnings (it has to use the public API and can't include this * file). Be sure to update that file if the #ifdefs change here. */ +/* clang-format off */ #if defined(H5_HAVE_ATTRIBUTE) && !defined(__SUNPRO_C) -#define H5_ATTR_FORMAT(X, Y, Z) __attribute__((format(X, Y, Z))) -#define H5_ATTR_UNUSED __attribute__((unused)) -#ifdef H5_HAVE_PARALLEL -#define H5_ATTR_PARALLEL_UNUSED __attribute__((unused)) -#define H5_ATTR_PARALLEL_USED /*void*/ -#else -#define H5_ATTR_PARALLEL_UNUSED /*void*/ -#define H5_ATTR_PARALLEL_USED __attribute__((unused)) -#endif -#ifdef H5_NO_DEPRECATED_SYMBOLS -#define H5_ATTR_DEPRECATED_USED H5_ATTR_UNUSED -#else /* H5_NO_DEPRECATED_SYMBOLS */ -#define H5_ATTR_DEPRECATED_USED /*void*/ -#endif /* H5_NO_DEPRECATED_SYMBOLS */ -#ifdef H5_DEBUG_API -#define H5_ATTR_DEBUG_API_USED /*void*/ -#else /* H5_DEBUG_API */ -#define H5_ATTR_DEBUG_API_USED H5_ATTR_UNUSED -#endif /* H5_DEBUG_API */ -#ifndef NDEBUG -#define H5_ATTR_NDEBUG_UNUSED /*void*/ -#else /* NDEBUG */ -#define H5_ATTR_NDEBUG_UNUSED H5_ATTR_UNUSED -#endif /* NDEBUG */ -#define H5_ATTR_NORETURN __attribute__((noreturn)) -#define H5_ATTR_CONST __attribute__((const)) -#define H5_ATTR_PURE __attribute__((pure)) -#if defined(__clang__) || defined(__GNUC__) && __GNUC__ >= 7 && !defined(__INTEL_COMPILER) -#define H5_ATTR_FALLTHROUGH __attribute__((fallthrough)); -#else -#define H5_ATTR_FALLTHROUGH /* FALLTHROUGH */ -#endif +# define H5_ATTR_FORMAT(X, Y, Z) __attribute__((format(X, Y, Z))) +# define H5_ATTR_UNUSED __attribute__((unused)) + +# ifdef H5_HAVE_PARALLEL +# define H5_ATTR_PARALLEL_UNUSED __attribute__((unused)) +# define H5_ATTR_PARALLEL_USED /*void*/ +# else +# define H5_ATTR_PARALLEL_UNUSED /*void*/ +# define H5_ATTR_PARALLEL_USED __attribute__((unused)) +# endif + +# ifdef H5_NO_DEPRECATED_SYMBOLS +# define H5_ATTR_DEPRECATED_USED H5_ATTR_UNUSED +# else +# define H5_ATTR_DEPRECATED_USED /*void*/ +# endif + +# ifdef H5_DEBUG_API +# define H5_ATTR_DEBUG_API_USED /*void*/ +# else +# define H5_ATTR_DEBUG_API_USED H5_ATTR_UNUSED +# endif + +# ifndef NDEBUG +# define H5_ATTR_NDEBUG_UNUSED /*void*/ +# else +# define H5_ATTR_NDEBUG_UNUSED H5_ATTR_UNUSED +# endif + +# define H5_ATTR_NORETURN __attribute__((noreturn)) +# define H5_ATTR_CONST __attribute__((const)) +# define H5_ATTR_PURE __attribute__((pure)) + +# if defined(__clang__) || defined(__GNUC__) && __GNUC__ >= 7 && !defined(__INTEL_COMPILER) +# define H5_ATTR_FALLTHROUGH __attribute__((fallthrough)); +# else +# define H5_ATTR_FALLTHROUGH /* FALLTHROUGH */ +# endif + +# if defined(__GNUC__) && !defined(__INTEL_COMPILER) +# define H5_ATTR_MALLOC __attribute__((malloc)) +# else +# define H5_ATTR_MALLOC /*void*/ +# endif + #else -#define H5_ATTR_FORMAT(X, Y, Z) /*void*/ -#define H5_ATTR_UNUSED /*void*/ -#define H5_ATTR_NDEBUG_UNUSED /*void*/ -#define H5_ATTR_DEBUG_API_USED /*void*/ -#define H5_ATTR_DEPRECATED_USED /*void*/ -#define H5_ATTR_PARALLEL_UNUSED /*void*/ -#define H5_ATTR_PARALLEL_USED /*void*/ -#define H5_ATTR_NORETURN /*void*/ -#define H5_ATTR_CONST /*void*/ -#define H5_ATTR_PURE /*void*/ -#define H5_ATTR_FALLTHROUGH /*void*/ +# define H5_ATTR_FORMAT(X, Y, Z) /*void*/ +# define H5_ATTR_UNUSED /*void*/ +# define H5_ATTR_NDEBUG_UNUSED /*void*/ +# define H5_ATTR_DEBUG_API_USED /*void*/ +# define H5_ATTR_DEPRECATED_USED /*void*/ +# define H5_ATTR_PARALLEL_UNUSED /*void*/ +# define H5_ATTR_PARALLEL_USED /*void*/ +# define H5_ATTR_NORETURN /*void*/ +# define H5_ATTR_CONST /*void*/ +# define H5_ATTR_PURE /*void*/ +# define H5_ATTR_FALLTHROUGH /*void*/ +# define H5_ATTR_MALLOC /*void*/ #endif +/* clang-format on */ /* * Networking headers used by the mirror VFD and related tests and utilities. diff --git a/src/H5public.h b/src/H5public.h index 303a13357fd..7fc36f5b0ab 100644 --- a/src/H5public.h +++ b/src/H5public.h @@ -75,19 +75,45 @@ * http://www.dbp-consulting.com/tutorials/SuppressingGCCWarnings.html * http://gcc.gnu.org/onlinedocs/gcc/Diagnostic-Pragmas.html#Diagnostic-Pragmas */ -/* These pragmas are only implemented usefully in gcc 4.6+ */ -#if ((__GNUC__ * 100) + __GNUC_MINOR__) >= 406 #define H5_GCC_DIAG_JOINSTR(x, y) x y #define H5_GCC_DIAG_DO_PRAGMA(x) _Pragma(#x) #define H5_GCC_DIAG_PRAGMA(x) H5_GCC_DIAG_DO_PRAGMA(GCC diagnostic x) -#define H5_GCC_DIAG_OFF(x) H5_GCC_DIAG_PRAGMA(push) H5_GCC_DIAG_PRAGMA(ignored H5_GCC_DIAG_JOINSTR("-W", x)) -#define H5_GCC_DIAG_ON(x) H5_GCC_DIAG_PRAGMA(pop) +#define H5_DIAG_OFF(x) H5_GCC_DIAG_PRAGMA(push) H5_GCC_DIAG_PRAGMA(ignored H5_GCC_DIAG_JOINSTR("-W", x)) +#define H5_DIAG_ON(x) H5_GCC_DIAG_PRAGMA(pop) + +/* Macros for enabling/disabling particular GCC-only warnings. + * These pragmas are only implemented usefully in gcc 4.6+ + */ +#if (((__GNUC__ * 100) + __GNUC_MINOR__) >= 406) +#define H5_GCC_DIAG_OFF(x) H5_DIAG_OFF(x) +#define H5_GCC_DIAG_ON(x) H5_DIAG_ON(x) #else #define H5_GCC_DIAG_OFF(x) #define H5_GCC_DIAG_ON(x) #endif +/* Macros for enabling/disabling particular clang-only warnings. + */ +#if defined(__clang__) +#define H5_CLANG_DIAG_OFF(x) H5_DIAG_OFF(x) +#define H5_CLANG_DIAG_ON(x) H5_DIAG_ON(x) +#else +#define H5_CLANG_DIAG_OFF(x) +#define H5_CLANG_DIAG_ON(x) +#endif + +/* Macros for enabling/disabling particular GCC / clang warnings. + * These macros should be used for warnings supported by both gcc and clang. + */ +#if (((__GNUC__ * 100) + __GNUC_MINOR__) >= 406) || defined(__clang__) +#define H5_GCC_CLANG_DIAG_OFF(x) H5_DIAG_OFF(x) +#define H5_GCC_CLANG_DIAG_ON(x) H5_DIAG_ON(x) +#else +#define H5_GCC_CLANG_DIAG_OFF(x) +#define H5_GCC_CLANG_DIAG_ON(x) +#endif + /* Version numbers */ /** * For major interface/format changes From 8304ce006ed06807545a15998bbe782c663afd2f Mon Sep 17 00:00:00 2001 From: Dana Robinson <43805+derobins@users.noreply.github.com> Date: Wed, 7 Jun 2023 05:52:12 -0700 Subject: [PATCH 074/108] Bring HDF-EOS5 and netCDF actions from develop (#3064) --- .github/workflows/hdfeos5.yml | 50 +++++++++++++++++++++++++++++++++ .github/workflows/netcdf.yml | 53 +++++++++++++++++++++++++++++++++++ 2 files changed, 103 insertions(+) create mode 100644 .github/workflows/hdfeos5.yml create mode 100644 .github/workflows/netcdf.yml diff --git a/.github/workflows/hdfeos5.yml b/.github/workflows/hdfeos5.yml new file mode 100644 index 00000000000..0deadc62546 --- /dev/null +++ b/.github/workflows/hdfeos5.yml @@ -0,0 +1,50 @@ +name: hdfeos5 + +on: + workflow_dispatch: + push: + pull_request: + branches: [ hdf5_1_12 ] + paths-ignore: + - '.github/CODEOWNERS' + - '.github/FUNDING.yml' + - 'doc/**' + - 'release_docs/**' + - 'ACKNOWLEDGEMENTS' + - 'COPYING**' + - '**.md' + +# Using concurrency to cancel any in-progress job or run +concurrency: + group: ${{ github.workflow }}-${{ github.sha || github.event.pull_request.number }} + cancel-in-progress: true + +permissions: + contents: read + +jobs: + build: + name: Build hdfeos5 + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - name: Install Autotools Dependencies (Linux) + run: | + sudo apt update + sudo apt install automake autoconf libtool libtool-bin + - name: Install HDF5 + run: | + ./autogen.sh + ./configure --prefix=/usr/local --with-default-api-version=v16 + make + sudo make install + - name: Install HDF-EOS5 + run: | + wget -O HDF-EOS5.2.0.tar.gz "https://git.earthdata.nasa.gov/projects/DAS/repos/hdfeos5/raw/hdf-eos5-2.0-src.tar.gz?at=refs%2Fheads%2FHDFEOS5_2.0" + tar zxvf HDF-EOS5.2.0.tar.gz + cd hdf-eos5-2.0 + ./configure CC=/usr/local/bin/h5cc --prefix=/usr/local/ --enable-install-include + make + make check + sudo make install diff --git a/.github/workflows/netcdf.yml b/.github/workflows/netcdf.yml new file mode 100644 index 00000000000..0ec7541ee80 --- /dev/null +++ b/.github/workflows/netcdf.yml @@ -0,0 +1,53 @@ +name: netCDF + +on: + workflow_dispatch: + push: + pull_request: + branches: [ hdf5_1_12 ] + paths-ignore: + - '.github/CODEOWNERS' + - '.github/FUNDING.yml' + - 'doc/**' + - 'release_docs/**' + - 'ACKNOWLEDGEMENTS' + - 'COPYING**' + - '**.md' + +# Using concurrency to cancel any in-progress job or run +concurrency: + group: ${{ github.workflow }}-${{ github.sha || github.event.pull_request.number }} + cancel-in-progress: true + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Install System dependencies + run: | + sudo apt update + sudo apt install -y libaec-dev zlib1g-dev automake autoconf libcurl4-openssl-dev libjpeg-dev wget curl bzip2 m4 flex bison cmake libzip-dev doxygen openssl libtool libtool-bin + - name: Checkout HDF5 + uses: actions/checkout@v3 + - name: Install HDF5 + run: | + ./autogen.sh + ./configure --prefix=/usr/local --disable-static --enable-shared --enable-hl --with-szlib + make -j + sudo make install -j + - name: Checkout netCDF + uses: actions/checkout@v3 + with: + repository: unidata/netcdf-c + path: netcdf-c + - name: Test netCDF + run: | + cd netcdf-c + autoreconf -if + CFLAGS=${CFLAGS} LDFLAGS=${LDFLAGS} LD_LIBRARY_PATH=${LD_LIBRARY_PATH} ./configure --enable-hdf5 --enable-dap --disable-dap-remote-tests --enable-doxygen --enable-external-server-tests + cat config.log + cat libnetcdf.settings + CFLAGS=${CFLAGS} LDFLAGS=${LDFLAGS} LD_LIBRARY_PATH=${LD_LIBRARY_PATH} make -j + CFLAGS=${CFLAGS} LDFLAGS=${LDFLAGS} LD_LIBRARY_PATH=${LD_LIBRARY_PATH} make check TESTS="" -j + LD_LIBRARY_PATH="/home/runner/work/hdf5/hdf5/netcdf-c/liblib/.libs:/usr/local/lib:${LD_LIBRARY_PATH}" + CFLAGS=${CFLAGS} LDFLAGS=${LDFLAGS} LD_LIBRARY_PATH=${LD_LIBRARY_PATH} make check -j From 37efa90085f6c34db6d5947f3320c3d519ee3653 Mon Sep 17 00:00:00 2001 From: Dana Robinson <43805+derobins@users.noreply.github.com> Date: Wed, 7 Jun 2023 08:08:33 -0700 Subject: [PATCH 075/108] Remove unused cruft from bin dir (#3071) --- bin/chkconfigure | 82 ------------------------ bin/dependencies | 50 --------------- bin/deploy | 57 ----------------- bin/distdep | 26 -------- bin/errors | 138 ---------------------------------------- bin/format_source | 4 +- bin/format_source_patch | 34 ---------- bin/gcov_script | 50 --------------- bin/mkdirs | 32 ---------- bin/newer | 42 ------------ bin/yodconfigure | 75 ---------------------- config/conclude.am | 4 +- config/examples.am | 4 +- 13 files changed, 5 insertions(+), 593 deletions(-) delete mode 100755 bin/chkconfigure delete mode 100755 bin/dependencies delete mode 100755 bin/deploy delete mode 100755 bin/distdep delete mode 100755 bin/errors delete mode 100755 bin/format_source_patch delete mode 100755 bin/gcov_script delete mode 100755 bin/mkdirs delete mode 100755 bin/newer delete mode 100755 bin/yodconfigure diff --git a/bin/chkconfigure b/bin/chkconfigure deleted file mode 100755 index db4010cc3fa..00000000000 --- a/bin/chkconfigure +++ /dev/null @@ -1,82 +0,0 @@ -#!/bin/sh -## -## Copyright by The HDF Group. -## All rights reserved. -## -## This file is part of HDF5. The full HDF5 copyright notice, including -## terms governing use, modification, and redistribution, is contained in -## the COPYING file, which can be found at the root of the source code -## distribution tree, or in https://www.hdfgroup.org/licenses. -## If you do not have access to either file, you may request a copy from -## help@hdfgroup.org. -## -# Check that all the configure files are properly generated. -# -# Programmer: Albert Cheng -# Created Date: 2004/12/07 - -# -# Configure: should be generated by autoconf version 2.69. -# autoconf: should be of version 2.69. - -# variable initialization -nerrors=0 -AUTOCONFVERSION=2.69 -AUTOCONFVERSIONLEAD='Generated by GNU Autoconf' -CONFIGUREFILES="configure" - - -# Function definitions -# -# PRINTMSG -# Print a one line message left justified in a field of 70 characters -# without newline. More output for this line later. -# -PRINTMSG() { - SPACES=" " - echo "$* $SPACES" | cut -c1-70 | tr -d '\012' -} - -# print result passed. -PASSED() { - echo " PASSED" -} - -# print result failed. -FAILED() { - echo "*FAILED*" -} - - -# Main body - -# Check configure files -# The autoconf version should be among the first 5 lines. -echo "Check autoconf version. Should be version $AUTOCONFVERSION" -for xf in $CONFIGUREFILES; do - PRINTMSG $xf - if [ ! -f $xf ]; then - FAILED - echo File not found - nerrors=`expr $nerrors + 1` - continue - fi - autoconf_version=`head -5 $xf | grep "$AUTOCONFVERSIONLEAD"` - echo $autoconf_version | grep "$AUTOCONFVERSIONLEAD $AUTOCONFVERSION" > /dev/null 2>&1 - if [ $? -eq 0 ]; then - PASSED - else - FAILED - echo "Expected: $AUTOCONFVERSIONLEAD $AUTOCONFVERSION" - echo "Got: $autoconf_version" - nerrors=`expr $nerrors + 1` - fi -done - - -# Summary -echo $0 found $nerrors errors -if [ $nerrors != 0 ]; then - exit 1 -fi -exit 0 diff --git a/bin/dependencies b/bin/dependencies deleted file mode 100755 index b2f23958c34..00000000000 --- a/bin/dependencies +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env perl -# -# Copyright by The HDF Group. -# All rights reserved. -# -# This file is part of HDF5. The full HDF5 copyright notice, including -# terms governing use, modification, and redistribution, is contained in -# the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://www.hdfgroup.org/licenses. -# If you do not have access to either file, you may request a copy from -# help@hdfgroup.org. -# -use warnings; - -my $depend_file; -my $new_depend_file; -my $srcdir; -my $top_srcdir; -my $top_builddir; - -while ($_ = shift @ARGV) { - if (/^--top_srcdir=([^ \t\n]*)/) { - $top_srcdir = $1; - $top_srcdir =~ s/\+/\\\+/g; - $top_srcdir =~ s/\./\\\./g; - } elsif (/^--top_builddir=([^ \t\n]*)/) { - $top_builddir = $1; - $top_builddir =~ s/\+/\\\+/g; - $top_builddir =~ s/\./\\\./g; - } else { - $depend_file = $_; - $new_depend_file = "$_.new"; - last; - } -} - -open(DEPEND, "<$depend_file") || die "cannot open file $depend_file: $!\n"; -open(NEW, ">$new_depend_file") || die "cannot open file $new_depend_file: $!\n"; - -while () { - s/\.o(\b)/\.lo$1/g; - s/ $top_srcdir/ \$\(top_srcdir\)/g; - s/ $top_builddir/ \$\(top_builddir\)/g; - print NEW $_; -} - -close(DEPEND); -close(NEW); - -`mv $new_depend_file $depend_file`; diff --git a/bin/deploy b/bin/deploy deleted file mode 100755 index 818fa0722e6..00000000000 --- a/bin/deploy +++ /dev/null @@ -1,57 +0,0 @@ -#!/bin/sh -# -# Copyright by The HDF Group. -# All rights reserved. -# -# This file is part of HDF5. The full HDF5 copyright notice, including -# terms governing use, modification, and redistribution, is contained in -# the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://www.hdfgroup.org/licenses. -# If you do not have access to either file, you may request a copy from -# help@hdfgroup.org. -# -# Deploy the HDF5 binary. -# -# Programmer: Albert Cheng -# Created Date: 2004/12/15 -# -# Modifications - -# Function definitions -# -# Print Usage page -USAGE() -{ -cat << EOF -Usage: $0 - Install the binary to directory - -Examples: - - $ bin/deploy /usr/local/hdf5 - .... - -EOF - -} - - -# Variables - -if [ $# != 1 ]; then - USAGE - exit 1 -fi - -installdir=$1 -# create installdir if it does not exist yet. -if [ -d $installdir ] || mkdir $installdir ; then - ${MAKE:-gmake} install prefix=$installdir && \ - ( cd $installdir/bin; ./h5redeploy -force) - exit $? -else - echo $installdir is not a valid directory - USAGE - exit 1 -fi - diff --git a/bin/distdep b/bin/distdep deleted file mode 100755 index fcda2170f2d..00000000000 --- a/bin/distdep +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/sh -#! -*-perl-*- -eval 'exec perl -p -x -S $0 ${1+"$@"}' - if 0; -# -# Copyright by The HDF Group. -# All rights reserved. -# -# This file is part of HDF5. The full HDF5 copyright notice, including -# terms governing use, modification, and redistribution, is contained in -# the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://www.hdfgroup.org/licenses. -# If you do not have access to either file, you may request a copy from -# help@hdfgroup.org. -# - -# Usage: $0 [<].depend - -# Takes dependency info and generates on stdout dependencies suitable for -# distribution by removing all the system include files from the list and -# removing all but the base name of other include files (since the Makefiles -# contain the logic for searching). - -($h,$_)=/\s*\\/?$h.$`:("",$h.$_); -s|( +/\S*)*( *)|$2?" \\\n ":""|eg; -#s|(([-\w\.]+)/)+([-\w\.]+)|\3|g; diff --git a/bin/errors b/bin/errors deleted file mode 100755 index 9473636907b..00000000000 --- a/bin/errors +++ /dev/null @@ -1,138 +0,0 @@ -#!/usr/bin/env perl -require 5.003; -use warnings; -use Text::Tabs; - -# NOTE: THE FORMAT OF HRETURN_ERROR AND HGOTO_ERROR MACROS HAS -# CHANGED. THIS SCRIPT NO LONGER WORKS! --rpm - -# Copyright by The HDF Group. -# All rights reserved. -# -# This file is part of HDF5. The full HDF5 copyright notice, including -# terms governing use, modification, and redistribution, is contained in -# the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://www.hdfgroup.org/licenses. -# If you do not have access to either file, you may request a copy from -# help@hdfgroup.org. -# -# Robb Matzke -# 30 Aug 1997 -# -# Purpose: This script will read standard input which should be a -# function prologue followed by a C function and will emit -# on standard output the same source code with the function -# prologue containing documentation for the various errors -# that occur in the function. -# -# Errors are raised by calling HGOTO_ERROR() or -# HRETURN_ERROR(). The reason for the error message is a -# comment which appears immediately after the error macro -# call and is contained entirely on one line: -# -# HRETURN_ERROR (...); /*entry not found*/ -# -# If such a comment doesn't exist, then the previous comment -# is used, subject to the constraint that raising an error -# clears the previous comment. -# -# /* Entry not found */ -# HGOTO_ERROR (...); -# -# Emacs users can use this script interactively with the -# c-mark-function and shell-command-on-region functions which -# are normally bound to M-C-h and M-|. - - -# Split STDIN into the prolog and the function body. Preserve leading -# white space. -$_ = join "", ; -my ($head, $prolog, $body) = (/^(\s*)(\/\*(.*?)\*\/)?(.*)/s)[0,2,3]; -$prolog = "" unless $prolog; - -# Find each error and the comment that goes with it. -for ($_=$body,$comment=""; /\/\*|H(RETURN|GOTO)_ERROR/s;) { - $_ = $&.$'; - - if (/^H(RETURN|GOTO)_ERROR\s*\(\s*H5E_(\w+)\s*,\s*H5E_(\w+)\s*,/s) { - ($major, $minor, $_) = ($2, $3, $'); - $comment=$1 if /^.*?\)\s*;\s*\/\*\s*(.*?)\s*\*\//; - $comment =~ s/^\s*\*+\s*/ /mg; # leading asterisks. - $comment =~ s/^\s+//s; # leading white space. - $comment =~ s/\s+$//s; # trailing white space. - $comment =~ s/(\w)$/$1./s; # punctuation. - $comment ||= "***NO COMMENT***"; - $errors{"$major\000$minor\000\u$comment"} = 1; - $comment = ""; - - } else { - ($comment) = /^\/\*\s*(.*?)\s*\*\//s; - $_ = $'; - } -} - - -# Format an error so it isn't too wide. -sub fmt_error ($) { - local ($_) = @_; - - my ($prefix,$space,$err) = /^((.*?)([A-Z_0-9]+\s+[A-Z_0-9]+\s+))/; - $_ = $'; - tr/\n / /s; - my $w = 70 - length expand $prefix; - s/(.{$w}\S+)\s+(\S)/$1."\n".$space.' 'x(length $err).$2/eg; - return $prefix . $_."\n"; -} - - - -# Sort the errors by major, then minor, then comment. Duplicate -# triplets have already been removed. -sub by_triplet { - my ($a_maj, $a_min, $a_com) = split /\000/, $a; - my ($b_maj, $b_min, $b_com) = split /\000/, $b; - $a_maj cmp $b_maj || $a_min cmp $b_min || $a_com cmp $b_com; -} -@errors = map {sprintf "%-9s %-13s %s\n", split /\000/} - sort by_triplet keys %errors; - - - -# Add the list of errors to the prologue depending on the type of -# prolog. -if (($front, $back) = $prolog=~/^(.*?Errors:\s*?(?=\n)).*?\n\s*\*\s*\n(.*)/s) { - #| * Errors: |# - #| * __list_of_error_messages__ (zero or more lines) |# - #| * |# - print $head, "/*", $front, "\n"; - map {print fmt_error " *\t\t".$_} @errors; - print " *\n", $back, "*/", $body; - -} elsif (($front,$back) = $prolog =~ - /(.*?\n\s*ERRORS:?\s*?(?=\n)).*?\n\s*\n(.*)/s) { - #| ERRORS |# - #| __list_of_error_messages__ (zero or more lines) |# - #| |# - print $head, "/*", $front, "\n"; - map {print fmt_error " ".$_} @errors; - print "\n", $back, "*/", $body; - -} elsif ($prolog eq "") { - # No prolog present. - print $head; - print " \n/*", "-"x73, "\n * Function:\t\n *\n * Purpose:\t\n *\n"; - print " * Errors:\n"; - map {print fmt_error " *\t\t".$_} @errors; - print " *\n * Return:\tSuccess:\t\n *\n *\t\tFailure:\t\n *\n"; - print " * Programmer:\t\n *\n * Modifications:\n *\n *", '-'x73, "\n"; - print " */\n", $body; - -} else { - # Prolog format not recognized. - print $head, "/*", $prolog, "*/\n\n"; - print "/*\n * Errors returned by this function...\n"; - map {print fmt_error " *\t".$_} @errors; - print " */\n", $body; -} - - diff --git a/bin/format_source b/bin/format_source index 227d22ab420..fb0264cc3c6 100755 --- a/bin/format_source +++ b/bin/format_source @@ -5,8 +5,6 @@ # # Note that any files or directories that are excluded here should also be # added to the 'exclude' list in .github/workflows/clang-format-check.yml -# -# (Remember to update both bin/format_source and bin/format_source_patch) find . \( -type d -path ./config -prune -and -not -path ./config \) \ -or \( \( \! \( \ @@ -21,6 +19,6 @@ find . \( -type d -path ./config -prune -and -not -path ./config \) \ -or -name H5overflow.h \ \) \) \ -and \( -iname *.h -or -iname *.c -or -iname *.cpp -or -iname *.hpp -or -iname *.java \) \) \ - | xargs clang-format -style=file -i -fallback-style=none + | xargs -P0 -n1 clang-format -style=file -i -fallback-style=none exit 0 diff --git a/bin/format_source_patch b/bin/format_source_patch deleted file mode 100755 index 2e01455dc4e..00000000000 --- a/bin/format_source_patch +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash -# -# Recursively format all C & C++ sources and header files, except those in the -# 'config' directory and generated files, such as H5LTanalyze.c, etc. -# -# Note that any files or directories that are excluded here should also be -# added to the 'exclude' list in .github/workflows/clang-format-check.yml -# -# (Remember to update both bin/format_source and bin/format_source_patch) - -find . \( -type d -path ./config -prune -and -not -path ./config \) \ - -or \( \( \! \( \ - -name H5LTanalyze.c \ - -or -name H5LTparse.c \ - -or -name H5LTparse.h \ - -or -name H5Epubgen.h \ - -or -name H5Einit.h \ - -or -name H5Eterm.h \ - -or -name H5Edefin.h \ - -or -name H5version.h \ - -or -name H5overflow.h \ - \) \) \ - -and \( -iname *.h -or -iname *.c -or -iname *.cpp -or -iname *.hpp -or -iname *.java \) \) \ - | xargs clang-format -style=file -i -fallback-style=none - -git diff > clang_format.patch - -# Delete if 0 size -if [ ! -s clang_format.patch ] -then - rm clang_format.patch -fi - -exit 0 diff --git a/bin/gcov_script b/bin/gcov_script deleted file mode 100755 index 06b2ad1b442..00000000000 --- a/bin/gcov_script +++ /dev/null @@ -1,50 +0,0 @@ -#! /bin/sh -# -# Copyright by The HDF Group. -# All rights reserved. -# -# This file is part of HDF5. The full HDF5 copyright notice, including -# terms governing use, modification, and redistribution, is contained in -# the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://www.hdfgroup.org/licenses. -# If you do not have access to either file, you may request a copy from -# help@hdfgroup.org. - -# A script to generate coverage files for HDF5 using gcov. -# Configures, builds, and runs tests in-place; the output files will be placed -# in a directory called gcov_logs. -# Must be invoked from the root hdf5 directory. -# This script has been tested on kagiso. - -CFLAGS="$CFLAGS -ftest-coverage -fprofile-arcs" -export CFLAGS -LDFLAGS="$LDFLAGS -lgcov" -export LDFLAGS -CC=gcc -export CC -./configure -make -make check -mkdir gcov_logs -cd src -for j in *.h *.c - do - ln -s ../$j .libs/$j - done -cd .libs -for j in *.gcda - do - gcov -b $j >> gcov.log 2>&1 - done -for j in *.gcov - do - mv $j ../../gcov_logs - done -mv gcov.log ../../gcov_logs -for j in *.c *.h - do - rm $j - done -cd ../.. - - diff --git a/bin/mkdirs b/bin/mkdirs deleted file mode 100755 index 4e66eb5f0e2..00000000000 --- a/bin/mkdirs +++ /dev/null @@ -1,32 +0,0 @@ -#! /bin/sh -# -# Copyright by The HDF Group. -# All rights reserved. -# -# This file is part of HDF5. The full HDF5 copyright notice, including -# terms governing use, modification, and redistribution, is contained in -# the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://www.hdfgroup.org/licenses. -# If you do not have access to either file, you may request a copy from -# help@hdfgroup.org. -# -# This is a small program which will create directories n-levels deep. -# You just call it with something like: -# -# mkdirs /tmp/foo/bar/baz -# -# and it will create all the directories from /tmp down to baz which -# don't exist. -# -chmodprog="${CHMODPROG-chmod}" -mkdirprog="${MKDIRPROG-mkdir}" - -make_dir () { - if test ! -d $1; then - make_dir `echo $1 | sed -e 's#/[^/]*$##'` - $mkdirprog $1 - $chmodprog 755 $1 - fi -} - -make_dir `echo $1 | sed -e 's#/$##'` diff --git a/bin/newer b/bin/newer deleted file mode 100755 index c36df0353af..00000000000 --- a/bin/newer +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/sh -# -# Copyright by The HDF Group. -# All rights reserved. -# -# This file is part of HDF5. The full HDF5 copyright notice, including -# terms governing use, modification, and redistribution, is contained in -# the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://www.hdfgroup.org/licenses. -# If you do not have access to either file, you may request a copy from -# help@hdfgroup.org. -# -# Compare the modification time of file argument 1 against other file arguments. -# Return true (0) if argument 1 is newer than all others, otherwise return -# false (1). If any of the argument is not a file, return false (1). -# -# Programmer: Albert Cheng -# Created Date: 2005/07/06 -# Modification: -# Albert Cheng 2005/8/30 -# Changed from two arguments to multiple arguments. - -if test $# -lt 2; then - exit 1 -fi -if test ! -f $1; then - exit 1 -fi -f1=$1 -shift - -for f in $*; do - if test ! -f $f; then - exit 1 - fi - if test X = X`find $f1 -newer $f -print`; then - exit 1 - fi -done - -# passed all tests. Must be a file newer than all others. -exit 0 diff --git a/bin/yodconfigure b/bin/yodconfigure deleted file mode 100755 index 76f45a8dfe8..00000000000 --- a/bin/yodconfigure +++ /dev/null @@ -1,75 +0,0 @@ -#!/bin/sh -# -# Copyright by The HDF Group. -# All rights reserved. -# -# This file is part of HDF5. The full HDF5 copyright notice, including -# terms governing use, modification, and redistribution, is contained in -# the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://www.hdfgroup.org/licenses. -# If you do not have access to either file, you may request a copy from -# help@hdfgroup.org. -# -# Fix configure file so that it can launch configure testing executable -# via the proper launching command, e.g., yod. (Thus the name yodconfigure -# is used.) -# -# Author: Albert Cheng - -if [ "$1" = -f ]; then - FORCEMODE=yes - echo turn FORCEMODE to $FORCEMODE - shift -fi - -if [ $# -ne 1 ]; then - echo "Usage: $0 [-f] " - echo " -f apply the change even if it has been applied already." - exit 1 -fi -confile=$1 - -if [ ! -w $confile ]; then - echo "$0: $confile is not writable" - exit 1 -fi - -ACTRY_NAME="ACTRY()" -if grep ^"$ACTRY_NAME"$ $confile >/dev/null 2>&1 && [ "$FORCEMODE" != yes ]; then - echo "$0: $confile is already yodconfigure ready. Use -f to force yodconfigure again." - exit 1 -fi - -# Insert the ACTRY function after the 1st line which is the #!/bin/sh. -# Change all "eval $ac_try" commands to call ACTRY. -# auto-configure have changed the ac_try syntax from 'eval $ac_try' to -# 'eval "$ac_try"'. Thus requiring two very similar global-substitute. -# The single quotes around EOF tell shell NOT to expand or do substitution in -# the body of ed input. -# -ed - $confile <<'EOF' -1a -# ===inserted by yodconfigure ==== -# ACTRY will figure out when it is approprirate to run the command by the -# $RUNSERIAL launcher (e.g., yod -sz 1) and when to just run it as is. -# So far, ./a.out and ./conftest are names of real executable that should -# be run by $RUNSERIAL. -# -# (uncomment the echo line if you want to see what is going on.) -ACTRY() -{ -#echo ACTRY: args are: $* > /dev/tty -if [ "$1" = ./a.out -o "$1" = ./conftest ]; then -# echo $RUNSERIAL $* > /dev/tty - $RUNSERIAL $* -else - $* -fi -} -# === end of ACTRY inserted by yodconfigure ==== -. -g/eval $ac_try/s/eval/eval ACTRY/ -g/eval "$ac_try"/s/eval/eval ACTRY/ -w -q -EOF diff --git a/config/conclude.am b/config/conclude.am index a74c83a9775..bda1a191ab2 100644 --- a/config/conclude.am +++ b/config/conclude.am @@ -121,7 +121,7 @@ $(TEST_PROG_CHKEXE) $(TEST_PROG_PARA_CHKEXE) dummy.chkexe_: tname=$(@:.chkexe_=)$(EXEEXT);\ log=$(@:.chkexe_=.chklog); \ echo "============================"; \ - if $(top_srcdir)/bin/newer $(@:.chkexe_=.chkexe) $${tname}; then \ + if [ $(@:.chkexe_=.chkexe) -nt $${tname} ]; then \ echo "No need to test $${tname} again."; \ else \ if test -n "$(REALTIMEOUTPUT)"; then \ @@ -209,7 +209,7 @@ $(TEST_SCRIPT_CHKSH) $(TEST_SCRIPT_PARA_CHKSH) dummysh.chkexe_: chkname=`basename $(@:.chkexe_=.chkexe)`;\ log=`basename $(@:.chkexe_=.chklog)`; \ echo "============================"; \ - if $(top_srcdir)/bin/newer $${chkname} $$cmd $(SCRIPT_DEPEND); then \ + if [ $${chkname} -nt $$cmd ] && [ $${chkname} -nt $(SCRIPT_DEPEND) ]; then \ echo "No need to test $${tname} again."; \ else \ echo "============================" > $${log}; \ diff --git a/config/examples.am b/config/examples.am index 06d8d4ea049..09a9a244c9b 100644 --- a/config/examples.am +++ b/config/examples.am @@ -50,9 +50,9 @@ CLEANFILES=$(EXAMPLE_PROG) $(EXAMPLE_PROG_PARA) # How to create EXAMPLEDIR if it doesn't already exist $(EXAMPLEDIR): - -$(top_srcdir)/bin/mkdirs $@ + mkdir -p -m 755 $@ $(EXAMPLETOPDIR): - -$(top_srcdir)/bin/mkdirs $@ + mkdir -p -m 755 $@ # Install and uninstall rules. We install the source files, not the # example programs themselves. From bbfecfe08adba3c0a2ebd589aea8bbfce8bc4011 Mon Sep 17 00:00:00 2001 From: Dana Robinson <43805+derobins@users.noreply.github.com> Date: Wed, 7 Jun 2023 08:59:54 -0700 Subject: [PATCH 076/108] Update uthash and move to H5private.h (#3074) --- src/H5Dmpio.c | 9 ---- src/H5Ipkg.h | 9 ---- src/H5private.h | 10 +++++ src/uthash.h | 115 ++++++++++++++++++++---------------------------- 4 files changed, 57 insertions(+), 86 deletions(-) diff --git a/src/H5Dmpio.c b/src/H5Dmpio.c index cc89e443ebe..db4fd625087 100644 --- a/src/H5Dmpio.c +++ b/src/H5Dmpio.c @@ -43,15 +43,6 @@ #include "H5Sprivate.h" /* Dataspaces */ #include "H5VMprivate.h" /* Vector */ -/* uthash is an external, header-only hash table implementation. - * - * We include the file directly in src/ and #define a few functions - * to use our internal memory calls. - */ -#define uthash_malloc(sz) H5MM_malloc(sz) -#define uthash_free(ptr, sz) H5MM_free(ptr) /* Ignoring sz is intentional */ -#include "uthash.h" - #ifdef H5_HAVE_PARALLEL /****************/ diff --git a/src/H5Ipkg.h b/src/H5Ipkg.h index 2d1002c8b67..24879c1917e 100644 --- a/src/H5Ipkg.h +++ b/src/H5Ipkg.h @@ -28,15 +28,6 @@ /* Get package's private header */ #include "H5Iprivate.h" -/* uthash is an external, header-only hash table implementation. - * - * We include the file directly in src/ and #define a few functions - * to use our internal memory calls. - */ -#define uthash_malloc(sz) H5MM_malloc(sz) -#define uthash_free(ptr, sz) H5MM_free(ptr) /* Ignoring sz is intentional */ -#include "uthash.h" - /**************************/ /* Package Private Macros */ /**************************/ diff --git a/src/H5private.h b/src/H5private.h index ff23370a05c..747c77d771d 100644 --- a/src/H5private.h +++ b/src/H5private.h @@ -251,6 +251,16 @@ #include "dmalloc.h" #endif /* H5_HAVE_DMALLOC_H */ +/* uthash is an external, header-only hash table implementation. + * + * We include the file directly in src/ and #define a few functions + * to use our internal memory calls. + */ +#define uthash_malloc(sz) H5MM_malloc(sz) +#define uthash_free(ptr, sz) H5MM_free(ptr) /* Ignoring sz is intentional */ +#define HASH_NONFATAL_OOM 1 /* Don't abort() on out-of-memory */ +#include "uthash.h" + /* * NT doesn't define SIGBUS, but since NT only runs on processors * that do not have alignment constraints a SIGBUS would never be diff --git a/src/uthash.h b/src/uthash.h index b738b770995..b1e5cbb9007 100644 --- a/src/uthash.h +++ b/src/uthash.h @@ -1,5 +1,5 @@ /* -Copyright (c) 2003-2018, Troy D. Hanson http://troydhanson.github.com/uthash/ +Copyright (c) 2003-2022, Troy D. Hanson https://troydhanson.github.io/uthash/ All rights reserved. Redistribution and use in source and binary forms, with or without @@ -24,12 +24,22 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #ifndef UTHASH_H #define UTHASH_H -#define UTHASH_VERSION 2.1.0 +#define UTHASH_VERSION 2.3.0 #include /* memcmp, memset, strlen */ #include /* ptrdiff_t */ #include /* exit */ +#if defined(HASH_DEFINE_OWN_STDINT) && HASH_DEFINE_OWN_STDINT +/* This codepath is provided for backward compatibility, but I plan to remove it. */ +#warning "HASH_DEFINE_OWN_STDINT is deprecated; please use HASH_NO_STDINT instead" +typedef unsigned int uint32_t; +typedef unsigned char uint8_t; +#elif defined(HASH_NO_STDINT) && HASH_NO_STDINT +#else +#include /* uint8_t, uint32_t */ +#endif + /* These macros use decltype or the earlier __typeof GNU extension. As decltype is only available in newer compilers (VS2010 or gcc 4.3+ when compiling c++ source) this code uses whatever method is needed @@ -62,23 +72,6 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. } while (0) #endif -/* a number of the hash function use uint32_t which isn't defined on Pre VS2010 */ -#if defined(_WIN32) -#if defined(_MSC_VER) && _MSC_VER >= 1600 -#include -#elif defined(__WATCOMC__) || defined(__MINGW32__) || defined(__CYGWIN__) -#include -#else -typedef unsigned int uint32_t; -typedef unsigned char uint8_t; -#endif -#elif defined(__GNUC__) && !defined(__VXWORKS__) -#include -#else -typedef unsigned int uint32_t; -typedef unsigned char uint8_t; -#endif - #ifndef uthash_malloc #define uthash_malloc(sz) malloc(sz) /* malloc fcn */ #endif @@ -92,15 +85,12 @@ typedef unsigned char uint8_t; #define uthash_strlen(s) strlen(s) #endif -#ifdef uthash_memcmp -/* This warning will not catch programs that define uthash_memcmp AFTER including uthash.h. */ -#warning "uthash_memcmp is deprecated; please use HASH_KEYCMP instead" -#else -#define uthash_memcmp(a, b, n) memcmp(a, b, n) +#ifndef HASH_FUNCTION +#define HASH_FUNCTION(keyptr, keylen, hashv) HASH_JEN(keyptr, keylen, hashv) #endif #ifndef HASH_KEYCMP -#define HASH_KEYCMP(a, b, n) uthash_memcmp(a, b, n) +#define HASH_KEYCMP(a, b, n) memcmp(a, b, n) #endif #ifndef uthash_noexpand_fyi @@ -163,7 +153,7 @@ typedef unsigned char uint8_t; #define HASH_VALUE(keyptr, keylen, hashv) \ do { \ - HASH_FCN(keyptr, keylen, hashv); \ + HASH_FUNCTION(keyptr, keylen, hashv); \ } while (0) #define HASH_FIND_BYHASHVALUE(hh, head, keyptr, keylen, hashval, out) \ @@ -420,7 +410,7 @@ typedef unsigned char uint8_t; do { \ IF_HASH_NONFATAL_OOM(int _ha_oomed = 0;) \ (add)->hh.hashv = (hashval); \ - (add)->hh.key = (char *)(keyptr); \ + (add)->hh.key = (const void *)(keyptr); \ (add)->hh.keylen = (unsigned)(keylen_in); \ if (!(head)) { \ (add)->hh.next = NULL; \ @@ -604,13 +594,6 @@ typedef unsigned char uint8_t; #define HASH_EMIT_KEY(hh, head, keyptr, fieldlen) #endif -/* default to Jenkin's hash unless overridden e.g. DHASH_FUNCTION=HASH_SAX */ -#ifdef HASH_FUNCTION -#define HASH_FCN HASH_FUNCTION -#else -#define HASH_FCN HASH_JEN -#endif - /* The Bernstein hash function, used in Perl prior to v5.6. Note (x<<5+x)=x*33. */ #define HASH_BER(key, keylen, hashv) \ do { \ @@ -623,7 +606,9 @@ typedef unsigned char uint8_t; } while (0) /* SAX/FNV/OAT/JEN hash functions are macro variants of those listed at - * http://eternallyconfuzzled.com/tuts/algorithms/jsw_tut_hashing.aspx */ + * http://eternallyconfuzzled.com/tuts/algorithms/jsw_tut_hashing.aspx + * (archive link: https://archive.is/Ivcan ) + */ #define HASH_SAX(key, keylen, hashv) \ do { \ unsigned _sx_i; \ @@ -715,36 +700,28 @@ typedef unsigned char uint8_t; switch (_hj_k) { \ case 11: \ hashv += ((unsigned)_hj_key[10] << 24); \ - H5_ATTR_FALLTHROUGH \ - case 10: \ - hashv += ((unsigned)_hj_key[9] << 16); \ - H5_ATTR_FALLTHROUGH \ - case 9: \ - hashv += ((unsigned)_hj_key[8] << 8); \ - H5_ATTR_FALLTHROUGH \ - case 8: \ - _hj_j += ((unsigned)_hj_key[7] << 24); \ - H5_ATTR_FALLTHROUGH \ - case 7: \ - _hj_j += ((unsigned)_hj_key[6] << 16); \ - H5_ATTR_FALLTHROUGH \ - case 6: \ - _hj_j += ((unsigned)_hj_key[5] << 8); \ - H5_ATTR_FALLTHROUGH \ - case 5: \ - _hj_j += _hj_key[4]; \ - H5_ATTR_FALLTHROUGH \ - case 4: \ - _hj_i += ((unsigned)_hj_key[3] << 24); \ - H5_ATTR_FALLTHROUGH \ - case 3: \ - _hj_i += ((unsigned)_hj_key[2] << 16); \ - H5_ATTR_FALLTHROUGH \ - case 2: \ - _hj_i += ((unsigned)_hj_key[1] << 8); \ - H5_ATTR_FALLTHROUGH \ - case 1: \ - _hj_i += _hj_key[0]; \ + H5_ATTR_FALLTHROUGH /* FALLTHROUGH */ \ + case 10 : hashv += ((unsigned)_hj_key[9] << 16); \ + H5_ATTR_FALLTHROUGH /* FALLTHROUGH */ \ + case 9 : hashv += ((unsigned)_hj_key[8] << 8); \ + H5_ATTR_FALLTHROUGH /* FALLTHROUGH */ \ + case 8 : _hj_j += ((unsigned)_hj_key[7] << 24); \ + H5_ATTR_FALLTHROUGH /* FALLTHROUGH */ \ + case 7 : _hj_j += ((unsigned)_hj_key[6] << 16); \ + H5_ATTR_FALLTHROUGH /* FALLTHROUGH */ \ + case 6 : _hj_j += ((unsigned)_hj_key[5] << 8); \ + H5_ATTR_FALLTHROUGH /* FALLTHROUGH */ \ + case 5 : _hj_j += _hj_key[4]; \ + H5_ATTR_FALLTHROUGH /* FALLTHROUGH */ \ + case 4 : _hj_i += ((unsigned)_hj_key[3] << 24); \ + H5_ATTR_FALLTHROUGH /* FALLTHROUGH */ \ + case 3 : _hj_i += ((unsigned)_hj_key[2] << 16); \ + H5_ATTR_FALLTHROUGH /* FALLTHROUGH */ \ + case 2 : _hj_i += ((unsigned)_hj_key[1] << 8); \ + H5_ATTR_FALLTHROUGH /* FALLTHROUGH */ \ + case 1 : _hj_i += _hj_key[0]; \ + H5_ATTR_FALLTHROUGH /* FALLTHROUGH */ \ + default:; \ } \ HASH_JEN_MIX(_hj_i, _hj_j, hashv); \ } while (0) @@ -794,6 +771,8 @@ typedef unsigned char uint8_t; hashv += *_sfh_key; \ hashv ^= hashv << 10; \ hashv += hashv >> 1; \ + break; \ + default:; \ } \ \ /* Force "avalanching" of final 127 bits */ \ @@ -899,12 +878,12 @@ typedef unsigned char uint8_t; struct UT_hash_handle *_he_thh, *_he_hh_nxt; \ UT_hash_bucket *_he_new_buckets, *_he_newbkt; \ _he_new_buckets = \ - (UT_hash_bucket *)uthash_malloc(2UL * (tbl)->num_buckets * sizeof(struct UT_hash_bucket)); \ + (UT_hash_bucket *)uthash_malloc(sizeof(struct UT_hash_bucket) * (tbl)->num_buckets * 2U); \ if (!_he_new_buckets) { \ HASH_RECORD_OOM(oomed); \ } \ else { \ - uthash_bzero(_he_new_buckets, 2UL * (tbl)->num_buckets * sizeof(struct UT_hash_bucket)); \ + uthash_bzero(_he_new_buckets, sizeof(struct UT_hash_bucket) * (tbl)->num_buckets * 2U); \ (tbl)->ideal_chain_maxlen = \ ((tbl)->num_items >> ((tbl)->log2_num_buckets + 1U)) + \ ((((tbl)->num_items & (((tbl)->num_buckets * 2U) - 1U)) != 0U) ? 1U : 0U); \ @@ -1186,7 +1165,7 @@ typedef struct UT_hash_handle { void *next; /* next element in app order */ struct UT_hash_handle *hh_prev; /* previous hh in bucket order */ struct UT_hash_handle *hh_next; /* next hh in bucket order */ - void *key; /* ptr to enclosing struct's key */ + const void *key; /* ptr to enclosing struct's key */ unsigned keylen; /* enclosing struct's key len */ unsigned hashv; /* result of hash-fcn(key) */ } UT_hash_handle; From 65da430c1129f03db2db70350ca7348bd6f1be76 Mon Sep 17 00:00:00 2001 From: Larry Knox Date: Wed, 7 Jun 2023 12:04:37 -0500 Subject: [PATCH 077/108] Add autotools generated files to codespell (#3075) Add autotools files for codespell to skip in order to avoid codespell failing for misspelled words in files generated by autotools when running autogen.sh. --- .github/workflows/codespell.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml index 57b8137274e..98526acb332 100644 --- a/.github/workflows/codespell.yml +++ b/.github/workflows/codespell.yml @@ -13,5 +13,5 @@ jobs: - uses: actions/checkout@v3 - uses: codespell-project/actions-codespell@master with: - skip: ./bin/trace,./hl/tools/h5watch/h5watch.c,./tools/test/h5jam/tellub.c,./config/sanitizer/LICENSE,./config/sanitizer/sanitizers.cmake,./tools/test/h5repack/testfiles/*.dat - ignore_words_list: isnt,inout,nd,parms,parm,ba,offsetP,ser,ois,had,fiter,fo,clude,refere,minnum,offsetp,creat,ans:,eiter,lastr,ans,isn't,ifset,sur,trun,dne,tthe,hda,filname,te,htmp,ake,gord,numer,ro,oce,ot,msdos + skip: ./.github/workflows/codespell.yml,./bin/trace,./hl/tools/h5watch/h5watch.c,./tools/test/h5jam/tellub.c,./config/sanitizer/LICENSE,./config/sanitizer/sanitizers.cmake,./tools/test/h5repack/testfiles/*.dat,./configure,./bin/ltmain.sh,./bin/depcomp,./bin/config.guess,./bin/config.sub,./autom4te.cache,./m4/libtool.m4,./c++/src/*.html + ignore_words_list: ot,isnt,inout,nd,parms,parm,ba,offsetP,ser,ois,had,fiter,fo,clude,refere,minnum,offsetp,creat,ans:,eiter,lastr,ans,isn't,ifset,sur,trun,dne,tthe,hda,filname,te,htmp,ake,gord,numer,ro,oce,ot,msdos From e1e2ec3093c653858f02298b5a9c7d5cfbc9f6e4 Mon Sep 17 00:00:00 2001 From: Dana Robinson <43805+derobins@users.noreply.github.com> Date: Wed, 7 Jun 2023 10:04:50 -0700 Subject: [PATCH 078/108] Minor normalization w/ develop (#3073) Mostly bin directory * bin/release uses sha256 * bin/h5vers whitespace changes * bin/genparser warning fixes + HL changes * Add bin directory for make distclean --- Makefile.am | 2 +- autogen.sh | 10 +++++----- bin/checkposix | 4 ++-- bin/chkcopyright | 39 ++++++++++++++++++++++----------------- bin/genparser | 3 ++- bin/h5vers | 4 ++-- bin/release | 26 +++++++++++++------------- bin/restore.sh | 10 +++++----- bin/warnhist | 15 +++++++++++++++ configure.ac | 14 ++++++-------- hl/src/H5LTanalyze.c | 1 + hl/src/H5LTparse.c | 1 + 12 files changed, 75 insertions(+), 54 deletions(-) diff --git a/Makefile.am b/Makefile.am index 72d49597b4e..0f2a2472c8d 100644 --- a/Makefile.am +++ b/Makefile.am @@ -89,7 +89,7 @@ endif SUBDIRS = src $(TESTSERIAL_DIR) $(TESTPARALLEL_DIR) bin $(TOOLS_DIR) utils . \ $(CXX_DIR) $(FORTRAN_DIR) $(JAVA_DIR) $(HDF5_HL_DIR) -DIST_SUBDIRS = src test testpar tools utils . c++ fortran hl examples java +DIST_SUBDIRS = src test testpar bin tools utils . c++ fortran hl examples java # Some files generated during configure that should be cleaned DISTCLEANFILES=config/stamp1 config/stamp2 diff --git a/autogen.sh b/autogen.sh index 7c3cbcf5ac0..74c6c4516a5 100755 --- a/autogen.sh +++ b/autogen.sh @@ -1,10 +1,10 @@ #!/bin/sh # -# Copyright by The HDF Group. -# All rights reserved. -# -# This file is part of HDF5. The full HDF5 copyright notice, including -# terms governing use, modification, and redistribution, is contained in +# Copyright by The HDF Group. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code # distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from diff --git a/bin/checkposix b/bin/checkposix index 8aed831e0bc..d72a4103d9b 100755 --- a/bin/checkposix +++ b/bin/checkposix @@ -103,7 +103,7 @@ foreach $arg (@ARGV) { # Now find all function calls on this line which don't start with 'H' while (($name)=/\b([a-z_A-GI-Z]\w*)\s*\(/) { $_ = $'; - + # Ignore C statements that look sort of like function # calls. next if $name =~ /^(if|for|offsetof|return|sizeof|switch|while|void)$/; @@ -186,7 +186,7 @@ foreach $arg (@ARGV) { } # TESTING (not comprehensive - just noise reduction) - + # Test macros and functions (testhdf5.h) next if $name =~ /^(AddTest|TestErrPrintf|TestSummary|TestCleanup|TestShutdown)$/; next if $name =~ /^(CHECK|CHECK_PTR|CHECK_PTR_NULL|CHECK_PTR_EQ|CHECK_I)$/; diff --git a/bin/chkcopyright b/bin/chkcopyright index 83b36489cd3..756afe8ee6d 100755 --- a/bin/chkcopyright +++ b/bin/chkcopyright @@ -9,7 +9,6 @@ # distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. -# # Check Copyright notice. # Check that all the files have the proper copyright notice. @@ -39,6 +38,7 @@ NFIXFAILEDFILES=0 # Number of files fix failed. NUMBEGINLINES=60 # Copyright notice should be located within the # this number of lines at the beginning of the file. THGCOPYRIGHTSTR="Copyright by The HDF Group." +UICOPYRIGHTSTR="Copyright by the Board of Trustees of the University of Illinois" PASSEDLOG=/tmp/h5chkright_passed.$$ SKIPPEDLOG=/tmp/h5chkright_skipped.$$ @@ -113,10 +113,10 @@ BUILDCOPYRIGHT() * * * This file is part of HDF5. The full HDF5 copyright notice, including * * terms governing use, modification, and redistribution, is contained in * - * the COPYING file, which can be found at the root of the source code - * distribution tree, or in https://www.hdfgroup.org/licenses. - * If you do not have access to either file, you may request a copy from - * help@hdfgroup.org. + * the COPYING file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * EOF # Fortran9X source Copyright notice @@ -126,10 +126,10 @@ EOF ! * ! This file is part of HDF5. The full HDF5 copyright notice, including * ! terms governing use, modification, and redistribution, is contained in * -! the COPYING file, which can be found at the root of the source code -! distribution tree, or in https://www.hdfgroup.org/licenses. -! If you do not have access to either file, you may request a copy from -! help@hdfgroup.org. +! the COPYING file, which can be found at the root of the source code * +! distribution tree, or in https://www.hdfgroup.org/licenses. * +! If you do not have access to either file, you may request a copy from * +! help@hdfgroup.org. * EOF # HTML file Copyright notice @@ -139,10 +139,10 @@ EOF * * * This file is part of HDF5. The full HDF5 copyright notice, including * * terms governing use, modification, and redistribution, is contained in * - * the COPYING file, which can be found at the root of the source code - * distribution tree, or in https://www.hdfgroup.org/licenses. - * If you do not have access to either file, you may request a copy from - * help@hdfgroup.org. + * the COPYING file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * EOF # Shell style Copyright notice @@ -341,7 +341,7 @@ FindLineInFile() # $1 file which contains the expected copyright notice. # $2 file in which to look for the copyright notice. # Copyright notice must be found within the beginning $NUMBEGINLINES of lines. -# Hunt for the particular string $UICOPYRIGHTSTR which signifies the beginning +# Hunt for the particular string $THGCOPYRIGHTSTR which signifies the beginning # of the copyright notice. # MATCH_COPYRIGHT() @@ -356,7 +356,7 @@ MATCH_COPYRIGHT() nlines=`wc -l ${COPYRIGHTFILE} | cut -f1 -d' '` # Find a line that contains the copyright string and its line number in # the file. - begin=`FindLineInFile "${UICOPYRIGHTSTR}" $f` + begin=`FindLineInFile "${THGCOPYRIGHTSTR}" $f` if [ "$begin" -le 0 ] ; then # Not found, generate an empty dummy file cp /dev/null ${EXTRACTEDFILE} @@ -381,7 +381,7 @@ MATCH_COPYRIGHT() # $1 file which contains the expected copyright notice. # $2 file in which to look for the copyright notice. # Copyright notice must be found within the beginning $NUMBEGINLINES of lines. -# Hunt for the particular string $UICOPYRIGHTSTR which signifies the beginning +# Hunt for the particular string $THGCOPYRIGHTSTR which signifies the beginning # of the copyright notice. # FIX_COPYRIGHT() @@ -404,7 +404,12 @@ FIX_COPYRIGHT() # the file. insertbegin=`FindLineInFile "${THGCOPYRIGHTSTR}" $f` if [ $insertbegin -gt 0 ]; then - insertend=`expr $insertbegin + $nlines` # no need to -1. See below. + insertUIbegin=`FindLineInFile "${UICOPYRIGHTSTR}" $f` + if [ $insertUIbegin -gt 0 ]; then + insertend=`expr $insertbegin + $nlines + 1` + else + insertend=`expr $insertbegin + $nlines` + fi else insertbegin=`FindLineInFile "${UICOPYRIGHTSTR}" $f` if [ $insertbegin -gt 0 ]; then diff --git a/bin/genparser b/bin/genparser index bb44a91e650..619dbfaa3ed 100755 --- a/bin/genparser +++ b/bin/genparser @@ -3,7 +3,7 @@ # Copyright by The HDF Group. # All rights reserved. # -# This file is part of HDF5. The full HDF5 copyright notice, including +# This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code # distribution tree, or in https://www.hdfgroup.org/licenses. @@ -236,6 +236,7 @@ do echo '#pragma GCC diagnostic ignored "-Wsign-conversion" ' >> tmp.out echo '#pragma GCC diagnostic ignored "-Wstrict-overflow" ' >> tmp.out echo '#pragma GCC diagnostic ignored "-Wstrict-prototypes" ' >> tmp.out + echo '#pragma GCC diagnostic ignored "-Wimplicit-fallthrough" ' >> tmp.out echo '#if !defined (__clang__) ' >> tmp.out echo '#pragma GCC diagnostic ignored "-Wlarger-than=" ' >> tmp.out echo '#pragma GCC diagnostic ignored "-Wsuggest-attribute=const" ' >> tmp.out diff --git a/bin/h5vers b/bin/h5vers index a78c24d71a5..6716794098f 100755 --- a/bin/h5vers +++ b/bin/h5vers @@ -376,7 +376,7 @@ if ($H5_JAVA) { my $version_string2 = sprintf("%d, %d, %d", @newver[0,1,2]); $data =~ s/\@version HDF5 .*
    /\@version HDF5 $version_string1
    /; - $data =~ s/ public final static int LIB_VERSION\[\] = \{ \d*, \d*, \d* \};/ public final static int LIB_VERSION[] = \{ $version_string2 \};/; + $data =~ s/ public final static int LIB_VERSION\[\] = \{\d*,.\d*,.\d*\};/ public final static int LIB_VERSION[] = \{$version_string2\};/; write_file($H5_JAVA, $data); } @@ -393,7 +393,7 @@ if ($TESTH5_JAVA) { my $version_string1 = sprintf("%d, %d, %d", @newver[0,1,2]); my $version_string2 = sprintf("int majnum = %d, minnum = %d, relnum = %d", @newver[0,1,2]); - $data =~ s/ int libversion\[\] = \{ .* \};/ int libversion\[\] = \{ $version_string1 \};/; + $data =~ s/ int libversion\[\] = \{.*\};/ int libversion\[\] = \{$version_string1\};/; $data =~ s/ int majnum = \d*, minnum = \d*, relnum = \d*;/ $version_string2;/; write_file($TESTH5_JAVA, $data); diff --git a/bin/release b/bin/release index 359d7c0aff4..9435657d09d 100755 --- a/bin/release +++ b/bin/release @@ -52,23 +52,23 @@ for compressing the resulting tar archive (if none are given then information is available in the README_HPC file. doc -- produce the latest doc tree in addition to the archive. -An md5 checksum is produced for each archive created and stored in the md5 file. +A sha256 checksum is produced for each archive created and stored in the sha256 file. Examples: $ bin/release -d /tmp /tmp/hdf5-1.8.13-RELEASE.txt - /tmp/hdf5-1.8.13.md5 + /tmp/hdf5-1.8.13.sha256 /tmp/hdf5-1.8.13.tar $ bin/release -d /tmp gzip /tmp/hdf5-1.8.13-RELEASE.txt - /tmp/hdf5-1.8.13.md5 + /tmp/hdf5-1.8.13.sha256 /tmp/hdf5-1.8.13.tar.gz $ bin/release -d /tmp tar gzip zip /tmp/hdf5-1.8.13-RELEASE.txt - /tmp/hdf5-1.8.13.md5 + /tmp/hdf5-1.8.13.sha256 /tmp/hdf5-1.8.13.tar /tmp/hdf5-1.8.13.tar.gz /tmp/hdf5-1.8.13.tar.zip @@ -531,43 +531,43 @@ test "$verbose" && echo " Running tar..." 1>&2 (cd "$tmpdir" && exec tar -ch --exclude-vcs -f "$HDF5_VERS.tar" "./$HDF5_VERS" || exit 1 ) # Compress -MD5file=$HDF5_VERS.md5 -cp /dev/null $DEST/$MD5file +SHA256=$HDF5_VERS.sha256 +cp /dev/null $DEST/$SHA256 for comp in $methods; do case $comp in tar) cp -p $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.tar - (cd $DEST; md5sum $HDF5_VERS.tar >> $MD5file) + (cd $DEST; sha256sum $HDF5_VERS.tar >> $SHA256) ;; gzip) test "$verbose" && echo " Running gzip..." 1>&2 gzip -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.gz - (cd $DEST; md5sum $HDF5_VERS.tar.gz >> $MD5file) + (cd $DEST; sha256sum $HDF5_VERS.tar.gz >> $SHA256) ;; cmake-tgz) test "$verbose" && echo " Creating CMake tar.gz file..." 1>&2 tar2cmaketgz $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/CMake-$HDF5_VERS.tar.gz 1>&2 - (cd $DEST; md5sum CMake-$HDF5_VERS.tar.gz >> $MD5file) + (cd $DEST; sha256sum CMake-$HDF5_VERS.tar.gz >> $SHA256) ;; hpc-cmake-tgz) test "$verbose" && echo " Creating HPC-CMake tar.gz file..." 1>&2 tar2hpccmaketgz $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/HPC-CMake-$HDF5_VERS.tar.gz 1>&2 - (cd $DEST; md5sum HPC-CMake-$HDF5_VERS.tar.gz >> $MD5file) + (cd $DEST; sha256sum HPC-CMake-$HDF5_VERS.tar.gz >> $SHA256) ;; bzip2) test "$verbose" && echo " Running bzip2..." 1>&2 bzip2 -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.bz2 - (cd $DEST; md5sum $HDF5_VERS.tar.bz2 >> $MD5file) + (cd $DEST; sha256sum $HDF5_VERS.tar.bz2 >> $SHA256) ;; zip) test "$verbose" && echo " Creating zip ball..." 1>&2 tar2zip $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.zip 1>&2 - (cd $DEST; md5sum $HDF5_VERS.zip >> $MD5file) + (cd $DEST; sha256sum $HDF5_VERS.zip >> $SHA256) ;; cmake-zip) test "$verbose" && echo " Creating CMake-zip ball..." 1>&2 tar2cmakezip $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/CMake-$HDF5_VERS.zip 1>&2 - (cd $DEST; md5sum CMake-$HDF5_VERS.zip >> $MD5file) + (cd $DEST; sha256sum CMake-$HDF5_VERS.zip >> $SHA256) ;; doc) if [ "${DOCVERSION}" = "" ]; then diff --git a/bin/restore.sh b/bin/restore.sh index 059757276cf..85ebd1c77a7 100755 --- a/bin/restore.sh +++ b/bin/restore.sh @@ -1,10 +1,10 @@ #!/bin/sh # -# Copyright by The HDF Group. -# All rights reserved. -# -# This file is part of HDF5. The full HDF5 copyright notice, including -# terms governing use, modification, and redistribution, is contained in +# Copyright by The HDF Group. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code # distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from diff --git a/bin/warnhist b/bin/warnhist index 680a0e88305..fc4220930e6 100755 --- a/bin/warnhist +++ b/bin/warnhist @@ -227,6 +227,9 @@ while (<>) { # Skip variables with the word 'warning' in them next if $_ =~ /_warning_/; + # Skip AMD Optimizing Compiler (aocc) lines "<#> warning(s) generated." + next if $_ =~ / warnings? generated\./; + # "Hide" the C++ '::' symbol until we've parsed out the parts of the line while($_ =~ /\:\:/) { $_ =~ s/\:\:/@@@@/g; @@ -242,6 +245,18 @@ while (<>) { } elsif($_ =~ /^\s*[Ww]arning:.*/) { $name = $last_c_name; ($toss, $warning, $extra, $extra2) = split /\:/, $_; + # Check for file-scope gcc Fortran warning output + } elsif($_ =~ /f\d\d\d: Warning:/) { + # These are interspersed with the "compiling a file" output + # when compiling with `make -j` and thus difficult to tie to + # any particular file. They are due to things like inappropriate + # build options and don't have a line number. + # + # They start with f, as in f951 + $name = "(generic)"; + $line = int(rand(1000000)); # Hack to avoid counting as duplictates + + ($warning) = $_ =~ /\[(.*)\]/x; # Check for FORTRAN warning output } elsif($_ =~ /^Warning:.*/) { $name = $last_fort_name; diff --git a/configure.ac b/configure.ac index 6d4f6d7fc96..403a82b3879 100644 --- a/configure.ac +++ b/configure.ac @@ -559,7 +559,7 @@ AC_DEFINE_UNQUOTED([PAC_C_MAX_REAL_PRECISION], $PAC_C_MAX_REAL_PRECISION, [Deter AC_MSG_RESULT([$PAC_C_MAX_REAL_PRECISION]) ## ---------------------------------------------------------------------- -## Check if they would like the Fortran interface compiled +## Check if the Fortran interface should be enabled ## ## This needs to be exposed for the library info file even if Fortran is disabled. @@ -797,7 +797,7 @@ HDF_CXX=no ## AC_PROG_CXX defines some macros that Automake 1.9.x uses and will ## miss even if c++ is not enabled. AC_PROG_CXX -AC_PROG_CXXCPP ## this is checked for when AC_HEADER_STDC is done +AC_PROG_CXXCPP AC_MSG_CHECKING([if c++ interface enabled]) @@ -922,7 +922,7 @@ esac ## ---------------------------------------------------------------------- ## Check which archiving tool to use. This needs to be done before -## the AM_PROG_LIBTOOL macro. +## the LT_INIT macro. ## if test -z "$AR"; then AC_CHECK_PROGS([AR], [ar xar], [:], [$PATH]) @@ -1169,7 +1169,7 @@ AC_SUBST([HDF5_TOOLS]) ## Default is to build tests and tools HDF5_TOOLS=yes -AC_MSG_CHECKING([if building tools is disabled]) +AC_MSG_CHECKING([if building tools is enabled]) AC_ARG_ENABLE([tools], [AS_HELP_STRING([--enable-tools], @@ -2050,10 +2050,8 @@ if test "X$THREADSAFE" = "Xyes"; then AC_CACHE_VAL([hdf5_cv_system_scope_threads], [AC_RUN_IFELSE( [AC_LANG_PROGRAM([ - #if STDC_HEADERS #include #include - #endif ],[ pthread_attr_t attribute; int ret; @@ -2706,8 +2704,8 @@ AC_SUBST([INTERNAL_DEBUG_OUTPUT]) ## too specialized or have huge performance hits. These ## are not listed in the "all" packages list. ## -## all_packages="AC,B,B2,D,F,FA,FL,FS,HL,I,O,S,ST,T,Z" -all_packages="AC,B2,CX,D,F,HL,I,O,S,ST,T,Z" +## all_packages="AC,B,B2,D,F,FA,FL,FS,HL,I,O,S,T,Z" +all_packages="AC,B2,CX,D,F,HL,I,O,S,T,Z" case "X-$INTERNAL_DEBUG_OUTPUT" in X-yes|X-all) diff --git a/hl/src/H5LTanalyze.c b/hl/src/H5LTanalyze.c index ec7247d3fc3..55505c57de1 100644 --- a/hl/src/H5LTanalyze.c +++ b/hl/src/H5LTanalyze.c @@ -10,6 +10,7 @@ #pragma GCC diagnostic ignored "-Wsign-conversion" #pragma GCC diagnostic ignored "-Wstrict-overflow" #pragma GCC diagnostic ignored "-Wstrict-prototypes" +#pragma GCC diagnostic ignored "-Wimplicit-fallthrough" #if !defined (__clang__) #pragma GCC diagnostic ignored "-Wlarger-than=" #pragma GCC diagnostic ignored "-Wsuggest-attribute=const" diff --git a/hl/src/H5LTparse.c b/hl/src/H5LTparse.c index 7f552d8cb00..fe9854ee868 100644 --- a/hl/src/H5LTparse.c +++ b/hl/src/H5LTparse.c @@ -10,6 +10,7 @@ #pragma GCC diagnostic ignored "-Wsign-conversion" #pragma GCC diagnostic ignored "-Wstrict-overflow" #pragma GCC diagnostic ignored "-Wstrict-prototypes" +#pragma GCC diagnostic ignored "-Wimplicit-fallthrough" #if !defined (__clang__) #pragma GCC diagnostic ignored "-Wlarger-than=" #pragma GCC diagnostic ignored "-Wsuggest-attribute=const" From 9741c88bf368fb269626d2e3ac2609b592d010c4 Mon Sep 17 00:00:00 2001 From: Dana Robinson <43805+derobins@users.noreply.github.com> Date: Sat, 10 Jun 2023 12:19:03 -0700 Subject: [PATCH 079/108] Adds a release note for Autoconf 2.71 bump (#3092) --- release_docs/RELEASE.txt | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 9e9dde4b6ff..06d1c34a579 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -47,6 +47,18 @@ New Features Configuration: ------------- + - The minimum required version of Autoconf has been bumped to 2.71 + + This fixes a problem with the Intel oneAPI Fortran compiler's -loopopt + option being interpreted as a linker option (see bug fixes, below). + + This should only affect building the library from a maintenance branch + using the Autotools, when autogen.sh is used to generate the Autotools + files. + + It does NOT affect the binaries or building from the source tarballs + that we distribute at release as Autoconf does not need to be re-run. + - Added support for CMake presets file. CMake supports two main files, CMakePresets.json and CMakeUserPresets.json, @@ -243,7 +255,7 @@ Bug Fixes since HDF5-1.12.2 release checkings will remove the potential invalid read of any of these values that could be triggered by a malformed file. - (GH-2603 - 2023/04/16) + Fixes GitHub issue #2603 - Fixed potential buffer overrun issues in some object header decode routines @@ -402,6 +414,17 @@ Bug Fixes since HDF5-1.12.2 release Configuration ------------- + - Fix Intel oneAPI -loopopt Fortran option being detected as a linker flag + + Intel's new oneAPI Fortran compiler takes a -loopopt flag that is + interpreted as a linker flag by Autoconf 2.69 and earlier. This bug + only affects the Autotools when building with Intel's oneAPI when + Fortran is enabled. + + This was fixed by changing the required minimum version of Autoconf + to 2.71 in configure.ac. The release source code will be processed + with Autoconf 2.71 or later. + - The accum test now passes on macOS 12+ (Monterey) w/ CMake Due to changes in the way macOS handles LD_LIBRARY_PATH, the accum test From dee5daa7a1a6e52e4fe54e18ea31b44ce5b42f88 Mon Sep 17 00:00:00 2001 From: Dana Robinson <43805+derobins@users.noreply.github.com> Date: Mon, 12 Jun 2023 12:16:59 -0700 Subject: [PATCH 080/108] Bring MD cache uthash tag list from develop (#3099) --- src/H5C.c | 213 +++++++++------------------------------------------ src/H5Cpkg.h | 33 ++++---- src/H5Ctag.c | 45 +++++------ 3 files changed, 75 insertions(+), 216 deletions(-) diff --git a/src/H5C.c b/src/H5C.c index cbe8b631b16..b57cbd15bb4 100644 --- a/src/H5C.c +++ b/src/H5C.c @@ -182,88 +182,11 @@ H5FL_SEQ_DEFINE_STATIC(H5C_cache_entry_ptr_t); * flag to determine whether writes are permitted. * * Return: Success: Pointer to the new instance. - * * Failure: NULL * * Programmer: John Mainzer * 6/2/04 * - * Modifications: - * - * JRM -- 7/20/04 - * Updated for the addition of the hash table. - * - * JRM -- 10/5/04 - * Added call to H5C_reset_cache_hit_rate_stats(). Also - * added initialization for cache_is_full flag and for - * resize_ctl. - * - * JRM -- 11/12/04 - * Added initialization for the new size_decreased field. - * - * JRM -- 11/17/04 - * Added/updated initialization for the automatic cache - * size control data structures. - * - * JRM -- 6/24/05 - * Added support for the new write_permitted field of - * the H5C_t structure. - * - * JRM -- 7/5/05 - * Added the new log_flush parameter and supporting code. - * - * JRM -- 9/21/05 - * Added the new aux_ptr parameter and supporting code. - * - * JRM -- 1/20/06 - * Added initialization of the new prefix field in H5C_t. - * - * JRM -- 3/16/06 - * Added initialization for the pinned entry related fields. - * - * JRM -- 5/31/06 - * Added initialization for the trace_file_ptr field. - * - * JRM -- 8/19/06 - * Added initialization for the flush_in_progress field. - * - * JRM -- 8/25/06 - * Added initialization for the slist_len_increase and - * slist_size_increase fields. These fields are used - * for sanity checking in the flush process, and are not - * compiled in unless H5C_DO_SANITY_CHECKS is TRUE. - * - * JRM -- 3/28/07 - * Added initialization for the new is_read_only and - * ro_ref_count fields. - * - * JRM -- 7/27/07 - * Added initialization for the new evictions_enabled - * field of H5C_t. - * - * JRM -- 12/31/07 - * Added initialization for the new flash cache size increase - * related fields of H5C_t. - * - * JRM -- 11/5/08 - * Added initialization for the new clean_index_size and - * dirty_index_size fields of H5C_t. - * - * - * Missing entries? - * - * - * JRM -- 4/20/20 - * Added initialization for the slist_enabled field. Recall - * that the slist is used to flush metadata cache entries - * in (roughly) increasing address order. While this is - * needed at flush and close, it is not used elsewhere. - * The slist_enabled field exists to allow us to construct - * the slist when needed, and leave it empty otherwise -- thus - * avoiding the overhead of maintaining it. - * - * JRM -- 4/29/20 - * *------------------------------------------------------------------------- */ H5C_t * @@ -296,8 +219,7 @@ H5C_create(size_t max_cache_size, size_t min_clean_size, int max_type_id, if (NULL == (cache_ptr->slist_ptr = H5SL_create(H5SL_TYPE_HADDR, NULL))) HGOTO_ERROR(H5E_CACHE, H5E_CANTCREATE, NULL, "can't create skip list") - if (NULL == (cache_ptr->tag_list = H5SL_create(H5SL_TYPE_HADDR, NULL))) - HGOTO_ERROR(H5E_CACHE, H5E_CANTCREATE, NULL, "can't create skip list for tagged entry addresses") + cache_ptr->tag_list = NULL; /* If we get this far, we should succeed. Go ahead and initialize all * the fields. @@ -504,7 +426,7 @@ H5C_create(size_t max_cache_size, size_t min_clean_size, int max_type_id, #ifndef NDEBUG cache_ptr->get_entry_ptr_from_addr_counter = 0; -#endif /* NDEBUG */ +#endif /* Set return value */ ret_value = cache_ptr; @@ -515,16 +437,16 @@ H5C_create(size_t max_cache_size, size_t min_clean_size, int max_type_id, if (cache_ptr->slist_ptr != NULL) H5SL_close(cache_ptr->slist_ptr); - if (cache_ptr->tag_list != NULL) - H5SL_close(cache_ptr->tag_list); + HASH_CLEAR(hh, cache_ptr->tag_list); + cache_ptr->tag_list = NULL; if (cache_ptr->log_info != NULL) H5MM_xfree(cache_ptr->log_info); cache_ptr->magic = 0; cache_ptr = H5FL_FREE(H5C_t, cache_ptr); - } /* end if */ - } /* end if */ + } + } FUNC_LEAVE_NOAPI(ret_value) } /* H5C_create() */ @@ -663,33 +585,6 @@ H5C_def_auto_resize_rpt_fcn(H5C_t *cache_ptr, } } /* H5C_def_auto_resize_rpt_fcn() */ -/*------------------------------------------------------------------------- - * Function: H5C__free_tag_list_cb - * - * Purpose: Callback function to free tag nodes from the skip list. - * - * Return: Non-negative on success/Negative on failure - * - * Programmer: Vailin Choi - * January 2014 - * - *------------------------------------------------------------------------- - */ -static herr_t -H5C__free_tag_list_cb(void *_item, void H5_ATTR_UNUSED *key, void H5_ATTR_UNUSED *op_data) -{ - H5C_tag_info_t *tag_info = (H5C_tag_info_t *)_item; - - FUNC_ENTER_STATIC_NOERR - - HDassert(tag_info); - - /* Release the item */ - tag_info = H5FL_FREE(H5C_tag_info_t, tag_info); - - FUNC_LEAVE_NOAPI(0) -} /* H5C__free_tag_list_cb() */ - /*------------------------------------------------------------------------- * * Function: H5C_prep_for_file_close @@ -722,10 +617,7 @@ H5C_prep_for_file_close(H5F_t *f) HDassert(cache_ptr); HDassert(cache_ptr->magic == H5C__H5C_T_MAGIC); - /* For now at least, it is possible to receive the - * close warning more than once -- the following - * if statement handles this. - */ + /* It is possible to receive the close warning more than once */ if (cache_ptr->close_warning_received) HGOTO_DONE(SUCCEED) cache_ptr->close_warning_received = TRUE; @@ -797,27 +689,15 @@ H5C_prep_for_file_close(H5F_t *f) * Programmer: John Mainzer * 6/2/04 * - * Modifications: - * - * JRM -- 5/15/20 - * - * Updated the function to enable the slist prior to the - * call to H5C__flush_invalidate_cache(). - * - * Arguably, it shouldn't be necessary to re-enable the - * slist after the call to H5C__flush_invalidate_cache(), as - * the metadata cache should be discarded. However, in the - * test code, we make multiple calls to H5C_dest(). Thus - * we re-enable the slist on failure if it and the cache - * still exist. - * *------------------------------------------------------------------------- */ herr_t H5C_dest(H5F_t *f) { - H5C_t *cache_ptr = f->shared->cache; - herr_t ret_value = SUCCEED; /* Return value */ + H5C_t *cache_ptr = f->shared->cache; + H5C_tag_info_t *item = NULL; + H5C_tag_info_t *tmp = NULL; + herr_t ret_value = SUCCEED; /* Return value */ FUNC_ENTER_NOAPI(FAIL) @@ -833,57 +713,42 @@ H5C_dest(H5F_t *f) /* Enable the slist, as it is needed in the flush */ if (H5C_set_slist_enabled(f->shared->cache, TRUE, FALSE) < 0) - HGOTO_ERROR(H5E_CACHE, H5E_SYSTEM, FAIL, "set slist enabled failed") /* Flush and invalidate all cache entries */ if (H5C__flush_invalidate_cache(f, H5C__NO_FLAGS_SET) < 0) - HGOTO_ERROR(H5E_CACHE, H5E_CANTFLUSH, FAIL, "unable to flush cache") /* Generate & write cache image if requested */ - if (cache_ptr->image_ctl.generate_image) { - + if (cache_ptr->image_ctl.generate_image) if (H5C__generate_cache_image(f, cache_ptr) < 0) - HGOTO_ERROR(H5E_CACHE, H5E_CANTCREATE, FAIL, "Can't generate metadata cache image") - } /* Question: Is it possible for cache_ptr->slist be non-null at this * point? If no, shouldn't this if statement be an assert? */ if (cache_ptr->slist_ptr != NULL) { - HDassert(cache_ptr->slist_len == 0); HDassert(cache_ptr->slist_size == 0); H5SL_close(cache_ptr->slist_ptr); - cache_ptr->slist_ptr = NULL; + } - } /* end if */ - - if (cache_ptr->tag_list != NULL) { - - H5SL_destroy(cache_ptr->tag_list, H5C__free_tag_list_cb, NULL); - - cache_ptr->tag_list = NULL; - - } /* end if */ - - if (cache_ptr->log_info != NULL) { + HASH_ITER(hh, cache_ptr->tag_list, item, tmp) + { + HASH_DELETE(hh, cache_ptr->tag_list, item); + item = H5FL_FREE(H5C_tag_info_t, item); + } + if (cache_ptr->log_info != NULL) H5MM_xfree(cache_ptr->log_info); - } #ifndef NDEBUG #if H5C_DO_SANITY_CHECKS - - if (cache_ptr->get_entry_ptr_from_addr_counter > 0) { - + if (cache_ptr->get_entry_ptr_from_addr_counter > 0) HDfprintf(stdout, "*** %" PRId64 " calls to H5C_get_entry_ptr_from_add(). ***\n", cache_ptr->get_entry_ptr_from_addr_counter); - } #endif /* H5C_DO_SANITY_CHECKS */ cache_ptr->magic = 0; @@ -892,18 +757,12 @@ H5C_dest(H5F_t *f) cache_ptr = H5FL_FREE(H5C_t, cache_ptr); done: - - if ((ret_value < 0) && (cache_ptr) && (cache_ptr->slist_ptr)) { - + if (ret_value < 0 && cache_ptr && cache_ptr->slist_ptr) /* need this for test code -- see change note for details */ - if (H5C_set_slist_enabled(f->shared->cache, FALSE, FALSE) < 0) - HDONE_ERROR(H5E_CACHE, H5E_SYSTEM, FAIL, "disable slist on flush dest failure failed") - } FUNC_LEAVE_NOAPI(ret_value) - } /* H5C_dest() */ /*------------------------------------------------------------------------- @@ -8107,7 +7966,7 @@ H5C__flush_marked_entries(H5F_t *f) herr_t H5C_cork(H5C_t *cache_ptr, haddr_t obj_addr, unsigned action, hbool_t *corked) { - H5C_tag_info_t *tag_info; /* Points to a tag info struct */ + H5C_tag_info_t *tag_info = NULL; herr_t ret_value = SUCCEED; FUNC_ENTER_NOAPI_NOINIT @@ -8118,7 +7977,7 @@ H5C_cork(H5C_t *cache_ptr, haddr_t obj_addr, unsigned action, hbool_t *corked) HDassert(action == H5C__SET_CORK || action == H5C__UNCORK || action == H5C__GET_CORKED); /* Search the list of corked object addresses in the cache */ - tag_info = (H5C_tag_info_t *)H5SL_search(cache_ptr->tag_list, &obj_addr); + HASH_FIND(hh, cache_ptr->tag_list, &obj_addr, sizeof(haddr_t), tag_info); if (H5C__GET_CORKED == action) { HDassert(corked); @@ -8126,7 +7985,7 @@ H5C_cork(H5C_t *cache_ptr, haddr_t obj_addr, unsigned action, hbool_t *corked) *corked = TRUE; else *corked = FALSE; - } /* end if */ + } else { /* Sanity check */ HDassert(H5C__SET_CORK == action || H5C__UNCORK == action); @@ -8142,25 +8001,24 @@ H5C_cork(H5C_t *cache_ptr, haddr_t obj_addr, unsigned action, hbool_t *corked) /* Set the tag for all entries */ tag_info->tag = obj_addr; - /* Insert tag info into skip list */ - if (H5SL_insert(cache_ptr->tag_list, tag_info, &(tag_info->tag)) < 0) - HGOTO_ERROR(H5E_CACHE, H5E_CANTINSERT, FAIL, "can't insert tag info in skip list") - } /* end if */ + /* Insert tag info into hash table */ + HASH_ADD(hh, cache_ptr->tag_list, tag, sizeof(haddr_t), tag_info); + } else { /* Check for object already corked */ if (tag_info->corked) HGOTO_ERROR(H5E_CACHE, H5E_CANTCORK, FAIL, "object already corked") HDassert(tag_info->entry_cnt > 0 && tag_info->head); - } /* end else */ + } /* Set the corked status for the entire object */ tag_info->corked = TRUE; cache_ptr->num_objs_corked++; - - } /* end if */ + } else { /* Sanity check */ - HDassert(tag_info); + if (NULL == tag_info) + HGOTO_ERROR(H5E_CACHE, H5E_CANTUNCORK, FAIL, "tag info pointer is NULL") /* Check for already uncorked */ if (!tag_info->corked) @@ -8175,16 +8033,15 @@ H5C_cork(H5C_t *cache_ptr, haddr_t obj_addr, unsigned action, hbool_t *corked) /* Sanity check */ HDassert(NULL == tag_info->head); - if (H5SL_remove(cache_ptr->tag_list, &(tag_info->tag)) != tag_info) - HGOTO_ERROR(H5E_CACHE, H5E_CANTREMOVE, FAIL, "can't remove tag info from list") + HASH_DELETE(hh, cache_ptr->tag_list, tag_info); /* Release the tag info */ tag_info = H5FL_FREE(H5C_tag_info_t, tag_info); - } /* end if */ + } else HDassert(NULL != tag_info->head); - } /* end else */ - } /* end else */ + } + } done: FUNC_LEAVE_NOAPI(ret_value) diff --git a/src/H5Cpkg.h b/src/H5Cpkg.h index 91bc94e9439..64b0c5df9c6 100644 --- a/src/H5Cpkg.h +++ b/src/H5Cpkg.h @@ -3565,15 +3565,17 @@ if ( ( (entry_ptr) == NULL ) || \ * * The fields of this structure are discussed individually below: * - * tag: Address (i.e. "tag") of the object header for all the entries + * tag: Address (i.e. "tag") of the object header for all the entries * corresponding to parts of that object. * - * head: Head of doubly-linked list of all entries belonging to the tag. + * head: Head of doubly-linked list of all entries belonging to the tag. * - * entry_cnt: Number of entries on linked list of entries for this tag. + * entry_cnt: Number of entries on linked list of entries for this tag. * - * corked: Boolean flag indicating whether entries for this object can be - * evicted. + * corked: Boolean flag indicating whether entries for this object can be + * evicted. + * + * hh: uthash hash table handle (must be last) * ****************************************************************************/ typedef struct H5C_tag_info_t { @@ -3581,6 +3583,9 @@ typedef struct H5C_tag_info_t { H5C_cache_entry_t *head; /* Head of the list of entries for this tag */ size_t entry_cnt; /* Number of entries on list */ hbool_t corked; /* Whether this object is corked */ + + /* Hash table fields */ + UT_hash_handle hh; /* Hash table handle (must be LAST) */ } H5C_tag_info_t; @@ -3973,15 +3978,15 @@ typedef struct H5C_tag_info_t { * * The following fields are maintained to facilitate this. * - * tag_list: A skip list to track entries that belong to an object. - * Each H5C_tag_info_t struct on the tag list corresponds to - * a particular object in the file. Tagged entries can be - * flushed or evicted as a group, or corked to prevent entries - * from being evicted from the cache. + * tag_list: A collection to track entries that belong to an object. + * Each H5C_tag_info_t struct on the tag list corresponds to a + * particular object in the file. Tagged entries can be flushed + * or evicted as a group, or corked to prevent entries from being + * evicted from the cache. * - * "Global" entries, like the superblock and the file's - * freelist, as well as shared entries like global - * heaps and shared object header messages, are not tagged. + * "Global" entries, like the superblock and the file's freelist, + * as well as shared entries like global heaps and shared object + * header messages, are not tagged. * * ignore_tags: Boolean flag to disable tag validation during entry insertion. * @@ -4862,7 +4867,7 @@ struct H5C_t { #endif /* H5C_DO_SANITY_CHECKS */ /* Fields for maintaining list of tagged entries */ - H5SL_t * tag_list; + H5C_tag_info_t * tag_list; hbool_t ignore_tags; uint32_t num_objs_corked; diff --git a/src/H5Ctag.c b/src/H5Ctag.c index 01d9a7376a6..e4b738f8f3e 100644 --- a/src/H5Ctag.c +++ b/src/H5Ctag.c @@ -239,7 +239,7 @@ H5C__tag_entry(H5C_t *cache, H5C_cache_entry_t *entry) #endif /* Search the list of tagged object addresses in the cache */ - tag_info = (H5C_tag_info_t *)H5SL_search(cache->tag_list, &tag); + HASH_FIND(hh, cache->tag_list, &tag, sizeof(haddr_t), tag_info); /* Check if this is the first entry for this tagged object */ if (NULL == tag_info) { @@ -250,10 +250,9 @@ H5C__tag_entry(H5C_t *cache, H5C_cache_entry_t *entry) /* Set the tag for all entries */ tag_info->tag = tag; - /* Insert tag info into skip list */ - if (H5SL_insert(cache->tag_list, tag_info, &(tag_info->tag)) < 0) - HGOTO_ERROR(H5E_CACHE, H5E_CANTINSERT, FAIL, "can't insert tag info in skip list") - } /* end if */ + /* Insert tag info into the hash table */ + HASH_ADD(hh, cache->tag_list, tag, sizeof(haddr_t), tag_info); + } else HDassert(tag_info->corked || (tag_info->entry_cnt > 0 && tag_info->head)); @@ -294,7 +293,7 @@ H5C__untag_entry(H5C_t *cache, H5C_cache_entry_t *entry) H5C_tag_info_t *tag_info; /* Points to a tag info struct */ herr_t ret_value = SUCCEED; /* Return value */ - FUNC_ENTER_PACKAGE + FUNC_ENTER_PACKAGE_NOERR /* Assertions */ HDassert(cache != NULL); @@ -322,17 +321,14 @@ H5C__untag_entry(H5C_t *cache, H5C_cache_entry_t *entry) /* Sanity check */ HDassert(NULL == tag_info->head); - if (H5SL_remove(cache->tag_list, &(tag_info->tag)) != tag_info) - HGOTO_ERROR(H5E_CACHE, H5E_CANTREMOVE, FAIL, "can't remove tag info from list") - /* Release the tag info */ + HASH_DELETE(hh, cache->tag_list, tag_info); tag_info = H5FL_FREE(H5C_tag_info_t, tag_info); - } /* end if */ + } else HDassert(tag_info->corked || NULL != tag_info->head); - } /* end if */ + } -done: FUNC_LEAVE_NOAPI(ret_value) } /* H5C__untag_entry */ @@ -363,7 +359,7 @@ H5C__iter_tagged_entries_real(H5C_t *cache, haddr_t tag, H5C_tag_iter_cb_t cb, v HDassert(cache->magic == H5C__H5C_T_MAGIC); /* Search the list of tagged object addresses in the cache */ - tag_info = (H5C_tag_info_t *)H5SL_search(cache->tag_list, &tag); + HASH_FIND(hh, cache->tag_list, &tag, sizeof(haddr_t), tag_info); /* If there's any entries for this tag, iterate over them */ if (tag_info) { @@ -755,7 +751,7 @@ H5C_flush_tagged_entries(H5F_t *f, haddr_t tag) * value specified by src_tag and changes it to the value * specified by dest_tag. * - * Return: SUCCEED or FAIL. + * Return: SUCCEED/FAIL * * Programmer: Mike McGreevy * March 17, 2010 @@ -765,27 +761,28 @@ H5C_flush_tagged_entries(H5F_t *f, haddr_t tag) herr_t H5C_retag_entries(H5C_t *cache, haddr_t src_tag, haddr_t dest_tag) { - H5C_tag_info_t *tag_info; /* Points to a tag info struct */ - herr_t ret_value = SUCCEED; /* Return value */ + H5C_tag_info_t *tag_info = NULL; /* Function enter macro */ - FUNC_ENTER_NOAPI(FAIL) + FUNC_ENTER_NOAPI_NOERR /* Sanity check */ HDassert(cache); /* Remove tag info from tag list */ - if (NULL != (tag_info = (H5C_tag_info_t *)H5SL_remove(cache->tag_list, &src_tag))) { + HASH_FIND(hh, cache->tag_list, &src_tag, sizeof(haddr_t), tag_info); + if (NULL != tag_info) { + /* Remove info with old tag */ + HASH_DELETE(hh, cache->tag_list, tag_info); + /* Change to new tag */ tag_info->tag = dest_tag; - /* Re-insert tag info into skip list */ - if (H5SL_insert(cache->tag_list, tag_info, &(tag_info->tag)) < 0) - HGOTO_ERROR(H5E_CACHE, H5E_CANTINSERT, FAIL, "can't insert tag info in skip list") - } /* end if */ + /* Re-insert tag info into tag list */ + HASH_ADD(hh, cache->tag_list, tag, sizeof(haddr_t), tag_info); + } -done: - FUNC_LEAVE_NOAPI(ret_value) + FUNC_LEAVE_NOAPI(SUCCEED) } /* H5C_retag_entries() */ /*------------------------------------------------------------------------- From 4a6872cbf62380ec0ee3394d5e0710de2216b21d Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Thu, 15 Jun 2023 09:50:02 -0500 Subject: [PATCH 081/108] Merges from develop/1.14 (#3118) * Fix release action that allows files to be uploaded. Add autotools h5copy help test. Update cmake tools.cmake file for clang-tidy. CMake build is missing H5FDsubfiling.h macros improved for configure output capture. Remove obsolete files * Revert parallel config with TS * Fix doxygen warnings --- .github/workflows/cmake-ctest.yml | 12 +- .github/workflows/main.yml | 33 ++- .github/workflows/release.yml | 34 ++-- config/cmake/ConfigureChecks.cmake | 5 + config/cmake/FindSZIP.cmake | 129 ------------ config/cmake/HDF5Macros.cmake | 12 +- config/cmake/HDF5UseFortran.cmake | 29 +-- config/cmake/HDFCompilerFlags.cmake | 5 +- config/cmake/HDFLibMacros.cmake | 4 +- config/cmake/UserMacros/Windows_MT.cmake | 1 + config/sanitizer/tools.cmake | 135 ++++++------- doxygen/Doxyfile.in | 2 +- doxygen/dox/LearnBasics3.dox | 2 +- doxygen/dox/ReferenceManual.dox | 4 - doxygen/examples/menus/core_menu.md | 4 - doxygen/examples/tables/propertyLists.dox | 28 --- java/CMakeLists.txt | 2 +- java/examples/datasets/CMakeLists.txt | 26 ++- java/examples/datatypes/CMakeLists.txt | 17 +- java/examples/groups/CMakeLists.txt | 30 ++- java/examples/intro/CMakeLists.txt | 17 +- java/src/Makefile.am | 9 +- java/src/hdf/hdf5lib/CMakeLists.txt | 13 -- java/src/hdf/hdf5lib/H5.java | 12 +- java/src/hdf/hdf5lib/HDF5GroupInfo.java | 188 ------------------ java/src/hdf/hdf5lib/HDFArray.java | 2 + .../hdf/hdf5lib/callbacks/package-info.java | 1 - .../hdf/hdf5lib/exceptions/package-info.java | 3 +- java/src/hdf/hdf5lib/package-info.java | 1 - .../hdf/hdf5lib/structs/H5FD_hdfs_fapl_t.java | 1 - .../hdf/hdf5lib/structs/H5FD_ros3_fapl_t.java | 1 - .../src/hdf/hdf5lib/structs/package-info.java | 1 - java/src/jni/h5fImp.c | 8 +- java/src/jni/h5lImp.c | 8 +- java/src/jni/h5pDAPLImp.c | 2 + java/src/jni/h5pFAPLImp.c | 2 + java/src/jni/h5tImp.c | 2 +- m4/aclocal_fc.f90 | 20 +- release_docs/RELEASE.txt | 10 +- src/H5Dpublic.h | 42 ++-- src/H5Fpublic.h | 16 +- src/H5Rpublic.h | 14 +- 42 files changed, 320 insertions(+), 567 deletions(-) delete mode 100644 config/cmake/FindSZIP.cmake delete mode 100644 java/src/hdf/hdf5lib/HDF5GroupInfo.java diff --git a/.github/workflows/cmake-ctest.yml b/.github/workflows/cmake-ctest.yml index 4363d0f8538..ffd56e2f82e 100644 --- a/.github/workflows/cmake-ctest.yml +++ b/.github/workflows/cmake-ctest.yml @@ -96,7 +96,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Install CMake Dependencies (Linux) - run: sudo apt-get install ninja-build + run: sudo apt-get install ninja-build doxygen graphviz - name: Set file base name (Linux) id: set-file-base @@ -151,6 +151,14 @@ jobs: path: ${{ runner.workspace }}/build/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204.tar.gz if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` + # Save doxygen files created by ctest script + - name: Save published doxygen (Linux) + uses: actions/upload-artifact@v3 + with: + name: docs-doxygen + path: ${{ runner.workspace }}/hdf5/build/ci-StdShar-GNUC/hdf5lib_docs/html + if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` + build_and_test_mac: # MacOS w/ Clang + CMake # @@ -158,7 +166,7 @@ jobs: runs-on: macos-11 steps: - name: Install Dependencies (MacOS) - run: brew install ninja + run: brew install ninja doxygen - name: Set file base name (MacOS) id: set-file-base diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 3a8854d0936..68c8d9e31c4 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -81,6 +81,7 @@ jobs: cpp: ON fortran: OFF java: ON + docs: OFF libaecfc: ON localaec: OFF zlibfc: ON @@ -88,6 +89,7 @@ jobs: parallel: OFF mirror_vfd: OFF direct_vfd: OFF + ros3_vfd: OFF generator: "-G \"Visual Studio 17 2022\" -A x64" run_tests: true @@ -100,6 +102,7 @@ jobs: cpp: ON fortran: ON java: ON + docs: ON libaecfc: ON localaec: OFF zlibfc: ON @@ -107,6 +110,7 @@ jobs: parallel: OFF mirror_vfd: ON direct_vfd: ON + ros3_vfd: OFF toolchain: "config/toolchain/gcc.cmake" generator: "-G Ninja" run_tests: true @@ -120,9 +124,11 @@ jobs: cpp: enable fortran: enable java: enable + docs: disable parallel: disable mirror_vfd: enable direct_vfd: enable + ros3_vfd: enable deprec_sym: enable default_api: v112 szip: yes @@ -141,9 +147,11 @@ jobs: cpp: disable fortran: enable java: disable + docs: disable parallel: enable mirror_vfd: disable direct_vfd: disable + ros3_vfd: disable deprec_sym: enable default_api: v112 szip: yes @@ -161,6 +169,7 @@ jobs: cpp: ON fortran: OFF java: ON + docs: OFF libaecfc: ON localaec: OFF zlibfc: ON @@ -168,6 +177,7 @@ jobs: parallel: OFF mirror_vfd: ON direct_vfd: OFF + ros3_vfd: OFF toolchain: "config/toolchain/clang.cmake" generator: "-G Ninja" run_tests: true @@ -186,9 +196,11 @@ jobs: cpp: enable fortran: enable java: enable + docs: disable parallel: disable mirror_vfd: enable direct_vfd: enable + ros3_vfd: enable deprec_sym: enable default_api: v16 szip: yes @@ -209,9 +221,11 @@ jobs: cpp: enable fortran: enable java: enable + docs: disable parallel: disable mirror_vfd: enable direct_vfd: enable + ros3_vfd: enable deprec_sym: enable default_api: v18 szip: yes @@ -232,9 +246,11 @@ jobs: cpp: enable fortran: enable java: enable + docs: disable parallel: disable mirror_vfd: enable direct_vfd: enable + ros3_vfd: enable deprec_sym: enable default_api: v110 szip: yes @@ -255,9 +271,11 @@ jobs: cpp: enable fortran: enable java: enable + docs: disable parallel: disable mirror_vfd: enable direct_vfd: enable + ros3_vfd: enable deprec_sym: enable default_api: v112 szip: yes @@ -278,9 +296,11 @@ jobs: cpp: enable fortran: enable java: enable + docs: disable parallel: disable mirror_vfd: enable direct_vfd: enable + ros3_vfd: enable deprec_sym: disable default_api: v112 szip: yes @@ -317,7 +337,7 @@ jobs: run: echo '${{ toJSON(matrix) }}' - name: Install CMake Dependencies (Linux) - run: sudo apt-get install ninja-build + run: sudo apt-get install ninja-build doxygen graphviz if: matrix.os == 'ubuntu-latest' - name: Install Autotools Dependencies (Linux, serial) @@ -325,6 +345,7 @@ jobs: sudo apt update sudo apt install automake autoconf libtool libtool-bin sudo apt install gcc-12 g++-12 gfortran-12 + sudo apt install libssl3 libssl-dev libcurl4 libcurl4-openssl-dev echo "CC=gcc-12" >> $GITHUB_ENV echo "CXX=g++-12" >> $GITHUB_ENV echo "FC=gfortran-12" >> $GITHUB_ENV @@ -346,7 +367,7 @@ jobs: if: matrix.os == 'windows-latest' - name: Install Dependencies (macOS) - run: brew install ninja + run: brew install ninja doxygen if: matrix.os == 'macos-13' - name: Set environment for MSVC (Windows) @@ -369,7 +390,7 @@ jobs: sh ./autogen.sh mkdir "${{ runner.workspace }}/build" cd "${{ runner.workspace }}/build" - ${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --${{ matrix.deprec_sym }}-deprecated-symbols --with-default-api-version=${{ matrix.default_api }} --enable-shared --${{ matrix.parallel }}-parallel --${{ matrix.cpp }}-cxx --${{ matrix.fortran }}-fortran --${{ matrix.java }}-java --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd --with-szlib=${{ matrix.szip }} + ${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --${{ matrix.deprec_sym }}-deprecated-symbols --with-default-api-version=${{ matrix.default_api }} --enable-shared --${{ matrix.parallel }}-parallel --${{ matrix.cpp }}-cxx --${{ matrix.fortran }}-fortran --${{ matrix.java }}-java --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd --${{ matrix.ros3_vfd }}-ros3-vfd --with-szlib=${{ matrix.szip }} shell: bash if: (matrix.generator == 'autogen') && ! (matrix.thread_safety.enabled) @@ -378,7 +399,7 @@ jobs: sh ./autogen.sh mkdir "${{ runner.workspace }}/build" cd "${{ runner.workspace }}/build" - ${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --enable-shared --enable-threadsafe --disable-hl --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd --with-szlib=${{ matrix.szip }} + ${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --enable-shared --enable-threadsafe --disable-hl --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd --${{ matrix.ros3_vfd }}-ros3-vfd --with-szlib=${{ matrix.szip }} shell: bash if: (matrix.generator == 'autogen') && (matrix.thread_safety.enabled) @@ -390,7 +411,7 @@ jobs: run: | mkdir "${{ runner.workspace }}/build" cd "${{ runner.workspace }}/build" - cmake -C $GITHUB_WORKSPACE/config/cmake/cacheinit.cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=${{ matrix.cpp }} -DHDF5_BUILD_FORTRAN=${{ matrix.fortran }} -DHDF5_BUILD_JAVA=${{ matrix.java }} -DBUILD_SZIP_WITH_FETCHCONTENT=${{ matrix.libaecfc }} -DLIBAEC_USE_LOCALCONTENT=${{ matrix.localaec }} -DBUILD_ZLIB_WITH_FETCHCONTENT=${{ matrix.zlibfc }} -DZLIB_USE_LOCALCONTENT=${{ matrix.localzlib }} -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} $GITHUB_WORKSPACE + cmake -C $GITHUB_WORKSPACE/config/cmake/cacheinit.cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=${{ matrix.cpp }} -DHDF5_BUILD_FORTRAN=${{ matrix.fortran }} -DHDF5_BUILD_JAVA=${{ matrix.java }} -DHDF5_BUILD_DOC=${{ matrix.docs }} -DBUILD_SZIP_WITH_FETCHCONTENT=${{ matrix.libaecfc }} -DLIBAEC_USE_LOCALCONTENT=${{ matrix.localaec }} -DBUILD_ZLIB_WITH_FETCHCONTENT=${{ matrix.zlibfc }} -DZLIB_USE_LOCALCONTENT=${{ matrix.localzlib }} -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} -DHDF5_ENABLE_ROS3_VFD:BOOL=${{ matrix.ros3_vfd }} $GITHUB_WORKSPACE shell: bash if: (matrix.generator != 'autogen') && ! (matrix.thread_safety.enabled) @@ -399,7 +420,7 @@ jobs: run: | mkdir "${{ runner.workspace }}/build" cd "${{ runner.workspace }}/build" - cmake -C $GITHUB_WORKSPACE/config/cmake/cacheinit.cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_THREADSAFE:BOOL=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=OFF -DHDF5_BUILD_FORTRAN:BOOL=OFF -DHDF5_BUILD_JAVA:BOOL=OFF -DHDF5_BUILD_HL_LIB:BOOL=OFF -DBUILD_SZIP_WITH_FETCHCONTENT=${{ matrix.libaecfc }} -DLIBAEC_USE_LOCALCONTENT=${{ matrix.localaec }} -DBUILD_ZLIB_WITH_FETCHCONTENT=${{ matrix.zlibfc }} -DZLIB_USE_LOCALCONTENT=${{ matrix.localzlib }} -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} $GITHUB_WORKSPACE + cmake -C $GITHUB_WORKSPACE/config/cmake/cacheinit.cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_THREADSAFE:BOOL=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=OFF -DHDF5_BUILD_FORTRAN:BOOL=OFF -DHDF5_BUILD_JAVA:BOOL=OFF -DHDF5_BUILD_HL_LIB:BOOL=OFF -DHDF5_BUILD_DOC=OFF -DBUILD_SZIP_WITH_FETCHCONTENT=${{ matrix.libaecfc }} -DLIBAEC_USE_LOCALCONTENT=${{ matrix.localaec }} -DBUILD_ZLIB_WITH_FETCHCONTENT=${{ matrix.zlibfc }} -DZLIB_USE_LOCALCONTENT=${{ matrix.localzlib }} -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} -DHDF5_ENABLE_ROS3_VFD:BOOL=${{ matrix.ros3_vfd }} $GITHUB_WORKSPACE shell: bash if: (matrix.generator != 'autogen') && (matrix.thread_safety.enabled) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e15c6b358c1..ab2efb7ef6f 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -45,16 +45,26 @@ jobs: - run: | echo "Tag already present: ${{ steps.tag_create.outputs.tag_exists }}" - getfiles: + PreRelease-getfiles: runs-on: ubuntu-latest + needs: create-tag + environment: snapshots_1_12 + permissions: + contents: write steps: - - name: Set file base name - id: set-file-base + - name: Get file base name + id: get-file-base run: | FILE_NAME_BASE=$(echo "${{ inputs.file_base }}") echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT # Get files created by tarball script + - name: Get doxygen (Linux) + uses: actions/download-artifact@v3 + with: + name: docs-doxygen + path: ${{ github.workspace }}/${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen + - name: Get tgz-tarball (Linux) uses: actions/download-artifact@v3 with: @@ -86,27 +96,13 @@ jobs: name: tgz-ubuntu-2204-binary path: ${{ github.workspace }} - # Get files used by release script - - PreRelease: - runs-on: ubuntu-latest - needs: [create-tag, getfiles] - environment: snapshots_1_12 - permissions: - contents: write - steps: - - name: Set file base name - id: get-file-base - run: | - FILE_NAME_BASE=$(echo "${{ inputs.file_base }}") - echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT - - name: PreRelease tag uses: softprops/action-gh-release@v1 with: - tag_name: "snapshot" + tag_name: "snapshot_1_12" prerelease: true files: | + ${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen ${{ steps.get-file-base.outputs.FILE_BASE }}.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}.zip ${{ steps.get-file-base.outputs.FILE_BASE }}-osx12.tar.gz diff --git a/config/cmake/ConfigureChecks.cmake b/config/cmake/ConfigureChecks.cmake index c72f415fc40..8900d4c6d1c 100644 --- a/config/cmake/ConfigureChecks.cmake +++ b/config/cmake/ConfigureChecks.cmake @@ -272,6 +272,7 @@ if (MINGW OR NOT WINDOWS) set (HDF_EXTRA_C_FLAGS ${HDF_EXTRA_C_FLAGS} -D_GNU_SOURCE) option (HDF_ENABLE_LARGE_FILE "Enable support for large (64-bit) files on Linux." ON) + mark_as_advanced (HDF_ENABLE_LARGE_FILE) if (HDF_ENABLE_LARGE_FILE AND NOT DEFINED TEST_LFS_WORKS_RUN) set (msg "Performing TEST_LFS_WORKS") try_run (TEST_LFS_WORKS_RUN TEST_LFS_WORKS_COMPILE @@ -702,6 +703,7 @@ endif () # Option for --enable-strict-format-checks #----------------------------------------------------------------------------- option (HDF5_STRICT_FORMAT_CHECKS "Whether to perform strict file format checks" OFF) +mark_as_advanced (HDF5_STRICT_FORMAT_CHECKS) if (HDF5_STRICT_FORMAT_CHECKS) set (${HDF_PREFIX}_STRICT_FORMAT_CHECKS 1) endif () @@ -714,6 +716,7 @@ MARK_AS_ADVANCED (HDF5_STRICT_FORMAT_CHECKS) # support denormalized floating values) to maximize speed. #----------------------------------------------------------------------------- option (HDF5_WANT_DATA_ACCURACY "IF data accuracy is guaranteed during data conversions" ON) +mark_as_advanced (HDF5_WANT_DATA_ACCURACY) if (HDF5_WANT_DATA_ACCURACY) set (${HDF_PREFIX}_WANT_DATA_ACCURACY 1) endif () @@ -726,6 +729,7 @@ MARK_AS_ADVANCED (HDF5_WANT_DATA_ACCURACY) # actually benefit little. #----------------------------------------------------------------------------- option (HDF5_WANT_DCONV_EXCEPTION "exception handling functions is checked during data conversions" ON) +mark_as_advanced (HDF5_WANT_DCONV_EXCEPTION) if (HDF5_WANT_DCONV_EXCEPTION) set (${HDF_PREFIX}_WANT_DCONV_EXCEPTION 1) endif () @@ -735,6 +739,7 @@ MARK_AS_ADVANCED (HDF5_WANT_DCONV_EXCEPTION) # Check if they would like the function stack support compiled in #----------------------------------------------------------------------------- option (HDF5_ENABLE_CODESTACK "Enable the function stack tracing (for developer debugging)." OFF) +mark_as_advanced (HDF5_ENABLE_CODESTACK) if (HDF5_ENABLE_CODESTACK) set (${HDF_PREFIX}_HAVE_CODESTACK 1) endif () diff --git a/config/cmake/FindSZIP.cmake b/config/cmake/FindSZIP.cmake deleted file mode 100644 index 846a3d11e63..00000000000 --- a/config/cmake/FindSZIP.cmake +++ /dev/null @@ -1,129 +0,0 @@ -# -# Copyright by The HDF Group. -# All rights reserved. -# -# This file is part of HDF5. The full HDF5 copyright notice, including -# terms governing use, modification, and redistribution, is contained in -# the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://www.hdfgroup.org/licenses. -# If you do not have access to either file, you may request a copy from -# help@hdfgroup.org. -# -######################################################################### - -# - Derived from the FindTiff.cmake and FindJPEG.cmake that is included with cmake -# FindSZIP - -# Find the native SZIP includes and library - -# Imported targets -################## - -# This module defines the following :prop_tgt:`IMPORTED` targets: -# -# SZIP::SZIP -# The SZIP library, if found. -# -# Result variables -################### - -# This module will set the following variables in your project: - -# SZIP_FOUND, true if the SZIP headers and libraries were found. -# SZIP_INCLUDE_DIR, the directory containing the SZIP headers. -# SZIP_INCLUDE_DIRS, the directory containing the SZIP headers. -# SZIP_LIBRARIES, libraries to link against to use SZIP. - -# Cache variables -################# - -# The following variables may also be set: - -# SZIP_LIBRARY, where to find the SZIP library. -# SZIP_LIBRARY_DEBUG - Debug version of SZIP library -# SZIP_LIBRARY_RELEASE - Release Version of SZIP library - -# message (STATUS "Finding SZIP library and headers..." ) -######################################################################### - - -find_path(SZIP_INCLUDE_DIR szlib.h) - -set(szip_names ${SZIP_NAMES} sz szip szip-static libsz libszip libszip-static) -foreach(name ${szip_names}) - list (APPEND szip_names_debug "${name}d") -endforeach() - -if(NOT SZIP_LIBRARY) - find_library(SZIP_LIBRARY_RELEASE NAMES ${szip_names}) - find_library(SZIP_LIBRARY_DEBUG NAMES ${szip_names_debug}) - include(SelectLibraryConfigurations) - select_library_configurations(SZIP) - mark_as_advanced(SZIP_LIBRARY_RELEASE SZIP_LIBRARY_DEBUG) -endif() -unset(szip_names) -unset(szip_names_debug) - -if(SZIP_INCLUDE_DIR AND EXISTS "${SZIP_INCLUDE_DIR}/SZconfig.h") - file(STRINGS "${SZIP_INCLUDE_DIR}/SZconfig.h" szip_version_str - REGEX "^#define[\t ]+SZIP_PACKAGE_VERSION[\t ]+.*") - - string(REGEX REPLACE "^#define[\t ]+SZIP_PACKAGE_VERSION[\t ]+([0-9]+).*" - "\\1" SZIP_VERSION "${szip_version_str}") - unset(szip_version_str) -endif() - -include(FindPackageHandleStandardArgs) -find_package_handle_standard_args(SZIP - REQUIRED_VARS SZIP_LIBRARY SZIP_INCLUDE_DIR - VERSION_VAR SZIP_VERSION) - -if(SZIP_FOUND) - set(SZIP_LIBRARIES ${SZIP_LIBRARY}) - set(SZIP_INCLUDE_DIRS "${SZIP_INCLUDE_DIR}") - - if(NOT TARGET SZIP::SZIP) - add_library(SZIP::SZIP UNKNOWN IMPORTED) - if(SZIP_INCLUDE_DIRS) - set_target_properties(SZIP::SZIP PROPERTIES - INTERFACE_INCLUDE_DIRECTORIES "${SZIP_INCLUDE_DIRS}") - endif() - if(EXISTS "${SZIP_LIBRARY}") - set_target_properties(SZIP::SZIP PROPERTIES - IMPORTED_LINK_INTERFACE_LANGUAGES "C" - IMPORTED_LOCATION "${SZIP_LIBRARY}") - endif() - if(EXISTS "${SZIP_LIBRARY_RELEASE}") - set_property(TARGET SZIP::SZIP APPEND PROPERTY - IMPORTED_CONFIGURATIONS RELEASE) - set_target_properties(SZIP::SZIP PROPERTIES - IMPORTED_LINK_INTERFACE_LANGUAGES_RELEASE "C" - IMPORTED_LOCATION_RELEASE "${SZIP_LIBRARY_RELEASE}") - endif() - if(EXISTS "${SZIP_LIBRARY_DEBUG}") - set_property(TARGET SZIP::SZIP APPEND PROPERTY - IMPORTED_CONFIGURATIONS DEBUG) - set_target_properties(SZIP::SZIP PROPERTIES - IMPORTED_LINK_INTERFACE_LANGUAGES_DEBUG "C" - IMPORTED_LOCATION_DEBUG "${SZIP_LIBRARY_DEBUG}") - endif() - endif() -endif() - -mark_as_advanced(SZIP_LIBRARY SZIP_INCLUDE_DIR) - -# Report the results. -if (NOT SZIP_FOUND) - set (SZIP_DIR_MESSAGE - "SZip was not found. Make sure SZIP_LIBRARY and SZIP_INCLUDE_DIR are set or set the SZIP_INSTALL environment variable." - ) - if (NOT SZIP_FIND_QUIETLY) - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE "${SZIP_DIR_MESSAGE}") - endif () - else () - if (SZIP_FIND_REQUIRED) - message (FATAL_ERROR "SZip was NOT found and is Required by this project") - endif () - endif () -endif () diff --git a/config/cmake/HDF5Macros.cmake b/config/cmake/HDF5Macros.cmake index 8b8b334d7b0..710c28b95b7 100644 --- a/config/cmake/HDF5Macros.cmake +++ b/config/cmake/HDF5Macros.cmake @@ -75,23 +75,23 @@ macro (H5_SET_VFD_LIST) ) if (H5_HAVE_DIRECT) - set (VFD_LIST ${VFD_LIST} direct) + list (APPEND VFD_LIST direct) endif () if (H5_HAVE_PARALLEL) # MPI I/O VFD is currently incompatible with too many tests in the VFD test set - # set (VFD_LIST ${VFD_LIST} mpio) + # list (APPEND VFD_LIST mpio) endif () if (H5_HAVE_MIRROR_VFD) - set (VFD_LIST ${VFD_LIST} mirror) + list (APPEND VFD_LIST mirror) endif () if (H5_HAVE_ROS3_VFD) - set (VFD_LIST ${VFD_LIST} ros3) + list (APPEND VFD_LIST ros3) endif () if (H5_HAVE_LIBHDFS) - set (VFD_LIST ${VFD_LIST} hdfs) + list (APPEND VFD_LIST hdfs) endif () if (H5_HAVE_WINDOWS) - set (VFD_LIST ${VFD_LIST} windows) + list (APPEND VFD_LIST windows) endif () endmacro () diff --git a/config/cmake/HDF5UseFortran.cmake b/config/cmake/HDF5UseFortran.cmake index 43e006c520e..5cf50883cdf 100644 --- a/config/cmake/HDF5UseFortran.cmake +++ b/config/cmake/HDF5UseFortran.cmake @@ -43,11 +43,16 @@ macro (FORTRAN_RUN FUNCTION_NAME SOURCE_CODE RUN_RESULT_VAR1 COMPILE_RESULT_VAR1 ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/testFortranCompiler1.f90 "${SOURCE_CODE}" ) + if (CMAKE_VERSION VERSION_LESS 3.25) + set (_RUN_OUTPUT_VARIABLE "RUN_OUTPUT_VARIABLE") + else () + set (_RUN_OUTPUT_VARIABLE "RUN_OUTPUT_STDOUT_VARIABLE") + endif() TRY_RUN (RUN_RESULT_VAR COMPILE_RESULT_VAR ${CMAKE_BINARY_DIR} ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/testFortranCompiler1.f90 LINK_LIBRARIES "${HDF5_REQUIRED_LIBRARIES}" - RUN_OUTPUT_VARIABLE OUTPUT_VAR + ${_RUN_OUTPUT_VARIABLE} OUTPUT_VAR ) set (${RETURN_OUTPUT_VAR} ${OUTPUT_VAR}) @@ -187,10 +192,10 @@ foreach (KIND ${VAR}) " PROGRAM main USE ISO_C_BINDING - USE, INTRINSIC :: ISO_FORTRAN_ENV, ONLY : stderr=>ERROR_UNIT + USE, INTRINSIC :: ISO_FORTRAN_ENV, ONLY : stdout=>OUTPUT_UNIT IMPLICIT NONE INTEGER (KIND=${KIND}) a - WRITE(stderr,'(I0)') ${FC_SIZEOF_A} + WRITE(stdout,'(I0)') ${FC_SIZEOF_A} END " ) @@ -230,10 +235,10 @@ foreach (KIND ${VAR} ) " PROGRAM main USE ISO_C_BINDING - USE, INTRINSIC :: ISO_FORTRAN_ENV, ONLY : stderr=>ERROR_UNIT + USE, INTRINSIC :: ISO_FORTRAN_ENV, ONLY : stdout=>OUTPUT_UNIT IMPLICIT NONE REAL (KIND=${KIND}) a - WRITE(stderr,'(I0)') ${FC_SIZEOF_A} + WRITE(stdout,'(I0)') ${FC_SIZEOF_A} END " ) @@ -272,17 +277,17 @@ set (PROG_SRC3 " PROGRAM main USE ISO_C_BINDING - USE, INTRINSIC :: ISO_FORTRAN_ENV, ONLY : stderr=>ERROR_UNIT + USE, INTRINSIC :: ISO_FORTRAN_ENV, ONLY : stdout=>OUTPUT_UNIT IMPLICIT NONE INTEGER a REAL b DOUBLE PRECISION c - WRITE(stderr,*) ${FC_SIZEOF_A} - WRITE(stderr,*) kind(a) - WRITE(stderr,*) ${FC_SIZEOF_B} - WRITE(stderr,*) kind(b) - WRITE(stderr,*) ${FC_SIZEOF_C} - WRITE(stderr,*) kind(c) + WRITE(stdout,*) ${FC_SIZEOF_A} + WRITE(stdout,*) kind(a) + WRITE(stdout,*) ${FC_SIZEOF_B} + WRITE(stdout,*) kind(b) + WRITE(stdout,*) ${FC_SIZEOF_C} + WRITE(stdout,*) kind(c) END " ) diff --git a/config/cmake/HDFCompilerFlags.cmake b/config/cmake/HDFCompilerFlags.cmake index 15b158da8ae..1560bbd440f 100644 --- a/config/cmake/HDFCompilerFlags.cmake +++ b/config/cmake/HDFCompilerFlags.cmake @@ -56,7 +56,9 @@ if (CMAKE_COMPILER_IS_GNUCC) set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Og -ftrapv -fno-common") endif () else () - if (CMAKE_C_COMPILER_ID STREQUAL "GNU" AND NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 5.0) + if (CMAKE_C_COMPILER_ID STREQUAL "GNU" AND NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 5.0 AND + NOT CMAKE_C_CLANG_TIDY) + # `clang-tidy` does not understand -fstdarg-opt set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fstdarg-opt") endif () if (CMAKE_C_COMPILER_ID STREQUAL "GNU" AND NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 10.0) @@ -66,6 +68,7 @@ if (CMAKE_COMPILER_IS_GNUCC) # This should NOT be on by default as it can cause process issues. #----------------------------------------------------------------------------- option (HDF5_ENABLE_BUILD_DIAGS "Enable color and URL extended diagnostic messages" OFF) + mark_as_advanced (HDF5_ENABLE_BUILD_DIAGS) if (HDF5_ENABLE_BUILD_DIAGS) message (STATUS "... default color and URL extended diagnostic messages enabled") else () diff --git a/config/cmake/HDFLibMacros.cmake b/config/cmake/HDFLibMacros.cmake index 6d77ab54e4f..752d8172b8d 100644 --- a/config/cmake/HDFLibMacros.cmake +++ b/config/cmake/HDFLibMacros.cmake @@ -17,13 +17,14 @@ macro (ORIGINAL_ZLIB_LIBRARY compress_type) GIT_TAG ${ZLIB_BRANCH} ) elseif (${compress_type} MATCHES "TGZ") + message (VERBOSE "Filter ZLIB file ${ZLIB_URL}") FetchContent_Declare (HDF5_ZLIB URL ${ZLIB_URL} URL_HASH "" ) endif () FetchContent_GetProperties(HDF5_ZLIB) - if(NOT zlib_POPULATED) + if(NOT hdf5_zlib_POPULATED) FetchContent_Populate(HDF5_ZLIB) # Copy an additional/replacement files into the populated source @@ -51,6 +52,7 @@ macro (ORIGINAL_SZIP_LIBRARY compress_type encoding) GIT_TAG ${SZIP_BRANCH} ) elseif (${compress_type} MATCHES "TGZ") + message (VERBOSE "Filter SZIP file ${SZIP_URL}") FetchContent_Declare (SZIP URL ${SZIP_URL} URL_HASH "" diff --git a/config/cmake/UserMacros/Windows_MT.cmake b/config/cmake/UserMacros/Windows_MT.cmake index 15cffbaae99..c8edbe4a0d1 100644 --- a/config/cmake/UserMacros/Windows_MT.cmake +++ b/config/cmake/UserMacros/Windows_MT.cmake @@ -47,6 +47,7 @@ endmacro () #----------------------------------------------------------------------------- option (BUILD_STATIC_CRT_LIBS "Build With Static CRT Libraries" OFF) +mark_as_advanced (BUILD_STATIC_CRT_LIBS) if (BUILD_STATIC_CRT_LIBS) TARGET_STATIC_CRT_FLAGS () endif () diff --git a/config/sanitizer/tools.cmake b/config/sanitizer/tools.cmake index 3a41ad448c1..88d3bafcb59 100644 --- a/config/sanitizer/tools.cmake +++ b/config/sanitizer/tools.cmake @@ -14,101 +14,90 @@ # the License. option(CLANG_TIDY "Turns on clang-tidy processing if it is found." OFF) -option(IWYU "Turns on include-what-you-use processing if it is found." OFF) -option(CPPCHECK "Turns on cppcheck processing if it is found." OFF) +if(CLANG_TIDY) + find_program(CLANG_TIDY_EXE NAMES "clang-tidy") + mark_as_advanced(FORCE CLANG_TIDY_EXE) + if(CLANG_TIDY_EXE) + message(STATUS "clang-tidy found: ${CLANG_TIDY_EXE}") + set(CMAKE_C_CLANG_TIDY ${CLANG_TIDY_EXE} --checks=-*,clang-analyzer-*)#${ARGN}) + set(CMAKE_CXX_CLANG_TIDY ${CLANG_TIDY_EXE} --checks=-*,clang-analyzer-*)#${ARGN}) + else() + message(STATUS "clang-tidy not found!") + set(CMAKE_C_CLANG_TIDY "" CACHE STRING "" FORCE) # delete it + set(CMAKE_CXX_CLANG_TIDY "" CACHE STRING "" FORCE) # delete it + endif() +else() + #message(STATUS "clang-tidy not enabled!") + set(CMAKE_C_CLANG_TIDY + "" + CACHE STRING "" FORCE) # delete it + set(CMAKE_CXX_CLANG_TIDY + "" + CACHE STRING "" FORCE) # delete it +endif() # Adds clang-tidy checks to the compilation, with the given arguments being used # as the options set. macro(clang_tidy) if(CLANG_TIDY AND CLANG_TIDY_EXE) + set(CMAKE_C_CLANG_TIDY ${CLANG_TIDY_EXE} ${ARGN}) set(CMAKE_CXX_CLANG_TIDY ${CLANG_TIDY_EXE} ${ARGN}) endif() endmacro() +option(IWYU "Turns on include-what-you-use processing if it is found." OFF) +if(IWYU) + find_program(IWYU_EXE NAMES "include-what-you-use") + mark_as_advanced(FORCE IWYU_EXE) + if(IWYU_EXE) + message(STATUS "include-what-you-use found: ${IWYU_EXE}") + else() + message(SEND_ERROR "Cannot enable include-what-you-use, as executable not found!") + set(CMAKE_C_INCLUDE_WHAT_YOU_USE "" CACHE STRING "" FORCE) # delete it + set(CMAKE_CXX_INCLUDE_WHAT_YOU_USE "" CACHE STRING "" FORCE) # delete it + endif() +else() + #message(STATUS "include-what-you-use NOT ENABLED via 'IWYU' variable!") + set(CMAKE_C_INCLUDE_WHAT_YOU_USE "" CACHE STRING "" FORCE) # delete it + set(CMAKE_CXX_INCLUDE_WHAT_YOU_USE "" CACHE STRING "" FORCE) # delete it +endif() + # Adds include_what_you_use to the compilation, with the given arguments being # used as the options set. macro(include_what_you_use) if(IWYU AND IWYU_EXE) + set(CMAKE_C_INCLUDE_WHAT_YOU_USE ${IWYU_EXE} ${ARGN}) set(CMAKE_CXX_INCLUDE_WHAT_YOU_USE ${IWYU_EXE} ${ARGN}) endif() endmacro() +option(CPPCHECK "Turns on cppcheck processing if it is found." OFF) +if(CPPCHECK) + if(CPPCHECK_EXE) + message(STATUS "cppcheck found: ${CPPCHECK_EXE}") + set(CMAKE_C_CPPCHECK + "${CPPCHECK_EXE};--enable=warning,performance,portability,missingInclude;--template=\"[{severity}][{id}] {message} {callstack} \(On {file}:{line}\)\";--suppress=missingIncludeSystem;--quiet;--verbose;--force" + ) + set(CMAKE_CXX_CPPCHECK + "${CPPCHECK_EXE};--enable=warning,performance,portability,missingInclude;--template=\"[{severity}][{id}] {message} {callstack} \(On {file}:{line}\)\";--suppress=missingIncludeSystem;--quiet;--verbose;--force" + ) + else() + message(SEND_ERROR "Cannot enable cppcheck, as executable not found!") + set(CMAKE_C_CPPCHECK "" CACHE STRING "" FORCE) # delete it + set(CMAKE_CXX_CPPCHECK "" CACHE STRING "" FORCE) # delete it + endif() +else() + # message(SEND_ERROR "cppcheck NOT ENABLED via 'CPPCHECK' variable!") + set(CMAKE_C_CPPCHECK "" CACHE STRING "" FORCE) # delete it + set(CMAKE_CXX_CPPCHECK "" CACHE STRING "" FORCE) # delete it +endif() + # Adds cppcheck to the compilation, with the given arguments being used as the # options set. macro(cppcheck) if(CPPCHECK AND CPPCHECK_EXE) + set(CMAKE_C_CPPCHECK ${CPPCHECK_EXE} ${ARGN}) set(CMAKE_CXX_CPPCHECK ${CPPCHECK_EXE} ${ARGN}) endif() endmacro() -find_program(CLANG_TIDY_EXE NAMES "clang-tidy") -mark_as_advanced(FORCE CLANG_TIDY_EXE) -if(CLANG_TIDY_EXE) - message(STATUS "clang-tidy found: ${CLANG_TIDY_EXE}") - if(NOT CLANG_TIDY) - message(STATUS "clang-tidy NOT ENABLED via 'CLANG_TIDY' variable!") - set(CMAKE_CXX_CLANG_TIDY - "" - CACHE STRING "" FORCE) # delete it - endif() -elseif(CLANG_TIDY) - message(SEND_ERROR "Cannot enable clang-tidy, as executable not found!") - set(CMAKE_CXX_CLANG_TIDY - "" - CACHE STRING "" FORCE) # delete it -else() - message(STATUS "clang-tidy not found!") - set(CMAKE_CXX_CLANG_TIDY - "" - CACHE STRING "" FORCE) # delete it -endif() - -find_program(IWYU_EXE NAMES "include-what-you-use") -mark_as_advanced(FORCE IWYU_EXE) -if(IWYU_EXE) - message(STATUS "include-what-you-use found: ${IWYU_EXE}") - if(NOT IWYU) - message(STATUS "include-what-you-use NOT ENABLED via 'IWYU' variable!") - set(CMAKE_CXX_INCLUDE_WHAT_YOU_USE - "" - CACHE STRING "" FORCE) # delete it - endif() -elseif(IWYU) - message( - SEND_ERROR "Cannot enable include-what-you-use, as executable not found!") - set(CMAKE_CXX_INCLUDE_WHAT_YOU_USE - "" - CACHE STRING "" FORCE) # delete it -else() - message(STATUS "include-what-you-use not found!") - set(CMAKE_CXX_INCLUDE_WHAT_YOU_USE - "" - CACHE STRING "" FORCE) # delete it -endif() - -find_program(CPPCHECK_EXE NAMES "cppcheck") -mark_as_advanced(FORCE CPPCHECK_EXE) -if(CPPCHECK_EXE) - message(STATUS "cppcheck found: ${CPPCHECK_EXE}") - if(CPPCHECK) - set(CMAKE_CXX_CPPCHECK - "${CPPCHECK_EXE};--enable=warning,performance,portability,missingInclude;--template=\"[{severity}][{id}] {message} {callstack} \(On {file}:{line}\)\";--suppress=missingIncludeSystem;--quiet;--verbose;--force" - ) - endif() - if(NOT CPPCHECK) - message(STATUS "cppcheck NOT ENABLED via 'CPPCHECK' variable!") - set(CMAKE_CXX_CPPCHECK - "" - CACHE STRING "" FORCE) # delete it - endif() -elseif(CPPCHECK) - message(SEND_ERROR "Cannot enable cppcheck, as executable not found!") - set(CMAKE_CXX_CPPCHECK - "" - CACHE STRING "" FORCE) # delete it -else() - message(STATUS "cppcheck not found!") - set(CMAKE_CXX_CPPCHECK - "" - CACHE STRING "" FORCE) # delete it -endif() diff --git a/doxygen/Doxyfile.in b/doxygen/Doxyfile.in index 66e4826be51..cddf17cdf2a 100644 --- a/doxygen/Doxyfile.in +++ b/doxygen/Doxyfile.in @@ -625,7 +625,7 @@ WARN_NO_PARAMDOC = NO # a warning is encountered. # The default value is: NO. -WARN_AS_ERROR = NO +WARN_AS_ERROR = FAIL_ON_WARNINGS # The WARN_FORMAT tag determines the format of the warning messages that doxygen # can produce. The string should contain the $file, $line, and $text tags, which diff --git a/doxygen/dox/LearnBasics3.dox b/doxygen/dox/LearnBasics3.dox index 06afacd7d27..748745827f2 100644 --- a/doxygen/dox/LearnBasics3.dox +++ b/doxygen/dox/LearnBasics3.dox @@ -945,7 +945,7 @@ For a more complete script (and to help resolve issues) see the script provided \subsection subsecLBCompilingCMakeExamples HDF5 Examples The installed HDF5 can be verified by compiling the HDF5 Examples project, included with the CMake built HDF5 binaries -in the share folder or you can go to the HDF5 Examples github repository. +in the share folder or you can go to the HDF5 Examples github repository. Go into the share directory and follow the instructions in USING_CMake_examples.txt to build the examples. diff --git a/doxygen/dox/ReferenceManual.dox b/doxygen/dox/ReferenceManual.dox index 40f8b8af183..86a7b2cec80 100644 --- a/doxygen/dox/ReferenceManual.dox +++ b/doxygen/dox/ReferenceManual.dox @@ -31,10 +31,6 @@ The functions provided by the HDF5 API are grouped into the following -Event Set (H5ES)@ref H5ES "C""C++""Fortran""Java"HDF5 event set life cycle used with HDF5 VOL connectors that enable the asynchronous feature in HDF5. - - - Files (H5F)@ref H5F "C"@ref H5::H5File "C++"@ref FH5F "Fortran"@ref JH5F "Java"Manage HDF5 files. diff --git a/doxygen/examples/menus/core_menu.md b/doxygen/examples/menus/core_menu.md index 3fd7d11aa2a..8c82cc55bd7 100644 --- a/doxygen/examples/menus/core_menu.md +++ b/doxygen/examples/menus/core_menu.md @@ -20,10 +20,6 @@ HDF5 datatypes describe the element type of HDF5 datasets and attributes.
    HDF5 library error reporting. -- @ref H5ES "Event Set (H5ES)" -
    -HDF5 event set life cycle used with HDF5 VOL connectors that enable the asynchronous feature in HDF5. - - @ref H5F "Files (H5F)"
    Manage HDF5 files. diff --git a/doxygen/examples/tables/propertyLists.dox b/doxygen/examples/tables/propertyLists.dox index 039ac979b23..cac7fd2c3bb 100644 --- a/doxygen/examples/tables/propertyLists.dox +++ b/doxygen/examples/tables/propertyLists.dox @@ -274,10 +274,6 @@ versions used when creating objects. Sets the file VOL connector for a file access property list. -#H5Pget_vol_cap_flags -Retrieves the capability flags for the VOL connector that will be used with a file access property list. - - #H5Pget_vol_id Retrieves the identifier of the current VOL connector. @@ -316,18 +312,6 @@ versions used when creating objects. Returns a pointer to file driver information. -#H5Pset_driver_by_name -Sets a file driver according to a given driver name. - - -#H5Pset_driver_by_value -Sets a file driver according to a given driver value. - - -#H5Pget_driver_config_str -Retrieves a string representation of the configuration for the driver. - - #H5Pset_fapl_core/#H5Pget_fapl_core Sets the driver for buffered memory files (in RAM) or retrieves information regarding the driver. @@ -346,10 +330,6 @@ larger than 2 gigabytes, or retrieves information regarding driver. . -#H5Pset_fapl_ioc/#H5Pget_fapl_ioc -Modifies/queries the file driver properties of the I/O concentrator driver. - - #H5Pset_fapl_log Sets logging driver. @@ -385,10 +365,6 @@ and one raw data file. Sets driver for buffered permanent files. -#H5Pset_fapl_subfiling/#H5Pget_fapl_subfiling -Modifies/queries the file driver properties of the subfiling driver. - - #H5Pset_fapl_windows Sets the Windows I/O driver. @@ -627,10 +603,6 @@ pointers to application-allocated buffers. Sets/gets a data transform expression. -#H5Pset_dataset_io_hyperslab_selection -Sets a hyperslab file selection for a dataset I/O operation. - - #H5Pset_edc_check/#H5Pget_edc_check Sets/gets whether to enable error-detection when reading a dataset. diff --git a/java/CMakeLists.txt b/java/CMakeLists.txt index cc22d9e4bbc..af1aaff2be7 100644 --- a/java/CMakeLists.txt +++ b/java/CMakeLists.txt @@ -1,7 +1,7 @@ cmake_minimum_required (VERSION 3.12) project (HDF5_JAVA C Java) -set (CMAKE_MODULE_PATH "${HDF_RESOURCES_DIR};${HDF_RESOURCES_DIR}") +set (CMAKE_MODULE_PATH "${HDF_RESOURCES_DIR}") find_package (Java) #----------------------------------------------------------------------------- diff --git a/java/examples/datasets/CMakeLists.txt b/java/examples/datasets/CMakeLists.txt index e63ead0a66f..6ed03cabce0 100644 --- a/java/examples/datasets/CMakeLists.txt +++ b/java/examples/datasets/CMakeLists.txt @@ -82,6 +82,7 @@ if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) get_property (target_name TARGET ${HDF5_JAVA_JNI_LIB_TARGET} PROPERTY OUTPUT_NAME) set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$:${CMAKE_DEBUG_POSTFIX}>;") + set (last_test "") foreach (example ${HDF_JAVA_EXAMPLES}) if (example STREQUAL "H5Ex_D_External") add_test ( @@ -93,9 +94,13 @@ if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) else () add_test ( NAME JAVA_datasets-${example}-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5 + COMMAND ${CMAKE_COMMAND} -E remove + ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5 ) endif () + if (last_test) + set_tests_properties (JAVA_datasets-${example}-clear-objects PROPERTIES DEPENDS ${last_test}) + endif () add_test ( NAME JAVA_datasets-${example}-copy-objects @@ -122,5 +127,24 @@ if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) set_tests_properties (JAVA_datasets-${example} PROPERTIES DEPENDS JAVA_datasets-${example}-copy-objects ) + + if (example STREQUAL "H5Ex_D_External") + add_test ( + NAME JAVA_datasets-${example}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove + ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5 + ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.data + ) + else () + add_test ( + NAME JAVA_datasets-${example}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove + ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5 + ) + endif () + set_tests_properties (JAVA_datasets-${example}-clean-objects PROPERTIES + DEPENDS JAVA_datasets-${example} + ) + set (last_test "JAVA_datasets-${example}-clean-objects") endforeach () endif () diff --git a/java/examples/datatypes/CMakeLists.txt b/java/examples/datatypes/CMakeLists.txt index 45122218bb2..47df1f89001 100644 --- a/java/examples/datatypes/CMakeLists.txt +++ b/java/examples/datatypes/CMakeLists.txt @@ -67,11 +67,16 @@ if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) get_property (target_name TARGET ${HDF5_JAVA_JNI_LIB_TARGET} PROPERTY OUTPUT_NAME) set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$:${CMAKE_DEBUG_POSTFIX}>;") + set (last_test "") foreach (example ${HDF_JAVA_EXAMPLES}) add_test ( NAME JAVA_datatypes-${example}-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5 + COMMAND ${CMAKE_COMMAND} -E remove + ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5 ) + if (last_test) + set_tests_properties (JAVA_datatypes-${example}-clear-objects PROPERTIES DEPENDS ${last_test}) + endif () add_test ( NAME JAVA_datatypes-${example}-copy-objects @@ -97,5 +102,15 @@ if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) set_tests_properties (JAVA_datatypes-${example} PROPERTIES DEPENDS JAVA_datatypes-${example}-copy-objects ) + + add_test ( + NAME JAVA_datatypes-${example}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove + ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5 + ) + set_tests_properties (JAVA_datatypes-${example}-clean-objects PROPERTIES + DEPENDS JAVA_datatypes-${example} + ) + set (last_test "JAVA_datatypes-${example}-clean-objects") endforeach () endif () diff --git a/java/examples/groups/CMakeLists.txt b/java/examples/groups/CMakeLists.txt index 9f20abf8da3..fdaf95cdbd5 100644 --- a/java/examples/groups/CMakeLists.txt +++ b/java/examples/groups/CMakeLists.txt @@ -66,6 +66,7 @@ if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) get_property (target_name TARGET ${HDF5_JAVA_JNI_LIB_TARGET} PROPERTY OUTPUT_NAME) set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$:${CMAKE_DEBUG_POSTFIX}>;") + set (last_test "") foreach (example ${HDF_JAVA_EXAMPLES}) if (NOT example STREQUAL "H5Ex_G_Iterate" AND NOT example STREQUAL "H5Ex_G_Visit") if (example STREQUAL "H5Ex_G_Compact") @@ -78,7 +79,8 @@ if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) else () add_test ( NAME JAVA_groups-${example}-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5 + COMMAND ${CMAKE_COMMAND} -E remove + ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5 ) endif () else () @@ -87,6 +89,9 @@ if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) COMMAND ${CMAKE_COMMAND} -E echo "${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5 exists" ) endif () + if (last_test) + set_tests_properties (JAVA_groups-${example}-clear-objects PROPERTIES DEPENDS ${last_test}) + endif () add_test ( NAME JAVA_groups-${example}-copy-objects @@ -112,5 +117,28 @@ if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) set_tests_properties (JAVA_groups-${example} PROPERTIES DEPENDS JAVA_groups-${example}-copy-objects ) + + if (NOT example STREQUAL "H5Ex_G_Iterate" AND NOT example STREQUAL "H5Ex_G_Visit") + if (example STREQUAL "H5Ex_G_Compact") + add_test ( + NAME JAVA_groups-${example}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove + ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}1.h5 + ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}2.h5 + ) + else () + add_test ( + NAME JAVA_groups-${example}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove + ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5 + ) + endif () + set_tests_properties (JAVA_groups-${example}-clean-objects PROPERTIES + DEPENDS JAVA_groups-${example} + ) + set (last_test "JAVA_groups-${example}-clean-objects") + else () + set (last_test "JAVA_groups-${example}") + endif () endforeach () endif () diff --git a/java/examples/intro/CMakeLists.txt b/java/examples/intro/CMakeLists.txt index a5e2a671227..f74455984b9 100644 --- a/java/examples/intro/CMakeLists.txt +++ b/java/examples/intro/CMakeLists.txt @@ -56,11 +56,16 @@ if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) get_property (target_name TARGET ${HDF5_JAVA_JNI_LIB_TARGET} PROPERTY OUTPUT_NAME) set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$:${CMAKE_DEBUG_POSTFIX}>;") + set (last_test "") foreach (example ${HDF_JAVA_EXAMPLES}) add_test ( NAME JAVA_intro-${example}-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5 + COMMAND ${CMAKE_COMMAND} -E remove + ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5 ) + if (last_test) + set_tests_properties (JAVA_intro-${example}-clear-objects PROPERTIES DEPENDS ${last_test}) + endif () add_test ( NAME JAVA_intro-${example}-copy-objects @@ -87,6 +92,16 @@ if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) set_tests_properties (JAVA_intro-${example} PROPERTIES DEPENDS JAVA_intro-${example}-copy-objects ) + + add_test ( + NAME JAVA_intro-${example}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove + ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5 + ) + set_tests_properties (JAVA_intro-${example}-clean-objects PROPERTIES + DEPENDS JAVA_intro-${example} + ) + set (last_test "JAVA_intro-${example}-clean-objects") endforeach () endif () diff --git a/java/src/Makefile.am b/java/src/Makefile.am index c9b0d7c71f3..67d21f81cfb 100644 --- a/java/src/Makefile.am +++ b/java/src/Makefile.am @@ -108,30 +108,25 @@ hdf5_java_JAVA = \ ${pkgpath}/structs/H5AC_cache_config_t.java \ ${pkgpath}/H5.java \ ${pkgpath}/HDF5Constants.java \ - ${pkgpath}/HDF5GroupInfo.java \ ${pkgpath}/HDFArray.java \ ${pkgpath}/HDFNativeData.java -$(jarfile): classhdf5_java.stamp classes docs +$(jarfile): classhdf5_java.stamp classes $(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath) hdf5_java_DATA = $(jarfile) -.PHONY: docs classes +.PHONY: classes WINDOWTITLE = 'HDF5 Java' DOCTITLE = '

    HDF5 Java Wrapper

    ' SRCDIR = '$(pkgpath)' -docs: - $(JAVADOC) -sourcepath $(srcdir) -d javadoc -Xdoclint:none -use -splitIndex -windowtitle $(WINDOWTITLE) -doctitle $(DOCTITLE) -J-Xmx180m -verbose -overview $(top_srcdir)/java/src/hdf/overview.html -classpath $(CLASSPATH_ENV) hdf.hdf5lib - CLEANFILES = classhdf5_java.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/callbacks/*.class $(JAVAROOT)/$(pkgpath)/exceptions/*.class $(JAVAROOT)/$(pkgpath)/structs/*.class $(JAVAROOT)/$(pkgpath)/*.class clean: rm -rf $(JAVAROOT)/* rm -f $(jarfile) - rm -rf javadoc rm -f classhdf5_java.stamp diff --git a/java/src/hdf/hdf5lib/CMakeLists.txt b/java/src/hdf/hdf5lib/CMakeLists.txt index 1afc8b0e533..1738bcf971a 100644 --- a/java/src/hdf/hdf5lib/CMakeLists.txt +++ b/java/src/hdf/hdf5lib/CMakeLists.txt @@ -101,7 +101,6 @@ set (HDF5_JAVADOC_HDF_HDF5_STRUCTS_SOURCES set (HDF5_JAVA_HDF_HDF5_SOURCES HDFArray.java HDF5Constants.java - HDF5GroupInfo.java HDFNativeData.java H5.java ) @@ -135,15 +134,3 @@ set_target_properties (${HDF5_JAVA_HDF5_LIB_TARGET} PROPERTIES FOLDER libraries/ if (HDF5_ENABLE_FORMATTERS) clang_format (HDF5_JAVA_SRC_FORMAT ${HDF5_JAVA_HDF_HDF5_CALLBACKS_SOURCES} ${HDF5_JAVA_HDF_HDF5_EXCEPTIONS_SOURCES} ${HDF5_JAVA_HDF_HDF5_STRUCTS_SOURCES} ${HDF5_JAVA_HDF_HDF5_SOURCES}) endif () - -create_javadoc(hdf5_java_doc - FILES ${HDF5_JAVADOC_HDF_HDF5_CALLBACKS_SOURCES} ${HDF5_JAVADOC_HDF_HDF5_EXCEPTIONS_SOURCES} ${HDF5_JAVADOC_HDF_HDF5_STRUCTS_SOURCES} ${HDF5_JAVADOC_HDF_HDF5_SOURCES} - OVERVIEW ${HDF5_JAVA_HDF5_SRC_DIR}/overview.html - CLASSPATH ${CMAKE_JAVA_INCLUDE_PATH} - WINDOWTITLE "HDF5 Java" - DOCTITLE "

    HDF5 Java Wrapper

    " - INSTALLPATH ${HDF5_INSTALL_DOC_DIR} - AUTHOR TRUE - USE TRUE - VERSION TRUE -) diff --git a/java/src/hdf/hdf5lib/H5.java b/java/src/hdf/hdf5lib/H5.java index bb421066519..90c4575003d 100644 --- a/java/src/hdf/hdf5lib/H5.java +++ b/java/src/hdf/hdf5lib/H5.java @@ -228,7 +228,7 @@ * which prints out the HDF5 error stack, as described in the HDF5 C API @ref H5Eprint(). This * may be used by Java exception handlers to print out the HDF5 error stack.
    * - * @version HDF5 1.10.10
    + * @version HDF5 1.12.3
    * See also: * @ref HDFARRAY hdf.hdf5lib.HDFArray
    * @ref HDF5CONST hdf.hdf5lib.HDF5Constants
    @@ -5082,8 +5082,8 @@ public synchronized static native boolean H5Fis_hdf5(String name) * * @param name * IN: File name to check. - * @param file_id - * IN: File identifier for a currently-open HDF5 file + * @param fapl_id + * IN: File access property list identifier * * @return true if file is accessible, false if not. * @@ -5092,7 +5092,7 @@ public synchronized static native boolean H5Fis_hdf5(String name) * @exception NullPointerException * name is null. **/ - public synchronized static native boolean H5Fis_accessible(String name, long file_id) + public synchronized static native boolean H5Fis_accessible(String name, long fapl_id) throws HDF5LibraryException, NullPointerException; /** @@ -13773,7 +13773,7 @@ public static long H5Tcopy(long type_id) throws HDF5LibraryException /** * @ingroup JH5T * - * H5Tcreate creates a new dataype of the specified class with the specified number of bytes. + * H5Tcreate creates a new datatype of the specified class with the specified number of bytes. * * @param tclass * IN: Class of datatype to create. @@ -15228,7 +15228,7 @@ public synchronized static native void H5Treclaim(long type_id, long space_id, l /** * @ingroup JH5T * - * H5Tvlen_create creates a new variable-length (VL) dataype. + * H5Tvlen_create creates a new variable-length (VL) datatype. * * @param base_id * IN: Identifier of parent datatype. diff --git a/java/src/hdf/hdf5lib/HDF5GroupInfo.java b/java/src/hdf/hdf5lib/HDF5GroupInfo.java deleted file mode 100644 index 50c7db0e1a8..00000000000 --- a/java/src/hdf/hdf5lib/HDF5GroupInfo.java +++ /dev/null @@ -1,188 +0,0 @@ -/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * - * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * - * All rights reserved. * - * * - * This file is part of HDF5. The full HDF5 copyright notice, including * - * terms governing use, modification, and redistribution, is contained in * - * the COPYING file, which can be found at the root of the source code * - * distribution tree, or in https://www.hdfgroup.org/licenses. * - * If you do not have access to either file, you may request a copy from * - * help@hdfgroup.org. * - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ - -package hdf.hdf5lib; - -/** - *

    - * This class is a container for the information reported about an HDF5 Object - * from the H5Gget_obj_info() method. - *

    - * The fileno and objno fields contain four values which uniquely identify an - * object among those HDF5 files which are open: if all four values are the same - * between two objects, then the two objects are the same (provided both files - * are still open). The nlink field is the number of hard links to the object or - * zero when information is being returned about a symbolic link (symbolic links - * do not have hard links but all other objects always have at least one). The - * type field contains the type of the object, one of H5G_GROUP, H5G_DATASET, or - * H5G_LINK. The mtime field contains the modification time. If information is - * being returned about a symbolic link then linklen will be the length of the - * link value (the name of the pointed-to object with the null terminator); - * otherwise linklen will be zero. Other fields may be added to this structure - * in the future. - * - * @deprecated Not for public use. It is not used by the library. - * This class assumes that an object can contain four values which uniquely identify an - * object among those HDF5 files which are open. This is no longer valid in future - * HDF5 releases. - */ - -@Deprecated -public class HDF5GroupInfo { - long[] fileno; - long[] objno; - int nlink; - int type; - long mtime; - int linklen; - - /** - * Container for the information reported about an HDF5 Object - * from the H5Gget_obj_info() method - */ - public HDF5GroupInfo() - { - fileno = new long[2]; - objno = new long[2]; - nlink = -1; - type = -1; - mtime = 0; - linklen = 0; - } - - /** - * Sets the HDF5 group information. Used by the JHI5. - * - * @param fn - * File id number - * @param on - * Object id number - * @param nl - * Number of links - * @param t - * Type of the object - * @param mt - * Modification time - * @param len - * Length of link - **/ - public void setGroupInfo(long[] fn, long[] on, int nl, int t, long mt, int len) - { - fileno = fn; - objno = on; - nlink = nl; - type = t; - mtime = mt; - linklen = len; - } - - /** Resets all the group information to defaults. */ - public void reset() - { - fileno[0] = 0; - fileno[1] = 0; - objno[0] = 0; - objno[1] = 0; - nlink = -1; - type = -1; - mtime = 0; - linklen = 0; - } - - /** - * fileno accessors - * @return the file number if successful - */ - public long[] getFileno() { return fileno; } - - /** - * accessors - * @return the object number if successful - */ - public long[] getObjno() { return objno; } - - /** - * accessors - * @return type of group if successful - */ - public int getType() { return type; } - - /** - * accessors - * @return the number of links in the group if successful - */ - public int getNlink() { return nlink; } - - /** - * accessors - * @return the modified time value if successful - */ - public long getMtime() { return mtime; } - - /** - * accessors - * @return a length of link name if successful - */ - public int getLinklen() { return linklen; } - - /** - * The fileno and objno fields contain four values which uniquely identify - * an object among those HDF5 files. - */ - @Override - public boolean equals(Object obj) - { - if (!(obj instanceof HDF5GroupInfo)) { - return false; - } - - HDF5GroupInfo target = (HDF5GroupInfo)obj; - if ((fileno[0] == target.fileno[0]) && (fileno[1] == target.fileno[1]) && - (objno[0] == target.objno[0]) && (objno[1] == target.objno[1])) { - return true; - } - else { - return false; - } - } - - /** - * Returns the object id. - * - * @return the object id - */ - public long getOID() { return objno[0]; } - - /** - * /** Converts this object to a String representation. - * - * @return a string representation of this object - */ - @Override - public String toString() - { - String fileStr = "fileno=null"; - String objStr = "objno=null"; - - if (fileno != null) { - fileStr = "fileno[0]=" + fileno[0] + ",fileno[1]=" + fileno[1]; - } - - if (objno != null) { - objStr = "objno[0]=" + objno[0] + ",objno[1]=" + objno[1]; - } - - return getClass().getName() + "[" + fileStr + "," + objStr + ",type=" + type + ",nlink=" + nlink + - ",mtime=" + mtime + ",linklen=" + linklen + "]"; - } -} diff --git a/java/src/hdf/hdf5lib/HDFArray.java b/java/src/hdf/hdf5lib/HDFArray.java index 28d5117a112..637a8966e6c 100644 --- a/java/src/hdf/hdf5lib/HDFArray.java +++ b/java/src/hdf/hdf5lib/HDFArray.java @@ -455,6 +455,7 @@ else if (ArrayDescriptor.className.equals("java.lang.Double")) new HDF5JavaException("HDFArray: unsupported Object type: " + ArrayDescriptor.NT); throw(ex); } + break; } // end of statement for arrays of boxed objects default: HDF5JavaException ex = @@ -527,6 +528,7 @@ else if (ArrayDescriptor.className.equals("java.lang.Double")) new HDF5JavaException("HDFArray: unsupported Object type: " + ArrayDescriptor.NT); throw(ex); } + break; } // end of statement for arrays of boxed numerics } // end of switch statement for arrays of primitives diff --git a/java/src/hdf/hdf5lib/callbacks/package-info.java b/java/src/hdf/hdf5lib/callbacks/package-info.java index 5ef3fabc736..bf7bf3c79c1 100644 --- a/java/src/hdf/hdf5lib/callbacks/package-info.java +++ b/java/src/hdf/hdf5lib/callbacks/package-info.java @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/java/src/hdf/hdf5lib/exceptions/package-info.java b/java/src/hdf/hdf5lib/exceptions/package-info.java index 2ac7806365f..784e57d6dda 100644 --- a/java/src/hdf/hdf5lib/exceptions/package-info.java +++ b/java/src/hdf/hdf5lib/exceptions/package-info.java @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * @@ -29,4 +28,4 @@ * error code returned by the HDF5 library. * */ -package hdf.hdf5lib.exceptions; \ No newline at end of file +package hdf.hdf5lib.exceptions; diff --git a/java/src/hdf/hdf5lib/package-info.java b/java/src/hdf/hdf5lib/package-info.java index 7ae4df96fd2..e7421975917 100644 --- a/java/src/hdf/hdf5lib/package-info.java +++ b/java/src/hdf/hdf5lib/package-info.java @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/java/src/hdf/hdf5lib/structs/H5FD_hdfs_fapl_t.java b/java/src/hdf/hdf5lib/structs/H5FD_hdfs_fapl_t.java index 181d6814f48..986133448f0 100644 --- a/java/src/hdf/hdf5lib/structs/H5FD_hdfs_fapl_t.java +++ b/java/src/hdf/hdf5lib/structs/H5FD_hdfs_fapl_t.java @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/java/src/hdf/hdf5lib/structs/H5FD_ros3_fapl_t.java b/java/src/hdf/hdf5lib/structs/H5FD_ros3_fapl_t.java index 67c24636761..62b29218e39 100644 --- a/java/src/hdf/hdf5lib/structs/H5FD_ros3_fapl_t.java +++ b/java/src/hdf/hdf5lib/structs/H5FD_ros3_fapl_t.java @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/java/src/hdf/hdf5lib/structs/package-info.java b/java/src/hdf/hdf5lib/structs/package-info.java index a3ec6cd5a1a..7bd3dee9bba 100644 --- a/java/src/hdf/hdf5lib/structs/package-info.java +++ b/java/src/hdf/hdf5lib/structs/package-info.java @@ -1,6 +1,5 @@ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * - * * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * diff --git a/java/src/jni/h5fImp.c b/java/src/jni/h5fImp.c index 8cfecc50f3c..af06bdcf55a 100644 --- a/java/src/jni/h5fImp.c +++ b/java/src/jni/h5fImp.c @@ -188,7 +188,7 @@ Java_hdf_hdf5lib_H5_H5Fis_1hdf5(JNIEnv *env, jclass clss, jstring name) * Signature: (Ljava/lang/String;J)Z */ JNIEXPORT jboolean JNICALL -Java_hdf_hdf5lib_H5_H5Fis_1accessible(JNIEnv *env, jclass clss, jstring name, jlong file_id) +Java_hdf_hdf5lib_H5_H5Fis_1accessible(JNIEnv *env, jclass clss, jstring name, jlong fapl_id) { const char *fileName = NULL; htri_t bval = JNI_FALSE; @@ -200,7 +200,7 @@ Java_hdf_hdf5lib_H5_H5Fis_1accessible(JNIEnv *env, jclass clss, jstring name, jl PIN_JAVA_STRING(ENVONLY, name, fileName, NULL, "H5Fis_accessible: file name not pinned"); - if ((bval = H5Fis_accessible(fileName, (hid_t)file_id)) < 0) + if ((bval = H5Fis_accessible(fileName, (hid_t)fapl_id)) < 0) H5_LIBRARY_ERROR(ENVONLY); bval = (bval > 0) ? JNI_TRUE : JNI_FALSE; @@ -741,7 +741,7 @@ Java_hdf_hdf5lib_H5_H5Fset_1dset_1no_1attrs_1hint(JNIEnv *env, jclass clss, jlon done: return; -} +} /* end Java_hdf_hdf5lib_H5_H5Fset_1dset_1no_1attrs_1hint */ /* * Class: hdf_hdf5lib_H5 @@ -764,7 +764,7 @@ Java_hdf_hdf5lib_H5_H5Fget_1dset_1no_1attrs_1hint(JNIEnv *env, jclass clss, jlon done: return bval; -} +} /* end Java_hdf_hdf5lib_H5_H5Fget_1dset_1no_1attrs_1hint */ /* * Class: hdf_hdf5lib_H5 diff --git a/java/src/jni/h5lImp.c b/java/src/jni/h5lImp.c index 5389600a04f..67f48507aba 100644 --- a/java/src/jni/h5lImp.c +++ b/java/src/jni/h5lImp.c @@ -277,7 +277,7 @@ Java_hdf_hdf5lib_H5_H5Lexists(JNIEnv *env, jclass clss, jlong loc_id, jstring na JNIEXPORT jobject JNICALL Java_hdf_hdf5lib_H5_H5Lget_1info(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jlong access_id) { - H5L_info2_t infobuf = {H5L_TYPE_ERROR, FALSE, -1, H5T_CSET_ERROR, 0}; + H5L_info2_t infobuf = {H5L_TYPE_ERROR, FALSE, -1, H5T_CSET_ERROR, {{{0}}}}; const char *linkName = NULL; jvalue args[5]; herr_t status = FAIL; @@ -340,7 +340,7 @@ JNIEXPORT jobject JNICALL Java_hdf_hdf5lib_H5_H5Lget_1info_1by_1idx(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jint index_field, jint order, jlong link_n, jlong access_id) { - H5L_info2_t infobuf = {H5L_TYPE_ERROR, FALSE, -1, H5T_CSET_ERROR, 0}; + H5L_info2_t infobuf = {H5L_TYPE_ERROR, FALSE, -1, H5T_CSET_ERROR, {{{0}}}}; const char *groupName = NULL; jvalue args[5]; herr_t status = FAIL; @@ -454,7 +454,7 @@ JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Lget_1value(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jobjectArray link_value, jlong access_id) { - H5L_info2_t infobuf = {H5L_TYPE_ERROR, FALSE, -1, H5T_CSET_ERROR, 0}; + H5L_info2_t infobuf = {H5L_TYPE_ERROR, FALSE, -1, H5T_CSET_ERROR, {{{0}}}}; const char *file_name = NULL; const char *obj_name = NULL; const char *linkName = NULL; @@ -544,7 +544,7 @@ Java_hdf_hdf5lib_H5_H5Lget_1value_1by_1idx(JNIEnv *env, jclass clss, jlong loc_i jint index_field, jint order, jlong link_n, jobjectArray link_value, jlong access_id) { - H5L_info2_t infobuf = {H5L_TYPE_ERROR, FALSE, -1, H5T_CSET_ERROR, 0}; + H5L_info2_t infobuf = {H5L_TYPE_ERROR, FALSE, -1, H5T_CSET_ERROR, {{{0}}}}; const char *file_name = NULL; const char *obj_name = NULL; const char *grpName = NULL; diff --git a/java/src/jni/h5pDAPLImp.c b/java/src/jni/h5pDAPLImp.c index 01108b360b1..666f47dcf33 100644 --- a/java/src/jni/h5pDAPLImp.c +++ b/java/src/jni/h5pDAPLImp.c @@ -80,6 +80,8 @@ Java_hdf_hdf5lib_H5_H5Pget_1chunk_1cache(JNIEnv *env, jclass clss, jlong dapl, j if (NULL != rdcc_nbytes) PIN_LONG_ARRAY(ENVONLY, rdcc_nbytes, nbytesArray, &isCopy, "H5Pget_chunk_cache: nbytesArray array not pinned"); + if (NULL == nbytesArray) + H5_NULL_ARGUMENT_ERROR(ENVONLY, "nbytesArray should not be NULL after pinning"); { /* direct cast (size_t *)variable fails on 32-bit environment */ diff --git a/java/src/jni/h5pFAPLImp.c b/java/src/jni/h5pFAPLImp.c index f094aaf564c..80ed6c61829 100644 --- a/java/src/jni/h5pFAPLImp.c +++ b/java/src/jni/h5pFAPLImp.c @@ -1252,6 +1252,8 @@ Java_hdf_hdf5lib_H5_H5Pget_1cache(JNIEnv *env, jclass clss, jlong plist, jintArr if (NULL != rdcc_nbytes) PIN_LONG_ARRAY(ENVONLY, rdcc_nbytes, nbytesArray, &isCopy, "H5Pget_cache: nbytesArray array not pinned"); + if (NULL == nbytesArray) + H5_NULL_ARGUMENT_ERROR(ENVONLY, "nbytesArray should not be NULL after pinning"); { /* direct cast (size_t *)variable fails on 32-bit environment */ diff --git a/java/src/jni/h5tImp.c b/java/src/jni/h5tImp.c index a81d2706505..3a3a28867dc 100644 --- a/java/src/jni/h5tImp.c +++ b/java/src/jni/h5tImp.c @@ -1687,7 +1687,7 @@ Java_hdf_hdf5lib_H5_H5Tflush(JNIEnv *env, jclass clss, jlong loc_id) done: return; -} +} /* end Java_hdf_hdf5lib_H5_H5Tflush */ /* * Class: hdf_hdf5lib_H5 diff --git a/m4/aclocal_fc.f90 b/m4/aclocal_fc.f90 index bcefab5c8c8..68a8f1b590c 100644 --- a/m4/aclocal_fc.f90 +++ b/m4/aclocal_fc.f90 @@ -82,7 +82,7 @@ END PROGRAM PROG_FC_C_LONG_DOUBLE_EQ_C_DOUBLE !---- START ----- Determine the available KINDs for REALs and INTEGERs PROGRAM FC_AVAIL_KINDS - USE, INTRINSIC :: ISO_FORTRAN_ENV, ONLY : stderr=>ERROR_UNIT + USE, INTRINSIC :: ISO_FORTRAN_ENV, ONLY : stdout=>OUTPUT_UNIT IMPLICIT NONE INTEGER :: ik, jk, k, kk, max_decimal_prec INTEGER :: prev_rkind, num_rkinds = 1, num_ikinds = 1 @@ -102,11 +102,11 @@ PROGRAM FC_AVAIL_KINDS ENDDO DO k = 1, num_ikinds - WRITE(stderr,'(I0)', ADVANCE='NO') list_ikinds(k) + WRITE(stdout,'(I0)', ADVANCE='NO') list_ikinds(k) IF(k.NE.num_ikinds)THEN - WRITE(stderr,'(A)',ADVANCE='NO') ',' + WRITE(stdout,'(A)',ADVANCE='NO') ',' ELSE - WRITE(stderr,'()') + WRITE(stdout,'()') ENDIF ENDDO @@ -139,17 +139,17 @@ PROGRAM FC_AVAIL_KINDS ENDDO prec DO k = 1, num_rkinds - WRITE(stderr,'(I0)', ADVANCE='NO') list_rkinds(k) + WRITE(stdout,'(I0)', ADVANCE='NO') list_rkinds(k) IF(k.NE.num_rkinds)THEN - WRITE(stderr,'(A)',ADVANCE='NO') ',' + WRITE(stdout,'(A)',ADVANCE='NO') ',' ELSE - WRITE(stderr,'()') + WRITE(stdout,'()') ENDIF ENDDO - WRITE(stderr,'(I0)') max_decimal_prec - WRITE(stderr,'(I0)') num_ikinds - WRITE(stderr,'(I0)') num_rkinds + WRITE(stdout,'(I0)') max_decimal_prec + WRITE(stdout,'(I0)') num_ikinds + WRITE(stdout,'(I0)') num_rkinds END PROGRAM FC_AVAIL_KINDS !---- END ----- Determine the available KINDs for REALs and INTEGERs diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 06d1c34a579..48a8e60743f 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -409,7 +409,15 @@ Bug Fixes since HDF5-1.12.2 release Java Library ------------ - - + - Fixed switch case 'L' block missing a break statement. + + The HDF5Array.arrayify method is missing a break statement in the case 'L': section + which causes it to fall through and throw an HDF5JavaException when attempting to + read an Array[Array[Long]]. + + The error was fixed by inserting a break statement at the end of the case 'L': sections. + + Fixes GitHub issue #3056 Configuration diff --git a/src/H5Dpublic.h b/src/H5Dpublic.h index 13a6d3400af..32d4f64e37c 100644 --- a/src/H5Dpublic.h +++ b/src/H5Dpublic.h @@ -279,7 +279,7 @@ extern "C" { * * \p loc_id may specify a file, group, dataset, named datatype, * or attribute. If an attribute, dataset, or named datatype is - * specified then the dataset will be created at the location + * specified, then the dataset will be created at the location * where the attribute, dataset, or named datatype is attached. * * \p name may be either an absolute path in the file or a relative @@ -290,7 +290,7 @@ extern "C" { * file location where the dataset will be created, the datatype * is copied and converted to a transient type. * - * The link creation property list, \p lcpl_id, governs creation + * The link creation property list, \p lcpl_id, governs the creation * of the link(s) by which the new dataset is accessed and the * creation of any intermediate groups that may be missing. * @@ -334,12 +334,12 @@ H5_DLL hid_t H5Dcreate2(hid_t loc_id, const char *name, hid_t type_id, hid_t spa * * \p loc_id may specify a file, group, dataset, named datatype, * or attribute. If an attribute, dataset, or named datatype is - * specified then the dataset will be created at the location + * specified, then the dataset will be created at the location * where the attribute, dataset, or named datatype is attached. * * The dataset’s datatype and dataspace are specified by * \p type_id and \p space_id, respectively. These are the - * datatype and dataspace of the dataset as it will exist in + * datatype and dataspace of the dataset as they will exist in * the file, which may differ from the datatype and dataspace * in application memory. * @@ -668,7 +668,7 @@ H5_DLL herr_t H5Dget_chunk_info_by_coord(hid_t dset_id, const hsize_t *offset, u * context \p op_data. * * \par Example - * For each chunk, print the allocated chunk size (0 for un-allocated chunks). + * For each chunk, print the allocated chunk size (0 for unallocated chunks). * \snippet H5D_examples.c H5Dchunk_iter_cb * Iterate over all chunked datasets and chunks in a file. * \snippet H5D_examples.c H5Ovisit_cb @@ -700,22 +700,22 @@ H5_DLL herr_t H5Dchunk_iter(hid_t dset_id, hid_t dxpl_id, H5D_chunk_iter_op_t cb * specified by the index \p index. The chunk belongs to a set of * chunks in the selection specified by \p fspace_id. If the queried * chunk does not exist in the file, the size will be set to 0 and - * address to #HADDR_UNDEF. The value pointed to by filter_mask will + * address to HADDR_UNDEF. The value pointed to by filter_mask will * not be modified. \c NULL can be passed in for any \p out parameters. * * \p chk_idx is the chunk index in the selection. The index value * may have a value of 0 up to the number of chunks stored in - * the file that have a nonempty intersection with the file - * dataspace selection + * the file that has a nonempty intersection with the file + * dataspace selection. * * \note As of 1.10.5, the dataspace intersection is not yet - * supported, hence, the index is of all the written chunks. + * supported. Hence, the index is of all the written chunks. * * \p fspace_id specifies the file dataspace selection. It is - * intended to take #H5S_ALL for specifying the current selection. + * intended to take #H5S_ALL to specify the current selection. * * \note Please be aware that this function currently does not - * support non-trivial selections, thus \p fspace_id has no + * support non-trivial selections; thus \p fspace_id has no * effect. Also, the implementation does not handle the #H5S_ALL * macro correctly. As a workaround, an application can get * the dataspace for the dataset using H5Dget_space() and pass that @@ -735,7 +735,7 @@ H5_DLL herr_t H5Dget_chunk_info(hid_t dset_id, hid_t fspace_id, hsize_t chk_idx, * * \dset_id * - * \return Returns the offset in bytes; otherwise, returns #HADDR_UNDEF, + * \return Returns the offset in bytes; otherwise, returns HADDR_UNDEF, * a negative value. * * \details H5Dget_offset() returns the address in the file of @@ -983,7 +983,7 @@ H5_DLL herr_t H5Dwrite(hid_t dset_id, hid_t mem_type_id, hid_t mem_space_id, hid * the file. Only one chunk can be written with this function. * * \p filters is a mask providing a record of which filters are - * used with the the chunk. The default value of the mask is + * used with the chunk. The default value of the mask is * zero (0), indicating that all enabled filters are applied. A * filter is skipped if the bit corresponding to the filter’s * position in the pipeline (0 ≤ position < 32) is turned on. @@ -1102,6 +1102,8 @@ H5_DLL herr_t H5Dread_chunk(hid_t dset_id, hid_t dxpl_id, const hsize_t *offset, * be restarted at the point of exit; a second H5Diterate() * call will always restart at the beginning. * + * \warning Modifying the selection of \p space_id during iteration + * will lead to undefined behavior. * * \since 1.10.2 * @@ -1304,7 +1306,7 @@ H5_DLL herr_t H5Drefresh(hid_t dset_id); * \p op and scatters it to the supplied buffer \p dst_buf in a * manner similar to data being written to a dataset. * - * \p dst_space_id is a dataspace which defines the extent of \p + * \p dst_space_id is a dataspace that defines the extent of \p * dst_buf and the selection within it to scatter the data to. * * \p type_id is the datatype of the data to be scattered in both @@ -1361,7 +1363,7 @@ H5_DLL herr_t H5Dscatter(H5D_scatter_func_t op, void *op_data, hid_t type_id, hi * enough to hold all the data if the callback function \p op is * not provided. * - * \p op is a callback function which handles the gathered data. + * \p op is a callback function that handles the gathered data. * It is optional if \p dst_buf is large enough to hold all of the * gathered data; required otherwise. * @@ -1407,10 +1409,12 @@ H5_DLL herr_t H5Dgather(hid_t src_space_id, const void *src_buf, hid_t type_id, */ H5_DLL herr_t H5Dclose(hid_t dset_id); +/// \cond DEV /* Internal API routines */ H5_DLL herr_t H5Ddebug(hid_t dset_id); H5_DLL herr_t H5Dformat_convert(hid_t dset_id); H5_DLL herr_t H5Dget_chunk_index_type(hid_t did, H5D_chunk_index_t *idx_type); +/// \endcond /* Symbols defined for compatibility with previous versions of the HDF5 API. * @@ -1489,10 +1493,10 @@ H5_DLL herr_t H5Dget_chunk_index_type(hid_t did, H5D_chunk_index_t *idx_type); * * H5Dcreate() and H5Dcreate_anon() return a dataset identifier for * success or a negative value for failure. The dataset identifier - * should eventually be closed by calling H5Dclose() to release + * should eventually be closed by calling H5Dclose() to release the * resources it uses. * - * See H5Dcreate_anon() for discussion of the differences between + * See H5Dcreate_anon() for a discussion of the differences between * H5Dcreate() and H5Dcreate_anon(). * * The HDF5 library provides flexible means of specifying a fill value, @@ -1568,7 +1572,7 @@ H5_DLL hid_t H5Dopen1(hid_t loc_id, const char *name); * * This function ensures that the dataset dimensions are of at least * the sizes specified in size. The function H5Dset_extent() must be - * used if the dataset dimension sizes are are to be reduced. + * used if the dataset dimension sizes are to be reduced. * * \version 1.8.0 Function deprecated in this release. Parameter size * syntax changed to \Code{const hsize_t size[]} in this release. @@ -1596,7 +1600,7 @@ H5_DLL herr_t H5Dextend(hid_t dset_id, const hsize_t size[]); * The \p type_id must be the datatype stored in the buffer. The \p * space_id describes the selection for the memory buffer to free the * VL datatypes within. The \p dxpl_id is the dataset transfer property - * list which was used for the I/O transfer to create the buffer. And + * list that was used for the I/O transfer to create the buffer. And * \p buf is the pointer to the buffer to be reclaimed. * * The VL structures (\ref hvl_t) in the user's buffer are modified to diff --git a/src/H5Fpublic.h b/src/H5Fpublic.h index 195495c260e..65b30b8727f 100644 --- a/src/H5Fpublic.h +++ b/src/H5Fpublic.h @@ -385,7 +385,7 @@ H5_DLL hid_t H5Fcreate(const char *filename, unsigned flags, hid_t fcpl_id, hid_ * opened. * * The \p fapl_id parameter specifies the file access property list. - * Use of #H5P_DEFAULT specifies that default I/O access properties + * The use of #H5P_DEFAULT specifies that default I/O access properties * are to be used. * * The \p flags parameter specifies whether the file will be opened in @@ -495,7 +495,7 @@ H5_DLL hid_t H5Freopen(hid_t file_id); * \snippet H5F_examples.c flush * * \attention HDF5 does not possess full control over buffering. H5Fflush() - * flushes the internal HDF5 buffers then asks the operating system + * flushes the internal HDF5 buffers and then asks the operating system * (the OS) to flush the system buffers for the open files. After * that, the OS is responsible for ensuring that the data is * actually flushed to disk. @@ -522,7 +522,7 @@ H5_DLL herr_t H5Fflush(hid_t object_id, H5F_scope_t scope); * \snippet H5F_examples.c minimal * * \note \Bold{Delayed close:} Note the following deviation from the - * above-described behavior. If H5Fclose() is called for a file but one + * above-described behavior. If H5Fclose() is called for a file, but one * or more objects within the file remain open, those objects will remain * accessible until they are individually closed. Thus, if the dataset * \c data_sample is open when H5Fclose() is called for the file @@ -531,7 +531,7 @@ H5_DLL herr_t H5Fflush(hid_t object_id, H5F_scope_t scope); * automatically closed once all objects in the file have been closed.\n * Be warned, however, that there are circumstances where it is not * possible to delay closing a file. For example, an MPI-IO file close is - * a collective call; all of the processes that opened the file must + * a collective call; all of the processes that open the file must * close it collectively. The file cannot be closed at some time in the * future by each process in an independent fashion. Another example is * that an application using an AFS token-based file access privilege may @@ -1303,7 +1303,7 @@ H5_DLL herr_t H5Fstart_swmr_write(hid_t file_id); * \snippet this H5F_sect_info_t_snip * * This routine retrieves free-space section information for \p nsects - * sections or at most the maximum number of sections in the specified + * sections or, at most, the maximum number of sections in the specified * free-space manager. If the number of sections is not known, a * preliminary H5Fget_free_sections() call can be made by setting \p * sect_info to NULL and the total number of free-space sections for @@ -1569,7 +1569,7 @@ H5_DLL herr_t H5Fget_page_buffering_stats(hid_t file_id, unsigned accesses[2], u * \brief Obtains information about a cache image if it exists * * \file_id - * \param[out] image_addr Offset of the cache image if it exists, or #HADDR_UNDEF if it does not + * \param[out] image_addr Offset of the cache image if it exists, or HADDR_UNDEF if it does not * \param[out] image_size Length of the cache image if it exists, or 0 if it does not * \returns \herr_t * @@ -1611,7 +1611,7 @@ H5_DLL herr_t H5Fget_mdc_image_info(hid_t file_id, haddr_t *image_addr, hsize_t * file_id. This setting is used to inform the library to create * minimized dataset object headers when \c TRUE. * - * The setting's value is returned in the boolean pointer minimize. + * The setting's value is returned in the boolean pointer minimized. * * \since 1.10.5 * @@ -1810,7 +1810,7 @@ H5_DLL herr_t H5Fget_info1(hid_t obj_id, H5F_info1_t *file_info); /** * \ingroup H5F * - * \brief Sets thelatest version of the library to be used for writing objects + * \brief Sets the latest version of the library to be used for writing objects * * \file_id * \param[in] latest_format Latest format flag diff --git a/src/H5Rpublic.h b/src/H5Rpublic.h index becbd155e2a..5d2701c5a52 100644 --- a/src/H5Rpublic.h +++ b/src/H5Rpublic.h @@ -75,7 +75,7 @@ typedef haddr_t hobj_ref_t; /** * Buffer to store heap ID and index * - * This needs to be large enough to store largest #haddr_t in a worst case + * This needs to be large enough to store largest haddr_t in a worst case * machine (8 bytes currently) plus an int. * * \deprecated Dataset region reference type that is used with deprecated @@ -259,7 +259,7 @@ H5_DLL herr_t H5Rdestroy(H5R_ref_t *ref_ptr); * \snippet this H5R_type_t_snip * * Note that #H5R_OBJECT1 and #H5R_DATASET_REGION1 can never be - * associated to an \ref H5R_ref_t reference and can therefore never be + * associated with an \ref H5R_ref_t reference and can, therefore, never be * returned through that function. * * \ref H5R_ref_t is defined in H5Rpublic.h as: @@ -302,7 +302,7 @@ H5_DLL htri_t H5Requal(const H5R_ref_t *ref1_ptr, const H5R_ref_t *ref2_ptr); * \return \herr_t * * \details H5Rcopy() creates a copy of an existing reference. - * \p src_ref_ptr points to the reference to copy and \p dst_ref_ptr is the + * \p src_ref_ptr points to the reference to copy, and \p dst_ref_ptr is the * pointer to the destination reference. * */ @@ -336,7 +336,7 @@ H5_DLL herr_t H5Rcopy(const H5R_ref_t *src_ref_ptr, H5R_ref_t *dst_ref_ptr); * * The object opened with this function should be closed when it * is no longer needed so that resource leaks will not develop. Use - * the appropriate close function such as H5Oclose() or H5Dclose() + * the appropriate close function, such as H5Oclose() or H5Dclose() * for datasets. * */ @@ -574,7 +574,7 @@ H5_DLL ssize_t H5Rget_attr_name(const H5R_ref_t *ref_ptr, char *name, size_t siz * * A \Emph{reference type} is the type of reference, either an object * reference or a dataset region reference. An \Emph{object reference} - * points to an HDF5 object while a \Emph{dataset region reference} + * points to an HDF5 object, while a \Emph{dataset region reference} * points to a defined region within a dataset. * * The \Emph{referenced object} is the object the reference points @@ -646,7 +646,7 @@ H5_DLL H5G_obj_t H5Rget_obj_type1(hid_t id, H5R_type_t ref_type, const void *ref * * The object opened with this function should be closed when it is no * longer needed so that resource leaks will not develop. Use the - * appropriate close function such as H5Oclose() or H5Dclose() for + * appropriate close function, such as H5Oclose() or H5Dclose() for * datasets. * * \version 1.10.0 Function H5Rdereference() renamed to H5Rdereference1() and @@ -783,7 +783,7 @@ H5_DLL herr_t H5Rget_obj_type2(hid_t id, H5R_type_t ref_type, const void *ref, H * * The object opened with this function should be closed when it is no * longer needed so that resource leaks will not develop. Use the - * appropriate close function such as H5Oclose() or H5Dclose() for + * appropriate close function, such as H5Oclose() or H5Dclose() for * datasets. * * \since 1.10.0 From 31d098776e5de670e0616dae2ba7f52758374ec8 Mon Sep 17 00:00:00 2001 From: mattjala <124107509+mattjala@users.noreply.github.com> Date: Thu, 15 Jun 2023 15:54:12 -0500 Subject: [PATCH 082/108] Add java options to build scripts (#3137) Previously, cmakehdf5 turned on compiling of the java interface by default due to a value set in cacheinit.cmake. Now, consistent with how Fortran and CPP interfaces are handled, the script overwrites this default value to disable the libraries, fixing #2958. I also implemented the --enable-java/--disable java options for cmakehdf5, and -java for buildhdf5. Allen said these scripts should mention that compilers are to be specified in environment variables, but missing compilers causes errors at the CMake level, and CMake's error messages are already pretty informative (See the one in #2958 about JAVA_COMPILER). --- bin/buildhdf5 | 6 +++++- bin/cmakehdf5 | 11 +++++++++++ test/dt_arith.c | 2 +- test/testerror.sh.in | 2 +- 4 files changed, 18 insertions(+), 3 deletions(-) diff --git a/bin/buildhdf5 b/bin/buildhdf5 index 4c848841b00..a2a13cb225c 100755 --- a/bin/buildhdf5 +++ b/bin/buildhdf5 @@ -25,7 +25,7 @@ can continue from a previous build. Command Syntax ============== -buildhdf5 [-config] [-szlib] [-help] [-n] [-srcdir dir] [-fortran] [-cxx] [-pp] config-arguments ... +buildhdf5 [-config] [-szlib] [-help] [-n] [-srcdir dir] [-fortran] [-cxx] [-java] [-pp] config-arguments ... -config: run configure only. [default to do build too] -szlib: configure in the szlib option -help: show this help page @@ -35,6 +35,7 @@ buildhdf5 [-config] [-szlib] [-help] [-n] [-srcdir dir] [-fortran] [-cxx] [-pp] which will be passed to configure] -fortran: add --enable-fortran -cxx: add --enable-cxx + -java: add --enable-java -pp: add --enable-parallel all other arguments are passed to configure @@ -251,6 +252,9 @@ while [ $# -gt 0 ]; do -cxx) CONFIG_OP="$CONFIG_OP --enable-cxx" ;; + -java) + CONFIG_OP="$CONFIG_OP --enable-java" + ;; -fortran) CONFIG_OP="$CONFIG_OP --enable-fortran" ;; diff --git a/bin/cmakehdf5 b/bin/cmakehdf5 index e59c772a3b3..f17b9c4d131 100755 --- a/bin/cmakehdf5 +++ b/bin/cmakehdf5 @@ -45,6 +45,7 @@ fi cacheinit=$srcdir/config/cmake/cacheinit.cmake build_cpp_lib=-DHDF5_BUILD_CPP_LIB:BOOL=OFF # C++ interface default off build_fortran=-DHDF5_BUILD_FORTRAN:BOOL=OFF # Fortran interface default off +build_java=-DHDF5_BUILD_JAVA:BOOL=OFF # Java interface default off build_hl_lib=-DHDF5_BUILD_HL_LIB:BOOL=ON # High Level interface default on build_threadsafe=-DHDF5_ENABLE_THREADSAFE:BOOL=OFF # Threadsafe feature default off build_testing=-DBUILD_TESTING:BOOL=ON # Build tests default on @@ -82,6 +83,8 @@ Usage: $progname [] enable or disable fortran API. Default is off. --enable-cxx | --disable-cxx: enable or disable c++ API. Default is off. + --enable-java | --disable-java: + enable or disable Java API. Default is off. --enable-hl | --disable-hl: enable or disable high level API. Default is on. --enable-threadsafe | --disable-threadsafe: @@ -210,6 +213,7 @@ fi # XXX can be: # fortran Fortran interface # cxx C++ interface +# java Java interface # hl Highlevel interface # testing Build tests # tools Build tools @@ -227,6 +231,12 @@ while [ $# -gt 0 ]; do --disable-cxx) build_cpp_lib=-DHDF5_BUILD_CPP_LIB:BOOL=OFF ;; + --enable-java) + build_java=-DHDF5_BUILD_JAVA:BOOL=ON + ;; + --disable-java) + build_java=-DHDF5_BUILD_JAVA:BOOL=OFF + ;; --enable-hl) build_hl_lib=-DHDF5_BUILD_HL_LIB:BOOL=ON ;; @@ -345,6 +355,7 @@ STEP "Configure..." \ -C $cacheinit \ $build_cpp_lib \ $build_fortran \ + $build_java \ $build_hl_lib \ $build_threadsafe \ $shared_lib \ diff --git a/test/dt_arith.c b/test/dt_arith.c index b2742aeac81..f09e08dcd73 100644 --- a/test/dt_arith.c +++ b/test/dt_arith.c @@ -2847,7 +2847,7 @@ test_conv_flt_1(const char *name, int run_test, hid_t src, hid_t dst) int status; /*child exit status */ /* - * Some systems generage SIGFPE during floating point overflow and we + * Some systems generate SIGFPE during floating point overflow and we * cannot assume that we can continue from such a signal. Therefore, we * fork here and let the child run the test and return the number of * failures with the exit status. diff --git a/test/testerror.sh.in b/test/testerror.sh.in index 72bb943eb58..1e6a5603a6d 100644 --- a/test/testerror.sh.in +++ b/test/testerror.sh.in @@ -14,7 +14,7 @@ srcdir=@srcdir@ -# Determine backward compatibility options eneabled +# Check if backward compatibility options are enabled DEPRECATED_SYMBOLS="@DEPRECATED_SYMBOLS@" CMP='cmp -s' From 25a90599784abc7d96426fe03621debd3490fd36 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Mon, 19 Jun 2023 11:22:56 -0500 Subject: [PATCH 083/108] Update github files/workflow from develop (#3155) --- .github/workflows/cmake-ctest.yml | 6 +++--- .github/workflows/daily-build.yml | 5 +++++ .github/workflows/release.yml | 26 ++++++++++++++++++++++++-- 3 files changed, 32 insertions(+), 5 deletions(-) diff --git a/.github/workflows/cmake-ctest.yml b/.github/workflows/cmake-ctest.yml index ffd56e2f82e..1d26fdbe2a3 100644 --- a/.github/workflows/cmake-ctest.yml +++ b/.github/workflows/cmake-ctest.yml @@ -113,7 +113,7 @@ jobs: - name: List files for the space (Linux) run: | - ls ${{ github.workspace }} + ls -l ${{ github.workspace }} ls ${{ runner.workspace }} - name: Uncompress source (Linux) @@ -141,7 +141,7 @@ jobs: - name: List files in the space (Linux) run: | ls ${{ github.workspace }} - ls ${{ runner.workspace }} + ls -l ${{ runner.workspace }} # Save files created by ctest script - name: Save published binary (Linux) @@ -216,7 +216,7 @@ jobs: - name: List files in the space (MacOS) run: | ls ${{ github.workspace }} - ls ${{ runner.workspace }} + ls -l ${{ runner.workspace }} # Save files created by ctest script - name: Save published binary (MacOS) diff --git a/.github/workflows/daily-build.yml b/.github/workflows/daily-build.yml index 25768c2e06c..815178136d7 100644 --- a/.github/workflows/daily-build.yml +++ b/.github/workflows/daily-build.yml @@ -6,6 +6,9 @@ on: schedule: - cron: "8 0 * * *" +permissions: + contents: read + # A workflow run is made up of one or more jobs that can run sequentially or # in parallel. jobs: @@ -21,6 +24,8 @@ jobs: call-workflow-release: needs: [call-workflow-tarball, call-workflow-ctest] + permissions: + contents: write # In order to allow tag creation uses: ./.github/workflows/release.yml with: file_base: ${{ needs.call-workflow-tarball.outputs.file_base }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index ab2efb7ef6f..735c21fe23d 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -17,6 +17,10 @@ on: required: true type: string +# Minimal permissions to be inherited by any job that doesn't declare its own permissions +permissions: + contents: read + # Previous workflows must pass to get here so tag the commit that created the files jobs: create-tag: @@ -63,7 +67,10 @@ jobs: uses: actions/download-artifact@v3 with: name: docs-doxygen - path: ${{ github.workspace }}/${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen + path: ${{ github.workspace }} + + - name: Zip Folder + run: zip -r ${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen.zip. -i ${{ github.workspace }}/${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen - name: Get tgz-tarball (Linux) uses: actions/download-artifact@v3 @@ -97,15 +104,30 @@ jobs: path: ${{ github.workspace }} - name: PreRelease tag + id: create_release uses: softprops/action-gh-release@v1 with: tag_name: "snapshot_1_12" prerelease: true files: | - ${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen + ${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen.zip ${{ steps.get-file-base.outputs.FILE_BASE }}.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}.zip ${{ steps.get-file-base.outputs.FILE_BASE }}-osx12.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}-win_vs2022.zip if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` + + - name: Store Release url + run: | + echo "${{ steps.create_release.outputs.upload_url }}" > ./upload_url + +# - uses: actions/upload-artifact@v3 +# with: +# path: ./upload_url +# name: upload_url + + - name: List files for the space (Linux) + run: | + ls -l ${{ github.workspace }} + ls ${{ runner.workspace }} From 74eaaa5946fdf295e859491681ac66863bb4d2e4 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Tue, 20 Jun 2023 11:04:52 -0500 Subject: [PATCH 084/108] Remove duplicate code (#3161) --- fortran/src/H5Pff.F90 | 23 ----------------------- 1 file changed, 23 deletions(-) diff --git a/fortran/src/H5Pff.F90 b/fortran/src/H5Pff.F90 index 867e26fc44e..60b89891bf4 100644 --- a/fortran/src/H5Pff.F90 +++ b/fortran/src/H5Pff.F90 @@ -4036,29 +4036,6 @@ SUBROUTINE h5pset_f(prp_id, name, value, hdferr) CHARACTER(LEN=*), INTENT(IN) :: name INTEGER , INTENT(IN) :: value INTEGER , INTENT(OUT) :: hdferr - END SUBROUTINE h5pset - -!> -!! \ingroup FH5P -!! -!! \brief Sets a property list value. -!! -!! \note \fortran_obsolete -!! -!! \param prp_id Property list identifier to modify. -!! \param name Name of property to modify. -!! \param value Property value, supported types are: -!! \li INTEGER -!! \li REAL -!! \li DOUBLE PRECISION -!! \li CHARACTER(LEN=*) -!! \param hdferr \fortran_error -!! - SUBROUTINE h5pset_f(prp_id, name, value, hdferr) - INTEGER(HID_T), INTENT(IN) :: prp_id - CHARACTER(LEN=*), INTENT(IN) :: name - INTEGER, INTENT(IN) :: value - INTEGER, INTENT(OUT) :: hdferr END SUBROUTINE h5pset_f !> !! \ingroup FH5P From 36cb3a9e2a5e41d07d789d880198dc95a47f6f05 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Thu, 22 Jun 2023 10:10:13 -0500 Subject: [PATCH 085/108] Add missing space in zip command (#3169) --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 735c21fe23d..8e3824f46b6 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -70,7 +70,7 @@ jobs: path: ${{ github.workspace }} - name: Zip Folder - run: zip -r ${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen.zip. -i ${{ github.workspace }}/${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen + run: zip -r ${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen.zip . -i ${{ github.workspace }}/${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen - name: Get tgz-tarball (Linux) uses: actions/download-artifact@v3 From 30da5a24b08e88b49c8f6ff7be1a1d9c50cd98b1 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Thu, 29 Jun 2023 20:09:29 -0500 Subject: [PATCH 086/108] Hdf5 1 12 release action (#3199) * Update actions for release option, fix branch for daily build * Scheduled workflows run on latest commit on the develop --- .github/workflows/daily-build.yml | 11 +- .github/workflows/hdfeos5.yml | 2 +- .github/workflows/main.yml | 2 +- .github/workflows/release-files.yml | 144 ++++++++++++++++++++++++ .github/workflows/release.yml | 166 ++++++++-------------------- .github/workflows/tarball.yml | 43 ++++++- README.md | 2 +- 7 files changed, 241 insertions(+), 129 deletions(-) create mode 100644 .github/workflows/release-files.yml diff --git a/.github/workflows/daily-build.yml b/.github/workflows/daily-build.yml index 815178136d7..d371e0e39ff 100644 --- a/.github/workflows/daily-build.yml +++ b/.github/workflows/daily-build.yml @@ -3,8 +3,6 @@ name: hdf5 1.12 daily build # Controls when the action will run. Triggers the workflow on a schedule on: workflow_dispatch: - schedule: - - cron: "8 0 * * *" permissions: contents: read @@ -14,22 +12,29 @@ permissions: jobs: call-workflow-tarball: uses: ./.github/workflows/tarball.yml + with: + #use_tag: snapshot-1.12 + use_environ: snapshots call-workflow-ctest: needs: call-workflow-tarball uses: ./.github/workflows/cmake-ctest.yml with: file_base: ${{ needs.call-workflow-tarball.outputs.file_base }} + #use_tag: snapshot-1.12 + #use_environ: snapshots if: ${{ needs.call-workflow-tarball.outputs.has_changes == 'true' }} call-workflow-release: needs: [call-workflow-tarball, call-workflow-ctest] permissions: contents: write # In order to allow tag creation - uses: ./.github/workflows/release.yml + uses: ./.github/workflows/release-files.yml with: file_base: ${{ needs.call-workflow-tarball.outputs.file_base }} file_branch: ${{ needs.call-workflow-tarball.outputs.file_branch }} file_sha: ${{ needs.call-workflow-tarball.outputs.file_sha }} + use_tag: snapshot-1.12 + use_environ: snapshots if: ${{ needs.call-workflow-tarball.outputs.has_changes == 'true' }} diff --git a/.github/workflows/hdfeos5.yml b/.github/workflows/hdfeos5.yml index 0deadc62546..9bdce203ed1 100644 --- a/.github/workflows/hdfeos5.yml +++ b/.github/workflows/hdfeos5.yml @@ -1,4 +1,4 @@ -name: hdfeos5 +name: hdfeos5 1.12 on: workflow_dispatch: diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 68c8d9e31c4..e87b62763f6 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -1,4 +1,4 @@ -name: hdf5 dev CI +name: hdf5 1.12 CI # Controls when the action will run. Triggers the workflow on push or pull request on: diff --git a/.github/workflows/release-files.yml b/.github/workflows/release-files.yml new file mode 100644 index 00000000000..0f483d4972c --- /dev/null +++ b/.github/workflows/release-files.yml @@ -0,0 +1,144 @@ +name: hdf5 1.12 release + +# Controls when the action will run. Triggers the workflow on a schedule +on: + workflow_call: + inputs: + use_tag: + description: 'Release version tag' + type: string + required: false + default: snapshot-1.12 + use_environ: + description: 'Environment to locate files' + type: string + required: true + default: snapshots + file_base: + description: "The common base name of the source tarballs" + required: true + type: string + file_branch: + description: "The branch name for the source tarballs" + required: true + type: string + file_sha: + description: "The sha for the source tarballs" + required: true + type: string + +# Minimal permissions to be inherited by any job that doesn't declare its own permissions +permissions: + contents: read + +# Previous workflows must pass to get here so tag the commit that created the files +jobs: + create-tag: + runs-on: ubuntu-latest + permissions: + contents: write # In order to allow tag creation + steps: + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - name: Get Sources + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - run: | + git checkout ${{ inputs.file_sha }} + + - uses: rickstaa/action-create-tag@v1 + id: "tag_create" + with: + commit_sha: ${{ inputs.file_sha }} + tag: "${{ inputs.use_tag }}" + force_push_tag: true + message: "Latest snapshot" + if: ${{ inputs.use_environ == 'snapshots' }} + + # Print result using the action output. + - run: | + echo "Tag already present: ${{ steps.tag_create.outputs.tag_exists }}" + + PreRelease-getfiles: + runs-on: ubuntu-latest + needs: create-tag + environment: ${{ inputs.use_environ }} + permissions: + contents: write + steps: + - name: Get file base name + id: get-file-base + run: | + FILE_NAME_BASE=$(echo "${{ inputs.file_base }}") + echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT + + # Get files created by tarball script + - name: Get doxygen (Linux) + uses: actions/download-artifact@v3 + with: + name: docs-doxygen + path: ${{ github.workspace }}/${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen + + - name: Zip Folder + run: zip -r ${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen.zip ./${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen + + - name: Get tgz-tarball (Linux) + uses: actions/download-artifact@v3 + with: + name: tgz-tarball + path: ${{ github.workspace }} + + - name: Get zip-tarball (Windows) + uses: actions/download-artifact@v3 + with: + name: zip-tarball + path: ${{ github.workspace }} + + # Get files created by cmake-ctest script + - name: Get published binary (Windows) + uses: actions/download-artifact@v3 + with: + name: zip-vs2022-binary + path: ${{ github.workspace }} + + - name: Get published binary (MacOS) + uses: actions/download-artifact@v3 + with: + name: tgz-osx12-binary + path: ${{ github.workspace }} + + - name: Get published binary (Linux) + uses: actions/download-artifact@v3 + with: + name: tgz-ubuntu-2204-binary + path: ${{ github.workspace }} + + - name: PreRelease tag + id: create_release + uses: softprops/action-gh-release@v1 + with: + tag_name: "${{ inputs.use_tag }}" + prerelease: true + files: | + ${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen.zip + ${{ steps.get-file-base.outputs.FILE_BASE }}.tar.gz + ${{ steps.get-file-base.outputs.FILE_BASE }}.zip + ${{ steps.get-file-base.outputs.FILE_BASE }}-osx12.tar.gz + ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204.tar.gz + ${{ steps.get-file-base.outputs.FILE_BASE }}-win_vs2022.zip + if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` + + - name: Store Release url + run: | + echo "${{ steps.create_release.outputs.upload_url }}" > ./upload_url + +# - uses: actions/upload-artifact@v3 +# with: +# path: ./upload_url +# name: upload_url + + - name: List files for the space (Linux) + run: | + ls -l ${{ github.workspace }} + ls ${{ runner.workspace }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 8e3824f46b6..26c08a90259 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,133 +1,63 @@ -name: hdf5 1.12 release +name: hdf5 1.12 release build -# Controls when the action will run. Triggers the workflow on a schedule +# Controls when the action will run. Triggers the workflow on a manual run on: - workflow_call: + workflow_dispatch: inputs: - file_base: - description: "The common base name of the source tarballs" - required: true - type: string - file_branch: - description: "The branch name for the source tarballs" - required: true + use_tag: + description: 'Release version tag' type: string - file_sha: - description: "The sha for the source tarballs" + required: false + default: snapshot-1.12 + use_environ: + description: 'Environment to locate files' + type: choice required: true - type: string + default: snapshots + options: + - snapshots + - release -# Minimal permissions to be inherited by any job that doesn't declare its own permissions permissions: contents: read -# Previous workflows must pass to get here so tag the commit that created the files +# A workflow run is made up of one or more jobs that can run sequentially or +# in parallel. jobs: - create-tag: + log-the-inputs: runs-on: ubuntu-latest - permissions: - contents: write # In order to allow tag creation steps: - # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - name: Get Sources - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - run: | - git checkout ${{ inputs.file_sha }} - - - uses: rickstaa/action-create-tag@v1 - id: "tag_create" - with: - commit_sha: ${{ inputs.file_sha }} - tag: "snapshot" - force_push_tag: true - message: "Latest snapshot" - - # Print result using the action output. - - run: | - echo "Tag already present: ${{ steps.tag_create.outputs.tag_exists }}" - - PreRelease-getfiles: - runs-on: ubuntu-latest - needs: create-tag - environment: snapshots_1_12 + echo "Tag: $TAG" + echo "Environment: $ENVIRONMENT" + env: + TAG: ${{ inputs.tag }} + ENVIRONMENT: ${{ inputs.environment }} + + call-workflow-tarball: + needs: log-the-inputs + uses: ./.github/workflows/tarball.yml + with: + use_tag: ${{ inputs.use_tag }} + use_environ: ${{ inputs.use_environ }} + + call-workflow-ctest: + needs: call-workflow-tarball + uses: ./.github/workflows/cmake-ctest.yml + with: + file_base: ${{ needs.call-workflow-tarball.outputs.file_base }} + use_tag: ${{ inputs.use_tag }} + use_environ: ${{ inputs.use_environ }} + + call-workflow-release: + needs: [call-workflow-tarball, call-workflow-ctest] permissions: - contents: write - steps: - - name: Get file base name - id: get-file-base - run: | - FILE_NAME_BASE=$(echo "${{ inputs.file_base }}") - echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT - - # Get files created by tarball script - - name: Get doxygen (Linux) - uses: actions/download-artifact@v3 - with: - name: docs-doxygen - path: ${{ github.workspace }} - - - name: Zip Folder - run: zip -r ${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen.zip . -i ${{ github.workspace }}/${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen - - - name: Get tgz-tarball (Linux) - uses: actions/download-artifact@v3 - with: - name: tgz-tarball - path: ${{ github.workspace }} - - - name: Get zip-tarball (Windows) - uses: actions/download-artifact@v3 - with: - name: zip-tarball - path: ${{ github.workspace }} - - # Get files created by cmake-ctest script - - name: Get published binary (Windows) - uses: actions/download-artifact@v3 - with: - name: zip-vs2022-binary - path: ${{ github.workspace }} - - - name: Get published binary (MacOS) - uses: actions/download-artifact@v3 - with: - name: tgz-osx12-binary - path: ${{ github.workspace }} - - - name: Get published binary (Linux) - uses: actions/download-artifact@v3 - with: - name: tgz-ubuntu-2204-binary - path: ${{ github.workspace }} - - - name: PreRelease tag - id: create_release - uses: softprops/action-gh-release@v1 - with: - tag_name: "snapshot_1_12" - prerelease: true - files: | - ${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen.zip - ${{ steps.get-file-base.outputs.FILE_BASE }}.tar.gz - ${{ steps.get-file-base.outputs.FILE_BASE }}.zip - ${{ steps.get-file-base.outputs.FILE_BASE }}-osx12.tar.gz - ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204.tar.gz - ${{ steps.get-file-base.outputs.FILE_BASE }}-win_vs2022.zip - if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` - - - name: Store Release url - run: | - echo "${{ steps.create_release.outputs.upload_url }}" > ./upload_url - -# - uses: actions/upload-artifact@v3 -# with: -# path: ./upload_url -# name: upload_url + contents: write # In order to allow tag creation + uses: ./.github/workflows/release-files.yml + with: + file_base: ${{ needs.call-workflow-tarball.outputs.file_base }} + file_branch: ${{ needs.call-workflow-tarball.outputs.file_branch }} + file_sha: ${{ needs.call-workflow-tarball.outputs.file_sha }} + use_tag: ${{ inputs.use_tag }} + use_environ: ${{ inputs.use_environ }} - - name: List files for the space (Linux) - run: | - ls -l ${{ github.workspace }} - ls ${{ runner.workspace }} diff --git a/.github/workflows/tarball.yml b/.github/workflows/tarball.yml index 4a9d0d88c5a..cc9a535d88e 100644 --- a/.github/workflows/tarball.yml +++ b/.github/workflows/tarball.yml @@ -3,6 +3,17 @@ name: hdf5 1.12 tarball # Controls when the action will run. Triggers the workflow on a schedule on: workflow_call: + inputs: +# use_tag: +# description: 'Release version tag' +# type: string +# required: false +# default: snapshot-1.12 + use_environ: + description: 'Environment to locate files' + type: string + required: true + default: snapshots outputs: has_changes: description: "Whether there were changes the previous day" @@ -55,6 +66,7 @@ jobs: with: seconds: 86400 # One day in seconds branch: '${{ steps.get-branch-name.outputs.branch_ref }}' + if: ${{ inputs.use_environ == 'snapshots' }} - run: echo "You have ${{ steps.check-new-commits.outputs.new-commits-number }} new commit(s) in ${{ steps.get-branch-name.outputs.BRANCH_REF }} ✅!" if: ${{ steps.check-new-commits.outputs.has-new-commits == 'true' }} @@ -65,7 +77,7 @@ jobs: name: Create a source tarball runs-on: ubuntu-latest needs: check_commits - if: ${{ needs.check_commits.outputs.has_changes == 'true' }} + if: ${{ ((inputs.use_environ == 'snapshots') && (needs.check_commits.outputs.has_changes == 'true')) || (inputs.use_environ == 'release') }} outputs: file_base: ${{ steps.set-file-base.outputs.FILE_BASE }} steps: @@ -80,22 +92,43 @@ jobs: sudo apt update sudo apt install automake autoconf libtool libtool-bin gzip dos2unix + - name: Retrieve version + id: version + run: | + cd "$GITHUB_WORKSPACE/hdfsrc" + echo "TAG_VERSION=$(bin/h5vers)" >> $GITHUB_OUTPUT + - name: Set file base name id: set-file-base run: | - FILE_NAME_BASE=$(echo "hdf5-${{ needs.check_commits.outputs.branch_ref }}-${{ needs.check_commits.outputs.branch_sha }}") + if [[ '${{ inputs.use_environ }}' == 'snapshots' && '${{ needs.check_commits.outputs.has_changes }}' == 'true' ]] + then + FILE_NAME_BASE=$(echo "hdf5-${{ needs.check_commits.outputs.branch_ref }}-${{ needs.check_commits.outputs.branch_sha }}") + else + FILE_NAME_BASE=$(echo "hdf5-${{ steps.version.outputs.TAG_VERSION }}") + fi echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT + shell: bash - - name: Run release script - id: run-release-script + - name: Create snapshot file base name + id: create-file-base + if: ${{ (inputs.use_environ == 'snapshots') && (needs.check_commits.outputs.has_changes == 'true') }} run: | cd "$GITHUB_WORKSPACE/hdfsrc" bin/bbrelease -d $GITHUB_WORKSPACE --branch ${{ needs.check_commits.outputs.branch_ref }} --revision gzip zip shell: bash + - name: Create release file base name + id: create-rel-base + if: ${{ (inputs.use_environ == 'release') }} + run: | + cd "$GITHUB_WORKSPACE/hdfsrc" + bin/release -d $GITHUB_WORKSPACE gzip zip cmake-tgz cmake-zip + shell: bash + - name: List files in the repository run: | - ls ${{ github.workspace }} + ls -l ${{ github.workspace }} ls $GITHUB_WORKSPACE # Save files created by release script diff --git a/README.md b/README.md index 99e0fa52c2d..29608d9ab9f 100644 --- a/README.md +++ b/README.md @@ -75,7 +75,7 @@ SNAPSHOTS, PREVIOUS RELEASES AND SOURCE CODE -------------------------------------------- Periodically development code snapshots are provided at the following URL: - https://gamma.hdfgroup.org/ftp/pub/outgoing/hdf5/snapshots/ + https://github.com/HDFGroup/hdf5/releases/tag/snapshot-1.12 Source packages for current and previous releases are located at: From 7dd821c9a2a207ea9ed8b9d406da26d5ee533f0c Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Fri, 30 Jun 2023 15:59:26 -0500 Subject: [PATCH 087/108] Adjust presets timeout and fix build script VS versions (#3217) --- config/cmake-presets/hidden-presets.json | 2 +- config/cmake/scripts/HDF5config.cmake | 14 +++++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/config/cmake-presets/hidden-presets.json b/config/cmake-presets/hidden-presets.json index c616e7d1f4b..18ffdd17f5f 100644 --- a/config/cmake-presets/hidden-presets.json +++ b/config/cmake-presets/hidden-presets.json @@ -360,7 +360,7 @@ "hidden": true, "execution": { "noTestsAction": "error", - "timeout": 180, + "timeout": 600, "jobs": 8 } }, diff --git a/config/cmake/scripts/HDF5config.cmake b/config/cmake/scripts/HDF5config.cmake index 2c9155a040b..35684f47dca 100644 --- a/config/cmake/scripts/HDF5config.cmake +++ b/config/cmake/scripts/HDF5config.cmake @@ -11,8 +11,8 @@ # ############################################################################################# ### ${CTEST_SCRIPT_ARG} is of the form OPTION=VALUE ### -### BUILD_GENERATOR required [Unix, VS2022, VS2019, VS201964, VS2017, VS201764, VS2015, VS201564] ### -### ctest -S HDF5config.cmake,BUILD_GENERATOR=VS201764 -C Release -VV -O hdf5.log ### +### BUILD_GENERATOR required [Unix, VS2022, VS202264, VS2019, VS201964] ### +### ctest -S HDF5config.cmake,BUILD_GENERATOR=VS202264 -C Release -VV -O hdf5.log ### ############################################################################################# cmake_minimum_required (VERSION 3.12) @@ -68,7 +68,7 @@ endif () # build generator must be defined if (NOT DEFINED BUILD_GENERATOR) - message (FATAL_ERROR "BUILD_GENERATOR must be defined - Unix, VS2022, VS2019, VS201964, VS2017, VS201764, VS2015, VS201564") + message (FATAL_ERROR "BUILD_GENERATOR must be defined - Unix, VS2022, VS202264, VS2019, VS201964") endif () ################################################################### @@ -105,7 +105,7 @@ endif () ######### Following describes compiler ############ if (NOT DEFINED HPC) if (NOT DEFINED BUILD_GENERATOR) - message (FATAL_ERROR "BUILD_GENERATOR must be defined - Unix, VS2022, VS2019, VS201964, VS2017, VS201764, VS2015, VS201564") + message (FATAL_ERROR "BUILD_GENERATOR must be defined - Unix, VS2022, VS202264, VS2019, VS201964") endif () if (WIN32 AND NOT MINGW) set (SITE_OS_NAME "Windows") @@ -175,10 +175,10 @@ if (NOT DEFINED HPC) set (SITE_COMPILER_NAME "vs2012") set (SITE_COMPILER_VERSION "11") else () - message (FATAL_ERROR "Invalid BUILD_GENERATOR must be - Unix, VS2022, VS2019, VS201964, VS2017, VS201764, VS2015, VS201564") + message (FATAL_ERROR "Invalid BUILD_GENERATOR must be - Unix, VS2022, VS202264, VS2019, VS201964") endif () ## Set the following to unique id your computer ## - set (CTEST_SITE "WIN7${BUILD_GENERATOR}.XXXX") + set (CTEST_SITE "WIN10${BUILD_GENERATOR}.XXXX") else () if (MINGW) set (CTEST_CMAKE_GENERATOR "MinGW Makefiles") @@ -187,7 +187,7 @@ if (NOT DEFINED HPC) endif () ## Set the following to unique id your computer ## if (APPLE) - set (CTEST_SITE "MAC.XXXX") + set (CTEST_SITE "MAC.XXXX") else () set (CTEST_SITE "LINUX.XXXX") endif () From 8a364dd086df040af0bef02515f90841e0a8dc88 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Tue, 18 Jul 2023 06:27:54 -0500 Subject: [PATCH 088/108] Merge changes from develop (#3238) * Merge changes from develop * Add last snapshot name * Merge bbrelease into release --- .gitattributes | 1 - .github/workflows/release-files.yml | 34 ++- .github/workflows/release.yml | 97 +++++--- .github/workflows/tarball.yml | 2 +- bin/bbrelease | 339 ---------------------------- bin/release | 67 ++++-- 6 files changed, 136 insertions(+), 404 deletions(-) delete mode 100755 bin/bbrelease diff --git a/.gitattributes b/.gitattributes index a1a7f8fc8ce..52722dacd73 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,5 +1,4 @@ * text=auto !eol -bin/bbrelease -text bin/genparser -text bin/switch_maint_mode -text c++/src/H5OcreatProp.cpp -text diff --git a/.github/workflows/release-files.yml b/.github/workflows/release-files.yml index 0f483d4972c..d7d95b38b4b 100644 --- a/.github/workflows/release-files.yml +++ b/.github/workflows/release-files.yml @@ -53,7 +53,7 @@ jobs: commit_sha: ${{ inputs.file_sha }} tag: "${{ inputs.use_tag }}" force_push_tag: true - message: "Latest snapshot" + message: "Latest snapshot-1.12" if: ${{ inputs.use_environ == 'snapshots' }} # Print result using the action output. @@ -114,13 +114,19 @@ jobs: name: tgz-ubuntu-2204-binary path: ${{ github.workspace }} + - name: Store snapshot name + run: | + echo "${{ steps.get-file-base.outputs.FILE_BASE }}" > ./last-file.txt + - name: PreRelease tag - id: create_release + id: create_prerelease + if: ${{ (inputs.use_environ == 'snapshots') }} uses: softprops/action-gh-release@v1 with: tag_name: "${{ inputs.use_tag }}" prerelease: true files: | + last-file.txt ${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen.zip ${{ steps.get-file-base.outputs.FILE_BASE }}.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}.zip @@ -129,14 +135,22 @@ jobs: ${{ steps.get-file-base.outputs.FILE_BASE }}-win_vs2022.zip if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` - - name: Store Release url - run: | - echo "${{ steps.create_release.outputs.upload_url }}" > ./upload_url - -# - uses: actions/upload-artifact@v3 -# with: -# path: ./upload_url -# name: upload_url + - name: Release tag + id: create_release + if: ${{ (inputs.use_environ == 'release') }} + uses: softprops/action-gh-release@v1 + with: + tag_name: "${{ inputs.use_tag }}" + prerelease: false + #body_path: ${{ github.workspace }}-CHANGELOG.txt + files: | + ${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen.zip + ${{ steps.get-file-base.outputs.FILE_BASE }}.tar.gz + ${{ steps.get-file-base.outputs.FILE_BASE }}.zip + ${{ steps.get-file-base.outputs.FILE_BASE }}-osx12.tar.gz + ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204.tar.gz + ${{ steps.get-file-base.outputs.FILE_BASE }}-win_vs2022.zip + if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` - name: List files for the space (Linux) run: | diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 26c08a90259..75352a02445 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -9,14 +9,6 @@ on: type: string required: false default: snapshot-1.12 - use_environ: - description: 'Environment to locate files' - type: choice - required: true - default: snapshots - options: - - snapshots - - release permissions: contents: read @@ -26,38 +18,83 @@ permissions: jobs: log-the-inputs: runs-on: ubuntu-latest + outputs: + rel_tag: ${{ steps.get-tag-name.outputs.RELEASE_TAG }} steps: - - run: | - echo "Tag: $TAG" - echo "Environment: $ENVIRONMENT" - env: - TAG: ${{ inputs.tag }} - ENVIRONMENT: ${{ inputs.environment }} - - call-workflow-tarball: + - name: Get tag name + id: get-tag-name + env: + TAG: ${{ inputs.use_tag }} + run: echo "RELEASE_TAG=$TAG" >> $GITHUB_OUTPUT + +# tarball.yml will be used for releases when pre-tag actions are implemented +# call-workflow-tarball: +# needs: log-the-inputs +# uses: ./.github/workflows/tarball.yml +# with: +# use_tag: ${{ inputs.use_tag }} +# use_environ: release + + create-files-ctest: needs: log-the-inputs - uses: ./.github/workflows/tarball.yml - with: - use_tag: ${{ inputs.use_tag }} - use_environ: ${{ inputs.use_environ }} + runs-on: ubuntu-latest + outputs: + file_base: ${{ steps.set-file-base.outputs.FILE_BASE }} + steps: + - name: Set file base name + id: set-file-base + run: | + FILE_NAME_BASE=$(echo "${{ needs.log-the-inputs.outputs.rel_tag }}") + echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT + shell: bash + + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - name: Get Sources + uses: actions/checkout@v3 + with: + path: hdfsrc + + - name: Zip Folder + run: | + zip -r ${{ steps.set-file-base.outputs.FILE_BASE }}.zip ./hdfsrc + tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}.tar.gz ./hdfsrc + + - name: List files in the repository + run: | + ls -l ${{ github.workspace }} + ls $GITHUB_WORKSPACE + + # Save files created by release script + - name: Save tgz-tarball + uses: actions/upload-artifact@v3 + with: + name: tgz-tarball + path: ${{ steps.set-file-base.outputs.FILE_BASE }}.tar.gz + if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` + + - name: Save zip-tarball + uses: actions/upload-artifact@v3 + with: + name: zip-tarball + path: ${{ steps.set-file-base.outputs.FILE_BASE }}.zip + if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` call-workflow-ctest: - needs: call-workflow-tarball + needs: create-files-ctest uses: ./.github/workflows/cmake-ctest.yml with: - file_base: ${{ needs.call-workflow-tarball.outputs.file_base }} - use_tag: ${{ inputs.use_tag }} - use_environ: ${{ inputs.use_environ }} + file_base: ${{ needs.create-files-ctest.outputs.file_base }} call-workflow-release: - needs: [call-workflow-tarball, call-workflow-ctest] + #needs: [call-workflow-tarball, call-workflow-ctest] + needs: [log-the-inputs, create-files-ctest, call-workflow-ctest] permissions: contents: write # In order to allow tag creation uses: ./.github/workflows/release-files.yml with: - file_base: ${{ needs.call-workflow-tarball.outputs.file_base }} - file_branch: ${{ needs.call-workflow-tarball.outputs.file_branch }} - file_sha: ${{ needs.call-workflow-tarball.outputs.file_sha }} - use_tag: ${{ inputs.use_tag }} - use_environ: ${{ inputs.use_environ }} + file_base: ${{ needs.create-files-ctest.outputs.file_base }} + file_branch: ${{ needs.log-the-inputs.outputs.rel_tag }} + file_sha: ${{ needs.log-the-inputs.outputs.rel_tag }} + use_tag: ${{ needs.log-the-inputs.outputs.rel_tag }} + use_environ: release diff --git a/.github/workflows/tarball.yml b/.github/workflows/tarball.yml index cc9a535d88e..1f1ad242544 100644 --- a/.github/workflows/tarball.yml +++ b/.github/workflows/tarball.yml @@ -115,7 +115,7 @@ jobs: if: ${{ (inputs.use_environ == 'snapshots') && (needs.check_commits.outputs.has_changes == 'true') }} run: | cd "$GITHUB_WORKSPACE/hdfsrc" - bin/bbrelease -d $GITHUB_WORKSPACE --branch ${{ needs.check_commits.outputs.branch_ref }} --revision gzip zip + bin/release -d $GITHUB_WORKSPACE --branch ${{ needs.check_commits.outputs.branch_ref }} --revision gzip zip shell: bash - name: Create release file base name diff --git a/bin/bbrelease b/bin/bbrelease deleted file mode 100755 index c59e2e5fcf4..00000000000 --- a/bin/bbrelease +++ /dev/null @@ -1,339 +0,0 @@ -#!/bin/sh -# -# Copyright by The HDF Group. -# All rights reserved. -# -# This file is part of HDF5. The full HDF5 copyright notice, including -# terms governing use, modification, and redistribution, is contained in -# the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://www.hdfgroup.org/licenses. -# If you do not have access to either file, you may request a copy from -# help@hdfgroup.org. -# - -# Make a release of hdf5. -# -# NOTE: -# This script differs from bin/release in that this has an added -# --revision option to create private releases with the code revision -# hash in the version strings. -# -# This script can probably be merged into the original release script in -# the future. - -# Function definitions -# -# Print Usage page -USAGE() -{ -cat << EOF -Usage: $0 -d

    [-h] [--private] [--revision [--branch BRANCHNAME]] ... - -d DIR The name of the directory where the release(s) should be - placed. - --branch BRANCHNAME This is to get the correct version of the branch name from the - repository. BRANCHNAME for v1.8 should be hdf5_1_8. - -h print the help page. - --private Make a private release with today's date in version information. - --revision Make a private release with the code revision number in version information. - This allows --branch to be used for the file name. - --branch BRANCHNAME This is to get the correct version of the branch name from the - repository. BRANCHNAME for v1.8 should be hdf5_1_8. - -This must be run at the top level of the source directory. -The other command-line options are the names of the programs to use -for compressing the resulting tar archive (if none are given then -"tar" is assumed): - - tar -- use tar and don't do any compressing. - gzip -- use gzip with "-9" and append ".gz" to the output name. - bzip2 -- use bzip2 with "-9" and append ".bz2" to the output name. - zip -- convert all text files to DOS style and form a zip file for Windows use. - -An md5 checksum is produced for each archive created and stored in the md5 file. - -Examples: - - $ bin/release -d /tmp - /tmp/hdf5-1.8.13-RELEASE.txt - /tmp/hdf5-1.8.13.md5 - /tmp/hdf5-1.8.13.tar - - $ bin/release -d /tmp gzip - /tmp/hdf5-1.8.13-RELEASE.txt - /tmp/hdf5-1.8.13.md5 - /tmp/hdf5-1.8.13.tar.gz - - $ bin/release -d /tmp tar gzip zip - /tmp/hdf5-1.8.13-RELEASE.txt - /tmp/hdf5-1.8.13.md5 - /tmp/hdf5-1.8.13.tar - /tmp/hdf5-1.8.13.tar.gz - /tmp/hdf5-1.8.13.tar.zip - -EOF - -} - -# Function name: tar2zip -# Convert the release tarball to a Windows zipball. -# -# Steps: -# 1. untar the tarball in a temporary directory; -# Note: do this in a temporary directory to avoid changing -# the original source directory which may be around. -# 2. convert all its text files to DOS (LF-CR) style; -# 3. form a zip file which is usable by Windows users. -# -# Parameters: -# $1 version -# $2 release tarball -# $3 output zipball file name -# -# Returns 0 if successful; 1 otherwise -# -tar2zip() -{ - if [ $# -ne 3 ]; then - echo "usage: tar2zip " - return 1 - fi - ztmpdir=/tmp/tmpdir$$ - mkdir -p $ztmpdir - version=$1 - tarfile=$2 - zipfile=$3 - - # step 1: untar tarball in ztmpdir - (cd $ztmpdir; tar xf -) < $tarfile - # sanity check - if [ ! -d $ztmpdir/$version ]; then - echo "untar did not create $ztmpdir/$version source dir" - # cleanup - rm -rf $ztmpdir - return 1 - fi - # step 2: convert text files - # There maybe a simpler way to do this. - # options used in unix2dos: - # -k Keep the date stamp - # -q quiet mode - # grep redirect output to /dev/null because -q or -s are not portable. - find $ztmpdir/$version | \ - while read inf; do \ - if file $inf | grep "$inf\: .*text" > /dev/null 2>&1 ; then \ - unix2dos -q -k $inf; \ - fi\ - done - # step 3: make zipball - # -9 maximum compression - # -y Store symbolic links as such in the zip archive - # -r recursive - # -q quiet - (cd $ztmpdir; zip -9 -y -r -q $version.zip $version) - mv $ztmpdir/$version.zip $zipfile - - # cleanup - rm -rf $ztmpdir -} - -# This command must be run at the top level of the hdf5 source directory. -# Verify this requirement. -if [ ! \( -f configure.ac -a -f bin/bbrelease \) ]; then - echo "$0 must be run at the top level of the hdf5 source directory" - exit 1 -fi - -# Defaults -DEST=releases -VERS=`perl bin/h5vers` -VERS_OLD= -test "$VERS" || exit 1 -verbose=yes -release_date=`date +%F` -today=`date +%Y%m%d` -pmode='no' -revmode='no' -tmpdir="../#release_tmp.$$" # tmp work directory -CPPLUS_RM_NAME=cpplus_RM - -# Restore previous Version information -RESTORE_VERSION() -{ - if [ X-${VERS_OLD} != X- ]; then - echo restoring version information back to $VERS_OLD - rm -f config/lt_vers.am - cp $tmpdir/lt_vers.am config/lt_vers.am - bin/h5vers -s $VERS_OLD - VERS_OLD= - fi -} - - -# Command-line arguments -while [ -n "$1" ]; do - arg=$1 - shift - case "$arg" in - -d) - DEST=$1 - shift - ;; - -h) - USAGE - exit 0 - ;; - --private) - pmode=yes - ;; - --revision) - revmode=yes - ;; - --branch) - BRANCHNAME=$1 - shift - ;; - -*) - echo "Unknown switch: $arg" 1>&2 - USAGE - exit 1 - ;; - *) - methods="$methods $arg" - ;; - esac -done - -# Default method is tar -if [ "X$methods" = "X" ]; then - methods="tar" -fi - -# Create the temporary work directory. -if mkdir $tmpdir; then - echo "temporary work directory for release. "\ - "Can be deleted after release completes." > $tmpdir/README -else - echo "Failed to mkdir tmpdir($tmpdir)" - exit 1 -fi - -# setup restoration in case of abort. -trap RESTORE_VERSION 0 - -if [ X$pmode = Xyes ]; then - VERS_OLD=$VERS - # Copy old version of config/lt_vers.am, since it's hard to - # "undo" changes to it. - cp config/lt_vers.am $tmpdir - # Set version information to m.n.r-of$today. - # (h5vers does not correctly handle just m.n.r-$today.) - VERS=`echo $VERS | sed -e s/-.*//`-of$today - echo Private release of $VERS - bin/h5vers -s $VERS -fi - -if [ X$revmode = Xyes ]; then - VERS_OLD=$VERS - echo "Save old version $VERS_OLD for restoration later." - # Copy old version of config/lt_vers.am, since it's hard to - # "undo" changes to it. - cp config/lt_vers.am $tmpdir - if [ "${BRANCHNAME}" = "" ]; then - BRANCHNAME=`git symbolic-ref -q --short HEAD` - fi - revision=`git rev-parse --short HEAD` - # Set version information to m.n.r-r$revision. - # (h5vers does not correctly handle just m.n.r-$today.) - VERS=`echo $VERS | sed -e s/-.*//`-$revision - echo Private release of $VERS - HDF5_VERS=hdf5-$BRANCHNAME-$revision - echo file base of $HDF5_VERS - bin/h5vers -s $VERS - # use a generic directory name for revision releases - HDF5_IN_VERS=hdfsrc -else - # Store hdf5-$VERS ("hdf5-1.7.51", e.g.) to a variable to avoid typos - HDF5_VERS=hdf5-$VERS - # directory name matches tar file name for non-revision releases - HDF5_IN_VERS=$HDF5_VERS -fi - -test "$verbose" && echo "Releasing $HDF5_VERS to $DEST" 1>&2 -if [ ! -d $DEST ]; then - echo " Destination directory $DEST does not exist" 1>&2 - exit 1 -fi - -# Create a symlink to the source so files in the tarball have the prefix -# we want (gnu's --transform isn't portable) -ln -s `pwd` $tmpdir/$HDF5_IN_VERS || exit 1 - -# Save a backup copy of Makefile if exists. -test -f Makefile && mv Makefile $tmpdir/Makefile.x -cp -p Makefile.dist Makefile - -# Update README.md and release_docs/RELEASE.txt with release information in -# line 1. -for f in README.md release_docs/RELEASE.txt; do - echo "HDF5 version $VERS released on $release_date" >$f.x - sed -e 1d $f >>$f.x - mv $f.x $f - # Make sure new files are of the right access mode - chmod 644 $f -done - -# Create the tar file -test "$verbose" && echo " Running tar..." 1>&2 -(cd "$tmpdir" && exec tar -ch --exclude-vcs -f "$HDF5_VERS.tar" "./$HDF5_IN_VERS" || exit 1 ) - -# Compress -MD5file=$HDF5_VERS.md5 -cp /dev/null $DEST/$MD5file -for comp in $methods; do - case $comp in - tar) - cp -p $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.tar - (cd $DEST; md5sum $HDF5_VERS.tar >> $MD5file) - ;; - gzip) - test "$verbose" && echo " Running gzip..." 1>&2 - gzip -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.gz - (cd $DEST; md5sum $HDF5_VERS.tar.gz >> $MD5file) - ;; - bzip2) - test "$verbose" && echo " Running bzip2..." 1>&2 - bzip2 -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.bz2 - (cd $DEST; md5sum $HDF5_VERS.tar.bz2 >> $MD5file) - ;; - zip) - test "$verbose" && echo " Creating zip ball..." 1>&2 - tar2zip $HDF5_IN_VERS $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.zip 1>&2 - (cd $DEST; md5sum $HDF5_VERS.zip >> $MD5file) - ;; - *) - echo "***Error*** Unknown method $comp" - exit 1 - ;; - esac -done - -# Copy the RELEASE.txt to the release area. -cp release_docs/RELEASE.txt $DEST/$HDF5_VERS-RELEASE.txt - -# Remove distributed Makefile and restore previous Makefile if existed. -rm -f Makefile -test -f $tmpdir/Makefile.x && mv $tmpdir/Makefile.x Makefile - -# Restore OLD version information, then no need for trap. -if [ X$pmode = Xyes ] || [ X$revmode = Xyes ]; then - echo "Restore the original version $VERS_OLD" - RESTORE_VERSION - trap 0 -fi - -# Remove temporary things -rm -rf $tmpdir - -echo "DONE" - -exit 0 diff --git a/bin/release b/bin/release index 9435657d09d..d9e4f7b41b4 100755 --- a/bin/release +++ b/bin/release @@ -19,14 +19,14 @@ USAGE() { cat << EOF -Usage: $0 -d [--docver BRANCHNAME] [-h] [--private] ... +Usage: $0 -d [-h] [--private] [--revision [--branch BRANCHNAME]] ... -d DIR The name of the directory where the release(s) should be placed. - --docver BRANCHNAME This is added for 1.8 and beyond to get the correct - version of documentation files from the hdf5docs + --branch BRANCHNAME This is to get the correct version of the branch name from the repository. BRANCHNAME for v1.8 should be hdf5_1_8. -h print the help page. --private Make a private release with today's date in version information. + --revision Make a private release with the code revision number in version information. This must be run at the top level of the source directory. The other command-line options are the names of the programs to use @@ -193,19 +193,19 @@ tar2cmakezip() fi # step 2: add batch file for building CMake on window - (cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS2013 -C Release -V -O hdf5.log" > build-VS2013-32.bat; chmod 755 build-VS2013-32.bat) - (cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS201364 -C Release -V -O hdf5.log" > build-VS2013-64.bat; chmod 755 build-VS2013-64.bat) (cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS2015 -C Release -V -O hdf5.log" > build-VS2015-32.bat; chmod 755 build-VS2015-32.bat) (cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS201564 -C Release -V -O hdf5.log" > build-VS2015-64.bat; chmod 755 build-VS2015-64.bat) (cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS2017 -C Release -V -O hdf5.log" > build-VS2017-32.bat; chmod 755 build-VS2017-32.bat) (cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS201764 -C Release -V -O hdf5.log" > build-VS2017-64.bat; chmod 755 build-VS2017-64.bat) (cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS2019 -C Release -V -O hdf5.log" > build-VS2019-32.bat; chmod 755 build-VS2019-32.bat) (cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS201964 -C Release -V -O hdf5.log" > build-VS2019-64.bat; chmod 755 build-VS2019-64.bat) + (cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS2022 -C Release -V -O hdf5.log" > build-VS2022-32.bat; chmod 755 build-VS2022-32.bat) + (cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS202264 -C Release -V -O hdf5.log" > build-VS2022-64.bat; chmod 755 build-VS2022-64.bat) # step 3: add LIBAEC.tar.gz, ZLib.tar.gz and cmake files cp /mnt/scr1/pre-release/hdf5/CMake/LIBAEC.tar.gz $cmziptmpsubdir cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmziptmpsubdir - cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-2.0.1-Source.zip $cmziptmpsubdir + cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-2.0.3-Source.zip $cmziptmpsubdir cp /mnt/scr1/pre-release/hdf5/CMake/hdf5_plugins-1_12_2.zip $cmziptmpsubdir cp $cmziptmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmziptmpsubdir cp $cmziptmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmziptmpsubdir @@ -297,7 +297,7 @@ tar2cmaketgz() # step 3: add LIBAEC.tar.gz, ZLib.tar.gz and cmake files cp /mnt/scr1/pre-release/hdf5/CMake/LIBAEC.tar.gz $cmgztmpsubdir cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmgztmpsubdir - cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-2.0.1-Source.tar.gz $cmgztmpsubdir + cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-2.0.3-Source.tar.gz $cmgztmpsubdir cp /mnt/scr1/pre-release/hdf5/CMake/hdf5_plugins-1_12_2.tar.gz $cmgztmpsubdir cp $cmgztmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmgztmpsubdir cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmgztmpsubdir @@ -376,7 +376,7 @@ tar2hpccmaketgz() # step 3: add LIBAEC.tar.gz, ZLib.tar.gz and cmake files cp /mnt/scr1/pre-release/hdf5/CMake/LIBAEC.tar.gz $cmgztmpsubdir cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmgztmpsubdir - cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-2.0.1-Source.tar.gz $cmgztmpsubdir + cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-2.0.3-Source.tar.gz $cmgztmpsubdir cp /mnt/scr1/pre-release/hdf5/CMake/hdf5_plugins-1_12_2.tar.gz $cmgztmpsubdir cp $cmgztmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmgztmpsubdir cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmgztmpsubdir @@ -407,8 +407,8 @@ verbose=yes release_date=`date +%F` today=`date +%Y%m%d` pmode='no' +revmode='no' tmpdir="../#release_tmp.$$" # tmp work directory -DOC_URL=https://git@bitbucket.hdfgroup.org/scm/hdffv/hdf5doc.git CPPLUS_RM_NAME=cpplus_RM MAINT_MODE_ENABLED="" @@ -449,8 +449,11 @@ while [ -n "$1" ]; do --private) pmode=yes ;; - --docver) - DOCVERSION=$1 + --revision) + revmode=yes + ;; + --branch) + BRANCHNAME=$1 shift ;; -*) @@ -490,12 +493,34 @@ if [ X$pmode = Xyes ]; then # (h5vers does not correctly handle just m.n.r-$today.) VERS=`echo $VERS | sed -e s/-.*//`-of$today echo Private release of $VERS -else bin/h5vers -s $VERS fi -# Store hdf5-$VERS ("hdf5-1.7.51", e.g.) to a variable to avoid typos -HDF5_VERS=hdf5-$VERS +if [ X$revmode = Xyes ]; then + VERS_OLD=$VERS + echo "Save old version $VERS_OLD for restoration later." + # Copy old version of config/lt_vers.am, since it's hard to + # "undo" changes to it. + cp config/lt_vers.am $tmpdir + if [ "${BRANCHNAME}" = "" ]; then + BRANCHNAME=`git symbolic-ref -q --short HEAD` + fi + revision=`git rev-parse --short HEAD` + # Set version information to m.n.r-r$revision. + # (h5vers does not correctly handle just m.n.r-$today.) + VERS=`echo $VERS | sed -e s/-.*//`-$revision + echo Private release of $VERS + HDF5_VERS=hdf5-$BRANCHNAME-$revision + echo file base of $HDF5_VERS + bin/h5vers -s $VERS + # use a generic directory name for revision releases + HDF5_IN_VERS=hdfsrc +else + # Store hdf5-$VERS ("hdf5-1.7.51", e.g.) to a variable to avoid typos + HDF5_VERS=hdf5-$VERS + # directory name matches tar file name for non-revision releases + HDF5_IN_VERS=$HDF5_VERS +fi test "$verbose" && echo "Releasing $HDF5_VERS to $DEST" 1>&2 if [ ! -d $DEST ]; then @@ -505,7 +530,7 @@ fi # Create a symlink to the source so files in the tarball have the prefix # we want (gnu's --transform isn't portable) -ln -s `pwd` $tmpdir/$HDF5_VERS || exit 1 +ln -s `pwd` $tmpdir/$HDF5_IN_VERS || exit 1 # Save a backup copy of Makefile if exists. test -f Makefile && mv Makefile $tmpdir/Makefile.x @@ -521,14 +546,9 @@ for f in README.md release_docs/RELEASE.txt; do chmod 644 $f done -# develop is different than branches. -if [ "${DOCVERSION}" ]; then - DOC_URL="$DOC_URL -b ${DOCVERSION}" -fi - # Create the tar file test "$verbose" && echo " Running tar..." 1>&2 -(cd "$tmpdir" && exec tar -ch --exclude-vcs -f "$HDF5_VERS.tar" "./$HDF5_VERS" || exit 1 ) +(cd "$tmpdir" && exec tar -ch --exclude-vcs -f "$HDF5_VERS.tar" "./$HDF5_IN_VERS" || exit 1 ) # Compress SHA256=$HDF5_VERS.sha256 @@ -561,7 +581,7 @@ for comp in $methods; do ;; zip) test "$verbose" && echo " Creating zip ball..." 1>&2 - tar2zip $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.zip 1>&2 + tar2zip $HDF5_IN_VERS $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.zip 1>&2 (cd $DEST; sha256sum $HDF5_VERS.zip >> $SHA256) ;; cmake-zip) @@ -607,7 +627,8 @@ rm -f Makefile test -f $tmpdir/Makefile.x && mv $tmpdir/Makefile.x Makefile # Restore OLD version information, then no need for trap. -if [ X$pmode = Xyes ]; then +if [ X$pmode = Xyes ] || [ X$revmode = Xyes ]; then + echo "Restore the original version $VERS_OLD" RESTORE_VERSION trap 0 fi From b78729638bf69dca9e0dd86eddd530d5fc1aef40 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Tue, 25 Jul 2023 12:46:12 -0500 Subject: [PATCH 089/108] sync bin dir with develop (#3270) --- COPYING | 2 +- README.md | 30 ++++++++++++++---------------- bin/buildhdf5 | 2 -- bin/chkcopyright | 6 +----- bin/cmakehdf5 | 9 ++------- bin/format_source | 16 +++++++++++++++- bin/h5vers | 7 +++---- bin/make_err | 2 -- bin/make_overflow | 2 -- bin/make_vers | 2 -- bin/output_filter.sh | 12 +++++------- bin/switch_maint_mode | 2 -- bin/warnhist | 3 --- 13 files changed, 41 insertions(+), 54 deletions(-) diff --git a/COPYING b/COPYING index 9d32232bdf2..9bd04025250 100644 --- a/COPYING +++ b/COPYING @@ -49,7 +49,7 @@ works thereof, in binary and source code form. Limited portions of HDF5 were developed by Lawrence Berkeley National Laboratory (LBNL). LBNL's Copyright Notice and Licensing Terms can be found here: COPYING_LBNL_HDF5 file in this directory or at -http://support.hdfgroup.org/ftp/HDF5/releases/COPYING_LBNL_HDF5. +https://raw.githubusercontent.com/hdfgroup/hdf5/develop/COPYING_LBNL_HDF5. ----------------------------------------------------------------------------- ----------------------------------------------------------------------------- diff --git a/README.md b/README.md index 29608d9ab9f..e0d94b79573 100644 --- a/README.md +++ b/README.md @@ -14,25 +14,26 @@ in science, engineering, and research communities worldwide. The HDF Group is the developer, maintainer, and steward of HDF5 software. Find more information about The HDF Group, the HDF5 Community, and other HDF5 software projects, -tools, and services at The HDF Group's website. - - https://www.hdfgroup.org/ - +tools, and services at [The HDF Group's website](https://www.hdfgroup.org/). DOCUMENTATION ------------- This release is fully functional for the API described in the documentation. - + https://portal.hdfgroup.org/display/HDF5/The+HDF5+API Full Documentation and Programming Resources for this release can be found at https://portal.hdfgroup.org/display/HDF5 -See the RELEASE.txt file in the release_docs/ directory for information specific +The latest doxygen documentation generated on changes to develop is available at: + + https://hdfgroup.github.io/hdf5/ + +See the [RELEASE.txt](/release_docs/RELEASE.txt) file in the [release_docs/](/release_docs/) directory for information specific to the features and updates included in this release of the library. -Several more files are located within the release_docs/ directory with specific +Several more files are located within the [release_docs/](/release_docs/) directory with specific details for several common platforms and configurations. INSTALL - Start Here. General instructions for compiling and installing the library @@ -52,23 +53,20 @@ Information regarding Help Desk and Support services is available at FORUM and NEWS -------------- -The following public forums are provided for public announcements and discussions +The [HDF Forum](https://forum.hdfgroup.org) is provided for public announcements and discussions of interest to the general HDF5 Community. - - Homepage of the Forum - https://forum.hdfgroup.org - - - News and Announcement + - News and Announcements https://forum.hdfgroup.org/c/news-and-announcements-from-the-hdf-group - - HDF5 and HDF4 Topics + - HDF5 Topics https://forum.hdfgroup.org/c/hdf5 These forums are provided as an open and public service for searching and reading. Posting requires completing a simple registration and allows one to join in the -conversation. Please read the following instructions pertaining to the Forum's -use and configuration - https://forum.hdfgroup.org/t/quickstart-guide-welcome-to-the-new-hdf-forum +conversation. Please read the [instructions](https://forum.hdfgroup.org/t/quickstart-guide-welcome-to-the-new-hdf-forum +) pertaining to the Forum's use and configuration. + SNAPSHOTS, PREVIOUS RELEASES AND SOURCE CODE diff --git a/bin/buildhdf5 b/bin/buildhdf5 index a2a13cb225c..8dfb82115e8 100755 --- a/bin/buildhdf5 +++ b/bin/buildhdf5 @@ -12,8 +12,6 @@ # # Build HDF5 library by doing configure, make, and tests. # Usage: See USAGE() -# Programmer: Albert Cheng -# Creation date: Jul 9, 2003 # Some handy definitions USAGE() diff --git a/bin/chkcopyright b/bin/chkcopyright index 756afe8ee6d..34649277267 100755 --- a/bin/chkcopyright +++ b/bin/chkcopyright @@ -14,10 +14,6 @@ # Check that all the files have the proper copyright notice. # It goes down directories recursively. # -# Programmer: Albert Cheng -# Created Data: 2003/07/22 -# Modification: -# Rewrote most of it. Albert Cheng, 2005/10/10. # Setup # @@ -363,7 +359,7 @@ MATCH_COPYRIGHT() false else if [ $begin -gt 1 ]; then - begin=`expr $begin - 1` + begin=`expr $begin` fi end=`expr $begin + $nlines - 1` sed -n -e "${begin},${end}p" < $f > ${EXTRACTEDFILE} diff --git a/bin/cmakehdf5 b/bin/cmakehdf5 index f17b9c4d131..6d31b3450f9 100755 --- a/bin/cmakehdf5 +++ b/bin/cmakehdf5 @@ -1,10 +1,5 @@ -#! /bin/sh +#!/bin/sh # Build and Test HDF5 using cmake. -# Author: Allen Byrne -# Albert Cheng -# Creation Date: Nov 2012 -# Modified: -# Changed to use the quick steps described in INSTALL_CMake.txt. (AKC 2014/1/1) # Copyright: The HDF Group, 2012-14 @@ -365,7 +360,7 @@ STEP "Configure..." \ $with_zlib \ $with_szlib \ $srcdir" $configlog &&\ - cat $config_summary >> $configlog + cat src/$config_summary >> $configlog # 5. Build the C library, tools and tests with this command: STEP "Build the library, tools and tests, ..." "cmake --build . --config Release -- $njobs" $makelog diff --git a/bin/format_source b/bin/format_source index fb0264cc3c6..dea771ce7f4 100755 --- a/bin/format_source +++ b/bin/format_source @@ -6,6 +6,20 @@ # Note that any files or directories that are excluded here should also be # added to the 'exclude' list in .github/workflows/clang-format-check.yml +COMMAND="clang-format" + +if [ $# -eq 1 ]; then + COMMAND="$COMMAND-$1" +fi + +echo "" +echo "bin/format_source " +echo "" +echo "Format the HDF5 C source using clang-format. The " +echo "parameter is optional and can be used to force a specific" +echo "installed version of clang-format to be used." +echo "" + find . \( -type d -path ./config -prune -and -not -path ./config \) \ -or \( \( \! \( \ -name H5LTanalyze.c \ @@ -19,6 +33,6 @@ find . \( -type d -path ./config -prune -and -not -path ./config \) \ -or -name H5overflow.h \ \) \) \ -and \( -iname *.h -or -iname *.c -or -iname *.cpp -or -iname *.hpp -or -iname *.java \) \) \ - | xargs -P0 -n1 clang-format -style=file -i -fallback-style=none + | xargs -P0 -n1 ${COMMAND} -style=file -i -fallback-style=none exit 0 diff --git a/bin/h5vers b/bin/h5vers index 6716794098f..1133451a407 100755 --- a/bin/h5vers +++ b/bin/h5vers @@ -16,8 +16,6 @@ use strict; # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # -# Robb Matzke -# 17 July 1998 ### Purpose # Increments the hdf5 version number by changing the value of @@ -118,8 +116,9 @@ Usage: $prog [OPTS] [FILE] as \"version 1.1 release 0 (pre1)\" will be printed. FILE The name of the file that contains version information. This is - seldom necessary since files H5public.h, src/H5public.h and - ../src/H5public.h are automatically checked. + seldom necessary since the file H5public.h is checked for current + working directory at the top level, one level down, or in the src + directory where H5public.h resides. EOF exit 1; } diff --git a/bin/make_err b/bin/make_err index 31cb9644133..39c6ce5b7b0 100755 --- a/bin/make_err +++ b/bin/make_err @@ -20,8 +20,6 @@ use warnings; # Read in the error description text file and create the appropriate headers # needed by the library. # -# Programmer: Quincey Koziol -# Creation Date: 2003/08/12 ############################################################################## # Print the copyright into an open file diff --git a/bin/make_overflow b/bin/make_overflow index 33cbdfee3ec..1cb5104901a 100755 --- a/bin/make_overflow +++ b/bin/make_overflow @@ -22,8 +22,6 @@ my @ctypes = ( () ); # Create assignment overflow #ifdefs # -# Programmer: Quincey Koziol -# Creation Date: 2009/04/09 ############################################################################## # Parse a meaningful line (not a comment or blank line) into the appropriate diff --git a/bin/make_vers b/bin/make_vers index 936d49559ca..e883eb038ca 100755 --- a/bin/make_vers +++ b/bin/make_vers @@ -34,8 +34,6 @@ $indent = 2; # Read in the public symbol version description text file and create the # appropriate headers needed by the library. # -# Programmer: Quincey Koziol -# Creation Date: 2007/07/10 ############################################################################## # Print the copyright into an open file diff --git a/bin/output_filter.sh b/bin/output_filter.sh index ba68cb3007a..64f9befc155 100644 --- a/bin/output_filter.sh +++ b/bin/output_filter.sh @@ -11,8 +11,6 @@ # This contains function definitions of output filtering. # This file should only be sourced in by another shell script. # -# Programmer: Albert Cheng -# Created Date: 2011/5/3 # Comment added to address HDFFV-8270: @@ -61,26 +59,26 @@ STDOUT_FILTER() { # Remove them from the stderr result file. # $1 is the file name of the file to be filtered. # Cases of filter needed: -# 1. MPE: +# * MPE: # In parallel mode and if MPE library is used, it prints the following # two message lines whether the MPE tracing is used or not. # Writing logfile. # Finished writing logfile. -# 2. LANL MPI: +# * LANL MPI: # The LANL MPI will print some messages like the following, # LA-MPI: *** mpirun (1.5.10) # LA-MPI: *** 3 process(es) on 2 host(s): 2*fln21 1*fln22 # LA-MPI: *** libmpi (1.5.10) # LA-MPI: *** Copyright 2001-2004, ACL, Los Alamos National Laboratory -# 3. h5diff debug output: +# * h5diff debug output: # Debug output all have prefix "h5diff debug: ". -# 4. AIX system prints messages like these when it is aborting: +# * AIX system prints messages like these when it is aborting: # ERROR: 0031-300 Forcing all remote tasks to exit due to exit code 1 in task 0 # ERROR: 0031-250 task 4: Terminated # ERROR: 0031-250 task 3: Terminated # ERROR: 0031-250 task 2: Terminated # ERROR: 0031-250 task 1: Terminated -# 5. LLNL Blue-Gene mpirun prints messages like there when it exit non-zero: +# * LLNL Blue-Gene mpirun prints messages like there when it exit non-zero: # BE_MPI (ERROR): The error message in the job record is as follows: # BE_MPI (ERROR): "killed by exit(1) on node 0" STDERR_FILTER() { diff --git a/bin/switch_maint_mode b/bin/switch_maint_mode index 9813eebb09d..7f894ffff87 100755 --- a/bin/switch_maint_mode +++ b/bin/switch_maint_mode @@ -12,8 +12,6 @@ # # Switch AM_MAINTAINER_MODE value in configure.ac # Usage: See USAGE() -# Programmer: Dana Robinson -# Creation date: January 2016 USAGE() { diff --git a/bin/warnhist b/bin/warnhist index fc4220930e6..1e63a550b39 100755 --- a/bin/warnhist +++ b/bin/warnhist @@ -13,9 +13,6 @@ use warnings; # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # -# Quincey Koziol -# 9 Aug 2013 -# # Purpose: Given an input file containing the output from a build of the # library, gather the file names and line numbers, alias # identical types of warnings together into a single bin and From 89b4afd4b317c5131cbc8607c99d21b369d050eb Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Wed, 2 Aug 2023 11:28:08 -0500 Subject: [PATCH 090/108] Fix loading plugin fails with missing directory GH issue #3248 (#3324) --- release_docs/RELEASE.txt | 7 ++++++- src/H5PLint.c | 36 ++++++++++++++++++++++++++++++++---- src/H5PLpath.c | 12 ++++++------ src/H5PLplugin_cache.c | 6 ++---- src/H5PLpublic.h | 9 ++++----- test/filter_plugin.c | 28 ++++++++++++++++++---------- 6 files changed, 68 insertions(+), 30 deletions(-) diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 48a8e60743f..d6f3a570079 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -124,7 +124,12 @@ New Features Library: -------- - - + - Change the error handling for a not found path in the find plugin process. + + While attempting to load a plugin the HDF5 library will fail if one of the + directories in the plugin paths does not exist, even if there are more paths + to check. Instead of exiting the function with an error, just logged the error + and continue processing the list of paths to check. Parallel Library: diff --git a/src/H5PLint.c b/src/H5PLint.c index e29bc10dfbe..1b58be150ac 100644 --- a/src/H5PLint.c +++ b/src/H5PLint.c @@ -258,11 +258,17 @@ H5PL_load(H5PL_type_t type, const H5PL_key_t *key) /* If not found, try iterating through the path table to find an appropriate plugin */ if (!found) if (H5PL__find_plugin_in_path_table(&search_params, &found, &plugin_info) < 0) - HGOTO_ERROR(H5E_PLUGIN, H5E_CANTGET, NULL, "search in path table failed") + HGOTO_ERROR(H5E_PLUGIN, H5E_CANTGET, NULL, + "can't find plugin in the paths either set by HDF5_PLUGIN_PATH, or default location, " + "or set by H5PLxxx functions") /* Set the return value we found the plugin */ if (found) ret_value = plugin_info; + else + HGOTO_ERROR(H5E_PLUGIN, H5E_NOTFOUND, NULL, + "can't find plugin. Check either HDF5_PLUGIN_PATH, default location, " + "or path set by H5PLxxx functions") done: FUNC_LEAVE_NOAPI(ret_value) @@ -273,9 +279,31 @@ H5PL_load(H5PL_type_t type, const H5PL_key_t *key) * * Purpose: Opens a plugin. * - * The success parameter will be set to TRUE and the plugin_info - * parameter will be filled in on success. Otherwise, they - * will be FALSE and NULL, respectively. + * `path` specifies the path to the plugin library file. + * + * `type` specifies the type of plugin being searched for and + * will be used to verify that a loaded plugin matches the + * type requested. H5PL_TYPE_NONE may be passed, in which case + * no plugin type verification is performed. This is most + * useful when iterating over available plugins without regard + * to their types. + * + * `key` specifies the information that will be used to find a + * specific plugin. For filter plugins, this is typically an + * integer identifier. For VOL connectors, this + * is typically either an integer identifier or a name string. + * After a plugin has been opened, this information will be + * compared against the relevant information provided by the + * plugin to ensure that the plugin is a match. If + * H5PL_TYPE_NONE is provided for `type`, then `key` should be + * NULL. + * + * On successful open of a plugin, the `success` parameter + * will be set to TRUE and the `plugin_type` and `plugin_info` + * parameters will be filled appropriately. On failure, the + * `success` parameter will be set to FALSE, the `plugin_type` + * parameter will be set to H5PL_TYPE_ERROR and the + * `plugin_info` parameter will be set to NULL. * * Return: SUCCEED/FAIL * diff --git a/src/H5PLpath.c b/src/H5PLpath.c index a6734cb134c..10a668cf7e2 100644 --- a/src/H5PLpath.c +++ b/src/H5PLpath.c @@ -615,9 +615,9 @@ H5PL__path_table_iterate_process_path(const char *plugin_path, H5PL_iterate_type HDassert(plugin_path); HDassert(iter_op); - /* Open the directory */ + /* Open the directory - skip the path if the directory can't be opened */ if (!(dirp = HDopendir(plugin_path))) - HGOTO_ERROR(H5E_PLUGIN, H5E_OPENERROR, H5_ITER_ERROR, "can't open directory: %s", plugin_path) + HGOTO_DONE(H5_ITER_CONT); /* Iterate through all entries in the directory */ while (NULL != (dp = HDreaddir(dirp))) { @@ -710,7 +710,7 @@ H5PL__path_table_iterate_process_path(const char *plugin_path, H5PL_iterate_type * skip the path if the directory can't be opened */ HDsnprintf(service, sizeof(service), "%s\\*.dll", plugin_path); if ((hFind = FindFirstFileA(service, &fdFile)) == INVALID_HANDLE_VALUE) - HGOTO_ERROR(H5E_PLUGIN, H5E_OPENERROR, H5_ITER_ERROR, "can't open directory") + HGOTO_DONE(H5_ITER_CONT); /* Loop over all the files */ do { @@ -799,8 +799,7 @@ H5PL__find_plugin_in_path_table(const H5PL_search_params_t *search_params, hbool /* Search for the plugin in this path */ if (H5PL__find_plugin_in_path(search_params, found, H5PL_paths_g[u], plugin_info) < 0) - HGOTO_ERROR(H5E_PLUGIN, H5E_CANTGET, FAIL, "search in path %s encountered an error", - H5PL_paths_g[u]) + HERROR(H5E_PLUGIN, H5E_CANTGET, "search in path %s encountered an error", H5PL_paths_g[u]); /* Break out if found */ if (*found) { @@ -852,7 +851,8 @@ H5PL__find_plugin_in_path(const H5PL_search_params_t *search_params, hbool_t *fo /* Open the directory */ if (!(dirp = HDopendir(dir))) - HGOTO_ERROR(H5E_PLUGIN, H5E_OPENERROR, FAIL, "can't open directory: %s", dir) + HGOTO_ERROR(H5E_PLUGIN, H5E_OPENERROR, FAIL, "can't open directory (%s). Please verify its existence", + dir) /* Iterate through all entries in the directory */ while (NULL != (dp = HDreaddir(dirp))) { diff --git a/src/H5PLplugin_cache.c b/src/H5PLplugin_cache.c index cf0fd233b0b..b74f6a1944e 100644 --- a/src/H5PLplugin_cache.c +++ b/src/H5PLplugin_cache.c @@ -273,7 +273,7 @@ H5PL__find_plugin_in_cache(const H5PL_search_params_t *search_params, hbool_t *f /* Get the "get plugin info" function from the plugin. */ if (NULL == (get_plugin_info_function = (H5PL_get_plugin_info_t)H5PL_GET_LIB_FUNC( - (H5PL_cache_g[u]).handle, "H5PLget_plugin_info"))) + H5PL_cache_g[u].handle, "H5PLget_plugin_info"))) HGOTO_ERROR(H5E_PLUGIN, H5E_CANTGET, FAIL, "can't get function for H5PLget_plugin_info") /* Call the "get plugin info" function */ @@ -286,9 +286,7 @@ H5PL__find_plugin_in_cache(const H5PL_search_params_t *search_params, hbool_t *f /* No need to continue processing */ break; - - } /* end if */ - + } } /* end for */ done: diff --git a/src/H5PLpublic.h b/src/H5PLpublic.h index a886375ee9e..8ef1c3ef134 100644 --- a/src/H5PLpublic.h +++ b/src/H5PLpublic.h @@ -17,8 +17,7 @@ #ifndef H5PLpublic_H #define H5PLpublic_H -/* Public headers needed by this file */ -#include "H5public.h" /* Generic Functions */ +#include "H5public.h" /* Generic Functions */ /*******************/ /* Public Typedefs */ @@ -93,9 +92,9 @@ H5_DLL herr_t H5PLset_loading_state(unsigned int plugin_control_mask); * \brief Queries the loadability of dynamic plugin types * * \param[out] plugin_control_mask List of dynamic plugin types that are enabled or disabled.\n - * A plugin bit set to 0 (zero) indicates that that the dynamic plugin type is + * A plugin bit set to 0 (zero) indicates that the dynamic plugin type is * disabled.\n - * A plugin bit set to 1 (one) indicates that that the dynamic plugin type is + * A plugin bit set to 1 (one) indicates that the dynamic plugin type is * enabled.\n * If the value of \p plugin_control_mask is negative, all dynamic plugin * types are enabled.\n @@ -103,7 +102,7 @@ H5_DLL herr_t H5PLset_loading_state(unsigned int plugin_control_mask); * are disabled. * \return \herr_t * - * \details H5PLget_loading_state() retrieves the bitmask that controls whether a certain type of plugins + * \details H5PLget_loading_state() retrieves the bitmask that controls whether a certain type of plugin * (e.g.: filters, VOL drivers) will be loaded by the HDF5 library. * * Bit positions allocated to date are specified in \ref H5PL_type_t as follows: diff --git a/test/filter_plugin.c b/test/filter_plugin.c index ea5c0005b86..7ed8ced9daa 100644 --- a/test/filter_plugin.c +++ b/test/filter_plugin.c @@ -438,9 +438,11 @@ static herr_t test_dataset_write_with_filters(hid_t fid) { hid_t dcpl_id = -1; /* Dataset creation property list ID */ - unsigned int compress_level; /* Deflate compression level */ unsigned int filter1_data; /* Data used by filter 1 */ unsigned int libver_values[4]; /* Used w/ the filter that makes HDF5 calls */ +#ifdef H5_HAVE_FILTER_DEFLATE + unsigned int compress_level; /* Deflate compression level */ +#endif /*---------------------------------------------------------- * STEP 1: Test deflation by itself. @@ -847,7 +849,7 @@ test_creating_groups_using_plugins(hid_t fid) for (i = 0; i < N_SUBGROUPS; i++) { char *sp = subgroup_name; - sp += HDsprintf(subgroup_name, SUBGROUP_PREFIX); + sp += HDsnprintf(subgroup_name, sizeof(subgroup_name), SUBGROUP_PREFIX); HDsprintf(sp, "%d", i); if ((sub_gid = H5Gcreate2(gid, subgroup_name, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) @@ -906,7 +908,7 @@ test_opening_groups_using_plugins(hid_t fid) for (i = 0; i < N_SUBGROUPS; i++) { char *sp = subgroup_name; - sp += HDsprintf(subgroup_name, SUBGROUP_PREFIX); + sp += HDsnprintf(subgroup_name, sizeof(subgroup_name), SUBGROUP_PREFIX); HDsprintf(sp, "%d", i); if ((sub_gid = H5Gopen2(gid, subgroup_name, H5P_DEFAULT)) < 0) @@ -1025,7 +1027,7 @@ test_path_api_calls(void) /* Add a bunch of paths to the path table */ for (u = 0; u < n_starting_paths; u++) { - HDsprintf(path, "a_path_%u", u); + HDsnprintf(path, sizeof(path), "a_path_%u", u); if (H5PLappend(path) < 0) { HDfprintf(stderr, " at %u: %s\n", u, path); TEST_ERROR; @@ -1091,7 +1093,7 @@ test_path_api_calls(void) /* Get path at the last index */ if ((path_len = H5PLget(n_starting_paths - 1, path, 256)) <= 0) TEST_ERROR; - HDsprintf(temp_name, "a_path_%u", n_starting_paths - 1); + HDsnprintf(temp_name, sizeof(temp_name), "a_path_%u", n_starting_paths - 1); if (HDstrcmp(path, temp_name) != 0) { HDfprintf(stderr, " get %u: %s\n", n_starting_paths - 1, path); TEST_ERROR; @@ -1145,7 +1147,7 @@ test_path_api_calls(void) TESTING(" prepend"); /* Prepend one path */ - HDsprintf(path, "a_path_%d", n_starting_paths + 1); + HDsnprintf(path, sizeof(path), "a_path_%d", n_starting_paths + 1); if (H5PLprepend(path) < 0) { HDfprintf(stderr, " prepend %u: %s\n", n_starting_paths + 1, path); TEST_ERROR; @@ -1168,7 +1170,7 @@ test_path_api_calls(void) /* Verify that the path was inserted at index zero */ if (H5PLget(0, path, 256) <= 0) TEST_ERROR; - HDsprintf(temp_name, "a_path_%d", n_starting_paths + 1); + HDsnprintf(temp_name, sizeof(temp_name), "a_path_%d", n_starting_paths + 1); if (HDstrcmp(path, temp_name) != 0) { HDfprintf(stderr, " get 0: %s\n", path); TEST_ERROR; @@ -1183,7 +1185,7 @@ test_path_api_calls(void) TESTING(" replace"); /* Replace one path at index 1 */ - HDsprintf(path, "a_path_%u", n_starting_paths + 4); + HDsnprintf(path, sizeof(path), "a_path_%u", n_starting_paths + 4); if (H5PLreplace(path, 1) < 0) { HDfprintf(stderr, " replace 1: %s\n", path); TEST_ERROR; @@ -1202,7 +1204,7 @@ test_path_api_calls(void) /* Check path at index 0 */ if (H5PLget(0, path, 256) <= 0) TEST_ERROR; - HDsprintf(temp_name, "a_path_%u", n_starting_paths + 1); + HDsnprintf(temp_name, sizeof(temp_name), "a_path_%u", n_starting_paths + 1); if (HDstrcmp(path, temp_name) != 0) { HDfprintf(stderr, " get 0: %s\n", path); TEST_ERROR; @@ -1250,7 +1252,7 @@ test_path_api_calls(void) TESTING(" insert"); /* Insert one path at index 3*/ - HDsprintf(path, "a_path_%d", n_starting_paths + 5); + HDsnprintf(path, sizeof(path), "a_path_%d", n_starting_paths + 5); if (H5PLinsert(path, 3) < 0) { HDfprintf(stderr, " insert 3: %s\n", path); TEST_ERROR; @@ -1436,6 +1438,12 @@ main(void) else my_fapl_id = old_ff_fapl_id; + /* Add extra path to check for correct error process */ + if (H5PLprepend("bogus") < 0) { + fprintf(stderr, "Could not prepend path:bogus\n"); + TEST_ERROR; + } + /* Reopen the file for testing data reading */ if ((fid = H5Fopen(filename, H5F_ACC_RDONLY, my_fapl_id)) < 0) TEST_ERROR; From ceef4a9ebd055f9ec0378a97ec2ce9421cdec866 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Mon, 21 Aug 2023 17:13:03 -0500 Subject: [PATCH 091/108] Merge/update CMake, presets,java and tools (#3393) --- .gitattributes | 7 +- CMakeFilters.cmake | 12 +- CMakeInstallation.cmake | 48 +- CMakeLists.txt | 27 +- CMakePresets.json | 72 +- bin/README.md | 30 + bin/release | 6 +- c++/examples/CMakeLists.txt | 4 +- c++/examples/CMakeTests.cmake | 59 +- c++/src/CMakeLists.txt | 4 +- c++/test/CMakeLists.txt | 2 +- c++/test/CMakeTests.cmake | 18 +- c++/test/CMakeVFDTests.cmake | 14 - config/README.md | 88 ++ config/apple | 127 +-- config/clang-cxxflags | 2 +- config/clang-warnings/error-general | 3 - config/cmake-presets/hidden-presets.json | 4 +- config/cmake/CTestCustom.cmake | 17 + config/cmake/ConfigureChecks.cmake | 44 +- config/cmake/ConversionTests.c | 290 +++--- config/cmake/H5pubconf.h.in | 4 +- config/cmake/HDF5DeveloperBuild.cmake | 196 ++++ config/cmake/HDF5UseFortran.cmake | 6 + config/cmake/HDFCXXCompilerFlags.cmake | 2 +- config/cmake/HDFCompilerFlags.cmake | 51 +- config/cmake/HDFFortranCompilerFlags.cmake | 18 +- config/cmake/HDFMacros.cmake | 6 +- config/cmake/ZLIB/zlib-config.cmake.in | 2 +- config/cmake/cacheinit.cmake | 7 + config/cmake/{ => examples}/CTestScript.cmake | 27 +- .../{ => examples}/HDF5_Examples.cmake.in | 2 +- .../HDF5_Examples_options.cmake | 0 config/cmake/grepTest.cmake | 16 + config/cmake/hdf5-config.cmake.in | 6 +- config/cmake/jrunTest.cmake | 16 + config/cmake/libhdf5.settings.cmake.in | 32 +- config/cmake/mccacheinit.cmake | 21 +- config/cmake/runTest.cmake | 16 + config/cmake/scripts/HDF5options.cmake | 6 +- config/cmake/vfdTest.cmake | 16 + config/cmake/volTest.cmake | 16 + config/conclude_fc.am | 4 +- config/gnu-cxxflags | 2 +- config/gnu-fflags | 7 +- config/gnu-warnings/4.8 | 7 - config/gnu-warnings/cxx-4.8 | 7 - config/gnu-warnings/cxx-developer-4.8 | 7 +- config/gnu-warnings/developer-12 | 4 + config/gnu-warnings/developer-4.8 | 7 + config/gnu-warnings/developer-general | 9 +- config/gnu-warnings/error-8 | 15 +- config/gnu-warnings/error-general | 38 +- config/gnu-warnings/gfort-4.8 | 1 - config/gnu-warnings/gfort-developer-4.8 | 3 + .../{developer-gfort-5 => gfort-developer-5} | 0 config/gnu-warnings/gfort-developer-general | 3 + config/gnu-warnings/gfort-no-developer-4.8 | 3 + .../gnu-warnings/gfort-no-developer-general | 3 + config/gnu-warnings/no-cxx-developer-4.8 | 3 + config/gnu-warnings/no-developer-4.8 | 3 +- config/ibm-aix | 2 - config/intel-cxxflags | 2 +- config/intel-fflags | 2 +- config/intel-warnings/developer-general | 9 +- config/pgi-cxxflags | 2 +- config/sanitizer/README.md | 2 +- configure.ac | 148 +-- doxygen/dox/IntroHDF5.dox | 8 +- doxygen/dox/LearnBasics.dox | 20 +- doxygen/dox/UsersGuide.dox | 2 +- doxygen/dox/high_level/extension.dox | 2 +- doxygen/examples/H5Fclose.c | 2 +- doxygen/examples/H5Fcreate.c | 2 +- doxygen/examples/hello_hdf5.c | 2 +- doxygen/examples/tables/fileDriverLists.dox | 4 +- doxygen/examples/tables/propertyLists.dox | 2 +- examples/CMakeLists.txt | 4 +- examples/CMakeTests.cmake | 15 +- examples/h5_cmprss.c | 2 +- examples/h5_crtatt.c | 2 +- examples/h5_crtdat.c | 2 +- examples/h5_crtgrp.c | 2 +- examples/h5_crtgrpar.c | 2 +- examples/h5_crtgrpd.c | 2 +- examples/h5_extlink.c | 6 +- examples/h5_rdwt.c | 2 +- examples/testh5cc.sh.in | 8 - fortran/examples/CMakeLists.txt | 12 +- fortran/examples/CMakeTests.cmake | 13 +- fortran/examples/testh5fc.sh.in | 4 - fortran/src/CMakeLists.txt | 85 +- fortran/src/H5config_f.inc.cmake | 8 +- fortran/src/H5config_f.inc.in | 3 + fortran/src/hdf5_fortrandll.def.in | 2 +- fortran/test/CMakeLists.txt | 4 +- fortran/test/CMakeTests.cmake | 23 +- fortran/testpar/CMakeLists.txt | 2 - hl/CMakeLists.txt | 10 +- hl/Makefile.am | 4 - hl/c++/examples/CMakeLists.txt | 2 +- hl/c++/examples/CMakeTests.cmake | 19 +- hl/c++/src/CMakeLists.txt | 4 +- hl/c++/test/CMakeLists.txt | 2 +- hl/c++/test/CMakeTests.cmake | 20 +- hl/examples/CMakeLists.txt | 2 +- hl/examples/CMakeTests.cmake | 56 +- hl/fortran/examples/CMakeTests.cmake | 23 +- hl/fortran/src/CMakeLists.txt | 34 +- hl/fortran/test/CMakeTests.cmake | 33 +- hl/src/CMakeLists.txt | 4 +- hl/test/CMakeLists.txt | 8 +- hl/test/CMakeTests.cmake | 13 +- hl/tools/CMakeLists.txt | 8 +- hl/tools/Makefile.am | 8 +- hl/tools/gif2h5/CMakeLists.txt | 10 +- hl/tools/gif2h5/CMakeTests.cmake | 18 +- hl/tools/h5watch/CMakeLists.txt | 8 +- hl/tools/h5watch/CMakeTests.cmake | 17 +- .../datasets/JavaDatasetExample.sh.in | 6 +- java/examples/datasets/Makefile.am | 2 +- java/examples/datatypes/CMakeLists.txt | 2 +- .../datatypes/JavaDatatypeExample.sh.in | 6 +- java/examples/datatypes/Makefile.am | 2 +- java/examples/groups/CMakeLists.txt | 2 +- java/examples/groups/JavaGroupExample.sh.in | 6 +- java/examples/groups/Makefile.am | 2 +- java/examples/intro/CMakeLists.txt | 2 +- java/examples/intro/JavaIntroExample.sh.in | 6 +- java/examples/intro/Makefile.am | 2 +- java/lib/ext/slf4j-nop-1.7.33.jar | Bin 4020 -> 0 bytes java/lib/ext/slf4j-nop-2.0.6.jar | Bin 0 -> 4071 bytes java/lib/ext/slf4j-simple-1.7.33.jar | Bin 15400 -> 0 bytes java/lib/ext/slf4j-simple-2.0.6.jar | Bin 0 -> 15239 bytes java/lib/slf4j-api-1.7.33.jar | Bin 41473 -> 0 bytes java/lib/slf4j-api-2.0.6.jar | Bin 0 -> 62531 bytes java/src/Makefile.am | 2 +- java/src/hdf/hdf5lib/H5.java | 5 +- java/src/jni/CMakeLists.txt | 4 +- java/src/jni/h5aImp.c | 597 ++++-------- java/src/jni/h5dImp.c | 561 +++-------- java/src/jni/h5util.c | 882 +++++++++++++++++- java/src/jni/h5util.h | 5 + java/test/CMakeLists.txt | 57 +- java/test/Makefile.am | 2 +- java/test/TestH5A.java | 436 ++++++++- java/test/TestH5D.java | 819 +++++++++++++++- java/test/TestH5R.java | 24 +- java/test/junit.sh.in | 6 +- java/test/testfiles/JUnit-TestH5A.txt | 5 +- java/test/testfiles/JUnit-TestH5D.txt | 8 +- release_docs/INSTALL_CMake.txt | 6 +- release_docs/README_HDF5_CMake | 2 +- release_docs/USING_HDF5_VS.txt | 5 +- src/CMakeLists.txt | 36 +- src/H5private.h | 3 + src/H5system.c | 43 + src/H5win32defs.h | 2 + test/CMakeLists.txt | 111 ++- test/CMakeTests.cmake | 415 ++++---- test/CMakeVFDTests.cmake | 48 +- testpar/CMakeLists.txt | 12 +- testpar/CMakeTests.cmake | 12 + testpar/CMakeVFDTests.cmake | 46 +- tools/lib/CMakeLists.txt | 4 +- tools/libtest/CMakeLists.txt | 2 +- tools/libtest/CMakeTests.cmake | 11 - tools/src/h5copy/CMakeLists.txt | 4 +- tools/src/h5diff/CMakeLists.txt | 8 +- tools/src/h5dump/CMakeLists.txt | 4 +- tools/src/h5format_convert/CMakeLists.txt | 4 +- tools/src/h5import/CMakeLists.txt | 4 +- tools/src/h5jam/CMakeLists.txt | 8 +- tools/src/h5ls/CMakeLists.txt | 4 +- tools/src/h5perf/CMakeLists.txt | 44 +- tools/src/h5repack/CMakeLists.txt | 4 +- tools/src/h5stat/CMakeLists.txt | 4 +- tools/src/misc/CMakeLists.txt | 16 +- tools/test/h5copy/CMakeLists.txt | 4 +- tools/test/h5copy/CMakeTests.cmake | 194 ++-- tools/test/h5diff/CMakeLists.txt | 4 +- tools/test/h5diff/CMakeTests.cmake | 411 +------- tools/test/h5dump/CMakeLists.txt | 4 +- tools/test/h5dump/CMakeTests.cmake | 292 ++++-- tools/test/h5dump/CMakeTestsPBITS.cmake | 7 +- tools/test/h5dump/CMakeTestsVDS.cmake | 7 +- tools/test/h5dump/CMakeTestsXML.cmake | 7 +- tools/test/h5dump/h5dump_plugin.sh.in | 4 +- tools/test/h5format_convert/CMakeLists.txt | 4 +- tools/test/h5format_convert/CMakeTests.cmake | 236 +++-- .../testfiles/h5fc_ext1_f.ddl | 2 +- .../testfiles/h5fc_ext1_i.ddl | 2 +- .../testfiles/h5fc_ext1_s.ddl | 2 +- .../testfiles/h5fc_ext2_if.ddl | 2 +- .../testfiles/h5fc_ext2_is.ddl | 2 +- .../testfiles/h5fc_ext2_sf.ddl | 2 +- .../testfiles/h5fc_ext3_isf.ddl | 2 +- .../h5format_convert/testfiles/h5fc_v_all.ddl | 2 +- .../h5format_convert/testfiles/h5fc_v_bt1.ddl | 2 +- .../h5format_convert/testfiles/h5fc_v_err.ddl | 2 +- .../testfiles/h5fc_v_n_1d.ddl | 2 +- .../testfiles/h5fc_v_n_all.ddl | 2 +- .../testfiles/h5fc_v_ndata_bt1.ddl | 2 +- .../testfiles/h5fc_v_non_chunked.ddl | 2 +- .../testfiles/old_h5fc_ext1_f.ddl | 2 +- .../testfiles/old_h5fc_ext1_i.ddl | 2 +- .../testfiles/old_h5fc_ext1_s.ddl | 2 +- .../testfiles/old_h5fc_ext2_if.ddl | 2 +- .../testfiles/old_h5fc_ext2_is.ddl | 2 +- .../testfiles/old_h5fc_ext2_sf.ddl | 2 +- .../testfiles/old_h5fc_ext3_isf.ddl | 2 +- tools/test/h5format_convert/testh5fc.sh.in | 125 +-- tools/test/h5import/CMakeLists.txt | 2 +- tools/test/h5import/CMakeTests.cmake | 150 ++- tools/test/h5import/h5importtestutil.sh.in | 6 +- tools/test/h5import/testfiles/tall_fp32.ddl | 2 +- tools/test/h5import/testfiles/tall_i32.ddl | 2 +- .../h5import/testfiles/tintsattrs_u32.ddl | 2 +- tools/test/h5jam/CMakeLists.txt | 6 +- tools/test/h5jam/testh5jam.sh.in | 2 +- tools/test/h5ls/CMakeLists.txt | 2 +- tools/test/h5ls/CMakeTests.cmake | 15 +- tools/test/h5ls/h5ls_plugin.sh.in | 4 +- tools/test/h5repack/CMakeLists.txt | 10 +- tools/test/h5repack/CMakeTests.cmake | 357 ++++--- tools/test/h5repack/CMakeVFDTests.cmake | 108 ++- tools/test/h5repack/h5repack.sh.in | 54 +- tools/test/h5repack/h5repack_plugin.sh.in | 4 +- tools/test/h5stat/CMakeLists.txt | 2 +- tools/test/h5stat/CMakeTests.cmake | 17 +- tools/test/h5stat/testh5stat.sh.in | 4 - tools/test/misc/CMakeLists.txt | 8 +- tools/test/misc/CMakeTestsRepart.cmake | 14 + tools/test/misc/testh5mkgrp.sh.in | 3 - tools/test/perform/CMakeLists.txt | 118 +-- tools/test/perform/CMakeTests.cmake | 62 +- utils/mirror_vfd/CMakeLists.txt | 4 +- utils/test/CMakeLists.txt | 2 +- 238 files changed, 5413 insertions(+), 3388 deletions(-) create mode 100644 bin/README.md create mode 100644 config/README.md create mode 100644 config/cmake/HDF5DeveloperBuild.cmake rename config/cmake/{ => examples}/CTestScript.cmake (88%) rename config/cmake/{ => examples}/HDF5_Examples.cmake.in (99%) rename config/cmake/{ => examples}/HDF5_Examples_options.cmake (100%) create mode 100644 config/gnu-warnings/developer-12 create mode 100644 config/gnu-warnings/gfort-developer-4.8 rename config/gnu-warnings/{developer-gfort-5 => gfort-developer-5} (100%) create mode 100644 config/gnu-warnings/gfort-developer-general create mode 100644 config/gnu-warnings/gfort-no-developer-4.8 create mode 100644 config/gnu-warnings/gfort-no-developer-general delete mode 100644 java/lib/ext/slf4j-nop-1.7.33.jar create mode 100644 java/lib/ext/slf4j-nop-2.0.6.jar delete mode 100644 java/lib/ext/slf4j-simple-1.7.33.jar create mode 100644 java/lib/ext/slf4j-simple-2.0.6.jar delete mode 100644 java/lib/slf4j-api-1.7.33.jar create mode 100644 java/lib/slf4j-api-2.0.6.jar diff --git a/.gitattributes b/.gitattributes index 52722dacd73..0b51cd870c2 100644 --- a/.gitattributes +++ b/.gitattributes @@ -191,19 +191,18 @@ java/examples/testfiles/examples.intro.H5_CreateGroup.txt -text java/examples/testfiles/examples.intro.H5_CreateGroupAbsoluteRelative.txt -text java/examples/testfiles/examples.intro.H5_CreateGroupDataset.txt -text java/examples/testfiles/examples.intro.H5_ReadWrite.txt -text -java/lib/ext/slf4j-nop-1.7.33.jar -text svneol=unset#application/zip -java/lib/ext/slf4j-simple-1.7.33.jar -text svneol=unset#application/zip +java/lib/ext/slf4j-nop-2.0.6.jar -text svneol=unset#application/zip +java/lib/ext/slf4j-simple-2.0.6.jar -text svneol=unset#application/zip java/lib/hamcrest-core.jar -text svneol=unset#application/java-archive java/lib/junit.jar -text svneol=unset#application/java-archive java/lib/simplelogger.properties -text -java/lib/slf4j-api-1.7.33.jar -text svneol=unset#application/zip +java/lib/slf4j-api-2.0.6.jar -text svneol=unset#application/zip java/src/CMakeLists.txt -text java/src/Makefile.am -text java/src/hdf/CMakeLists.txt -text java/src/hdf/hdf5lib/CMakeLists.txt -text java/src/hdf/hdf5lib/H5.java -text java/src/hdf/hdf5lib/HDF5Constants.java -text -java/src/hdf/hdf5lib/HDF5GroupInfo.java -text java/src/hdf/hdf5lib/HDFArray.java -text java/src/hdf/hdf5lib/HDFNativeData.java -text java/src/hdf/hdf5lib/callbacks/Callbacks.java -text diff --git a/CMakeFilters.cmake b/CMakeFilters.cmake index b81d6e21a7e..200634e1cb3 100644 --- a/CMakeFilters.cmake +++ b/CMakeFilters.cmake @@ -164,11 +164,11 @@ if (HDF5_ENABLE_SZIP_SUPPORT) if (NOT SZIP_FOUND) find_package (SZIP) # Legacy find endif () - if (SZIP_FOUND) - set (SZIP_INCLUDE_DIR_GEN ${SZIP_INCLUDE_DIR}) - set (SZIP_INCLUDE_DIRS ${SZIP_INCLUDE_DIRS} ${SZIP_INCLUDE_DIR}) - set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${SZIP_LIBRARIES}) - endif () + endif () + if (SZIP_FOUND) + set (SZIP_INCLUDE_DIR_GEN ${SZIP_INCLUDE_DIR}) + set (SZIP_INCLUDE_DIRS ${SZIP_INCLUDE_DIRS} ${SZIP_INCLUDE_DIR}) + set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${SZIP_LIBRARIES}) endif () else () if (BUILD_SZIP_WITH_FETCHCONTENT) @@ -200,8 +200,6 @@ if (HDF5_ENABLE_SZIP_SUPPORT) set (SZIP_PACKAGE_NAME ${SZIP_PACKAGE_NAME}) endif () set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${SZIP_STATIC_LIBRARY}) - else () - message (FATAL_ERROR "SZIP is Required for SZIP support in HDF5") endif () endif () if (SZIP_FOUND) diff --git a/CMakeInstallation.cmake b/CMakeInstallation.cmake index 67fd493e9f4..1fb77141240 100644 --- a/CMakeInstallation.cmake +++ b/CMakeInstallation.cmake @@ -138,7 +138,7 @@ install ( option (HDF5_PACK_EXAMPLES "Package the HDF5 Library Examples Compressed File" OFF) if (HDF5_PACK_EXAMPLES) configure_file ( - ${HDF_RESOURCES_DIR}/HDF5_Examples.cmake.in + ${HDF_RESOURCES_DIR}/examples/HDF5_Examples.cmake.in ${HDF5_BINARY_DIR}/HDF5_Examples.cmake @ONLY ) install ( @@ -150,15 +150,20 @@ if (HDF5_PACK_EXAMPLES) option (EXAMPLES_USE_RELEASE_NAME "Use the released examples artifact name" OFF) option (EXAMPLES_DOWNLOAD "Download to use released examples files" OFF) if (EXAMPLES_DOWNLOAD) - if (NOT EXAMPLES_USE_LOCALCONTENT) - set (EXAMPLES_URL ${EXAMPLES_TGZ_ORIGPATH}/${EXAMPLES_TGZ_ORIGNAME}) + if (EXAMPLES_USE_RELEASE_NAME) + set (EXAMPLES_NAME ${EXAMPLES_TGZ_ORIGNAME}) else () - set (EXAMPLES_URL ${TGZPATH}/${EXAMPLES_TGZ_ORIGNAME}) + set (EXAMPLES_NAME ${HDF5_EXAMPLES_COMPRESSED}) endif () - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE "Examples file is ${EXAMPLES_URL}") + if (NOT EXAMPLES_USE_LOCALCONTENT) + set (EXAMPLES_URL ${EXAMPLES_TGZ_ORIGPATH}/${EXAMPLES_NAME}) + file (DOWNLOAD ${EXAMPLES_URL} ${HDF5_BINARY_DIR}/${HDF5_EXAMPLES_COMPRESSED} STATUS EX_DL) + message (STATUS "Examples file is ${EXAMPLES_URL} STATUS=${EX_DL}") + else () + set (EXAMPLES_URL ${TGZPATH}/${EXAMPLES_NAME}) + file (COPY_FILE ${EXAMPLES_URL} ${HDF5_BINARY_DIR}/${HDF5_EXAMPLES_COMPRESSED} RESULT EX_DL) + message (STATUS "Examples file is ${EXAMPLES_URL} RESULT=${EX_DL}") endif () - file (DOWNLOAD ${EXAMPLES_URL} ${HDF5_BINARY_DIR}/${HDF5_EXAMPLES_COMPRESSED}) if (EXISTS "${HDF5_BINARY_DIR}/${HDF5_EXAMPLES_COMPRESSED}") execute_process( COMMAND ${CMAKE_COMMAND} -E tar xzf ${HDF5_EXAMPLES_COMPRESSED} @@ -166,7 +171,6 @@ if (HDF5_PACK_EXAMPLES) COMMAND_ECHO STDOUT ) endif () - set (EXAMPLES_USE_RELEASE_NAME ON CACHE BOOL "" FORCE) else () if (EXISTS "${HDF5_EXAMPLES_COMPRESSED_DIR}/${HDF5_EXAMPLES_COMPRESSED}") execute_process( @@ -176,20 +180,18 @@ if (HDF5_PACK_EXAMPLES) ) endif () endif () - if (EXAMPLES_USE_RELEASE_NAME) - get_filename_component (EX_LAST_EXT ${HDF5_EXAMPLES_COMPRESSED} LAST_EXT) - if (${EX_LAST_EXT} STREQUAL ".zip") - get_filename_component (EX_DIR_NAME ${HDF5_EXAMPLES_COMPRESSED} NAME_WLE) - else () - get_filename_component (EX_DIR_NAME ${HDF5_EXAMPLES_COMPRESSED} NAME_WLE) - get_filename_component (EX_DIR_NAME ${EX_DIR_NAME} NAME_WLE) - endif () - execute_process( - COMMAND ${CMAKE_COMMAND} -E rename ${EX_DIR_NAME} HDF5Examples - WORKING_DIRECTORY ${HDF5_BINARY_DIR} - COMMAND_ECHO STDOUT - ) + get_filename_component (EX_LAST_EXT ${HDF5_EXAMPLES_COMPRESSED} LAST_EXT) + if (${EX_LAST_EXT} STREQUAL ".zip") + get_filename_component (EX_DIR_NAME ${HDF5_EXAMPLES_COMPRESSED} NAME_WLE) + else () + get_filename_component (EX_DIR_NAME ${HDF5_EXAMPLES_COMPRESSED} NAME_WLE) + get_filename_component (EX_DIR_NAME ${EX_DIR_NAME} NAME_WLE) endif () + execute_process( + COMMAND ${CMAKE_COMMAND} -E rename ${EX_DIR_NAME} HDF5Examples + WORKING_DIRECTORY ${HDF5_BINARY_DIR} + COMMAND_ECHO STDOUT + ) install ( DIRECTORY ${HDF5_BINARY_DIR}/HDF5Examples DESTINATION ${HDF5_INSTALL_DATA_DIR} @@ -204,13 +206,13 @@ if (HDF5_PACK_EXAMPLES) ) install ( FILES - ${HDF_RESOURCES_DIR}/CTestScript.cmake + ${HDF_RESOURCES_DIR}/examples/CTestScript.cmake DESTINATION ${HDF5_INSTALL_DATA_DIR} COMPONENT hdfdocuments ) install ( FILES - ${HDF_RESOURCES_DIR}/HDF5_Examples_options.cmake + ${HDF_RESOURCES_DIR}/examples/HDF5_Examples_options.cmake DESTINATION ${HDF5_INSTALL_DATA_DIR} COMPONENT hdfdocuments ) diff --git a/CMakeLists.txt b/CMakeLists.txt index 2b51a31364f..28fe23dc510 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -222,6 +222,7 @@ set (HDF_CONFIG_DIR ${HDF5_SOURCE_DIR}/config) set (HDF_RESOURCES_DIR ${HDF5_SOURCE_DIR}/config/cmake) set (HDF5_SRC_DIR ${HDF5_SOURCE_DIR}/src) set (HDF5_TEST_SRC_DIR ${HDF5_SOURCE_DIR}/test) +set (HDF5_TEST_PAR_DIR ${HDF5_SOURCE_DIR}/testpar) set (HDF5_CPP_SRC_DIR ${HDF5_SOURCE_DIR}/c++) set (HDF5_CPP_TST_DIR ${HDF5_SOURCE_DIR}/c++/test) set (HDF5_HL_SRC_DIR ${HDF5_SOURCE_DIR}/hl) @@ -236,9 +237,9 @@ set (HDF5_JAVA_JNI_SRC_DIR ${HDF5_SOURCE_DIR}/java/src/jni) set (HDF5_JAVA_HDF5_SRC_DIR ${HDF5_SOURCE_DIR}/java/src/hdf) set (HDF5_JAVA_TEST_SRC_DIR ${HDF5_SOURCE_DIR}/java/test) set (HDF5_JAVA_LIB_DIR ${HDF5_SOURCE_DIR}/java/lib) -set (HDF5_JAVA_LOGGING_JAR ${HDF5_SOURCE_DIR}/java/lib/slf4j-api-1.7.33.jar) -set (HDF5_JAVA_LOGGING_NOP_JAR ${HDF5_SOURCE_DIR}/java/lib/ext/slf4j-nop-1.7.33.jar) -set (HDF5_JAVA_LOGGING_SIMPLE_JAR ${HDF5_SOURCE_DIR}/java/lib/ext/slf4j-simple-1.7.33.jar) +set (HDF5_JAVA_LOGGING_JAR ${HDF5_SOURCE_DIR}/java/lib/slf4j-api-2.0.6.jar) +set (HDF5_JAVA_LOGGING_NOP_JAR ${HDF5_SOURCE_DIR}/java/lib/ext/slf4j-nop-2.0.6.jar) +set (HDF5_JAVA_LOGGING_SIMPLE_JAR ${HDF5_SOURCE_DIR}/java/lib/ext/slf4j-simple-2.0.6.jar) set (HDF5_DOXYGEN_DIR ${HDF5_SOURCE_DIR}/doxygen) set (HDF5_SRC_INCLUDE_DIRS ${HDF5_SRC_DIR}) @@ -573,15 +574,6 @@ if (HDF5_ENABLE_COVERAGE) endif () endif () -#----------------------------------------------------------------------------- -# Option to indicate using dmalloc -#----------------------------------------------------------------------------- -# option (HDF5_ENABLE_USING_DMALLOC "Indicate that dmalloc is used" OFF) -# if (HDF5_ENABLE_USING_DMALLOC) -# find_package (DMALLOC) -# set (H5_HAVE_DMALLOC DMALLOC_FOUND) -# endif () - #----------------------------------------------------------------------------- # Option to indicate using a memory checker #----------------------------------------------------------------------------- @@ -661,6 +653,13 @@ if (${HDF_CFG_NAME} MATCHES "Debug" OR ${HDF_CFG_NAME} MATCHES "Developer") mark_as_advanced (HDF5_ENABLE_INSTRUMENT) endif () +#----------------------------------------------------------------------------- +# Add some definitions for Developer Builds +#----------------------------------------------------------------------------- +if (${HDF_CFG_NAME} MATCHES "Developer") + include (${HDF_RESOURCES_DIR}/HDF5DeveloperBuild.cmake) +endif () + #----------------------------------------------------------------------------- # Option to embed library info into executables #----------------------------------------------------------------------------- @@ -989,11 +988,11 @@ if (BUILD_TESTING) mark_as_advanced (HDF5_TEST_JAVA) if (NOT HDF5_EXTERNALLY_CONFIGURED) - if (EXISTS "${HDF5_SOURCE_DIR}/test" AND IS_DIRECTORY "${HDF5_SOURCE_DIR}/test") + if (EXISTS "${HDF5_TEST_SRC_DIR}" AND IS_DIRECTORY "${HDF5_TEST_SRC_DIR}") add_subdirectory (test) endif () if (H5_HAVE_PARALLEL) - if (EXISTS "${HDF5_SOURCE_DIR}/testpar" AND IS_DIRECTORY "${HDF5_SOURCE_DIR}/testpar") + if (EXISTS "${HDF5_TEST_PAR_DIR}" AND IS_DIRECTORY "${HDF5_TEST_PAR_DIR}") add_subdirectory (testpar) endif () endif () diff --git a/CMakePresets.json b/CMakePresets.json index 13766ca66f8..152aa414873 100644 --- a/CMakePresets.json +++ b/CMakePresets.json @@ -14,28 +14,35 @@ } }, { - "name": "ci-StdCompression", + "name": "ci-CompressionVars", "hidden": true, - "inherits": "ci-base-tgz", "cacheVariables": { "BUILD_ZLIB_WITH_FETCHCONTENT": "ON", "ZLIB_PACKAGE_NAME": {"type": "STRING", "value": "zlib"}, "ZLIB_TGZ_ORIGPATH": {"type": "STRING", "value": "https://github.com/madler/zlib/releases/download/v1.2.13"}, "ZLIB_TGZ_ORIGNAME": {"type": "STRING", "value": "zlib-1.2.13.tar.gz"}, - "ZLIB_USE_LOCALCONTENT": "OFF", "BUILD_SZIP_WITH_FETCHCONTENT": "ON", "LIBAEC_PACKAGE_NAME": {"type": "STRING", "value": "libaec"}, "LIBAEC_TGZ_ORIGPATH": {"type": "STRING", "value": "https://github.com/MathisRosenhauer/libaec/releases/download/v1.0.6"}, - "LIBAEC_TGZ_ORIGNAME": {"type": "STRING", "value": "libaec-1.0.6.tar.gz"}, - "LIBAEC_USE_LOCALCONTENT": "OFF" + "LIBAEC_TGZ_ORIGNAME": {"type": "STRING", "value": "libaec-1.0.6.tar.gz"} + } + }, + { + "name": "ci-StdCompression", + "hidden": true, + "inherits": ["ci-base-tgz", "ci-CompressionVars"], + "cacheVariables": { + "HDF5_PACKAGE_EXTLIBS": "ON", + "ZLIB_USE_LOCALCONTENT": "OFF", + "LIBAEC_USE_LOCALCONTENT": "OFF", + "HDF5_ENABLE_SZIP_SUPPORT": "ON" } }, { "name": "ci-base-plugins", "hidden": true, - "inherits": "ci-base-tgz", "cacheVariables": { - "PLUGIN_TGZ_NAME": {"type": "STRING", "value": "hdf5_plugins-1.14.0.tar.gz"}, + "PLUGIN_TGZ_NAME": {"type": "STRING", "value": "hdf5_plugins.tar.gz"}, "PLUGIN_PACKAGE_NAME": {"type": "STRING", "value": "pl"}, "BSHUF_TGZ_NAME": {"type": "STRING", "value": "bitshuffle.tar.gz"}, "BSHUF_PACKAGE_NAME": {"type": "STRING", "value": "bshuf"}, @@ -63,32 +70,49 @@ } }, { - "name": "ci-StdPlugins", + "name": "ci-PluginsVars", "hidden": true, - "inherits": ["ci-base-plugins", "ci-base-tgz"], "cacheVariables": { "HDF5_ENABLE_PLUGIN_SUPPORT": "ON", "PLUGIN_TGZ_ORIGPATH": {"type": "STRING", "value": "https://github.com/HDFGroup/hdf5_plugins/archive/refs/tags"}, - "PLUGIN_TGZ_ORIGNAME": {"type": "STRING", "value": "hdf5_plugins-1.14.0.tar.gz"} + "PLUGIN_TGZ_ORIGNAME": {"type": "STRING", "value": "hdf5_plugins-master.tar.gz"} } }, - { - "name": "ci-StdExamples", + { + "name": "ci-StdPlugins", "hidden": true, - "inherits": "ci-base", + "inherits": ["ci-base-plugins", "ci-PluginsVars", "ci-base-tgz"], "cacheVariables": { - "HDF5_PACK_EXAMPLES": "ON", - "HDF5_EXAMPLES_COMPRESSED": {"type": "STRING", "value": "hdf5-examples-2.0.3.tar.gz"}, + "H5PL_VERS_MAJOR": "1", + "H5PL_VERS_MINOR": "12", + "H5PL_VERS_RELEASE": "3" + } + }, + { + "name": "ci-ExamplesVars", + "hidden": true, + "cacheVariables": { + "HDF5_EXAMPLES_COMPRESSED": {"type": "STRING", "value": "hdf5-examples-2.0.4.tar.gz"}, "HDF5_EXAMPLES_COMPRESSED_DIR": {"type": "STRING", "value": "${sourceParentDir}/temp"}, "EXAMPLES_TGZ_ORIGPATH": {"type": "STRING", "value": "https://github.com/HDFGroup/hdf5-examples/archive/refs/tags/"}, - "EXAMPLES_TGZ_ORIGNAME": {"type": "STRING", "value": "2.0.3.tar.gz"}, + "EXAMPLES_TGZ_ORIGNAME": {"type": "STRING", "value": "2.0.4.tar.gz"} + } + }, + { + "name": "ci-StdExamples", + "hidden": true, + "inherits": ["ci-base", "ci-ExamplesVars", "ci-base-tgz"], + "cacheVariables": { + "EXAMPLES_USE_RELEASE_NAME": "ON", + "EXAMPLES_USE_LOCALCONTENT": "OFF", + "HDF5_PACK_EXAMPLES": "ON", "EXAMPLES_DOWNLOAD": "ON" } }, - { + { "name": "ci-StdShar", "hidden": true, - "inherits": "ci-StdCompression", + "inherits": ["ci-StdCompression", "ci-StdExamples"], "cacheVariables": { "HDF_PACKAGE_NAMESPACE": {"type": "STRING", "value": "hdf5::"}, "HDF5_INSTALL_MOD_FORTRAN": "NO", @@ -105,8 +129,7 @@ "ci-x64-Release-MSVC", "ci-CPP", "ci-Java", - "ci-StdShar", - "ci-StdExamples" + "ci-StdShar" ] }, { @@ -117,8 +140,7 @@ "ci-CPP", "ci-Fortran", "ci-Java", - "ci-StdShar", - "ci-StdExamples" + "ci-StdShar" ] }, { @@ -129,8 +151,7 @@ "ci-CPP", "ci-Fortran", "ci-Java", - "ci-StdShar", - "ci-StdExamples" + "ci-StdShar" ] }, { @@ -141,8 +162,7 @@ "ci-CPP", "ci-Fortran", "ci-Java", - "ci-StdShar", - "ci-StdExamples" + "ci-StdShar" ] } ], diff --git a/bin/README.md b/bin/README.md new file mode 100644 index 00000000000..e2e25701893 --- /dev/null +++ b/bin/README.md @@ -0,0 +1,30 @@ +# Scripts in `bin` and their purpose + +Programs run via `autogen.sh` (or the equivalent in CMake) are indicated. + +|Program|Purpose| +|-------|-------| +|`buildhdf5`|Convenience script to build HDF5 using the Autotools| +|`checkapi`|Checks if public API calls are used in internal functions| +|`chkcopyright`|Checks if files have appropriate copyright statements| +|`cmakehdf5`|Convenience script to build HDF5 using CMake| +|`debug-ohdr`|Examines debug output from `H5O_open/close` to look for open objects| +|`format_source`|Runs `clang-format` over the source files, applying our rules| +|`genparser`|Creates the flex/bison-based parser files in the high-level library| +|`h5cc.in`|Input file from which h5cc is created| +|`h5redeploy.in`|Input file from which h5redeploy is created| +|`h5vers`|Updates the library version number| +|`make_err`|Generates the H5E header files (called in `autogen.sh`)| +|`make_vers`|Generates H5version.h (called in `autogen.sh`)| +|`make_overflow`|Generates H5overflow.h (called in `autogen.sh`)| +|`output_filter`|Used in the tools test code to strip extraneous output before we diff files| +|`restore.sh`|Removes files generated by `autogen.sh`| +|`runbkprog`|Used by CMake to run test programs in the background| +|`switch_maint_mode`|Switches maintainer mode on/off in `configure.ac`| +|`trace`|Adds `TRACE` macros to HDF5 C library source files (run by `autogen.sh`)| +|`warnhist`|Generates compiler warning statistics for gcc/clang when fed output of make| + +## TODO + +* chkcopyright is currently semi-broken as it doesn't handle the full variety of copyright headers we need. We're leaving it in place, though, in the hopes that someone will update it in the future. +* Extending warnhist to better understand the output of additional compilers/languages would be nice. diff --git a/bin/release b/bin/release index d9e4f7b41b4..1226d33d8a1 100755 --- a/bin/release +++ b/bin/release @@ -206,7 +206,7 @@ tar2cmakezip() cp /mnt/scr1/pre-release/hdf5/CMake/LIBAEC.tar.gz $cmziptmpsubdir cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmziptmpsubdir cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-2.0.3-Source.zip $cmziptmpsubdir - cp /mnt/scr1/pre-release/hdf5/CMake/hdf5_plugins-1_12_2.zip $cmziptmpsubdir + cp /mnt/scr1/pre-release/hdf5/CMake/hdf5_plugins-master.zip $cmziptmpsubdir cp $cmziptmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmziptmpsubdir cp $cmziptmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmziptmpsubdir cp $cmziptmpsubdir/$version/config/cmake/scripts/HDF5options.cmake $cmziptmpsubdir @@ -298,7 +298,7 @@ tar2cmaketgz() cp /mnt/scr1/pre-release/hdf5/CMake/LIBAEC.tar.gz $cmgztmpsubdir cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmgztmpsubdir cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-2.0.3-Source.tar.gz $cmgztmpsubdir - cp /mnt/scr1/pre-release/hdf5/CMake/hdf5_plugins-1_12_2.tar.gz $cmgztmpsubdir + cp /mnt/scr1/pre-release/hdf5/CMake/hdf5_plugins-master.tar.gz $cmgztmpsubdir cp $cmgztmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmgztmpsubdir cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmgztmpsubdir cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5options.cmake $cmgztmpsubdir @@ -377,7 +377,7 @@ tar2hpccmaketgz() cp /mnt/scr1/pre-release/hdf5/CMake/LIBAEC.tar.gz $cmgztmpsubdir cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmgztmpsubdir cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-2.0.3-Source.tar.gz $cmgztmpsubdir - cp /mnt/scr1/pre-release/hdf5/CMake/hdf5_plugins-1_12_2.tar.gz $cmgztmpsubdir + cp /mnt/scr1/pre-release/hdf5/CMake/hdf5_plugins-master.tar.gz $cmgztmpsubdir cp $cmgztmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmgztmpsubdir cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmgztmpsubdir diff --git a/c++/examples/CMakeLists.txt b/c++/examples/CMakeLists.txt index c50315f6a52..606c2210533 100644 --- a/c++/examples/CMakeLists.txt +++ b/c++/examples/CMakeLists.txt @@ -34,7 +34,7 @@ set (tutr_examples foreach (example ${examples}) add_executable (cpp_ex_${example} ${HDF5_CPP_EXAMPLES_SOURCE_DIR}/${example}.cpp) - target_include_directories (cpp_ex_${example} PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (cpp_ex_${example} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (cpp_ex_${example} STATIC) target_link_libraries (cpp_ex_${example} PRIVATE ${HDF5_CPP_LIB_TARGET} ${HDF5_LIB_TARGET}) @@ -59,7 +59,7 @@ endforeach () foreach (example ${tutr_examples}) add_executable (cpp_ex_${example} ${HDF5_CPP_EXAMPLES_SOURCE_DIR}/${example}.cpp) - target_include_directories (cpp_ex_${example} PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (cpp_ex_${example} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (cpp_ex_${example} STATIC) target_link_libraries (cpp_ex_${example} PRIVATE ${HDF5_CPP_LIB_TARGET} ${HDF5_LIB_TARGET}) diff --git a/c++/examples/CMakeTests.cmake b/c++/examples/CMakeTests.cmake index 5af0b2a8546..f710204a948 100644 --- a/c++/examples/CMakeTests.cmake +++ b/c++/examples/CMakeTests.cmake @@ -16,17 +16,31 @@ ############################################################################## ############################################################################## # Remove any output file left over from previous test run +set (CPP_EX_CLEANFILES + Group.h5 + SDS.h5 + SDScompound.h5 + SDSextendible.h5 + Select.h5 +) add_test ( NAME CPP_ex-clear-objects COMMAND ${CMAKE_COMMAND} - -E remove - Group.h5 - SDS.h5 - SDScompound.h5 - SDSextendible.h5 - Select.h5 + -E remove ${CPP_EX_CLEANFILES} +) +set_tests_properties (CPP_ex-clear-objects PROPERTIES + FIXTURES_SETUP clear_cppex + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} +) +add_test ( + NAME CPP_ex-clean-objects + COMMAND ${CMAKE_COMMAND} + -E remove ${CPP_EX_CLEANFILES} +) +set_tests_properties (CPP_ex-clean-objects PROPERTIES + FIXTURES_CLEANUP clear_cppex + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} ) -set_tests_properties (CPP_ex-clear-objects PROPERTIES FIXTURES_SETUP clear_cppex) foreach (example ${examples}) if (HDF5_ENABLE_USING_MEMCHECKER) @@ -53,19 +67,32 @@ endforeach () #the following dependencies are handled by the order of the files # SET_TESTS_PROPERTIES(CPP_ex_readdata PROPERTIES DEPENDS CPP_ex_create) # SET_TESTS_PROPERTIES(CPP_ex_chunks PROPERTIES DEPENDS CPP_ex_extend_ds) - +set (CPP_EX_TUTR_CLEANFILES + h5tutr_cmprss.h5 + h5tutr_dset.h5 + h5tutr_extend.h5 + h5tutr_group.h5 + h5tutr_groups.h5 + h5tutr_subset.h5 +) add_test ( NAME CPP_ex_tutr-clear-objects COMMAND ${CMAKE_COMMAND} - -E remove - h5tutr_cmprss.h5 - h5tutr_dset.h5 - h5tutr_extend.h5 - h5tutr_group.h5 - h5tutr_groups.h5 - h5tutr_subset.h5 + -E remove ${CPP_EX_TUTR_CLEANFILES} +) +set_tests_properties (CPP_ex_tutr-clear-objects PROPERTIES + FIXTURES_SETUP clear_cppex_tutr + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} +) +add_test ( + NAME CPP_ex_tutr-clean-objects + COMMAND ${CMAKE_COMMAND} + -E remove ${CPP_EX_TUTR_CLEANFILES} +) +set_tests_properties (CPP_ex_tutr-clean-objects PROPERTIES + FIXTURES_CLEANUP clear_cppex_tutr + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} ) -set_tests_properties (CPP_ex_tutr-clear-objects PROPERTIES FIXTURES_SETUP clear_cppex_tutr) foreach (example ${tutr_examples}) if (HDF5_ENABLE_USING_MEMCHECKER) diff --git a/c++/src/CMakeLists.txt b/c++/src/CMakeLists.txt index 6bf0e8d4ca1..f8ac7296097 100644 --- a/c++/src/CMakeLists.txt +++ b/c++/src/CMakeLists.txt @@ -80,7 +80,7 @@ set (CPP_HDRS if (BUILD_STATIC_LIBS) add_library (${HDF5_CPP_LIB_TARGET} STATIC ${CPP_SOURCES} ${CPP_HDRS}) target_include_directories (${HDF5_CPP_LIB_TARGET} - PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" + PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" INTERFACE "$/include>" ) target_compile_options(${HDF5_CPP_LIB_TARGET} PRIVATE "${HDF5_CMAKE_CXX_FLAGS}") @@ -98,7 +98,7 @@ endif () if (BUILD_SHARED_LIBS) add_library (${HDF5_CPP_LIBSH_TARGET} SHARED ${CPP_SOURCES} ${CPP_HDRS}) target_include_directories (${HDF5_CPP_LIBSH_TARGET} - PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" + PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" INTERFACE "$/include>" ) target_compile_options(${HDF5_CPP_LIBSH_TARGET} PRIVATE "${HDF5_CMAKE_CXX_FLAGS}") diff --git a/c++/test/CMakeLists.txt b/c++/test/CMakeLists.txt index 331cada33d4..3511c5f20cb 100644 --- a/c++/test/CMakeLists.txt +++ b/c++/test/CMakeLists.txt @@ -38,7 +38,7 @@ set (srcdir ${CMAKE_CURRENT_SOURCE_DIR}) configure_file (${HDF5_CPP_TEST_SOURCE_DIR}/H5srcdir_str.h.in H5srcdir_str.h @ONLY) add_executable (cpp_testhdf5 ${CPP_TEST_SOURCES} ${HDF5_CPP_TEST_SOURCE_DIR}/h5cpputil.h) -target_include_directories (cpp_testhdf5 PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};${HDF5_TEST_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_include_directories (cpp_testhdf5 PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};${HDF5_TEST_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(cpp_testhdf5 PRIVATE "${HDF5_CMAKE_CXX_FLAGS}") target_compile_definitions(cpp_testhdf5 PRIVATE "$<$:MPICH_SKIP_MPICXX;MPICH_IGNORE_CXX_SEEK>"# Parallel/MPI, prevent spurious cpp/cxx warnings diff --git a/c++/test/CMakeTests.cmake b/c++/test/CMakeTests.cmake index e34f62970c8..fc94a68d32f 100644 --- a/c++/test/CMakeTests.cmake +++ b/c++/test/CMakeTests.cmake @@ -17,20 +17,6 @@ add_custom_target(cpp_testhdf5_files ALL COMMENT "Copying files needed by cpp_te ### T E S T I N G ### ############################################################################## ############################################################################## -# Remove any output file left over from previous test run -add_test ( - NAME CPP_testhdf5-clear-objects - COMMAND ${CMAKE_COMMAND} - -E remove - tattr_basic.h5 - tattr_compound.h5 - tattr_dtype.h5 - tattr_multi.h5 - tattr_scalar.h5 - tfattrs.h5 - titerate.h5 -) - if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME CPP_testhdf5 COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () @@ -46,7 +32,9 @@ else () -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () -set_tests_properties (CPP_testhdf5 PROPERTIES DEPENDS CPP_testhdf5-clear-objects) +set_tests_properties (CPP_testhdf5 PROPERTIES + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} +) ############################################################################## ############################################################################## diff --git a/c++/test/CMakeVFDTests.cmake b/c++/test/CMakeVFDTests.cmake index ced9b0ca0ec..5405df3a252 100644 --- a/c++/test/CMakeVFDTests.cmake +++ b/c++/test/CMakeVFDTests.cmake @@ -22,21 +22,8 @@ H5_CREATE_VFD_DIR() ### T H E T E S T S M A C R O S ### ############################################################################## ############################################################################## - macro (ADD_VFD_TEST vfdname resultcode) if (NOT HDF5_ENABLE_USING_MEMCHECKER) - add_test ( - NAME CPP_VFD-${vfdname}-cpp_testhdf5-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove - tattr_basic.h5 - tattr_compound.h5 - tattr_dtype.h5 - tattr_multi.h5 - tattr_scalar.h5 - tfattrs.h5 - titerate.h5 - WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/${vfdname} - ) add_test ( NAME CPP_VFD-${vfdname}-cpp_testhdf5 COMMAND "${CMAKE_COMMAND}" @@ -49,7 +36,6 @@ macro (ADD_VFD_TEST vfdname resultcode) -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/${vfdname}" -P "${HDF_RESOURCES_DIR}/vfdTest.cmake" ) - set_tests_properties (CPP_VFD-${vfdname}-cpp_testhdf5 PROPERTIES DEPENDS CPP_VFD-${vfdname}-cpp_testhdf5-clear-objects) set_tests_properties (CPP_VFD-${vfdname}-cpp_testhdf5 PROPERTIES TIMEOUT ${CTEST_SHORT_TIMEOUT}) endif () endmacro () diff --git a/config/README.md b/config/README.md new file mode 100644 index 00000000000..8459b7724ae --- /dev/null +++ b/config/README.md @@ -0,0 +1,88 @@ +# The `config` directory + +## Intro + +HDF5 can be configured using both the GNU Autotools and CMake. We try to keep +them in sync, but you can expect minor differences to crop up. Please create +a GitHub issue for any differences noted. Note that with the Autotools, we +do NOT check generated files into GitHub until release time, so you will +need to generate `configure`, `Makefile.in`(s), etc. via `autogen.sh` in the +project root if you want to build with that system. + +Configuration information for the HDF5 library and tools is (unfortunately) +spread across the repository. Basic library configuration will generally +be found in `configure.ac` (Autotools) and the root's `CMakeLists.txt` (CMake). +Each subdirectory of the project also has its own `Makefile.am` or CMake build +and test files. + +This directory contains a few important things: + +* Autotools OS- and compiler-specific configuration +* CMake support files (in `cmake`) +* Warning files shared between the two systems (in `*-warnings` directories) +* CMake toolchain files (in `toolchain`) +* CMake sanitizer files (in `sanitizer`) + +CMake will be documented elsewhere. This document focuses on the Autotools files +and the shared warning files. + +## Autotools + +An Autotools build will first use `$host_cpu`, `$host_os`, etc. to try to find a +suitable platform file in `config` to source and start checking compilers. The +code that does this is in `configure.ac` (search for `host_os`). For example, +MacOS will source the `apple` file and FreeBSD will source the `freebsd` file. +There are a bunch of Linux files, but they all eventually invoke +`linux-gnulibc1`. + +If you dig into one of these files, the way that they check for compilers is +rather crude. Each OS script will simply source the various C, C++, and +Fortran compiler files that are listed inside. Each compiler file checks +the designated compiler's version output to see if there's a match, and if so, +the flag processing proceeds, and a variable like `cc_flags_set` will be set +at the end. + +In case it's not obvious, the C files end in `-flags`, C++ in `-cxxflags`, and +Fortran in `-fflags`. + +When a compiler matches, the script will attempt to set the `CFLAGS`, etc. +variables based on the platform and compiler's properties. There are typically +a large number of flag categories (e.g., `DEBUG_OPT_CFLAGS`) that are +conditionally appended to the canonical variables, like `AM_FLAGS`, by the +remainder of the `configure` script. + +For the major compilers, like Clang and gcc, there will be a section at the +end where we append version-specific flags, mainly for warnings. These are +imported via a function in the script (`load_gnu_arguments()` for gcc). See +below for more detail. + +## Warnings files + +Keeping the Autotools and CMake build files in sync has always been a bit of a +struggle. One way that we help to ensure that the same flags are used in each +build system is to import the warnings settings from text files that are +maintained separately from the Autotools and CMake build files. We like to +configure the compiler to be as crabby as possible so as to catch subtle bugs, +so there are a LOT of warning flags for popular compilers like Clang and gcc. + +We've located these files in `config/*-warnings` directories. Each file +represents a compiler version and contains the warning flags we set, one to a +line. Lines that start with `#` are considered comment lines. You'll also see +`developer` and `no-developer` flavors of compiler version files. The former +corresponds to "developer flags" that are usually either only semi-useful and/or +generate a lot of (usually unfixable) noise. The latter corresponds to things +that we want to ensure do NOT appear in non-developer builds of the library. +These might involve a different level setting (`-Wfoo=x`) or something that +gets incorporated in a "conglomerate" flag like `-Wextra` so we need to set +`-Wno-foo` in non-developer builds. Developer warnings can be turned on +via a configure option. You will also sometimes see `error` files. Those are +files that include warnings that will be considered errors if you have enabled +the "warnings as errors" configure option set. Now that the library is largely +warning-free, these are less useful than in the past as you can now just set +-Werror directly in many cases (our configure script is smart about not running +configure checks with -Werror). + +For anyone interested, we are always interested in improving both the OS and +compiler files, so pull requests for those are always welcome, especially for +platforms we don't have routine access to. If you are a compiler or platform +expert/aficionado, please help us out! diff --git a/config/apple b/config/apple index 3eb65a7c2aa..a8a219b6798 100644 --- a/config/apple +++ b/config/apple @@ -21,58 +21,60 @@ # No support for OS older than darwin 10.X. if test "X-" = "X-$CC"; then case "$host_os" in - darwin10.*) # Snow Leopard. Use gcc/g++ because clang++ is not available. - CC=gcc - CC_BASENAME=gcc - ;; - *) - CC=clang - CC_BASENAME=clang - - # Production - PROD_CFLAGS="-O3" - PROD_CPPFLAGS= - - # Debug - DEBUG_CFLAGS="-g -O0" - DEBUG_CPPFLAGS= - - # Profile - # Use this for profiling with gprof - # Just "-g" for now. More later. - PROFILE_CFLAGS="-g" - PROFILE_CPPFLAGS= - ;; + darwin10.*) # Snow Leopard. Use gcc/g++ because clang++ is not available. + CC=gcc + CC_BASENAME=gcc + ;; + *) + if test "X-$enable_parallel" = "X-yes"; then + # default to use mpicc which is the defacto MPI compiler name + CC=mpicc + CC_BASENAME=mpicc + else + CC=clang + CC_BASENAME=clang + fi + + # Production + PROD_CFLAGS="-O3" + PROD_CPPFLAGS= + + # Debug + DEBUG_CFLAGS="-g -O0" + DEBUG_CPPFLAGS= + + # Profile + # Use this for profiling with gprof + # Just "-g" for now. More later. + PROFILE_CFLAGS="-g" + PROFILE_CPPFLAGS= + ;; esac fi -# Figure out compiler flags +# Figure out C compiler flags . $srcdir/config/gnu-flags . $srcdir/config/clang-flags +. $srcdir/config/intel-flags + # temp patch: if GCC 4.2.1 is used in Lion or Mountain Lion systems, do not # use -O option as it causes failures in test/dt_arith. case "$host_os" in darwin1[12].*) # lion & mountain lion - #echo cc_vendor=$cc_vendor'-'cc_version=$cc_version - case "$cc_vendor-$cc_version" in - gcc-4.2.1) - # Remove any -O flags - #echo PROD_CFLAGS=$PROD_CFLAGS - PROD_CFLAGS="`echo $PROD_CFLAGS | sed -e 's/-O[0-3]*//'`" - #echo new PROD_CFLAGS=$PROD_CFLAGS - ;; - esac - ;; + #echo cc_vendor=$cc_vendor'-'cc_version=$cc_version + case "$cc_vendor-$cc_version" in + gcc-4.2.1) + # Remove any -O flags + #echo PROD_CFLAGS=$PROD_CFLAGS + PROD_CFLAGS="`echo $PROD_CFLAGS | sed -e 's/-O[0-3]*//'`" + #echo new PROD_CFLAGS=$PROD_CFLAGS + ;; + esac + ;; esac - -. $srcdir/config/intel-flags + if test "X-" = "X-$FC"; then case $CC_BASENAME in - clang) - # clang has no fortran compiler. Use gfortran. - FC=gfortran - FC_BASENAME=gfortran - ;; gcc*) FC=gfortran FC_BASENAME=gfortran @@ -81,15 +83,26 @@ if test "X-" = "X-$FC"; then FC=ifort FC_BASENAME=ifort ;; + mpicc*) + FC=mpif90 + FC_BASENAME=mpif90 + ;; + clang) + # clang has no fortran compiler. Use gfortran. + FC=gfortran + FC_BASENAME=gfortran + ;; esac fi +# Figure out FORTRAN compiler flags +. $srcdir/config/gnu-fflags +. $srcdir/config/intel-fflags + + +# The default C++ compiler is `clang++'. if test "X-" = "X-$CXX"; then case $CC_BASENAME in - clang) - CXX=clang++ - CXX_BASENAME=clang++ - ;; gcc) CXX=g++ CXX_BASENAME=g++ @@ -98,18 +111,21 @@ if test "X-" = "X-$CXX"; then CXX=icpc CXX_BASENAME=icpc ;; + mpicc*) + FC=mpif90 + FC_BASENAME=mpif90 + ;; + clang) + CXX=clang++ + CXX_BASENAME=clang++ + ;; esac fi -case $CXX_BASENAME in - clang++) - PROD_CXXFLAGS="-O3" - DEBUG_CXXFLAGS="-g -O0" - # Use this for profiling with gprof - # Just "-g" for now. More later. - PROFILE_CXXFLAGS="-g" - ;; -esac +# Figure out C++ compiler flags +. $srcdir/config/intel-cxxflags # Do this ahead of GNU to avoid icpc being detected as g++ +. $srcdir/config/gnu-cxxflags +. $srcdir/config/clang-cxxflags # compiler version strings case $CC in @@ -132,16 +148,15 @@ case $CC in echo "No match to get cc_version_info for $CC" ;; esac + # Figure out Fortran compiler flags and version strings case $FC in *gfortran*) - . $srcdir/config/gnu-fflags fc_version_info=`$FC $FCFLAGS $H5_FCFLAGS --version 2>&1 |\ grep 'GCC' | sed 's/\(.*(GCC) [-a-z0-9\. ]*\).*/\1/'` ;; *ifc*|*ifort*) - . $srcdir/config/intel-fflags fc_version_info=`$FC $FCFLAGS $H5_FCFLAGS -V 2>&1 | grep 'Version' |\ sed 's/\(Intel.* Compiler\).*\( Version [a-z0-9\.]*\).*\( Build [0-9]*\)/\1\2\3/'` ;; @@ -155,13 +170,11 @@ esac # get c++ version info case $CXX in clang++) - . $srcdir/config/clang-cxxflags cxx_version_info=`$CXX $CXXFLAGS $H5_CXXFLAGS --version 2>&1 |\ grep 'Apple' | sed 's/(.*//'` ;; *g++*) - . $srcdir/config/gnu-cxxflags cxx_version_info=`$CXX $CXXFLAGS $H5_CXXFLAGS --version 2>&1 |\ grep 'GCC' | sed 's/.*\((GCC) [-a-z0-9\. ]*.*\)/\1/'` ;; diff --git a/config/clang-cxxflags b/config/clang-cxxflags index 5685ca1ee19..c279d67b882 100644 --- a/config/clang-cxxflags +++ b/config/clang-cxxflags @@ -112,7 +112,7 @@ if test "X-clang" = "X-$cxx_vendor" -o "X-Apple LLVM" = "X-$cxx_vendor"; then ;; esac - H5_CXXFLAGS="$H5_CXXFLAGS $arch" + H5_CXXFLAGS="$H5_CXXFLAGS $arch -std=c++98" ############## # Production # diff --git a/config/clang-warnings/error-general b/config/clang-warnings/error-general index 883dff76f7a..384fcc6ddc2 100644 --- a/config/clang-warnings/error-general +++ b/config/clang-warnings/error-general @@ -26,9 +26,6 @@ # -Wunused-variable # -# H5VLpassthru.c -# -Werror=unused-parameter -# -Wunused-parameter # # diff --git a/config/cmake-presets/hidden-presets.json b/config/cmake-presets/hidden-presets.json index 18ffdd17f5f..02d200aed8a 100644 --- a/config/cmake-presets/hidden-presets.json +++ b/config/cmake-presets/hidden-presets.json @@ -7,8 +7,8 @@ "description": "Basic build using Ninja generator", "generator": "Ninja", "hidden": true, - "binaryDir": "${sourceParentDir}/build/${presetName}", - "installDir": "${sourceParentDir}/install/${presetName}" + "binaryDir": "${sourceParentDir}/build112/${presetName}", + "installDir": "${sourceParentDir}/install112/${presetName}" }, { "name": "ci-x64", diff --git a/config/cmake/CTestCustom.cmake b/config/cmake/CTestCustom.cmake index 94a6481694b..30ff60f655d 100644 --- a/config/cmake/CTestCustom.cmake +++ b/config/cmake/CTestCustom.cmake @@ -23,6 +23,7 @@ set (CTEST_CUSTOM_WARNING_EXCEPTION "stamp.verify" "CMake Warning*stamp" "src.ZLIB.*:[ \t]*warning" + "src.HDF5_ZLIB.*:[ \t]*warning" "warning LNK4197:.*ZLIB-prefix" "src.SZIP.*:[ \t]*warning" # "POSIX name for this item is deprecated" @@ -36,6 +37,7 @@ set (CTEST_CUSTOM_WARNING_EXCEPTION ".*note.*expected.*void.*but argument is of type.*volatile.*" ".*src.SZIP.*:[ \t]*warning.*" ".*src.ZLIB.*:[ \t]*warning.*" + ".*src.HDF5_ZLIB.*:[ \t]*warning.*" ".*src.JPEG.*:[ \t]*warning.*" ".*POSIX name for this item is deprecated.*" ".*disabling jobserver mode.*" @@ -64,19 +66,32 @@ set (CTEST_CUSTOM_MEMCHECK_IGNORE H5TEST-vds_env-clear-objects PERFORM_h5perform-clear-objects HL_test-clear-objects + HL_test-clean-objects HL_FORTRAN_test-clear-objects + HL_FORTRAN_test-clean-objects FORTRAN_testhdf5-clear-objects + FORTRAN_testhdf5-clean-objects FORTRAN_flush1-clear-objects + FORTRAN_flush1-clean-objects CPP_testhdf5-clear-objects + CPP_testhdf5-clean-objects ######### examples ######### EXAMPLES-clear-objects + EXAMPLES-clean-objects CPP_ex-clear-objects + CPP_ex-clean-objects CPP_ex_tutr-clear-objects + CPP_ex_tutr-clean-objects HL_ex-clear-objects + HL_ex-clean-objects f90_ex-clear-objects + f90_ex-clean-objects HL_CPP_ptableTest-clear-objects + HL_CPP_ptableTest-clean-objects HL_CPP_ex_ptExampleFL-clear-objects + HL_CPP_ex_ptExampleFL-clean-objects HL_FORTRAN_f90_ex-clear-objects + HL_FORTRAN_f90_ex-clean-objects ######### tools/h5clear ######### H5CLEAR-clearall-objects H5CLEAR-h5clear_gentest # does not close ids by design @@ -196,5 +211,7 @@ set (CTEST_CUSTOM_MEMCHECK_IGNORE PERFORM_h5perform-clearall-objects ######### hl/tools ######### HL_TOOLS-clear-objects + HL_TOOLS-clean-objects H5WATCH-clearall-objects + H5WATCH-cleanall-objects ) diff --git a/config/cmake/ConfigureChecks.cmake b/config/cmake/ConfigureChecks.cmake index 8900d4c6d1c..ee57eb0ecf2 100644 --- a/config/cmake/ConfigureChecks.cmake +++ b/config/cmake/ConfigureChecks.cmake @@ -159,7 +159,7 @@ CHECK_INCLUDE_FILE_CONCAT (stdbool.h ${HDF_PREFIX}_HAVE_STDBOOL_H) ## Check for non-standard extension quadmath.h CHECK_INCLUDE_FILES(quadmath.h C_HAVE_QUADMATH) -if (${C_HAVE_QUADMATH}) +if (C_HAVE_QUADMATH) set(${HDF_PREFIX}_HAVE_QUADMATH_H 1) else () set(${HDF_PREFIX}_HAVE_QUADMATH_H 0) @@ -898,7 +898,7 @@ endif() if (HDF5_BUILD_FORTRAN) HDF_CHECK_TYPE_SIZE(__float128 _SIZEOF___FLOAT128) - if (${_SIZEOF___FLOAT128}) + if (_SIZEOF___FLOAT128) set (${HDF_PREFIX}_HAVE_FLOAT128 1) set (${HDF_PREFIX}_SIZEOF___FLOAT128 ${_SIZEOF___FLOAT128}) else () @@ -907,7 +907,7 @@ if (HDF5_BUILD_FORTRAN) endif () HDF_CHECK_TYPE_SIZE(_Quad _SIZEOF__QUAD) - if (NOT ${_SIZEOF__QUAD}) + if (NOT _SIZEOF__QUAD) set (${HDF_PREFIX}_SIZEOF__QUAD 0) else () set (${HDF_PREFIX}_SIZEOF__QUAD ${_SIZEOF__QUAD}) @@ -927,12 +927,17 @@ if (HDF5_BUILD_FORTRAN) ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/testCCompiler1.c ${SOURCE_CODE} ) + if (CMAKE_VERSION VERSION_LESS 3.25) + set (_RUN_OUTPUT_VARIABLE "RUN_OUTPUT_VARIABLE") + else () + set (_RUN_OUTPUT_VARIABLE "RUN_OUTPUT_STDOUT_VARIABLE") + endif() TRY_RUN (RUN_RESULT_VAR COMPILE_RESULT_VAR ${CMAKE_BINARY_DIR} ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/testCCompiler1.c COMPILE_DEFINITIONS "-D_SIZEOF___FLOAT128=${H5_SIZEOF___FLOAT128};-D_HAVE_QUADMATH_H=${H5_HAVE_QUADMATH_H}" COMPILE_OUTPUT_VARIABLE COMPILEOUT - RUN_OUTPUT_VARIABLE OUTPUT_VAR + ${_RUN_OUTPUT_VARIABLE} OUTPUT_VAR ) set (${RETURN_OUTPUT_VAR} ${OUTPUT_VAR}) @@ -969,7 +974,6 @@ if (HDF5_BUILD_FORTRAN) message (FATAL_ERROR "Compilation of C ${FUNCTION_NAME} - Failed") endif () endmacro () - set (PROG_SRC " #include \n\ @@ -991,7 +995,7 @@ if (HDF5_BUILD_FORTRAN) #define C_LDBL_DIG DECIMAL_DIG\n\ #else\n\ #define C_LDBL_DIG LDBL_DIG\n\ -#endif\n\nint main() {\nprintf(\"\\%d\\\;\\%d\\\;\", C_LDBL_DIG, C_FLT128_DIG)\\\;\n\nreturn 0\\\;\n}\n +#endif\n\nint main(void) {\nprintf(\"\\%d\\\;\\%d\\\;\", C_LDBL_DIG, C_FLT128_DIG)\\\;\n\nreturn 0\\\;\n}\n " ) @@ -1003,21 +1007,23 @@ if (HDF5_BUILD_FORTRAN) list (GET PROG_OUTPUT4 0 H5_LDBL_DIG) list (GET PROG_OUTPUT4 1 H5_FLT128_DIG) - endif () - if (${HDF_PREFIX}_SIZEOF___FLOAT128 EQUAL "0" OR FLT128_DIG EQUAL "0") - set (${HDF_PREFIX}_HAVE_FLOAT128 0) - set (${HDF_PREFIX}_SIZEOF___FLOAT128 0) - set (_PAC_C_MAX_REAL_PRECISION ${H5_LDBL_DIG}) + if (${HDF_PREFIX}_SIZEOF___FLOAT128 EQUAL "0" OR FLT128_DIG EQUAL "0") + set (${HDF_PREFIX}_HAVE_FLOAT128 0) + set (${HDF_PREFIX}_SIZEOF___FLOAT128 0) + set (_PAC_C_MAX_REAL_PRECISION ${H5_LDBL_DIG}) + else () + set (_PAC_C_MAX_REAL_PRECISION ${H5_FLT128_DIG}) + endif () + if (NOT ${_PAC_C_MAX_REAL_PRECISION}) + set (${HDF_PREFIX}_PAC_C_MAX_REAL_PRECISION 0) + else () + set (${HDF_PREFIX}_PAC_C_MAX_REAL_PRECISION ${_PAC_C_MAX_REAL_PRECISION}) + endif () + message (STATUS "maximum decimal precision for C var - ${${HDF_PREFIX}_PAC_C_MAX_REAL_PRECISION}") else () - set (_PAC_C_MAX_REAL_PRECISION ${H5_FLT128_DIG}) - endif () - if (NOT ${_PAC_C_MAX_REAL_PRECISION}) set (${HDF_PREFIX}_PAC_C_MAX_REAL_PRECISION 0) - else () - set (${HDF_PREFIX}_PAC_C_MAX_REAL_PRECISION ${_PAC_C_MAX_REAL_PRECISION}) endif () - message (STATUS "maximum decimal precision for C var - ${${HDF_PREFIX}_PAC_C_MAX_REAL_PRECISION}") endif() @@ -1067,7 +1073,7 @@ endmacro () # ---------------------------------------------------------------------- # Set the flag to indicate that the machine is using a special algorithm to convert # 'long double' to '(unsigned) long' values. (This flag should only be set for -# the IBM Power6 Linux. When the bit sequence of long double is +# the IBM Power Linux. When the bit sequence of long double is # 0x4351ccf385ebc8a0bfcc2a3c3d855620, the converted value of (unsigned)long # is 0x004733ce17af227f, not the same as the library's conversion to 0x004733ce17af2282. # The machine's conversion gets the correct value. We define the macro and disable @@ -1077,7 +1083,7 @@ H5ConversionTests (${HDF_PREFIX}_LDOUBLE_TO_LONG_SPECIAL "Checking IF your syst # ---------------------------------------------------------------------- # Set the flag to indicate that the machine is using a special algorithm # to convert some values of '(unsigned) long' to 'long double' values. -# (This flag should be off for all machines, except for IBM Power6 Linux, +# (This flag should be off for all machines, except for IBM Power Linux, # when the bit sequences are 003fff..., 007fff..., 00ffff..., 01ffff..., # ..., 7fffff..., the compiler uses a unknown algorithm. We define a # macro and skip the test for now until we know about the algorithm. diff --git a/config/cmake/ConversionTests.c b/config/cmake/ConversionTests.c index f80959fdc79..725f0496f01 100644 --- a/config/cmake/ConversionTests.c +++ b/config/cmake/ConversionTests.c @@ -11,13 +11,13 @@ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ #if defined(__has_attribute) -#if __has_attribute(no_sanitize) -#define HDF_NO_UBSAN __attribute__((no_sanitize("undefined"))) +# if __has_attribute(no_sanitize) +# define HDF_NO_UBSAN __attribute__((no_sanitize("undefined"))) +# else +# define HDF_NO_UBSAN +# endif #else -#define HDF_NO_UBSAN -#endif -#else -#define HDF_NO_UBSAN +# define HDF_NO_UBSAN #endif #ifdef H5_LDOUBLE_TO_LONG_SPECIAL_TEST @@ -34,45 +34,51 @@ int HDF_NO_UBSAN main(void) unsigned char s2[8]; int ret = 1; - if(sizeof(long double) == 16 && sizeof(long) == 8) { - /*make sure the long double type has 16 bytes in size and - * 11 bits of exponent. If it is, - *the bit sequence should be like below. It's not - *a decent way to check but this info isn't available. */ - memcpy(s, &ld, 16); - if(s[0]==0x43 && s[1]==0x51 && s[2]==0xcc && s[3]==0xf3 && - s[4]==0x85 && s[5]==0xeb && s[6]==0xc8 && s[7]==0xa0 && - s[8]==0xbf && s[9]==0xcc && s[10]==0x2a && s[11]==0x3c) { - - /* Assign the hexadecimal value of long double type. */ - s[0]=0x43; s[1]=0x51; s[2]=0xcc; s[3]=0xf3; - s[4]=0x85; s[5]=0xeb; s[6]=0xc8; s[7]=0xa0; - s[8]=0xbf; s[9]=0xcc; s[10]=0x2a; s[11]=0x3c; - s[12]=0x3d; s[13]=0x85; s[14]=0x56; s[15]=0x20; - - memcpy(&ld, s, 16); - - ll = (long)ld; - memcpy(s2, &ll, 8); - - /* The library's algorithm converts it to 0x 00 47 33 ce 17 af 22 82 - * and gets wrong value 20041683600089730 on the IBM Power6 Linux. - * But the IBM Power6 Linux converts it to 0x00 47 33 ce 17 af 22 7f - * and gets the correct value 20041683600089727. It uses some special - * algorithm. We're going to define the macro and skip the test until - * we can figure out how they do it. */ - if(s2[0]==0x00 && s2[1]==0x47 && s2[2]==0x33 && s2[3]==0xce && - s2[4]==0x17 && s2[5]==0xaf && s2[6]==0x22 && s2[7]==0x7f) - ret = 0; - - ull = (unsigned long)ld; - memcpy(s2, &ull, 8); - - /* The unsigned long is the same as signed long. */ - if(s2[0]==0x00 && s2[1]==0x47 && s2[2]==0x33 && s2[3]==0xce && - s2[4]==0x17 && s2[5]==0xaf && s2[6]==0x22 && s2[7]==0x7f) - ret = 0; - } + if (sizeof(long double) == 16 && sizeof(long) == 8) { + /* Make sure the long double type has 16 bytes in size and + * 11 bits of exponent. If it is, the bit sequence should be + * like below. It's not a decent way to check but this info + * isn't available. + */ + memcpy(s, &ld, 16); + if (s[0]==0x43 && s[1]==0x51 && s[2]==0xcc && s[3]==0xf3 && + s[4]==0x85 && s[5]==0xeb && s[6]==0xc8 && s[7]==0xa0 && + s[8]==0xbf && s[9]==0xcc && s[10]==0x2a && s[11]==0x3c) { + + /* Assign the hexadecimal value of long double type. */ + s[0]=0x43; s[1]=0x51; s[2]=0xcc; s[3]=0xf3; + s[4]=0x85; s[5]=0xeb; s[6]=0xc8; s[7]=0xa0; + s[8]=0xbf; s[9]=0xcc; s[10]=0x2a; s[11]=0x3c; + s[12]=0x3d; s[13]=0x85; s[14]=0x56; s[15]=0x20; + + memcpy(&ld, s, 16); + + ll = (long)ld; + memcpy(s2, &ll, 8); + + /* The library's algorithm converts it to 0x00 47 33 ce 17 af 22 82 + * and gets wrong value 20041683600089730 on Linux on IBM Power + * architecture. + * + * But Linux on IBM Power converts it to 0x00 47 33 ce 17 af 22 7f + * and gets the correct value 20041683600089727. It uses some special + * algorithm. We're going to define the macro and skip the test until + * we can figure out how they do it. + */ + if (s2[0]==0x00 && s2[1]==0x47 && s2[2]==0x33 && s2[3]==0xce && + s2[4]==0x17 && s2[5]==0xaf && s2[6]==0x22 && s2[7]==0x7f) + + ret = 0; + + ull = (unsigned long)ld; + memcpy(s2, &ull, 8); + + /* The unsigned long is the same as signed long */ + if(s2[0]==0x00 && s2[1]==0x47 && s2[2]==0x33 && s2[3]==0xce && + s2[4]==0x17 && s2[5]==0xaf && s2[6]==0x22 && s2[7]==0x7f) + + ret = 0; + } } done: @@ -94,50 +100,61 @@ int HDF_NO_UBSAN main(void) unsigned char s[16]; int flag=0, ret=1; - /*Determine if long double has 16 byte in size, 11 bit exponent, and - *the bias is 0x3ff */ - if(sizeof(long double) == 16) { - ld = 1.0L; - memcpy(s, &ld, 16); - if(s[0]==0x3f && s[1]==0xf0 && s[2]==0x00 && s[3]==0x00 && - s[4]==0x00 && s[5]==0x00 && s[6]==0x00 && s[7]==0x00) - flag = 1; - } + /* Determine if long double has 16 byte in size, 11 bit exponent, and + * the bias is 0x3ff + */ + if (sizeof(long double) == 16) { + ld = 1.0L; + memcpy(s, &ld, 16); + + if (s[0]==0x3f && s[1]==0xf0 && s[2]==0x00 && s[3]==0x00 && + s[4]==0x00 && s[5]==0x00 && s[6]==0x00 && s[7]==0x00) - if(flag==1 && sizeof(long)==8) { - ll = 0x003fffffffffffffL; - ld = (long double)ll; - memcpy(s, &ld, 16); - /* The library converts the value to 0x434fffffffffffff8000000000000000. - * In decimal it is 18014398509481982.000000, one value short of the original. - * The IBM Power6 Linux converts it to 0x4350000000000000bff0000000000000. - * The value is correct in decimal. It uses some special - * algorithm. We're going to define the macro and skip the test until - * we can figure out how they do it. */ - if(s[0]==0x43 && s[1]==0x50 && s[2]==0x00 && s[3]==0x00 && - s[4]==0x00 && s[5]==0x00 && s[6]==0x00 && s[7]==0x00 && - s[8]==0xbf && s[9]==0xf0 && s[10]==0x00 && s[11]==0x00 && - s[12]==0x00 && s[13]==0x00 && s[14]==0x00 && s[15]==0x00) - ret = 0; + flag = 1; } - if(flag==1 && sizeof(unsigned long)==8) { - ull = 0xffffffffffffffffUL; - ld = (long double)ull; - memcpy(s, &ld, 16); - /* Use a different value from signed long to test. The problem is the same - * for both long and unsigned long. The value is 18446744073709551615. - * The library converts the value to 0x43effffffffffffffe000000000000000. - * In decimal it's 18446744073709548544.000000, very different from the original. - * The IBM Power6 Linux converts it to 0x43f0000000000000bff0000000000000. - * The value is correct in decimal. It uses some special - * algorithm. We're going to define the macro and skip the test until - * we can figure out how they do it. */ - if(s[0]==0x43 && s[1]==0xf0 && s[2]==0x00 && s[3]==0x00 && - s[4]==0x00 && s[5]==0x00 && s[6]==0x00 && s[7]==0x00 && - s[8]==0xbf && s[9]==0xf0 && s[10]==0x00 && s[11]==0x00 && - s[12]==0x00 && s[13]==0x00 && s[14]==0x00 && s[15]==0x00) - ret = 0; + + if (flag==1 && sizeof(long)==8) { + ll = 0x003fffffffffffffL; + ld = (long double)ll; + memcpy(s, &ld, 16); + + /* The library converts the value to 0x434fffffffffffff8000000000000000. + * In decimal it is 18014398509481982.000000, one value short of the original. + * + * Linux on IBM Power architecture converts it to + * 0x4350000000000000bff0000000000000. The value is correct in decimal. + * It uses some special algorithm. We're going to define the macro and + * skip the test until we can figure out how they do it. + */ + if (s[0]==0x43 && s[1]==0x50 && s[2]==0x00 && s[3]==0x00 && + s[4]==0x00 && s[5]==0x00 && s[6]==0x00 && s[7]==0x00 && + s[8]==0xbf && s[9]==0xf0 && s[10]==0x00 && s[11]==0x00 && + s[12]==0x00 && s[13]==0x00 && s[14]==0x00 && s[15]==0x00) + + ret = 0; } + + if (flag==1 && sizeof(unsigned long)==8) { + ull = 0xffffffffffffffffUL; + ld = (long double)ull; + memcpy(s, &ld, 16); + + /* Use a different value from signed long to test. The problem is the + * same for both long and unsigned long. The value is 18446744073709551615. + * The library converts the value to 0x43effffffffffffffe000000000000000. + * In decimal it's 18446744073709548544.000000, very different from the + * original. Linux on IBM Power architecture converts it to + * 0x43f0000000000000bff0000000000000. The value is correct in decimal. + * It uses some special algorithm. We're going to define the macro and + * skip the test until we can figure out how they do it. + */ + if (s[0]==0x43 && s[1]==0xf0 && s[2]==0x00 && s[3]==0x00 && + s[4]==0x00 && s[5]==0x00 && s[6]==0x00 && s[7]==0x00 && + s[8]==0xbf && s[9]==0xf0 && s[10]==0x00 && s[11]==0x00 && + s[12]==0x00 && s[13]==0x00 && s[14]==0x00 && s[15]==0x00) + + ret = 0; + } done: exit(ret); } @@ -157,18 +174,21 @@ int HDF_NO_UBSAN main(void) unsigned char s[16]; int ret = 0; - if(sizeof(long double) == 16) { - /*make sure the long double type is the same as the failing type - *which has 16 bytes in size and 11 bits of exponent. If it is, - *the bit sequence should be like below. It's not - *a decent way to check but this info isn't available. */ + if (sizeof(long double) == 16) { + /* Make sure the long double type is the same as the failing type + * which has 16 bytes in size and 11 bits of exponent. If it is, + * the bit sequence should be like below. It's not + * a decent way to check but this info isn't available. + */ memcpy(s, &ld, 16); - if(s[0]==0x43 && s[1]==0x51 && s[2]==0xcc && s[3]==0xf3 && + + if (s[0]==0x43 && s[1]==0x51 && s[2]==0xcc && s[3]==0xf3 && s[4]==0x85 && s[5]==0xeb && s[6]==0xc8 && s[7]==0xa0 && s[8]==0xbf && s[9]==0xcc && s[10]==0x2a && s[11]==0x3c) { - /*slightly adjust the bit sequence (s[8]=0xdf). The converted - *values will go wild on Mac OS 10.4 and IRIX64 6.5.*/ + /* Slightly adjust the bit sequence (s[8]=0xdf). The converted + * values will go wild on Mac OS 10.4 and IRIX64 6.5. + */ s[0]=0x43; s[1]=0x51; s[2]=0xcc; s[3]=0xf3; s[4]=0x85; s[5]=0xeb; s[6]=0xc8; s[7]=0xa0; s[8]=0xdf; s[9]=0xcc; s[10]=0x2a; s[11]=0x3c; @@ -178,7 +198,7 @@ int HDF_NO_UBSAN main(void) ll = (long long)ld; ull = (unsigned long long)ld; - if(ll != 20041683600089728 || ull != 20041683600089728) + if (ll != 20041683600089728 || ull != 20041683600089728) ret = 1; } } @@ -200,93 +220,47 @@ int HDF_NO_UBSAN main(void) unsigned char s[16]; int flag=0, ret=0; - /*Determine if long double has 16 byte in size, 11 bit exponent, and - *the bias is 0x3ff */ - if(sizeof(long double) == 16) { + /* Determine if long double has 16 byte in size, 11 bit exponent, and + * the bias is 0x3ff + */ + if (sizeof(long double) == 16) { ld = 1.0L; memcpy(s, &ld, 16); - if(s[0]==0x3f && s[1]==0xf0 && s[2]==0x00 && s[3]==0x00 && + if (s[0]==0x3f && s[1]==0xf0 && s[2]==0x00 && s[3]==0x00 && s[4]==0x00 && s[5]==0x00 && s[6]==0x00 && s[7]==0x00) + flag = 1; } - if(flag==1 && sizeof(long long)==8) { + if (flag==1 && sizeof(long long)==8) { ll = 0x01ffffffffffffffLL; ld = (long double)ll; memcpy(s, &ld, 16); - /*Check if the bit sequence is as supposed to be*/ - if(s[0]!=0x43 || s[1]!=0x7f || s[2]!=0xff || s[3]!=0xff || + + /* Check if the bit sequence is as expected*/ + if (s[0]!=0x43 || s[1]!=0x7f || s[2]!=0xff || s[3]!=0xff || s[4]!=0xff || s[5]!=0xff || s[6]!=0xff || s[7]!=0xff || s[8]!=0xf0 || s[9]!=0x00 || s[10]!=0x00 || s[11]!=0x00) + ret = 1; } - if(flag==1 && sizeof(unsigned long long)==8) { + if (flag==1 && sizeof(unsigned long long)==8) { ull = 0x01ffffffffffffffULL; ld = (long double)ull; memcpy(s, &ld, 16); - if(s[0]!=0x43 || s[1]!=0x7f || s[2]!=0xff || s[3]!=0xff || + + if (s[0]!=0x43 || s[1]!=0x7f || s[2]!=0xff || s[3]!=0xff || s[4]!=0xff || s[5]!=0xff || s[6]!=0xff || s[7]!=0xff || s[8]!=0xf0 || s[9]!=0x00 || s[10]!=0x00 || s[11]!=0x00) + ret = 1; } + done: exit(ret); } #endif -#ifdef H5_NO_ALIGNMENT_RESTRICTIONS_TEST - -#include -#include - -typedef struct { - size_t len; - void *p; -} hvl_t; - -#ifdef FC_DUMMY_MAIN -#ifndef FC_DUMMY_MAIN_EQ_F77 -# ifdef __cplusplus -extern "C" -# endif -int FC_DUMMY_MAIN() -{ return 1;} -#endif -#endif -int HDF_NO_UBSAN -main () -{ - - char *chp = "beefs"; - char **chpp = malloc (2 * sizeof (char *)); - char **chpp2; - hvl_t vl = { 12345, (void *) chp }; - hvl_t *vlp; - hvl_t *vlp2; - - memcpy ((void *) ((char *) chpp + 1), &chp, sizeof (char *)); - chpp2 = (char **) ((char *) chpp + 1); - if (strcmp (*chpp2, chp)) { - free (chpp); - return 1; - } - free (chpp); - - vlp = malloc (2 * sizeof (hvl_t)); - memcpy ((void *) ((char *) vlp + 1), &vl, sizeof (hvl_t)); - vlp2 = (hvl_t *) ((char *) vlp + 1); - if (vlp2->len != vl.len || vlp2->p != vl.p) { - free (vlp); - return 1; - } - free (vlp); - - ; - return 0; -} - -#endif - #ifdef H5_DISABLE_SOME_LDOUBLE_CONV_TEST #include @@ -304,7 +278,7 @@ int HDF_NO_UBSAN main(void) pclose(fp); - if(strncmp(cpu, "ppc64le", 7) == 0) + if (strncmp(cpu, "ppc64le", 7) == 0) return 0; return 1; diff --git a/config/cmake/H5pubconf.h.in b/config/cmake/H5pubconf.h.in index 761717d8412..838726c4126 100644 --- a/config/cmake/H5pubconf.h.in +++ b/config/cmake/H5pubconf.h.in @@ -14,13 +14,13 @@ #ifndef H5_CONFIG_H_ #define H5_CONFIG_H_ -/* Define if the Windows virtual file driver should be compiled */ +/* Define if this is a Windows machine */ #cmakedefine H5_HAVE_WINDOWS @H5_HAVE_WINDOWS@ /* Define if using MinGW */ #cmakedefine H5_HAVE_MINGW @H5_HAVE_MINGW@ -/* Define if on the Windows platform and default WIN32 API */ +/* Define if on the Windows platform and using the Win32 API */ #cmakedefine H5_HAVE_WIN32_API @H5_HAVE_WIN32_API@ /* Define if using a Windows compiler (i.e. Visual Studio) */ diff --git a/config/cmake/HDF5DeveloperBuild.cmake b/config/cmake/HDF5DeveloperBuild.cmake new file mode 100644 index 00000000000..40efb0e0711 --- /dev/null +++ b/config/cmake/HDF5DeveloperBuild.cmake @@ -0,0 +1,196 @@ +# +# Copyright by The HDF Group. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in +# the COPYING file, which can be found at the root of the source code +# distribution tree, or in https://www.hdfgroup.org/licenses. +# If you do not have access to either file, you may request a copy from +# help@hdfgroup.org. +# + +# CMake settings for HDF5 Developer mode builds + +# Set CMake C++ flags based off of Debug build flags +set (CMAKE_CXX_FLAGS_DEVELOPER ${CMAKE_CXX_FLAGS_DEBUG} CACHE STRING + "Flags used by the C++ compiler during developer builds." FORCE +) + +# Set CMake C flags based off of Debug build flags. Add in -Og +# option to disable some GCC optimizations that might affect +# debugging negatively and also include some GCC compiler passes +# that collect debugging information +set (CMAKE_C_FLAGS_DEVELOPER "${CMAKE_C_FLAGS_DEBUG} -Og" CACHE STRING + "Flags used by the C compiler during developer builds." FORCE +) + +# Set CMake binary linker flags based off of Debug binary linker flags +set (CMAKE_EXE_LINKER_FLAGS_DEVELOPER ${CMAKE_EXE_LINKER_FLAGS_DEBUG} + CACHE STRING "Flags used for linking binaries during developer builds." + FORCE +) + +# Set CMake shared library linker flags based off of Debug shared library +# linker flags +set (CMAKE_SHARED_LINKER_FLAGS_DEVELOPER ${CMAKE_SHARED_LINKER_FLAGS_DEBUG} + CACHE STRING "Flags used by the shared libraries linker during developer builds." + FORCE +) + +mark_as_advanced ( + CMAKE_CXX_FLAGS_DEVELOPER + CMAKE_C_FLAGS_DEVELOPER + CMAKE_EXE_LINKER_FLAGS_DEVELOPER + CMAKE_SHARED_LINKER_FLAGS_DEVELOPER +) + +#----------------------------------------------------------------------------- +# Define various HDF5 macros for debugging the library +#----------------------------------------------------------------------------- + +# Enable debugging of various HDF5 modules +set (HDF5_ENABLE_DEBUG_APIS ON CACHE BOOL "Turn on extra debug output in all packages" FORCE) + +# HDF5 module debug definitions for debug code which either isn't +# currently integrated with HDF5_ENABLE_DEBUG_APIS, or which isn't +# well integrated with HDF5's H5DEBUG(X) (where 'X' is a package +# letter) system. This type of debug code usually always prints output +# to stdout, regardless of whether debugging for its particular module +# has been requested via the HDF5_DEBUG environment variable. Therefore, +# we don't automatically enable this debug code, but allow developers +# to quickly add those definitions into their build here, without +# needing to hack up source files. +option (HDF5_ENABLE_DEBUG_H5AC_DIRTY_BYTES "Enable printing of H5AC module dirty bytes information" OFF) +mark_as_advanced (HDF5_ENABLE_DEBUG_H5AC_DIRTY_BYTES) +if (HDF5_ENABLE_DEBUG_H5AC_DIRTY_BYTES) + list (APPEND HDF5_DEBUG_APIS H5AC_DEBUG_DIRTY_BYTES_CREATION) +endif () + +option (HDF5_ENABLE_DEBUG_H5FA "Enable debugging of H5FA module" OFF) +mark_as_advanced (HDF5_ENABLE_DEBUG_H5FA) +if (HDF5_ENABLE_DEBUG_H5FA) + list (APPEND HDF5_DEBUG_APIS H5FA_DEBUG) +endif () + +option (HDF5_ENABLE_DEBUG_H5FD_ALLOC "Enable debugging of H5FD module allocation code" OFF) +mark_as_advanced (HDF5_ENABLE_DEBUG_H5FD_ALLOC) +if (HDF5_ENABLE_DEBUG_H5FD_ALLOC) + list (APPEND HDF5_DEBUG_APIS H5FD_ALLOC_DEBUG) +endif () + +option (HDF5_ENABLE_DEBUG_H5FL "Enable debugging of H5FL module" OFF) +mark_as_advanced (HDF5_ENABLE_DEBUG_H5FL) +if (HDF5_ENABLE_DEBUG_H5FL) + list (APPEND HDF5_DEBUG_APIS H5FL_DEBUG) +endif () + +option (HDF5_ENABLE_DEBUG_H5FS "Enable debugging of H5FS module" OFF) +mark_as_advanced (HDF5_ENABLE_DEBUG_H5FS) +if (HDF5_ENABLE_DEBUG_H5FS) + list (APPEND HDF5_DEBUG_APIS H5FS_DEBUG) +endif () + +option (HDF5_ENABLE_DEBUG_H5FS_SINFO "Enable debugging of H5FS module section info" OFF) +mark_as_advanced (HDF5_ENABLE_DEBUG_H5FS_SINFO) +if (HDF5_ENABLE_DEBUG_H5FS_SINFO) + list (APPEND HDF5_DEBUG_APIS H5FS_SINFO_DEBUG) +endif () + +option (HDF5_ENABLE_DEBUG_H5MF_AGGR "Enable debugging of H5MF module aggregation code" OFF) +mark_as_advanced (HDF5_ENABLE_DEBUG_H5MF_AGGR) +if (HDF5_ENABLE_DEBUG_H5MF_AGGR) + list (APPEND HDF5_DEBUG_APIS H5MF_AGGR_DEBUG) +endif () + +option (HDF5_ENABLE_DEBUG_H5MF_ALLOC "Enable debugging of H5MF module allocation code" OFF) +mark_as_advanced (HDF5_ENABLE_DEBUG_H5MF_ALLOC) +if (HDF5_ENABLE_DEBUG_H5MF_ALLOC) + list (APPEND HDF5_DEBUG_APIS H5MF_ALLOC_DEBUG) +endif () + +option (HDF5_ENABLE_DEBUG_H5MF_ALLOC_MORE "Enable extra debugging of H5MF module allocation code" OFF) +mark_as_advanced (HDF5_ENABLE_DEBUG_H5MF_ALLOC_MORE) +if (HDF5_ENABLE_DEBUG_H5MF_ALLOC_MORE) + list (APPEND HDF5_DEBUG_APIS H5MF_ALLOC_DEBUG_MORE) +endif () + +option (HDF5_ENABLE_DEBUG_H5MF_ALLOC_DUMP "Enable printing of debugging info for H5MF module allocation code" OFF) +mark_as_advanced (HDF5_ENABLE_DEBUG_H5MF_ALLOC_DUMP) +if (HDF5_ENABLE_DEBUG_H5MF_ALLOC_DUMP) + list (APPEND HDF5_DEBUG_APIS H5MF_ALLOC_DEBUG_DUMP) +endif () + +option (HDF5_ENABLE_DEBUG_H5R "Enable debugging of H5R module" OFF) +mark_as_advanced (HDF5_ENABLE_DEBUG_H5R) +if (HDF5_ENABLE_DEBUG_H5R) + list (APPEND HDF5_DEBUG_APIS H5R_DEBUG) +endif () + +option (HDF5_ENABLE_DEBUG_H5S_HYPER "Enable debugging of H5S hyperslab code" OFF) +mark_as_advanced (HDF5_ENABLE_DEBUG_H5S_HYPER) +if (HDF5_ENABLE_DEBUG_H5S_HYPER) + list (APPEND HDF5_DEBUG_APIS H5S_HYPER_DEBUG) +endif () + +option (HDF5_ENABLE_DEBUG_H5T_REF "Enable debugging of H5T module reference code" OFF) +mark_as_advanced (HDF5_ENABLE_DEBUG_H5T_REF) +if (HDF5_ENABLE_DEBUG_H5T_REF) + list (APPEND HDF5_DEBUG_APIS H5T_REF_DEBUG) +endif () + +# HDF5 module debug definitions for debug code which may add +# considerable amounts of overhead when enabled and is usually +# only useful for specific circumstances rather than general +# developer use. +option (HDF5_ENABLE_DEBUG_H5B "Enable debugging of H5B module" OFF) +mark_as_advanced (HDF5_ENABLE_DEBUG_H5B) +if (HDF5_ENABLE_DEBUG_H5B) + list (APPEND HDF5_DEBUG_APIS H5B_DEBUG) +endif () + +option (HDF5_ENABLE_DEBUG_H5B2 "Enable debugging of H5B2 module" OFF) +mark_as_advanced (HDF5_ENABLE_DEBUG_H5B2) +if (HDF5_ENABLE_DEBUG_H5B2) + list (APPEND HDF5_DEBUG_APIS H5B2_DEBUG) +endif () + +option (HDF5_ENABLE_DEBUG_H5C_SANITY_CHECKS "Enable full sanity checking in H5C module" OFF) +mark_as_advanced (HDF5_ENABLE_DEBUG_H5C_SANITY_CHECKS) +if (HDF5_ENABLE_DEBUG_H5C_SANITY_CHECKS) + list (APPEND HDF5_DEBUG_APIS H5C_DO_SANITY_CHECKS) + list (APPEND HDF5_DEBUG_APIS H5C_DO_SLIST_SANITY_CHECKS) + list (APPEND HDF5_DEBUG_APIS H5C_DO_TAGGING_SANITY_CHECKS) + list (APPEND HDF5_DEBUG_APIS H5C_DO_EXTREME_SANITY_CHECKS) + + # See note in H5Cprivate.h about this #define + # list (APPEND HDF5_DEBUG_APIS H5C_DO_MEMORY_SANITY_CHECKS=1) +endif () + +option (HDF5_ENABLE_DEBUG_H5FL_TRACK "Enable tracking of free list allocations" OFF) +mark_as_advanced (HDF5_ENABLE_DEBUG_H5FL_TRACK) +if (HDF5_ENABLE_DEBUG_H5FL_TRACK) + list (APPEND HDF5_DEBUG_APIS H5FL_TRACK) + + # Free list tracking requires the codestack functionality + set (HDF5_ENABLE_CODESTACK ON CACHE BOOL "Enable the function stack tracing (for developer debugging)." FORCE) +else () + unset (HDF5_ENABLE_CODESTACK CACHE) +endif () + +option (HDF5_ENABLE_DEBUG_H5FS_ASSERT "Enable extra debugging of H5FS module" OFF) +mark_as_advanced (HDF5_ENABLE_DEBUG_H5FS_ASSERT) +if (HDF5_ENABLE_DEBUG_H5FS_ASSERT) + list (APPEND HDF5_DEBUG_APIS H5FS_DEBUG_ASSERT) +endif () + +# If HDF5 free list debugging wasn't specifically enabled, disable +# free lists entirely for developer build modes, as they can +# make certain types of issues (like references to stale pointers) +# much more difficult to debug +if (NOT HDF5_ENABLE_DEBUG_H5FL AND NOT HDF5_ENABLE_DEBUG_H5FL_TRACK) + list (APPEND HDF5_DEVELOPER_DEFS H5_NO_FREE_LISTS) +endif () + +# Enable strict checking of the file format +list (APPEND HDF5_DEVELOPER_DEFS H5_STRICT_FORMAT_CHECKS) diff --git a/config/cmake/HDF5UseFortran.cmake b/config/cmake/HDF5UseFortran.cmake index 5cf50883cdf..fd09c51b7e8 100644 --- a/config/cmake/HDF5UseFortran.cmake +++ b/config/cmake/HDF5UseFortran.cmake @@ -17,6 +17,12 @@ enable_language (Fortran) set (HDF_PREFIX "H5") + +# Force lowercase Fortran module file names +if (CMAKE_Fortran_COMPILER_ID STREQUAL "Cray") + set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -ef") +endif () + include (CheckFortranFunctionExists) if (NOT CMAKE_VERSION VERSION_LESS "3.14.0") diff --git a/config/cmake/HDFCXXCompilerFlags.cmake b/config/cmake/HDFCXXCompilerFlags.cmake index e6561465cb0..bd730a9ca8a 100644 --- a/config/cmake/HDFCXXCompilerFlags.cmake +++ b/config/cmake/HDFCXXCompilerFlags.cmake @@ -53,7 +53,7 @@ endif () if (CMAKE_COMPILER_IS_GNUCXX AND CMAKE_CXX_COMPILER_LOADED) set (CMAKE_CXX_FLAGS "${CMAKE_ANSI_CFLAGS} ${CMAKE_CXX_FLAGS}") - if (${HDF_CFG_NAME} MATCHES "Debug") + if (${HDF_CFG_NAME} MATCHES "Debug" OR ${HDF_CFG_NAME} MATCHES "Developer") if (NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 5.0) set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Og -ftrapv -fno-common") endif () diff --git a/config/cmake/HDFCompilerFlags.cmake b/config/cmake/HDFCompilerFlags.cmake index 1560bbd440f..3320ca880e6 100644 --- a/config/cmake/HDFCompilerFlags.cmake +++ b/config/cmake/HDFCompilerFlags.cmake @@ -51,7 +51,7 @@ endif() if (CMAKE_COMPILER_IS_GNUCC) set (CMAKE_C_FLAGS "${CMAKE_ANSI_CFLAGS} ${CMAKE_C_FLAGS}") - if (${HDF_CFG_NAME} MATCHES "Debug") + if (${HDF_CFG_NAME} MATCHES "Debug" OR ${HDF_CFG_NAME} MATCHES "Developer") if (NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 5.0) set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Og -ftrapv -fno-common") endif () @@ -176,6 +176,10 @@ endif () # Developer warnings (suggestions from gcc, not code problems) #----------------------------------------------------------------------------- option (HDF5_ENABLE_DEV_WARNINGS "Enable HDF5 developer group warnings" OFF) +if (${HDF_CFG_NAME} MATCHES "Developer") + # Developer build modes should always have these types of warnings enabled + set (HDF5_ENABLE_DEV_WARNINGS ON CACHE BOOL "Enable HDF5 developer group warnings" FORCE) +endif () if (HDF5_ENABLE_DEV_WARNINGS) message (STATUS "....HDF5 developer group warnings are enabled") if (CMAKE_C_COMPILER_ID STREQUAL "Intel") @@ -189,6 +193,19 @@ if (HDF5_ENABLE_DEV_WARNINGS) elseif (CMAKE_C_COMPILER_ID MATCHES "IntelLLVM" OR CMAKE_C_COMPILER_ID MATCHES "[Cc]lang") ADD_H5_FLAGS (H5_CFLAGS "${HDF5_SOURCE_DIR}/config/clang-warnings/developer-general") endif () + + # Turn on -Winline warnings now only for non-Debug and + # non-Developer builds. For at least GNU compilers this + # flag appears to conflict specifically with the -Og + # optimization flag and will produce warnings about functions + # not being considered for inlining + if (NOT ${HDF_CFG_NAME} MATCHES "Debug" AND NOT ${HDF_CFG_NAME} MATCHES "Developer") + if (CMAKE_C_COMPILER_ID STREQUAL "GNU") + list (APPEND H5_CFLAGS "-Winline") + elseif (CMAKE_C_COMPILER_ID STREQUAL "Intel" AND NOT _INTEL_WINDOWS) + list (APPEND H5_CFLAGS "-Winline") + endif () + endif () else () if (CMAKE_C_COMPILER_ID STREQUAL "GNU" AND CMAKE_C_COMPILER_VERSION VERSION_GREATER_EQUAL 4.8) ADD_H5_FLAGS (H5_CFLAGS "${HDF5_SOURCE_DIR}/config/gnu-warnings/no-developer-general") @@ -270,6 +287,38 @@ if (CMAKE_C_COMPILER_ID STREQUAL "GNU") # ADD_H5_FLAGS (H5_CFLAGS "${HDF5_SOURCE_DIR}/config/gnu-warnings/no-developer-10") endif () endif () + + # Append more extra warning flags that only gcc 12.x+ knows about + # or which should only be enabled for gcc 12.x+ + if (NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 12.0) + if (HDF5_ENABLE_DEV_WARNINGS) + ADD_H5_FLAGS (H5_CFLAGS "${HDF5_SOURCE_DIR}/config/gnu-warnings/developer-12") + #else () + # ADD_H5_FLAGS (H5_CFLAGS "${HDF5_SOURCE_DIR}/config/gnu-warnings/no-developer-12") + endif () + endif () +endif () + +#----------------------------------------------------------------------------- +# Option to allow the user to enable debug output +# from various HDF5 modules +#----------------------------------------------------------------------------- +option (HDF5_ENABLE_DEBUG_APIS "Turn on extra debug output in all packages" OFF) +if (HDF5_ENABLE_DEBUG_APIS) + # Add standard debug definitions to any existing ones + list (APPEND HDF5_DEBUG_APIS + H5AC_DEBUG + H5CX_DEBUG + H5D_DEBUG + H5D_CHUNK_DEBUG + H5F_DEBUG + H5HL_DEBUG + H5I_DEBUG + H5O_DEBUG + H5S_DEBUG + H5T_DEBUG + H5Z_DEBUG + ) endif () #----------------------------------------------------------------------------- diff --git a/config/cmake/HDFFortranCompilerFlags.cmake b/config/cmake/HDFFortranCompilerFlags.cmake index 84b3ebebe3f..65a1cdc677e 100644 --- a/config/cmake/HDFFortranCompilerFlags.cmake +++ b/config/cmake/HDFFortranCompilerFlags.cmake @@ -54,17 +54,18 @@ if (CMAKE_Fortran_COMPILER_ID STREQUAL "GNU" AND NOT CMAKE_Fortran_COMPILER_VERS endif () endif () -#----------------------------------------------------------------------------- -# CDash is configured to only allow 3000 warnings, so -# break into groups (from the config/gnu-flags file) -#----------------------------------------------------------------------------- if (NOT MSVC AND NOT MINGW) # General flags if (CMAKE_Fortran_COMPILER_ID STREQUAL "Intel") ADD_H5_FLAGS (HDF5_CMAKE_Fortran_FLAGS "${HDF5_SOURCE_DIR}/config/intel-warnings/ifort-general") - list (APPEND HDF5_CMAKE_Fortran_FLAGS "-stand f03" "-free") + list (APPEND HDF5_CMAKE_Fortran_FLAGS "-free") elseif (CMAKE_Fortran_COMPILER_ID STREQUAL "GNU") ADD_H5_FLAGS (HDF5_CMAKE_Fortran_FLAGS "${HDF5_SOURCE_DIR}/config/gnu-warnings/gfort-general") + if (HDF5_ENABLE_DEV_WARNINGS) + ADD_H5_FLAGS (HDF5_CMAKE_Fortran_FLAGS "${HDF5_SOURCE_DIR}/config/gnu-warnings/gfort-developer-general") + else () + ADD_H5_FLAGS (HDF5_CMAKE_Fortran_FLAGS "${HDF5_SOURCE_DIR}/config/gnu-warnings/gfort-no-developer-general") + endif () list (APPEND HDF5_CMAKE_Fortran_FLAGS "-ffree-form" "-fimplicit-none") if (CMAKE_Fortran_COMPILER_VERSION VERSION_LESS 8.0 AND NOT CMAKE_Fortran_COMPILER_VERSION VERSION_LESS 4.6) list (APPEND HDF5_CMAKE_Fortran_FLAGS "-std=f2008ts") @@ -82,6 +83,11 @@ if (NOT MSVC AND NOT MINGW) # Append more extra warning flags that only gcc 4.8+ knows about if (NOT CMAKE_Fortran_COMPILER_VERSION VERSION_LESS 4.8) ADD_H5_FLAGS (HDF5_CMAKE_Fortran_FLAGS "${HDF5_SOURCE_DIR}/config/gnu-warnings/gfort-4.8") + if (HDF5_ENABLE_DEV_WARNINGS) + ADD_H5_FLAGS (HDF5_CMAKE_Fortran_FLAGS "${HDF5_SOURCE_DIR}/config/gnu-warnings/gfort-developer-4.8") + else () + ADD_H5_FLAGS (HDF5_CMAKE_Fortran_FLAGS "${HDF5_SOURCE_DIR}/config/gnu-warnings/gfort-no-developer-4.8") + endif () endif () # Append more extra warning flags that only gcc 4.9+ knows about @@ -92,7 +98,7 @@ if (NOT MSVC AND NOT MINGW) # Append more extra warning flags that only gcc 5.x+ knows about if (NOT CMAKE_Fortran_COMPILER_VERSION VERSION_LESS 5.0) if (HDF5_ENABLE_DEV_WARNINGS) - ADD_H5_FLAGS (HDF5_CMAKE_Fortran_FLAGS "${HDF5_SOURCE_DIR}/config/gnu-warnings/developer-gfort-5") + ADD_H5_FLAGS (HDF5_CMAKE_Fortran_FLAGS "${HDF5_SOURCE_DIR}/config/gnu-warnings/gfort-developer-5") endif () endif () diff --git a/config/cmake/HDFMacros.cmake b/config/cmake/HDFMacros.cmake index 6037570af16..42b6a22d72f 100644 --- a/config/cmake/HDFMacros.cmake +++ b/config/cmake/HDFMacros.cmake @@ -77,7 +77,7 @@ macro (INSTALL_TARGET_PDB libtarget targetdestination targetcomponent) if (${target_type} MATCHES "SHARED") set (targetfilename $) else () - get_property (target_name TARGET ${libtarget} PROPERTY "$,OUTPUT_NAME_DEBUG,OUTPUT_NAME_RELWITHDEBINFO>") + get_property (target_name TARGET ${libtarget} PROPERTY "$,$>,OUTPUT_NAME_DEBUG,OUTPUT_NAME_RELWITHDEBINFO>") set (targetfilename $/${target_name}.pdb) endif () install ( @@ -121,6 +121,7 @@ macro (HDF_SET_LIB_OPTIONS libtarget libname libtype) set_target_properties (${libtarget} PROPERTIES OUTPUT_NAME ${LIB_RELEASE_NAME} # OUTPUT_NAME_DEBUG ${LIB_DEBUG_NAME} + OUTPUT_NAME_DEVELOPER ${LIB_DEBUG_NAME} OUTPUT_NAME_RELEASE ${LIB_RELEASE_NAME} OUTPUT_NAME_MINSIZEREL ${LIB_RELEASE_NAME} OUTPUT_NAME_RELWITHDEBINFO ${LIB_RELEASE_NAME} @@ -130,6 +131,7 @@ macro (HDF_SET_LIB_OPTIONS libtarget libname libtype) if (WIN32) set_target_properties (${libtarget} PROPERTIES COMPILE_PDB_NAME_DEBUG ${LIB_DEBUG_NAME} + COMPILE_PDB_NAME_DEVELOPER ${LIB_DEBUG_NAME} COMPILE_PDB_NAME_RELEASE ${LIB_RELEASE_NAME} COMPILE_PDB_NAME_MINSIZEREL ${LIB_RELEASE_NAME} COMPILE_PDB_NAME_RELWITHDEBINFO ${LIB_RELEASE_NAME} @@ -155,7 +157,7 @@ macro (HDF_IMPORT_SET_LIB_OPTIONS libtarget libname libtype libversion) if (${importtype} MATCHES "IMPORT") set (importprefix "${CMAKE_STATIC_LIBRARY_PREFIX}") endif () - if (${HDF_CFG_NAME} MATCHES "Debug") + if (${HDF_CFG_NAME} MATCHES "Debug" OR ${HDF_CFG_NAME} MATCHES "Developer") set (IMPORT_LIB_NAME ${LIB_DEBUG_NAME}) else () set (IMPORT_LIB_NAME ${LIB_RELEASE_NAME}) diff --git a/config/cmake/ZLIB/zlib-config.cmake.in b/config/cmake/ZLIB/zlib-config.cmake.in index 307896b61a9..9e6ef48fcbe 100644 --- a/config/cmake/ZLIB/zlib-config.cmake.in +++ b/config/cmake/ZLIB/zlib-config.cmake.in @@ -30,7 +30,7 @@ set (${ZLIB_PACKAGE_NAME}_VERSION_MINOR @ZLIB_VERSION_MINOR@) #----------------------------------------------------------------------------- # Don't include targets if this file is being picked up by another -# project which has already build ZLIB as a subproject +# project which has already built ZLIB as a subproject #----------------------------------------------------------------------------- if (NOT TARGET "@ZLIB_PACKAGE@") include (@PACKAGE_SHARE_INSTALL_DIR@/@ZLIB_PACKAGE@@ZLIB_PACKAGE_EXT@-targets.cmake) diff --git a/config/cmake/cacheinit.cmake b/config/cmake/cacheinit.cmake index 3cae5ab89c3..47135782072 100644 --- a/config/cmake/cacheinit.cmake +++ b/config/cmake/cacheinit.cmake @@ -32,6 +32,10 @@ set_property (CACHE HDF5_INSTALL_MOD_FORTRAN PROPERTY STRINGS NO SHARED STATIC) set (HDF5_BUILD_GENERATORS ON CACHE BOOL "Build Test Generators" FORCE) +set (HDF5_ENABLE_SZIP_SUPPORT ON CACHE BOOL "Use SZip Filter" FORCE) + +set (HDF5_ENABLE_SZIP_ENCODING ON CACHE BOOL "Use SZip Encoding" FORCE) + set (MPIEXEC_MAX_NUMPROCS "4" CACHE STRING "Minimum number of processes for HDF parallel tests" FORCE) set (HDF5_ENABLE_ALL_WARNINGS ON CACHE BOOL "Enable all warnings" FORCE) @@ -69,6 +73,9 @@ set (LIBAEC_USE_LOCALCONTENT ON CACHE BOOL "Use local file for LIBAEC FetchConte set (PLUGIN_TGZ_NAME "hdf5_plugins.tar.gz" CACHE STRING "Use PLUGINS from compressed file" FORCE) set (PLUGIN_PACKAGE_NAME "pl" CACHE STRING "Name of PLUGIN package" FORCE) +set (H5PL_VERS_MAJOR "1" CACHE STRING "Major version of hdf5 package for PLUGIN package" FORCE) +set (H5PL_VERS_MINOR "12" CACHE STRING "Minor version of hdf5 package for PLUGIN package" FORCE) +set (H5PL_VERS_RELEASE "3" CACHE STRING "Release version of hdf5 package for PLUGIN package" FORCE) ############# # bitshuffle diff --git a/config/cmake/CTestScript.cmake b/config/cmake/examples/CTestScript.cmake similarity index 88% rename from config/cmake/CTestScript.cmake rename to config/cmake/examples/CTestScript.cmake index dcf26851278..76bce5680d5 100644 --- a/config/cmake/CTestScript.cmake +++ b/config/cmake/examples/CTestScript.cmake @@ -35,23 +35,30 @@ if (NOT SITE_OS_NAME) message (STATUS "Dashboard script uname output: ${osname}-${osrel}-${cpu}\n") set (CTEST_BUILD_NAME "${osname}-${osrel}-${cpu}") - if (SITE_BUILDNAME_SUFFIX) - set (CTEST_BUILD_NAME "${SITE_BUILDNAME_SUFFIX}-${CTEST_BUILD_NAME}") - endif () - set (BUILD_OPTIONS "${ADD_BUILD_OPTIONS}") else () ## machine name provided ## -------------------------- if (CMAKE_HOST_UNIX) - set(CTEST_BUILD_NAME "${SITE_OS_NAME}-${SITE_OS_VERSION}-${SITE_OS_BITS}-${SITE_COMPILER_NAME}-${SITE_COMPILER_VERSION}") + set (CTEST_BUILD_NAME "${SITE_OS_NAME}-${SITE_OS_VERSION}-${SITE_OS_BITS}-${SITE_COMPILER_NAME}-${SITE_COMPILER_VERSION}") else () - set(CTEST_BUILD_NAME "${SITE_OS_NAME}-${SITE_OS_VERSION}-${SITE_COMPILER_NAME}") - endif () - if (SITE_BUILDNAME_SUFFIX) - set(CTEST_BUILD_NAME "${CTEST_BUILD_NAME}-${SITE_BUILDNAME_SUFFIX}") + set (CTEST_BUILD_NAME "${SITE_OS_NAME}-${SITE_OS_VERSION}-${SITE_COMPILER_NAME}") endif () - set (BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DSITE:STRING=${CTEST_SITE} -DBUILDNAME:STRING=${CTEST_BUILD_NAME}") endif () +if (SITE_BUILDNAME_SUFFIX) + set (CTEST_BUILD_NAME "${SITE_BUILDNAME_SUFFIX}-${CTEST_BUILD_NAME}") +endif () +set (BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DSITE:STRING=${CTEST_SITE} -DBUILDNAME:STRING=${CTEST_BUILD_NAME}") + +# Launchers work only with Makefile and Ninja generators. +if(NOT "${CTEST_CMAKE_GENERATOR}" MATCHES "Make|Ninja" OR LOCAL_SKIP_TEST) + set(CTEST_USE_LAUNCHERS 0) + set(ENV{CTEST_USE_LAUNCHERS_DEFAULT} 0) + set(BUILD_OPTIONS "${BUILD_OPTIONS} -DCTEST_USE_LAUNCHERS:BOOL=OFF") +else() + set(CTEST_USE_LAUNCHERS 1) + set(ENV{CTEST_USE_LAUNCHERS_DEFAULT} 1) + set(BUILD_OPTIONS "${BUILD_OPTIONS} -DCTEST_USE_LAUNCHERS:BOOL=ON") +endif() #----------------------------------------------------------------------------- # MacOS machines need special options diff --git a/config/cmake/HDF5_Examples.cmake.in b/config/cmake/examples/HDF5_Examples.cmake.in similarity index 99% rename from config/cmake/HDF5_Examples.cmake.in rename to config/cmake/examples/HDF5_Examples.cmake.in index 9f3db03fb80..da2502c0c18 100644 --- a/config/cmake/HDF5_Examples.cmake.in +++ b/config/cmake/examples/HDF5_Examples.cmake.in @@ -87,7 +87,7 @@ if(WIN32) set(CTEST_BINARY_NAME ${CTEST_SOURCE_NAME}\\build) set(CTEST_SOURCE_DIRECTORY "${CTEST_DASHBOARD_ROOT}\\${CTEST_SOURCE_NAME}") set(CTEST_BINARY_DIRECTORY "${CTEST_DASHBOARD_ROOT}\\${CTEST_BINARY_NAME}") -else(APPLE) +elseif(APPLE) set(ENV{DYLD_LIBRARY_PATH} "${INSTALLDIR}/lib") set(CTEST_BINARY_NAME ${CTEST_SOURCE_NAME}/build) set(CTEST_SOURCE_DIRECTORY "${CTEST_DASHBOARD_ROOT}/${CTEST_SOURCE_NAME}") diff --git a/config/cmake/HDF5_Examples_options.cmake b/config/cmake/examples/HDF5_Examples_options.cmake similarity index 100% rename from config/cmake/HDF5_Examples_options.cmake rename to config/cmake/examples/HDF5_Examples_options.cmake diff --git a/config/cmake/grepTest.cmake b/config/cmake/grepTest.cmake index c4a6eb7e98c..9dcedb51db2 100644 --- a/config/cmake/grepTest.cmake +++ b/config/cmake/grepTest.cmake @@ -192,6 +192,22 @@ if (TEST_FILTER) endif () endif () +if (NOT DEFINED ENV{HDF5_NOCLEANUP}) + if (EXISTS "${TEST_FOLDER}/${TEST_OUTPUT}") + file (REMOVE ${TEST_FOLDER}/${TEST_OUTPUT}) + endif () + + if (EXISTS "${TEST_FOLDER}/${TEST_OUTPUT}.err") + file (REMOVE ${TEST_FOLDER}/${TEST_OUTPUT}.err) + endif () + + if (TEST_DELETE_LIST) + foreach (dfile in ${TEST_DELETE_LIST}) + file (REMOVE ${dfile}) + endforeach () + endif () +endif () + # everything went fine... message (STATUS "Passed: The output of ${TEST_PROGRAM} matched") diff --git a/config/cmake/hdf5-config.cmake.in b/config/cmake/hdf5-config.cmake.in index 1a3fb7bbf2f..699db896476 100644 --- a/config/cmake/hdf5-config.cmake.in +++ b/config/cmake/hdf5-config.cmake.in @@ -38,7 +38,7 @@ set (${HDF5_PACKAGE_NAME}_BUILD_CPP_LIB @HDF5_BUILD_CPP_LIB@) set (${HDF5_PACKAGE_NAME}_BUILD_JAVA @HDF5_BUILD_JAVA@) set (${HDF5_PACKAGE_NAME}_BUILD_TOOLS @HDF5_BUILD_TOOLS@) set (${HDF5_PACKAGE_NAME}_BUILD_HL_LIB @HDF5_BUILD_HL_LIB@) -set (${HDF5_PACKAGE_NAME}_BUILD_HL_TOOLS @HDF5_BUILD_HL_TOOLS@) +set (${HDF5_PACKAGE_NAME}_BUILD_HL_GIF_TOOLS @HDF5_BUILD_HL_GIF_TOOLS@) set (${HDF5_PACKAGE_NAME}_ENABLE_THREADSAFE @HDF5_ENABLE_THREADSAFE@) set (${HDF5_PACKAGE_NAME}_ENABLE_PLUGIN_SUPPORT @HDF5_ENABLE_PLUGIN_SUPPORT@) set (${HDF5_PACKAGE_NAME}_ENABLE_Z_LIB_SUPPORT @HDF5_ENABLE_Z_LIB_SUPPORT@) @@ -70,8 +70,8 @@ endif () if (${HDF5_PACKAGE_NAME}_BUILD_JAVA) set (${HDF5_PACKAGE_NAME}_JAVA_INCLUDE_DIRS @PACKAGE_CURRENT_BUILD_DIR@/lib/jarhdf5-@HDF5_VERSION_STRING@.jar - @PACKAGE_CURRENT_BUILD_DIR@/lib/slf4j-api-1.7.33.jar - @PACKAGE_CURRENT_BUILD_DIR@/lib/slf4j-nop-1.7.33.jar + @PACKAGE_CURRENT_BUILD_DIR@/lib/slf4j-api-2.0.6.jar + @PACKAGE_CURRENT_BUILD_DIR@/lib/slf4j-nop-2.0.6.jar ) set (${HDF5_PACKAGE_NAME}_JAVA_LIBRARY "@PACKAGE_CURRENT_BUILD_DIR@/lib") set (${HDF5_PACKAGE_NAME}_JAVA_LIBRARIES "${${HDF5_PACKAGE_NAME}_JAVA_LIBRARY}") diff --git a/config/cmake/jrunTest.cmake b/config/cmake/jrunTest.cmake index f6fa3b3fe9b..294b05f8232 100644 --- a/config/cmake/jrunTest.cmake +++ b/config/cmake/jrunTest.cmake @@ -300,6 +300,22 @@ if (TEST_SKIP_COMPARE AND NOT TEST_NO_DISPLAY) endif () endif () +if (NOT DEFINED ENV{HDF5_NOCLEANUP}) + if (EXISTS "${TEST_FOLDER}/${TEST_OUTPUT}") + file (REMOVE ${TEST_FOLDER}/${TEST_OUTPUT}) + endif () + + if (EXISTS "${TEST_FOLDER}/${TEST_OUTPUT}.err") + file (REMOVE ${TEST_FOLDER}/${TEST_OUTPUT}.err) + endif () + + if (TEST_DELETE_LIST) + foreach (dfile in ${TEST_DELETE_LIST}) + file (REMOVE ${dfile}) + endforeach () + endif () +endif () + # everything went fine... message (STATUS "${TEST_PROGRAM} Passed") diff --git a/config/cmake/libhdf5.settings.cmake.in b/config/cmake/libhdf5.settings.cmake.in index 0e2c0134562..46aadb7024f 100644 --- a/config/cmake/libhdf5.settings.cmake.in +++ b/config/cmake/libhdf5.settings.cmake.in @@ -28,6 +28,7 @@ Linking Options: AM_LDFLAGS: @AM_LDFLAGS@ Extra libraries: @LINK_LIBS@ Archiver: @CMAKE_AR@ + AR_FLAGS: Ranlib: @CMAKE_RANLIB@ Languages: @@ -44,23 +45,24 @@ Languages: Static C Library: @H5_ENABLE_STATIC_LIB@ Fortran: @HDF5_BUILD_FORTRAN@ -@BUILD_FORTRAN_CONDITIONAL_TRUE@ Fortran Compiler: @CMAKE_Fortran_COMPILER@ @CMAKE_Fortran_COMPILER_VERSION@ -@BUILD_FORTRAN_CONDITIONAL_TRUE@ Fortran Flags: @CMAKE_Fortran_FLAGS@ -@BUILD_FORTRAN_CONDITIONAL_TRUE@ H5 Fortran Flags: @HDF5_CMAKE_Fortran_FLAGS@ -@BUILD_FORTRAN_CONDITIONAL_TRUE@ AM Fortran Flags: @AM_FCFLAGS@ -@BUILD_FORTRAN_CONDITIONAL_TRUE@ Shared Fortran Library: @H5_ENABLE_SHARED_LIB@ -@BUILD_FORTRAN_CONDITIONAL_TRUE@ Static Fortran Library: @H5_ENABLE_STATIC_LIB@ + Fortran Compiler: @CMAKE_Fortran_COMPILER@ @CMAKE_Fortran_COMPILER_VERSION@ + Fortran Flags: @CMAKE_Fortran_FLAGS@ + H5 Fortran Flags: @HDF5_CMAKE_Fortran_FLAGS@ + AM Fortran Flags: @AM_FCFLAGS@ + Shared Fortran Library: @H5_ENABLE_SHARED_LIB@ + Static Fortran Library: @H5_ENABLE_STATIC_LIB@ + Module Directory: @CMAKE_Fortran_MODULE_DIRECTORY@ C++: @HDF5_BUILD_CPP_LIB@ -@BUILD_CXX_CONDITIONAL_TRUE@ C++ Compiler: @CMAKE_CXX_COMPILER@ @CMAKE_CXX_COMPILER_VERSION@ -@BUILD_CXX_CONDITIONAL_TRUE@ C++ Flags: @CMAKE_CXX_FLAGS@ -@BUILD_CXX_CONDITIONAL_TRUE@ H5 C++ Flags: @HDF5_CMAKE_CXX_FLAGS@ -@BUILD_CXX_CONDITIONAL_TRUE@ AM C++ Flags: @AM_CXXFLAGS@ -@BUILD_CXX_CONDITIONAL_TRUE@ Shared C++ Library: @H5_ENABLE_SHARED_LIB@ -@BUILD_CXX_CONDITIONAL_TRUE@ Static C++ Library: @H5_ENABLE_STATIC_LIB@ + C++ Compiler: @CMAKE_CXX_COMPILER@ @CMAKE_CXX_COMPILER_VERSION@ + C++ Flags: @CMAKE_CXX_FLAGS@ + H5 C++ Flags: @HDF5_CMAKE_CXX_FLAGS@ + AM C++ Flags: @AM_CXXFLAGS@ + Shared C++ Library: @H5_ENABLE_SHARED_LIB@ + Static C++ Library: @H5_ENABLE_STATIC_LIB@ - JAVA: @HDF5_BUILD_JAVA@ -@BUILD_JAVA_CONDITIONAL_TRUE@ JAVA Compiler: @CMAKE_Java_COMPILER@ @Java_VERSION@ + JAVA: @HDF5_BUILD_JAVA@ + JAVA Compiler: @CMAKE_Java_COMPILER@ @Java_VERSION@ Features: --------- @@ -71,7 +73,7 @@ Features: Dimension scales w/ new references: @DIMENSION_SCALES_WITH_NEW_REF@ Build HDF5 Tests: @BUILD_TESTING@ Build HDF5 Tools: @HDF5_BUILD_TOOLS@ - Build High-level HDF5 Tools: @HDF5_BUILD_HL_TOOLS@ + Build GIF Tools: @HDF5_BUILD_HL_GIF_TOOLS@ Threadsafety: @HDF5_ENABLE_THREADSAFE@ Default API mapping: @DEFAULT_API_VERSION@ With deprecated public symbols: @HDF5_ENABLE_DEPRECATED_SYMBOLS@ diff --git a/config/cmake/mccacheinit.cmake b/config/cmake/mccacheinit.cmake index dd0c9ece455..ef1ac4ab662 100644 --- a/config/cmake/mccacheinit.cmake +++ b/config/cmake/mccacheinit.cmake @@ -11,9 +11,9 @@ # # This is the CMakeCache file. -######################## +######################### # EXTERNAL cache entries -######################## +######################### set (CMAKE_INSTALL_FRAMEWORK_PREFIX "Library/Frameworks" CACHE STRING "Frameworks installation directory" FORCE) @@ -25,14 +25,14 @@ set (HDF_PACKAGE_NAMESPACE "hdf5::" CACHE STRING "Name for HDF package namespace set (HDF5_BUILD_CPP_LIB ON CACHE BOOL "Build HDF5 C++ Library" FORCE) -set (HDF5_BUILD_EXAMPLES ON CACHE BOOL "Build HDF5 Library Examples" FORCE) - set (HDF5_BUILD_FORTRAN ON CACHE BOOL "Build FORTRAN support" FORCE) set (HDF5_BUILD_HL_LIB ON CACHE BOOL "Build HIGH Level HDF5 Library" FORCE) set (HDF5_BUILD_TOOLS ON CACHE BOOL "Build HDF5 Tools" FORCE) +set (HDF5_BUILD_EXAMPLES ON CACHE BOOL "Build HDF5 Library Examples" FORCE) + set (HDF5_ENABLE_Z_LIB_SUPPORT ON CACHE BOOL "Enable Zlib Filters" FORCE) set (HDF5_ENABLE_SZIP_SUPPORT ON CACHE BOOL "Use SZip Filter" FORCE) @@ -45,11 +45,18 @@ set (HDF5_ENABLE_USING_MEMCHECKER ON CACHE BOOL "Indicate that a memory checker set (HDF5_NO_PACKAGES ON CACHE BOOL "CPACK - Disable packaging" FORCE) -set (HDF5_ALLOW_EXTERNAL_SUPPORT "NO" CACHE STRING "Allow External Library Building (NO GIT TGZ)" FORCE) +set (HDF_TEST_EXPRESS "2" CACHE STRING "Control testing framework (0-3)" FORCE) + +set (HDF5_MINGW_STATIC_GCC_LIBS ON CACHE BOOL "Statically link libgcc/libstdc++" FORCE) + +set (HDF5_ALLOW_EXTERNAL_SUPPORT "TGZ" CACHE STRING "Allow External Library Building (NO GIT TGZ)" FORCE) set_property (CACHE HDF5_ALLOW_EXTERNAL_SUPPORT PROPERTY STRINGS NO GIT TGZ) +set (ZLIB_PACKAGE_NAME "zlib" CACHE STRING "Name of ZLIB package" FORCE) set (ZLIB_TGZ_NAME "ZLib.tar.gz" CACHE STRING "Use ZLib from compressed file" FORCE) +set (LIBAEC_PACKAGE_NAME "libaec" CACHE STRING "Name of AEC SZIP package" FORCE) +set (SZIP_PACKAGE_NAME "szip" CACHE STRING "Name of SZIP package" FORCE) set (SZIP_TGZ_NAME "SZip.tar.gz" CACHE STRING "Use SZip from compressed file" FORCE) set (SZAEC_TGZ_NAME "LIBAEC.tar.gz" CACHE STRING "Use SZip AEC from compressed file" FORCE) set (USE_LIBAEC ON CACHE BOOL "Use libaec szip replacement" FORCE) @@ -57,7 +64,3 @@ set (USE_LIBAEC ON CACHE BOOL "Use libaec szip replacement" FORCE) set (CMAKE_BUILD_TYPE "Debug" CACHE STRING "Build Debug" FORCE) set (CTEST_CONFIGURATION_TYPE "Debug" CACHE STRING "Build Debug" FORCE) - -set (ZLIB_PACKAGE_NAME "zlib" CACHE STRING "Name of ZLIB package" FORCE) -set (LIBAEC_PACKAGE_NAME "libaec" CACHE STRING "Name of AEC SZIP package" FORCE) -set (SZIP_PACKAGE_NAME "szip" CACHE STRING "Name of SZIP package" FORCE) diff --git a/config/cmake/runTest.cmake b/config/cmake/runTest.cmake index d1c454f0ed4..e26b8ea0c09 100644 --- a/config/cmake/runTest.cmake +++ b/config/cmake/runTest.cmake @@ -378,6 +378,22 @@ if (TEST_SKIP_COMPARE AND NOT TEST_NO_DISPLAY) ) endif () +if (NOT DEFINED ENV{HDF5_NOCLEANUP}) + if (EXISTS "${TEST_FOLDER}/${TEST_OUTPUT}" AND NOT TEST_SAVE) + file (REMOVE ${TEST_FOLDER}/${TEST_OUTPUT}) + endif () + + if (EXISTS "${TEST_FOLDER}/${TEST_OUTPUT}.err") + file (REMOVE ${TEST_FOLDER}/${TEST_OUTPUT}.err) + endif () + + if (TEST_DELETE_LIST) + foreach (dfile in ${TEST_DELETE_LIST}) + file (REMOVE ${dfile}) + endforeach () + endif () +endif () + # everything went fine... message (STATUS "${TEST_PROGRAM} Passed") diff --git a/config/cmake/scripts/HDF5options.cmake b/config/cmake/scripts/HDF5options.cmake index 92bfd37ecbe..5267212a09c 100644 --- a/config/cmake/scripts/HDF5options.cmake +++ b/config/cmake/scripts/HDF5options.cmake @@ -69,9 +69,9 @@ set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ALLOW_EXTERNAL_SUPPORT:STRIN ### disable using ext zlib #set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=OFF") -### disable using ext szip -#set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_SUPPORT:BOOL=OFF") -#set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_ENCODING:BOOL=OFF") +### enable using ext szip +#set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_SUPPORT:BOOL=ON") +#set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_ENCODING:BOOL=ON") #### package examples #### #set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_PACK_EXAMPLES:BOOL=ON -DHDF5_EXAMPLES_COMPRESSED:STRING=HDF5Examples-2.0.3-Source.tar.gz -DHDF5_EXAMPLES_COMPRESSED_DIR:PATH=${CTEST_SCRIPT_DIRECTORY}") diff --git a/config/cmake/vfdTest.cmake b/config/cmake/vfdTest.cmake index f5a4e3e9351..fadc67f94ca 100644 --- a/config/cmake/vfdTest.cmake +++ b/config/cmake/vfdTest.cmake @@ -72,5 +72,21 @@ endif () message (STATUS "COMMAND Error: ${TEST_ERROR}") +if (NOT DEFINED ENV{HDF5_NOCLEANUP}) + if (EXISTS "${TEST_FOLDER}/${TEST_OUTPUT}") + file (REMOVE ${TEST_FOLDER}/${TEST_OUTPUT}) + endif () + + if (EXISTS "${TEST_FOLDER}/${TEST_OUTPUT}.err") + file (REMOVE ${TEST_FOLDER}/${TEST_OUTPUT}.err) + endif () + + if (TEST_DELETE_LIST) + foreach (dfile in ${TEST_DELETE_LIST}) + file (REMOVE ${dfile}) + endforeach () + endif () +endif () + # everything went fine... message (STATUS "Passed: The ${TEST_PROGRAM} program used vfd ${TEST_VFD}") diff --git a/config/cmake/volTest.cmake b/config/cmake/volTest.cmake index 27ef8a2e113..a47c3e94f34 100644 --- a/config/cmake/volTest.cmake +++ b/config/cmake/volTest.cmake @@ -72,5 +72,21 @@ endif () message (STATUS "COMMAND Error: ${TEST_ERROR}") +if (NOT DEFINED ENV{HDF5_NOCLEANUP}) + if (EXISTS "${TEST_FOLDER}/${TEST_OUTPUT}") + file (REMOVE ${TEST_FOLDER}/${TEST_OUTPUT}) + endif () + + if (EXISTS "${TEST_FOLDER}/${TEST_OUTPUT}.err") + file (REMOVE ${TEST_FOLDER}/${TEST_OUTPUT}.err) + endif () + + if (TEST_DELETE_LIST) + foreach (dfile in ${TEST_DELETE_LIST}) + file (REMOVE ${dfile}) + endforeach () + endif () +endif () + # everything went fine... message (STATUS "Passed: The ${TEST_PROGRAM} program used vol ${TEST_VOL}") diff --git a/config/conclude_fc.am b/config/conclude_fc.am index 77403cbe1a7..d26016a03c7 100644 --- a/config/conclude_fc.am +++ b/config/conclude_fc.am @@ -19,12 +19,14 @@ # AM_FCCPPFLAGS, FCCPPFLAGS are currently not used. PPFCCOMPILE = $(FC) $(FCDEFS) $(DEFAULT_INCLUDES) $(FCINCLUDES) $(AM_FCCPPFLAGS) $(FCCPPFLAGS) $(AM_FCFLAGS) $(FCFLAGS) -LTPPFCCOMPILE = $(LIBTOOL) --tag=FC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(PPFCCOMPILE) +LTPPFCCOMPILE = $(LIBTOOL) --silent --tag=FC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(PPFCCOMPILE) # Treat all .f90 and .F90 files as preprocessed Fortran. .f90.o: + @if $(AM_V_P); then set -x; else echo " PPFC $@"; fi; \ $(PPFCCOMPILE) -c -o $@ $< .F90.o: + @if $(AM_V_P); then set -x; else echo " PPFC $@"; fi; \ $(PPFCCOMPILE) -c -o $@ $< include $(top_srcdir)/config/conclude.am diff --git a/config/gnu-cxxflags b/config/gnu-cxxflags index 5668c569cda..33a47c8af01 100644 --- a/config/gnu-cxxflags +++ b/config/gnu-cxxflags @@ -118,7 +118,7 @@ if test "X-g++" = "X-$cxx_vendor"; then esac # C++-specific - H5_CXXFLAGS="$H5_CXXFLAGS $arch" + H5_CXXFLAGS="$H5_CXXFLAGS $arch -std=c++98" ############## # Production # diff --git a/config/gnu-fflags b/config/gnu-fflags index 3096cee9f5f..b3385ec8836 100644 --- a/config/gnu-fflags +++ b/config/gnu-fflags @@ -149,6 +149,9 @@ if test "X-gfortran" = "X-$f9x_vendor"; then H5_FCFLAGS="$H5_FCFLAGS $(load_gnu_arguments gfort-general)" + NO_DEVELOPER_WARNING_FCFLAGS="$NO_DEVELOPER_WARNING_FCFLAGS $(load_gnu_arguments gfort-no-developer-general)" + DEVELOPER_WARNING_FCFLAGS="$DEVELOPER_WARNING_FCFLAGS $(load_gnu_arguments gfort-developer-general)" + ############################# # Version-specific warnings # ############################# @@ -156,13 +159,15 @@ if test "X-gfortran" = "X-$f9x_vendor"; then # gfortran >= 4.8 if test $f9x_vers_major -ge 5 -o $f9x_vers_major -eq 4 -a $f9x_vers_minor -ge 8; then H5_FCFLAGS="$H5_FCFLAGS $(load_gnu_arguments gfort-4.8)" + DEVELOPER_WARNING_FCFLAGS="$DEVELOPER_WARNING_FCFLAGS $(load_gnu_arguments gfort-developer-4.8)" + NO_DEVELOPER_WARNING_FCFLAGS="$NO_DEVELOPER_WARNING_FCFLAGS $(load_gnu_arguments gfort-no-developer-4.8)" fi # gfortran 4.9 (nothing new) # gfortran >= 5 if test $f9x_vers_major -ge 5; then - DEVELOPER_WARNING_FCFLAGS="$DEVELOPER_WARNING_FCFLAGS $(load_gnu_arguments developer-gfort-5)" + DEVELOPER_WARNING_FCFLAGS="$DEVELOPER_WARNING_FCFLAGS $(load_gnu_arguments gfort-developer-5)" fi # gfortran >= 6 diff --git a/config/gnu-warnings/4.8 b/config/gnu-warnings/4.8 index fa678ff6ecc..8a1928991b8 100644 --- a/config/gnu-warnings/4.8 +++ b/config/gnu-warnings/4.8 @@ -7,9 +7,6 @@ -Wpacked-bitfield-compat -Wsync-nand -# warning flag added for GCC >= 4.5 --Wstrict-overflow=5 - # This warning can only be truly addressed using the gcc extension of # using D to indicate doubles (e.g., 1.23D). -Wno-unsuffixed-float-constants @@ -19,10 +16,6 @@ -Wtrampolines # warning flag added for GCC >= 4.7 -# -# -Wstack-usage=8192 warnings need to be swept up on a branch so -# that we can stop burdening the whole development team. -# -Wstack-usage=8192 # warning flag added for GCC >= 4.8 diff --git a/config/gnu-warnings/cxx-4.8 b/config/gnu-warnings/cxx-4.8 index 744ebdc611c..387c41ebd31 100644 --- a/config/gnu-warnings/cxx-4.8 +++ b/config/gnu-warnings/cxx-4.8 @@ -7,18 +7,11 @@ -Wpacked-bitfield-compat -Wsync-nand -# warning flag added for GCC >= 4.5 --Wstrict-overflow=5 - # warning flags added for GCC >= 4.6 -Wdouble-promotion -Wtrampolines # warning flag added for GCC >= 4.7 -# -# -Wstack-usage=8192 warnings need to be swept up on a branch so -# that we can stop burdening the whole development team. -# -Wstack-usage=8192 # warning flag added for GCC >= 4.8 diff --git a/config/gnu-warnings/cxx-developer-4.8 b/config/gnu-warnings/cxx-developer-4.8 index e0d975f2b03..f5fbdad0c1d 100644 --- a/config/gnu-warnings/cxx-developer-4.8 +++ b/config/gnu-warnings/cxx-developer-4.8 @@ -1,5 +1,10 @@ -# developer warning flags added for GCC >= 4.5 +# developer warning flag added for GCC >= 4.5 # +# strict-overflow level 5 catches a LOT of false positives, but is useful +# for detecting overflow conditions. +# +-Wstrict-overflow=5 + # developer warning flag added for GCC >= 4.6 -Wsuggest-attribute=const diff --git a/config/gnu-warnings/developer-12 b/config/gnu-warnings/developer-12 new file mode 100644 index 00000000000..0835d96b140 --- /dev/null +++ b/config/gnu-warnings/developer-12 @@ -0,0 +1,4 @@ +# Enable GCC's static analyzer for GCC 12+ +# (Temporarily disabled as the analyzer currently adds significant +# compile time overhead for a few test files like cache.c) +# -fanalyzer diff --git a/config/gnu-warnings/developer-4.8 b/config/gnu-warnings/developer-4.8 index bfd15a1b56f..be7720cb90b 100644 --- a/config/gnu-warnings/developer-4.8 +++ b/config/gnu-warnings/developer-4.8 @@ -6,6 +6,13 @@ # -Wjump-misses-init +# developer warning flag added for GCC >= 4.5 +# +# strict-overflow level 5 catches a LOT of false positives, but is useful +# for detecting overflow conditions. +# +-Wstrict-overflow=5 + # developer warning flag added for GCC >= 4.6 -Wsuggest-attribute=const diff --git a/config/gnu-warnings/developer-general b/config/gnu-warnings/developer-general index 460b874a7d4..79ecd6a054b 100644 --- a/config/gnu-warnings/developer-general +++ b/config/gnu-warnings/developer-general @@ -1,10 +1,17 @@ # (suggestions from gcc, not code problems) -Waggregate-return -Wdisabled-optimization --Winline -Wmissing-format-attribute -Wmissing-noreturn -Wswitch-default -Wswitch-enum -Wunsafe-loop-optimizations -Wunused-macros +# -Winline warnings aren't included here because, for at least +# GNU compilers, this flag appears to conflict specifically with +# the -Og optimization level flag added for Debug and Developer +# builds and will produce warnings about functions not being +# considered for inlining. The flag will be added to the list +# of compiler flags separately if developer warnings are enabled +# and the build type is not Debug or Developer +#-Winline diff --git a/config/gnu-warnings/error-8 b/config/gnu-warnings/error-8 index cbb25f67e55..36c14143228 100644 --- a/config/gnu-warnings/error-8 +++ b/config/gnu-warnings/error-8 @@ -6,17 +6,4 @@ # in order to detect initializations that occur there. It's possible # that GCC 8 only performs that analysis at -O3, though. # -# -# NOTE: File Driver files are not compatible with these warnings as errors -# H5FDlog.c, -# -Werror=maybe-uninitialized --Wmaybe-uninitialized -# NOTE: src/ files are not compatible with these warnings as errors -# src/H5Shyper.c,src/H5SL.c,src/H5Shyper.c -# -Werror=maybe-uninitialized -# NOTE: Test files are not compatible with these warnings as errors -# test/cache_common.c, -# -Werror=maybe-uninitialized -# NOTE: hl/src/ files are not compatible with these warnings as errors -# hl/src/H5DS.c, -# -Werror=maybe-uninitialized +-Werror=maybe-uninitialized diff --git a/config/gnu-warnings/error-general b/config/gnu-warnings/error-general index a66d28411ec..73d1dd564cb 100644 --- a/config/gnu-warnings/error-general +++ b/config/gnu-warnings/error-general @@ -3,7 +3,10 @@ # circumstances, so ask the compiler to treat them as errors: # -Werror=bad-function-cast +-Werror=cast-align +-Werror=format -Werror=implicit-function-declaration +-Werror=int-to-pointer-cast -Werror=missing-declarations -Werror=missing-prototypes -Werror=nested-externs @@ -11,10 +14,11 @@ -Werror=packed -Werror=pointer-sign -Werror=pointer-to-int-cast --Werror=int-to-pointer-cast -Werror=redundant-decls -Werror=strict-prototypes -Werror=switch +-Werror=unused-but-set-variable +-Werror=unused-variable # #-Werror=discarded-qualifiers # @@ -25,43 +29,11 @@ # -Wunused-function # -# H5FDdrvr_module.h -# -Werror=unused-variable -# --Wunused-variable -# # H5VLpassthru.c # -Werror=unused-parameter # -Wunused-parameter # -# -# -# NOTE: Tools files are not compatible with these warnings as errors -# lib/h5tools.c -# -Werror=cast-align -# --Wcast-align -# -# lib/h5diff_array.c -# -Werror=unused-but-set-variable -# --Wunused-but-set-variable -# -# lib/h5tools_utils.c -# -Werror=unused-parameter -# -# -# NOTE: JNI files are not compatible with these warnings as errors -# jni/h5pDCPLImp.c,jni/nativeData.c,jni/h5util.c,jni/h5rImp.c -# jni/h5sImp.c,jni/h5tImp.c -# -Werror=cast-align -# jni/h5util.c -# -Werror=format(-overflow) -# --Werror=format -# -# #Examples and tests do not use the same set of extensive warning flags as libraries # Here is a list of tests and examples that have issues with the stricter warnings as error # diff --git a/config/gnu-warnings/gfort-4.8 b/config/gnu-warnings/gfort-4.8 index 9d880dea0af..f986072fa90 100644 --- a/config/gnu-warnings/gfort-4.8 +++ b/config/gnu-warnings/gfort-4.8 @@ -1,5 +1,4 @@ # warning flags added for gfortran >= 4.4 --Warray-temporaries -Wintrinsics-std # warning flag added for gfortran >= 4.5 diff --git a/config/gnu-warnings/gfort-developer-4.8 b/config/gnu-warnings/gfort-developer-4.8 new file mode 100644 index 00000000000..20effdc04dc --- /dev/null +++ b/config/gnu-warnings/gfort-developer-4.8 @@ -0,0 +1,3 @@ +# warning flags added for gfortran >= 4.4 +-Warray-temporaries + diff --git a/config/gnu-warnings/developer-gfort-5 b/config/gnu-warnings/gfort-developer-5 similarity index 100% rename from config/gnu-warnings/developer-gfort-5 rename to config/gnu-warnings/gfort-developer-5 diff --git a/config/gnu-warnings/gfort-developer-general b/config/gnu-warnings/gfort-developer-general new file mode 100644 index 00000000000..dde15feddc1 --- /dev/null +++ b/config/gnu-warnings/gfort-developer-general @@ -0,0 +1,3 @@ +# There is no easy way in Fortran to indicate parameters are intentionally +# unused and many callbacks will have these, generating a lot of noise. +-Wunused-dummy-argument diff --git a/config/gnu-warnings/gfort-no-developer-4.8 b/config/gnu-warnings/gfort-no-developer-4.8 new file mode 100644 index 00000000000..82274ef89fd --- /dev/null +++ b/config/gnu-warnings/gfort-no-developer-4.8 @@ -0,0 +1,3 @@ +# warning flags added for gfortran >= 4.4 +-Wno-array-temporaries + diff --git a/config/gnu-warnings/gfort-no-developer-general b/config/gnu-warnings/gfort-no-developer-general new file mode 100644 index 00000000000..bfcba22342e --- /dev/null +++ b/config/gnu-warnings/gfort-no-developer-general @@ -0,0 +1,3 @@ +# There is no easy way in Fortran to indicate parameters are intentionally +# unused and many callbacks will have these, generating a lot of noise. +-Wno-unused-dummy-argument diff --git a/config/gnu-warnings/no-cxx-developer-4.8 b/config/gnu-warnings/no-cxx-developer-4.8 index 3c3de93e0ce..d4d00e3b916 100644 --- a/config/gnu-warnings/no-cxx-developer-4.8 +++ b/config/gnu-warnings/no-cxx-developer-4.8 @@ -1,3 +1,6 @@ +# no-developer warning flag added for GCC >= 4.5 +-Wstrict-overflow=2 + # no-developer warning flag added for GCC >= 4.6 -Wno-suggest-attribute=const diff --git a/config/gnu-warnings/no-developer-4.8 b/config/gnu-warnings/no-developer-4.8 index 09a9a96f221..412b907789d 100644 --- a/config/gnu-warnings/no-developer-4.8 +++ b/config/gnu-warnings/no-developer-4.8 @@ -1,5 +1,6 @@ -# no-developer warning flag added for GCC >= 4.5 +# no-developer warning flags added for GCC >= 4.5 -Wno-jump-misses-init +-Wstrict-overflow=2 # no-developer warning flag added for GCC >= 4.6 -Wno-suggest-attribute=const diff --git a/config/ibm-aix b/config/ibm-aix index fb7e1df0c70..b2f5d05b79c 100644 --- a/config/ibm-aix +++ b/config/ibm-aix @@ -148,8 +148,6 @@ ac_cv_sizeof_char=${ac_cv_sizeof_char=1} ac_cv_sizeof_short=${ac_cv_sizeof_short=2} ac_cv_sizeof_int=${ac_cv_sizeof_int=4} ac_cv_sizeof_long_long=${ac_cv_sizeof_long_long=8} -# Do not cache __int64 since it is not a standard C type and some compilers -# (e.g., gcc) does not support it. ac_cv_sizeof_float=${ac_cv_sizeof_float=4} ac_cv_sizeof_double=${ac_cv_sizeof_double=8} ac_cv_sizeof_long_double=${ac_cv_sizeof_long_double=8} diff --git a/config/intel-cxxflags b/config/intel-cxxflags index 484100f403f..9d2be690b88 100644 --- a/config/intel-cxxflags +++ b/config/intel-cxxflags @@ -81,7 +81,7 @@ if test "X-icpc" = "X-$cxx_vendor"; then # General # ########### - H5_CXXFLAGS="$H5_CXXFLAGS $arch" + H5_CXXFLAGS="$H5_CXXFLAGS $arch -std=c++98" ############## # Production # diff --git a/config/intel-fflags b/config/intel-fflags index 843d21e9e49..ad1ce7c4bb5 100644 --- a/config/intel-fflags +++ b/config/intel-fflags @@ -122,7 +122,7 @@ if test "X-ifort" = "X-$f9x_vendor"; then # General # ########### - H5_FCFLAGS="$H5_FCFLAGS -stand:f03 -free" + H5_FCFLAGS="$H5_FCFLAGS -free" H5_FCFLAGS="$H5_FCFLAGS $(load_intel_arguments ifort-general)" ############################# diff --git a/config/intel-warnings/developer-general b/config/intel-warnings/developer-general index fae56f0acc8..861218eecb9 100644 --- a/config/intel-warnings/developer-general +++ b/config/intel-warnings/developer-general @@ -1,4 +1,11 @@ --Winline -Wreorder -Wport -Wstrict-aliasing +# -Winline warnings aren't included here because, for at least +# GNU compilers, this flag appears to conflict specifically with +# the -Og optimization level flag added for Debug and Developer +# builds and will produce warnings about functions not being +# considered for inlining. The flag will be added to the list +# of compiler flags separately if developer warnings are enabled +# and the build type is not Debug or Developer +#-Winline diff --git a/config/pgi-cxxflags b/config/pgi-cxxflags index 5fc74ae24f4..84654cb2536 100644 --- a/config/pgi-cxxflags +++ b/config/pgi-cxxflags @@ -48,7 +48,7 @@ if test "X-pgc++" = "X-$cxx_vendor"; then # General # ########### - H5_CXXFLAGS="$H5_CXXFLAGS -Minform=warn" + H5_CXXFLAGS="$H5_CXXFLAGS -std=c++98 -Minform=warn" ############## # Production # diff --git a/config/sanitizer/README.md b/config/sanitizer/README.md index 0d5fb6cdde1..308f9c393aa 100644 --- a/config/sanitizer/README.md +++ b/config/sanitizer/README.md @@ -53,7 +53,7 @@ These obviously force the standard to be required, and also disables compiler-sp ## Sanitizer Builds [`sanitizers.cmake`](sanitizers.cmake) -Sanitizers are tools that perform checks during a program’s runtime and returns issues, and as such, along with unit testing, code coverage and static analysis, is another tool to add to the programmers toolbox. And of course, like the previous tools, are tragically simple to add into any project using CMake, allowing any project and developer to quickly and easily use. +Sanitizers are tools that perform checks during a program's runtime and return issues, and as such, along with unit testing, code coverage and static analysis, are another tool to add to the programmer's toolbox. And, of course, like the previous tools, they are simple to add to any project using CMake, allowing any project and developer to quickly and easily use them. A quick rundown of the tools available, and what they do: - [LeakSanitizer](https://clang.llvm.org/docs/LeakSanitizer.html) detects memory leaks, or issues where memory is allocated and never deallocated, causing programs to slowly consume more and more memory, eventually leading to a crash. diff --git a/configure.ac b/configure.ac index 403a82b3879..0fad0ae411b 100644 --- a/configure.ac +++ b/configure.ac @@ -81,14 +81,21 @@ AM_MAINTAINER_MODE([enable]) ## This allows multiple src-dir builds within one host. AC_PREFIX_DEFAULT([`pwd`/hdf5]) -## Run post processing on files created by configure. +## Run post processing on files created by configure +## ## src/H5pubconf.h: ## Generate src/H5pubconf.h from src/H5config.h by prepending H5_ to all ## macro names. This avoid name conflict between HDF5 macro names and those ## generated by another software package that uses the HDF5 library. +## ## src/libhdf5.settings: -## Remove all lines begun with "#" which are generated by CONDITIONAL's of -## configure. +## Remove all lines beginning with "#" which are generated by CONDITIONAL's +## of configure. +## +## src/H5build_settings.c +## Remove all lines beginning with "#" which are generated by CONDITIONAL's +## of configure. This uses a check for whitespace after the pound sign +## to avoid clobbering include statements. AC_CONFIG_COMMANDS([pubconf], [ echo "creating src/H5pubconf.h" sed 's/#define /#define H5_/' \subsubsection subsec_intro_desc_prop_dspace Dataspaces -A dataspace describes the layout of a dataset’s data elements. It can consist of no elements (NULL), +A dataspace describes the layout of a dataset's data elements. It can consist of no elements (NULL), a single element (scalar), or a simple array. @@ -141,7 +141,7 @@ in size (i.e. they are extendible). There are two roles of a dataspace: \li It contains the spatial information (logical layout) of a dataset stored in a file. This includes the rank and dimensions of a dataset, which are a permanent part of the dataset definition. -\li It describes an application’s data buffers and data elements participating in I/O. In other words, it can be used to select a portion or subset of a dataset. +\li It describes an application's data buffers and data elements participating in I/O. In other words, it can be used to select a portion or subset of a dataset.
    @@ -602,8 +602,8 @@ Navigate back: \ref index "Main" / \ref GettingStarted @page HDF5Examples HDF5 Examples Example programs of how to use HDF5 are provided below. For HDF-EOS specific examples, see the examples -of how to access and visualize NASA HDF-EOS files using IDL, MATLAB, and NCL on the -HDF-EOS Tools and Information Center page. +of how to access and visualize NASA HDF-EOS files using Python, IDL, MATLAB, and NCL +on the HDF-EOS Tools and Information Center page. \section secHDF5Examples Examples \li \ref LBExamples diff --git a/doxygen/dox/LearnBasics.dox b/doxygen/dox/LearnBasics.dox index 298672d1856..1f57cca26c9 100644 --- a/doxygen/dox/LearnBasics.dox +++ b/doxygen/dox/LearnBasics.dox @@ -59,7 +59,7 @@ These examples (C, C++, Fortran) are provided in the HDF5 source code and (Unix) - @@ -67,7 +67,7 @@ These examples (C, C++, Fortran) are provided in the HDF5 source code and (Unix) - @@ -75,7 +75,7 @@ These examples (C, C++, Fortran) are provided in the HDF5 source code and (Unix) - @@ -83,7 +83,7 @@ These examples (C, C++, Fortran) are provided in the HDF5 source code and (Unix) - @@ -91,7 +91,7 @@ These examples (C, C++, Fortran) are provided in the HDF5 source code and (Unix) - @@ -99,7 +99,7 @@ These examples (C, C++, Fortran) are provided in the HDF5 source code and (Unix) - @@ -107,7 +107,7 @@ These examples (C, C++, Fortran) are provided in the HDF5 source code and (Unix) - @@ -115,7 +115,7 @@ These examples (C, C++, Fortran) are provided in the HDF5 source code and (Unix) - @@ -123,7 +123,7 @@ These examples (C, C++, Fortran) are provided in the HDF5 source code and (Unix) - @@ -131,7 +131,7 @@ These examples (C, C++, Fortran) are provided in the HDF5 source code and (Unix) - diff --git a/doxygen/dox/UsersGuide.dox b/doxygen/dox/UsersGuide.dox index a8ff060fccc..3a9c74595f3 100644 --- a/doxygen/dox/UsersGuide.dox +++ b/doxygen/dox/UsersGuide.dox @@ -126,7 +126,7 @@ HDF5 Release 1.12
  • \ref subsubsec_dataset_program_transfer
  • \ref subsubsec_dataset_program_read -\li \ref subsec_dataset_transfer Data Transfer +\li \ref subsec_dataset_transfer
    • \ref subsubsec_dataset_transfer_pipe
    • \ref subsubsec_dataset_transfer_filter diff --git a/doxygen/dox/high_level/extension.dox b/doxygen/dox/high_level/extension.dox index d754b96bf11..20a099a0e0b 100644 --- a/doxygen/dox/high_level/extension.dox +++ b/doxygen/dox/high_level/extension.dox @@ -392,7 +392,7 @@ H5_HLRDLL herr_t H5LRread_region(hid_t obj_id, * * \details H5LRget_region_info() queries information about the data * pointed by a region reference \p ref. It returns one of the - * absolute paths to a dataset, length of the path, dataset’s rank + * absolute paths to a dataset, length of the path, dataset's rank * and datatype, description of the referenced region and type of * the referenced region. Any output argument can be NULL if that * argument does not need to be returned. diff --git a/doxygen/examples/H5Fclose.c b/doxygen/examples/H5Fclose.c index 525bad38f0b..8c2ef08c439 100644 --- a/doxygen/examples/H5Fclose.c +++ b/doxygen/examples/H5Fclose.c @@ -1,7 +1,7 @@ #include "hdf5.h" int -main() +main(void) { hid_t file; if ((file = H5Fcreate("foo.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) diff --git a/doxygen/examples/H5Fcreate.c b/doxygen/examples/H5Fcreate.c index 525bad38f0b..8c2ef08c439 100644 --- a/doxygen/examples/H5Fcreate.c +++ b/doxygen/examples/H5Fcreate.c @@ -1,7 +1,7 @@ #include "hdf5.h" int -main() +main(void) { hid_t file; if ((file = H5Fcreate("foo.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) diff --git a/doxygen/examples/hello_hdf5.c b/doxygen/examples/hello_hdf5.c index a37d39ff808..f07809002ff 100644 --- a/doxygen/examples/hello_hdf5.c +++ b/doxygen/examples/hello_hdf5.c @@ -1,7 +1,7 @@ #include "hdf5.h" int -main() +main(void) { herr_t retval; unsigned majnum, minnum, relnum; diff --git a/doxygen/examples/tables/fileDriverLists.dox b/doxygen/examples/tables/fileDriverLists.dox index 1aae3ce492a..125df635da3 100644 --- a/doxygen/examples/tables/fileDriverLists.dox +++ b/doxygen/examples/tables/fileDriverLists.dox @@ -94,8 +94,8 @@ version of the file can be written back to disk or abandoned.
  • - diff --git a/doxygen/examples/tables/propertyLists.dox b/doxygen/examples/tables/propertyLists.dox index cac7fd2c3bb..dccefb552f6 100644 --- a/doxygen/examples/tables/propertyLists.dox +++ b/doxygen/examples/tables/propertyLists.dox @@ -920,4 +920,4 @@ encoding for object names. //! [acpl_table] * */ - + \ No newline at end of file diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index f453467f10d..371dea8ab90 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -52,7 +52,7 @@ endif () foreach (example ${examples}) add_executable (${example} ${HDF5_EXAMPLES_SOURCE_DIR}/${example}.c) - target_include_directories (${example} PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (${example} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (${example} STATIC) target_link_libraries (${example} PRIVATE ${HDF5_LIB_TARGET}) @@ -73,7 +73,7 @@ endforeach () if (H5_HAVE_PARALLEL) foreach (parallel_example ${parallel_examples}) add_executable (${parallel_example} ${HDF5_EXAMPLES_SOURCE_DIR}/${parallel_example}.c) - target_include_directories (${parallel_example} PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (${parallel_example} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (${parallel_example} STATIC) target_link_libraries (${parallel_example} PRIVATE ${HDF5_LIB_TARGET} MPI::MPI_C) diff --git a/examples/CMakeTests.cmake b/examples/CMakeTests.cmake index 6c84d1d0971..30f73c436b2 100644 --- a/examples/CMakeTests.cmake +++ b/examples/CMakeTests.cmake @@ -70,7 +70,18 @@ if (HDF5_TEST_SERIAL) NAME EXAMPLES-clear-objects COMMAND ${CMAKE_COMMAND} -E remove ${test_ex_CLEANFILES} ) - set_tests_properties (EXAMPLES-clear-objects PROPERTIES FIXTURES_SETUP clear_EXAMPLES) + set_tests_properties (EXAMPLES-clear-objects PROPERTIES + FIXTURES_SETUP clear_EXAMPLES + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} + ) + add_test ( + NAME EXAMPLES-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove ${test_ex_CLEANFILES} + ) + set_tests_properties (EXAMPLES-clean-objects PROPERTIES + FIXTURES_CLEANUP clear_EXAMPLES + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} + ) foreach (example ${examples}) if (HDF5_ENABLE_USING_MEMCHECKER) @@ -83,7 +94,6 @@ if (HDF5_TEST_SERIAL) -D "TEST_EXPECT=0" -D "TEST_SKIP_COMPARE=TRUE" -D "TEST_OUTPUT=${example}.txt" - #-D "TEST_REFERENCE=${example}.out" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) @@ -113,7 +123,6 @@ if (H5_HAVE_PARALLEL AND HDF5_TEST_PARALLEL AND NOT WIN32) -D "TEST_SKIP_COMPARE=TRUE" -D "TEST_OUTPUT=${parallel_example}.out" -D "TEST_REFERENCE:STRING=PHDF5 example finished with no errors" - #-D "TEST_FILTER:STRING=PHDF5 tests finished with no errors" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" -P "${HDF_RESOURCES_DIR}/grepTest.cmake" ) diff --git a/examples/h5_cmprss.c b/examples/h5_cmprss.c index 9aefe32ca7b..ffd319174d2 100644 --- a/examples/h5_cmprss.c +++ b/examples/h5_cmprss.c @@ -23,7 +23,7 @@ #define DIM1 20 int -main() +main(void) { hid_t file_id, dataset_id, dataspace_id; /* identifiers */ diff --git a/examples/h5_crtatt.c b/examples/h5_crtatt.c index f33e1f10224..8534703feea 100644 --- a/examples/h5_crtatt.c +++ b/examples/h5_crtatt.c @@ -19,7 +19,7 @@ #define FILE "dset.h5" int -main() +main(void) { hid_t file_id, dataset_id, attribute_id, dataspace_id; /* identifiers */ diff --git a/examples/h5_crtdat.c b/examples/h5_crtdat.c index 8de907739bd..d8ac072bba7 100644 --- a/examples/h5_crtdat.c +++ b/examples/h5_crtdat.c @@ -19,7 +19,7 @@ #define FILE "dset.h5" int -main() +main(void) { hid_t file_id, dataset_id, dataspace_id; /* identifiers */ diff --git a/examples/h5_crtgrp.c b/examples/h5_crtgrp.c index f9066f49852..3517bf3bc03 100644 --- a/examples/h5_crtgrp.c +++ b/examples/h5_crtgrp.c @@ -19,7 +19,7 @@ #define FILE "group.h5" int -main() +main(void) { hid_t file_id, group_id; /* identifiers */ diff --git a/examples/h5_crtgrpar.c b/examples/h5_crtgrpar.c index f397155cd16..1b474299dec 100644 --- a/examples/h5_crtgrpar.c +++ b/examples/h5_crtgrpar.c @@ -19,7 +19,7 @@ #define FILE "groups.h5" int -main() +main(void) { hid_t file_id, group1_id, group2_id, group3_id; /* identifiers */ diff --git a/examples/h5_crtgrpd.c b/examples/h5_crtgrpd.c index 364a3b9fa05..f1750d6e03e 100644 --- a/examples/h5_crtgrpd.c +++ b/examples/h5_crtgrpd.c @@ -19,7 +19,7 @@ #define FILE "groups.h5" int -main() +main(void) { hid_t file_id, group_id, dataset_id, dataspace_id; /* identifiers */ diff --git a/examples/h5_extlink.c b/examples/h5_extlink.c index 8d3fc8c6479..e1f02cf4cfd 100644 --- a/examples/h5_extlink.c +++ b/examples/h5_extlink.c @@ -411,7 +411,7 @@ UD_hard_create(const char *link_name, hid_t loc_group, const void *udata, size_t goto done; } - token = *((H5O_token_t *)udata); + token = *((const H5O_token_t *)udata); //! [H5Oopen_by_token_snip] @@ -459,7 +459,7 @@ UD_hard_delete(const char *link_name, hid_t loc_group, const void *udata, size_t goto done; } - token = *((H5O_token_t *)udata); + token = *((const H5O_token_t *)udata); /* Open the object this link points to */ target_obj = H5Oopen_by_token(loc_group, token); @@ -498,7 +498,7 @@ UD_hard_traverse(const char *link_name, hid_t cur_group, const void *udata, size if (udata_size != sizeof(H5O_token_t)) return H5I_INVALID_HID; - token = *((H5O_token_t *)udata); + token = *((const H5O_token_t *)udata); /* Open the object by token. If H5Oopen_by_token fails, ret_value will * be negative to indicate that the traversal function failed. diff --git a/examples/h5_rdwt.c b/examples/h5_rdwt.c index 679dc4668a6..9947f595199 100644 --- a/examples/h5_rdwt.c +++ b/examples/h5_rdwt.c @@ -19,7 +19,7 @@ #define FILE "dset.h5" int -main() +main(void) { hid_t file_id, dataset_id; /* identifiers */ diff --git a/examples/testh5cc.sh.in b/examples/testh5cc.sh.in index 88fc684dff5..d42e7f1ed27 100644 --- a/examples/testh5cc.sh.in +++ b/examples/testh5cc.sh.in @@ -11,14 +11,6 @@ # help@hdfgroup.org. # # Tests for the h5cc compiler tool -# Created: Albert Cheng, 2007/4/11 -# -# Modification: -# Albert Cheng, 2008/9/27 -# Added -shlib tests and verbose control. -# Albert Cheng, 2009/10/28 -# Added version compatibility tests. -# srcdir=@srcdir@ diff --git a/fortran/examples/CMakeLists.txt b/fortran/examples/CMakeLists.txt index 793df8d7447..3aaad085d8f 100644 --- a/fortran/examples/CMakeLists.txt +++ b/fortran/examples/CMakeLists.txt @@ -45,7 +45,7 @@ foreach (example ${examples}) target_include_directories (f90_ex_${example} PRIVATE "${CMAKE_Fortran_MODULE_DIRECTORY}/static" - "${HDF5_SRC_DIR}" + "${HDF5_SRC_INCLUDE_DIRS}" "${HDF5_SRC_BINARY_DIR}" "${HDF5_F90_BINARY_DIR}" "${HDF5_F90_BINARY_DIR}/static" @@ -60,7 +60,7 @@ foreach (example ${examples}) target_include_directories (f90_ex_${example} PRIVATE "${CMAKE_Fortran_MODULE_DIRECTORY}/shared" - "${HDF5_SRC_DIR}" + "${HDF5_SRC_INCLUDE_DIRS}" "${HDF5_SRC_BINARY_DIR}" "${HDF5_F90_BINARY_DIR}" "${HDF5_F90_BINARY_DIR}/shared" @@ -86,7 +86,7 @@ foreach (example ${F2003_examples}) target_include_directories (f03_ex_${example} PRIVATE "${CMAKE_Fortran_MODULE_DIRECTORY}/static" - "${HDF5_SRC_DIR}" + "${HDF5_SRC_INCLUDE_DIRS}" "${HDF5_SRC_BINARY_DIR}" "${HDF5_F90_BINARY_DIR}" "${HDF5_F90_BINARY_DIR}/static" @@ -101,7 +101,7 @@ foreach (example ${F2003_examples}) target_include_directories (f03_ex_${example} PRIVATE "${CMAKE_Fortran_MODULE_DIRECTORY}/shared" - "${HDF5_SRC_DIR}" + "${HDF5_SRC_INCLUDE_DIRS}" "${HDF5_SRC_BINARY_DIR}" "${HDF5_F90_BINARY_DIR}" "${HDF5_F90_BINARY_DIR}/shared" @@ -127,7 +127,7 @@ if (H5_HAVE_PARALLEL AND MPI_Fortran_FOUND) target_include_directories (f90_ex_ph5example PRIVATE "${CMAKE_Fortran_MODULE_DIRECTORY}/static" - "${HDF5_SRC_DIR}" + "${HDF5_SRC_INCLUDE_DIRS}" "${HDF5_SRC_BINARY_DIR}" "${HDF5_F90_BINARY_DIR}" "${HDF5_F90_BINARY_DIR}/static" @@ -148,7 +148,7 @@ if (H5_HAVE_PARALLEL AND MPI_Fortran_FOUND) target_include_directories (f90_ex_ph5example PRIVATE "${CMAKE_Fortran_MODULE_DIRECTORY}/shared" - "${HDF5_SRC_DIR}" + "${HDF5_SRC_INCLUDE_DIRS}" "${HDF5_SRC_BINARY_DIR}" "${HDF5_F90_BINARY_DIR}" "${HDF5_F90_BINARY_DIR}/shared" diff --git a/fortran/examples/CMakeTests.cmake b/fortran/examples/CMakeTests.cmake index 09b3fa0d08a..dabab8bb514 100644 --- a/fortran/examples/CMakeTests.cmake +++ b/fortran/examples/CMakeTests.cmake @@ -40,7 +40,18 @@ if (HDF5_TEST_SERIAL) NAME f90_ex-clear-objects COMMAND ${CMAKE_COMMAND} -E remove ${test_ex_fortran_CLEANFILES} ) - set_tests_properties (f90_ex-clear-objects PROPERTIES FIXTURES_SETUP clear_f90_ex) + set_tests_properties (f90_ex-clear-objects PROPERTIES + FIXTURES_SETUP clear_f90_ex + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} + ) + add_test ( + NAME f90_ex-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove ${test_ex_fortran_CLEANFILES} + ) + set_tests_properties (f90_ex-clean-objects PROPERTIES + FIXTURES_CLEANUP clear_f90_ex + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} + ) foreach (example ${examples}) if (HDF5_ENABLE_USING_MEMCHECKER) diff --git a/fortran/examples/testh5fc.sh.in b/fortran/examples/testh5fc.sh.in index 2f7c1768def..0328bbbaea7 100644 --- a/fortran/examples/testh5fc.sh.in +++ b/fortran/examples/testh5fc.sh.in @@ -11,10 +11,6 @@ # help@hdfgroup.org. # # Tests for the h5fc compiler tool -# Created: Albert Cheng, 2007/3/14 -# -# Modification: -# srcdir=@srcdir@ diff --git a/fortran/src/CMakeLists.txt b/fortran/src/CMakeLists.txt index c253c2c9b23..3dac0374873 100644 --- a/fortran/src/CMakeLists.txt +++ b/fortran/src/CMakeLists.txt @@ -10,6 +10,12 @@ if (WIN32) set (H5_NOPAREXP ";") endif () set (H5_F03EXP ";") + if (NOT H5_FORTRAN_C_LONG_DOUBLE_IS_UNIQUE) + set (H5_DBL_NOT_UNIQUE_EXP ";") + endif () + if (NOT H5_NO_DEPRECATED_SYMBOLS) + set (H5_NO_DEPRECATED_SYMBOLS ";") + endif () configure_file (${HDF5_F90_SRC_SOURCE_DIR}/hdf5_fortrandll.def.in ${HDF5_F90_SRC_BINARY_DIR}/hdf5_fortrandll.def @ONLY) endif () endif () @@ -71,7 +77,7 @@ add_executable (H5match_types ${HDF5_F90_BINARY_DIR}/H5fort_type_defines.h ${HDF5_F90_SRC_SOURCE_DIR}/H5match_types.c ) -target_include_directories (H5match_types PRIVATE "${HDF5_SRC_BINARY_DIR};${HDF5_SRC_DIR};${HDF5_F90_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_include_directories (H5match_types PRIVATE "${HDF5_SRC_BINARY_DIR};${HDF5_SRC_INCLUDE_DIRS};${HDF5_F90_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") add_custom_command (TARGET H5match_types POST_BUILD BYPRODUCTS ${HDF5_F90_BINARY_DIR}/H5f90i_gen.h ${HDF5_F90_BINARY_DIR}/H5fortran_types.F90 @@ -159,7 +165,7 @@ endif () if (BUILD_STATIC_LIBS) add_library (${HDF5_F90_C_LIB_TARGET} STATIC ${f90CStub_C_SOURCES} ${f90CStub_C_HDRS} ${f90CStub_CGEN_HDRS}) target_include_directories (${HDF5_F90_C_LIB_TARGET} - PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};${HDF5_F90_BINARY_DIR};${HDF5_F90_BINARY_DIR}/static;$<$:${MPI_C_INCLUDE_DIRS}>" + PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};${HDF5_F90_BINARY_DIR};${HDF5_F90_BINARY_DIR}/static;$<$:${MPI_C_INCLUDE_DIRS}>" INTERFACE "$/include>" ) target_compile_options(${HDF5_F90_C_LIB_TARGET} PRIVATE "${HDF5_CMAKE_C_FLAGS}") @@ -178,7 +184,7 @@ endif () if (BUILD_SHARED_LIBS) add_library (${HDF5_F90_C_LIBSH_TARGET} SHARED ${f90CStub_C_SOURCES} ${f90CStub_C_HDRS} ${f90CStub_CGEN_SHHDRS}) target_include_directories (${HDF5_F90_C_LIBSH_TARGET} - PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};${HDF5_F90_BINARY_DIR};${HDF5_F90_BINARY_DIR}/shared;$<$:${MPI_C_INCLUDE_DIRS}>" + PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};${HDF5_F90_BINARY_DIR};${HDF5_F90_BINARY_DIR}/shared;$<$:${MPI_C_INCLUDE_DIRS}>" INTERFACE "$/include>" ) target_compile_options(${HDF5_F90_C_LIBSH_TARGET} PRIVATE "${HDF5_CMAKE_C_FLAGS}") @@ -420,29 +426,33 @@ else () ) endif () +set (mod_export_files + h5fortran_types.mod + hdf5.mod + h5fortkit.mod + h5global.mod + h5a.mod + h5d.mod + h5e.mod + h5f.mod + h5g.mod + h5i.mod + h5l.mod + h5lib.mod + h5o.mod + h5p.mod + h5r.mod + h5s.mod + h5t.mod + h5vl.mod + h5z.mod + h5_gen.mod +) + if (BUILD_STATIC_LIBS) - set (mod_files - ${MOD_BUILD_DIR}/h5fortran_types.mod - ${MOD_BUILD_DIR}/hdf5.mod - ${MOD_BUILD_DIR}/h5fortkit.mod - ${MOD_BUILD_DIR}/h5global.mod - ${MOD_BUILD_DIR}/h5a.mod - ${MOD_BUILD_DIR}/h5d.mod - ${MOD_BUILD_DIR}/h5e.mod - ${MOD_BUILD_DIR}/h5f.mod - ${MOD_BUILD_DIR}/h5g.mod - ${MOD_BUILD_DIR}/h5i.mod - ${MOD_BUILD_DIR}/h5l.mod - ${MOD_BUILD_DIR}/h5lib.mod - ${MOD_BUILD_DIR}/h5o.mod - ${MOD_BUILD_DIR}/h5p.mod - ${MOD_BUILD_DIR}/h5r.mod - ${MOD_BUILD_DIR}/h5s.mod - ${MOD_BUILD_DIR}/h5t.mod - ${MOD_BUILD_DIR}/h5vl.mod - ${MOD_BUILD_DIR}/h5z.mod - ${MOD_BUILD_DIR}/h5_gen.mod - ) + foreach (mod_file ${mod_export_files}) + set (mod_files ${mod_files} ${MOD_BUILD_DIR}/${mod_file}) + endforeach () install ( FILES ${mod_files} @@ -464,28 +474,9 @@ if (BUILD_STATIC_LIBS) endif () if (BUILD_SHARED_LIBS) - set (modsh_files - ${MODSH_BUILD_DIR}/h5fortran_types.mod - ${MODSH_BUILD_DIR}/hdf5.mod - ${MODSH_BUILD_DIR}/h5fortkit.mod - ${MODSH_BUILD_DIR}/h5global.mod - ${MODSH_BUILD_DIR}/h5a.mod - ${MODSH_BUILD_DIR}/h5d.mod - ${MODSH_BUILD_DIR}/h5e.mod - ${MODSH_BUILD_DIR}/h5f.mod - ${MODSH_BUILD_DIR}/h5g.mod - ${MODSH_BUILD_DIR}/h5i.mod - ${MODSH_BUILD_DIR}/h5l.mod - ${MODSH_BUILD_DIR}/h5lib.mod - ${MODSH_BUILD_DIR}/h5o.mod - ${MODSH_BUILD_DIR}/h5p.mod - ${MODSH_BUILD_DIR}/h5r.mod - ${MODSH_BUILD_DIR}/h5s.mod - ${MODSH_BUILD_DIR}/h5t.mod - ${MODSH_BUILD_DIR}/h5vl.mod - ${MODSH_BUILD_DIR}/h5z.mod - ${MODSH_BUILD_DIR}/h5_gen.mod - ) + foreach (mod_file ${mod_export_files}) + set (modsh_files ${modsh_files} ${MODSH_BUILD_DIR}/${mod_file}) + endforeach () install ( FILES ${modsh_files} diff --git a/fortran/src/H5config_f.inc.cmake b/fortran/src/H5config_f.inc.cmake index 565d6eb3385..3c9bf8839fc 100644 --- a/fortran/src/H5config_f.inc.cmake +++ b/fortran/src/H5config_f.inc.cmake @@ -11,7 +11,7 @@ ! * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * ! fortran/src/H5config_f.inc. Generated from fortran/src/H5config_f.inc.in by configure -! Define if we have parallel support +! Define if there is parallel support #cmakedefine01 H5_HAVE_PARALLEL #if H5_HAVE_PARALLEL == 0 #undef H5_HAVE_PARALLEL @@ -70,3 +70,9 @@ ! Fortran compiler id #define H5_Fortran_COMPILER_ID @CMAKE_Fortran_COMPILER_ID@ + +! Define if deprecated public API symbols are disabled +#cmakedefine01 H5_NO_DEPRECATED_SYMBOLS +#if H5_NO_DEPRECATED_SYMBOLS == 0 +#undef H5_NO_DEPRECATED_SYMBOLS +#endif \ No newline at end of file diff --git a/fortran/src/H5config_f.inc.in b/fortran/src/H5config_f.inc.in index 685b4d20dd3..bd32a8c09c8 100644 --- a/fortran/src/H5config_f.inc.in +++ b/fortran/src/H5config_f.inc.in @@ -71,3 +71,6 @@ ! Fortran compiler name #undef Fortran_COMPILER_ID +! Define if deprecated public API symbols are disabled +#undef NO_DEPRECATED_SYMBOLS + diff --git a/fortran/src/hdf5_fortrandll.def.in b/fortran/src/hdf5_fortrandll.def.in index 4207239f442..b5d1af3c355 100644 --- a/fortran/src/hdf5_fortrandll.def.in +++ b/fortran/src/hdf5_fortrandll.def.in @@ -14,7 +14,7 @@ H5A_mp_H5AWRITE_CHAR_SCALAR H5A_mp_H5AREAD_CHAR_SCALAR H5A_mp_H5ACREATE_F H5A_mp_H5AOPEN_NAME_F -H5A_mp_H5AOPEN_IDX_F +@H5_NO_DEPRECATED_SYMBOLS@H5A_mp_H5AOPEN_IDX_F H5A_mp_H5AGET_SPACE_F H5A_mp_H5AGET_TYPE_F H5A_mp_H5AGET_NAME_F diff --git a/fortran/test/CMakeLists.txt b/fortran/test/CMakeLists.txt index 2d29497ef26..bfad2837856 100644 --- a/fortran/test/CMakeLists.txt +++ b/fortran/test/CMakeLists.txt @@ -45,7 +45,7 @@ if (NOT BUILD_SHARED_LIBS) add_library (${HDF5_F90_C_TEST_LIB_TARGET} STATIC t.c t.h) set_source_files_properties (t.c PROPERTIES LANGUAGE C) target_include_directories (${HDF5_F90_C_TEST_LIB_TARGET} - PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};${HDF5_F90_BINARY_DIR};${HDF5_F90_BINARY_DIR}/static;$<$:${MPI_C_INCLUDE_DIRS}>" + PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};${HDF5_F90_BINARY_DIR};${HDF5_F90_BINARY_DIR}/static;$<$:${MPI_C_INCLUDE_DIRS}>" INTERFACE "$/include>" ) target_compile_options(${HDF5_F90_C_TEST_LIB_TARGET} PRIVATE "${HDF5_CMAKE_C_FLAGS}") @@ -62,7 +62,7 @@ if (NOT BUILD_SHARED_LIBS) else () add_library (${HDF5_F90_C_TEST_LIBSH_TARGET} SHARED t.c t.h) target_include_directories (${HDF5_F90_C_TEST_LIBSH_TARGET} - PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};${HDF5_F90_BINARY_DIR};${HDF5_F90_BINARY_DIR}/shared;$<$:${MPI_C_INCLUDE_DIRS}>" + PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};${HDF5_F90_BINARY_DIR};${HDF5_F90_BINARY_DIR}/shared;$<$:${MPI_C_INCLUDE_DIRS}>" INTERFACE "$/include>" ) target_compile_options(${HDF5_F90_C_TEST_LIBSH_TARGET} PRIVATE "${HDF5_CMAKE_C_FLAGS}") diff --git a/fortran/test/CMakeTests.cmake b/fortran/test/CMakeTests.cmake index 68d8a5ad8c1..93571e16895 100644 --- a/fortran/test/CMakeTests.cmake +++ b/fortran/test/CMakeTests.cmake @@ -57,7 +57,18 @@ add_test ( NAME FORTRAN_testhdf5-clear-objects COMMAND ${CMAKE_COMMAND} -E remove ${testhdf5_fortran_CLEANFILES} ) -set_tests_properties (FORTRAN_testhdf5-clear-objects PROPERTIES FIXTURES_SETUP clear_testhdf5_fortran) +set_tests_properties (FORTRAN_testhdf5-clear-objects PROPERTIES + FIXTURES_SETUP clear_testhdf5_fortran + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} +) +add_test ( + NAME FORTRAN_testhdf5-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove ${testhdf5_fortran_CLEANFILES} +) +set_tests_properties (FORTRAN_testhdf5-clean-objects PROPERTIES + FIXTURES_CLEANUP clear_testhdf5_fortran + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} +) if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME FORTRAN_testhdf5_fortran COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) @@ -147,6 +158,9 @@ else () -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () +set_tests_properties (FORTRAN_vol_connector PROPERTIES + FIXTURES_REQUIRED clear_testhdf5_fortran +) #-- Adding test for fflush1 add_test ( @@ -169,3 +183,10 @@ add_test ( set_tests_properties (FORTRAN_fflush2 PROPERTIES DEPENDS FORTRAN_fflush1 ) +add_test ( + NAME FORTRAN_flush1-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove flush.h5 +) +set_tests_properties (FORTRAN_flush1-clean-objects PROPERTIES + DEPENDS FORTRAN_fflush2 +) diff --git a/fortran/testpar/CMakeLists.txt b/fortran/testpar/CMakeLists.txt index d41343c36ed..f2e9201d396 100644 --- a/fortran/testpar/CMakeLists.txt +++ b/fortran/testpar/CMakeLists.txt @@ -53,8 +53,6 @@ else () ) endif () -#set_property(TARGET parallel_test APPEND PROPERTY LINK_FLAGS $<$:"-SUBSYSTEM:CONSOLE">) -#set_property(TARGET parallel_test APPEND PROPERTY LINK_FLAGS $<$:${WIN_LINK_FLAGS}>) if(MSVC) set_property(TARGET parallel_test PROPERTY LINK_FLAGS "/SUBSYSTEM:CONSOLE ${WIN_LINK_FLAGS}") endif() diff --git a/hl/CMakeLists.txt b/hl/CMakeLists.txt index 9391231304e..2dc80126ca0 100644 --- a/hl/CMakeLists.txt +++ b/hl/CMakeLists.txt @@ -17,15 +17,7 @@ add_subdirectory (src) # Build HDF5 Tools if (HDF5_BUILD_TOOLS) - #----------------------------------------------------------------------------- - #-- Option to build the High level Tools - #----------------------------------------------------------------------------- - if (EXISTS "${HDF5_HL_SOURCE_DIR}/tools" AND IS_DIRECTORY "${HDF5_HL_SOURCE_DIR}/tools") - option (HDF5_BUILD_HL_TOOLS "Build HDF5 HL Tools" ON) - if (HDF5_BUILD_HL_TOOLS) - add_subdirectory (tools) - endif () - endif () + add_subdirectory (tools) endif () #-- Add High Level Examples diff --git a/hl/Makefile.am b/hl/Makefile.am index c9050ae41f0..1b6b988acb2 100644 --- a/hl/Makefile.am +++ b/hl/Makefile.am @@ -36,14 +36,10 @@ else TEST_DIR = endif if BUILD_TOOLS_CONDITIONAL -if BUILD_TOOLS_HL_CONDITIONAL TOOLS_DIR = tools else TOOLS_DIR = endif -else - TOOLS_DIR = -endif ## Don't recurse into any subdirectories if HDF5 is not configured to ## use the HL library diff --git a/hl/c++/examples/CMakeLists.txt b/hl/c++/examples/CMakeLists.txt index bfad5389690..548dd003894 100644 --- a/hl/c++/examples/CMakeLists.txt +++ b/hl/c++/examples/CMakeLists.txt @@ -5,7 +5,7 @@ project (HDF5_HL_CPP_EXAMPLES CXX) # Add in the examples for the Packet Table codes # -------------------------------------------------------------------- add_executable (ptExampleFL ${HDF5_HL_CPP_EXAMPLES_SOURCE_DIR}/ptExampleFL.cpp) -target_include_directories (ptExampleFL PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_include_directories (ptExampleFL PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (ptExampleFL STATIC) target_link_libraries (ptExampleFL PRIVATE diff --git a/hl/c++/examples/CMakeTests.cmake b/hl/c++/examples/CMakeTests.cmake index 7564fd7274a..8b2b4ce21d0 100644 --- a/hl/c++/examples/CMakeTests.cmake +++ b/hl/c++/examples/CMakeTests.cmake @@ -16,12 +16,16 @@ ############################################################################## ############################################################################## # Remove any output file left over from previous test run +set (HL_CPP_EX_PT_CLEANFILES + PTcppexampleFL.h5 +) add_test ( NAME HL_CPP_ex_ptExampleFL-clear-objects COMMAND ${CMAKE_COMMAND} - -E remove - PTcppexampleFL.h5 - ptExampleFL.txt + -E remove ${HL_CPP_EX_PT_CLEANFILES} +) +set_tests_properties (HL_CPP_ex_ptExampleFL-clear-objects PROPERTIES + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} ) if (HDF5_ENABLE_USING_MEMCHECKER) @@ -40,3 +44,12 @@ else () ) endif () set_tests_properties (HL_CPP_ex_ptExampleFL PROPERTIES DEPENDS HL_CPP_ex_ptExampleFL-clear-objects) +add_test ( + NAME HL_CPP_ex_ptExampleFL-clean-objects + COMMAND ${CMAKE_COMMAND} + -E remove ${HL_CPP_EX_PT_CLEANFILES} +) +set_tests_properties (HL_CPP_ex_ptExampleFL-clean-objects PROPERTIES + DEPENDS HL_CPP_ex_ptExampleFL + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} +) diff --git a/hl/c++/src/CMakeLists.txt b/hl/c++/src/CMakeLists.txt index b8e11782297..71a4c9bcd00 100644 --- a/hl/c++/src/CMakeLists.txt +++ b/hl/c++/src/CMakeLists.txt @@ -11,7 +11,7 @@ set (HDF5_HL_CPP_HDRS ${HDF5_HL_CPP_SRC_SOURCE_DIR}/H5PacketTable.h) if (BUILD_STATIC_LIBS) add_library (${HDF5_HL_CPP_LIB_TARGET} STATIC ${HDF5_HL_CPP_SOURCES} ${HDF5_HL_CPP_HDRS}) target_include_directories (${HDF5_HL_CPP_LIB_TARGET} - PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" + PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" INTERFACE "$/include>" ) target_compile_options(${HDF5_HL_CPP_LIB_TARGET} PRIVATE "${HDF5_CMAKE_CXX_FLAGS}") @@ -26,7 +26,7 @@ endif () if (BUILD_SHARED_LIBS) add_library (${HDF5_HL_CPP_LIBSH_TARGET} SHARED ${HDF5_HL_CPP_SOURCES}) target_include_directories (${HDF5_HL_CPP_LIBSH_TARGET} - PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" + PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" INTERFACE "$/include>" ) target_compile_options(${HDF5_HL_CPP_LIBSH_TARGET} PRIVATE "${HDF5_CMAKE_CXX_FLAGS}") diff --git a/hl/c++/test/CMakeLists.txt b/hl/c++/test/CMakeLists.txt index 9a1d6fd8bbd..fb750dd9ac1 100644 --- a/hl/c++/test/CMakeLists.txt +++ b/hl/c++/test/CMakeLists.txt @@ -3,7 +3,7 @@ project (HDF5_HL_CPP_TEST CXX) add_executable (hl_ptableTest ${HDF5_HL_CPP_TEST_SOURCE_DIR}/ptableTest.cpp) target_compile_options(hl_ptableTest PRIVATE "${HDF5_CMAKE_CXX_FLAGS}") -target_include_directories (hl_ptableTest PRIVATE "${HDF5_HL_SRC_DIR}/test;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_include_directories (hl_ptableTest PRIVATE "${HDF5_HL_SRC_DIR}/test;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (hl_ptableTest STATIC) target_link_libraries (hl_ptableTest PRIVATE diff --git a/hl/c++/test/CMakeTests.cmake b/hl/c++/test/CMakeTests.cmake index 5abbd4793ca..4137270b9fe 100644 --- a/hl/c++/test/CMakeTests.cmake +++ b/hl/c++/test/CMakeTests.cmake @@ -15,12 +15,19 @@ ### T E S T I N G ### ############################################################################## ############################################################################## - +# Remove any output file left over from previous test run +set (HL_CPP_PT_CLEANFILES + packettest.h5 +) add_test ( NAME HL_CPP_ptableTest-clear-objects COMMAND ${CMAKE_COMMAND} - -E remove ${example}.txt + -E remove ${HL_CPP_PT_CLEANFILES} ) +set_tests_properties (HL_CPP_ptableTest-clear-objects PROPERTIES + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} +) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME HL_CPP_ptableTest COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () @@ -37,3 +44,12 @@ else () ) endif () set_tests_properties (HL_CPP_ptableTest PROPERTIES DEPENDS HL_CPP_ptableTest-clear-objects) +add_test ( + NAME HL_CPP_ptableTest-clean-objects + COMMAND ${CMAKE_COMMAND} + -E remove ${HL_CPP_PT_CLEANFILES} +) +set_tests_properties (HL_CPP_ptableTest-clean-objects PROPERTIES + DEPENDS HL_CPP_ptableTest + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} +) diff --git a/hl/examples/CMakeLists.txt b/hl/examples/CMakeLists.txt index 93ae9e857bf..f14d0307a4a 100644 --- a/hl/examples/CMakeLists.txt +++ b/hl/examples/CMakeLists.txt @@ -28,7 +28,7 @@ set (examples foreach (example ${examples}) add_executable (hl_ex_${example} ${HDF5_HL_EXAMPLES_SOURCE_DIR}/${example}.c) - target_include_directories (hl_ex_${example} PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (hl_ex_${example} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (hl_ex_${example} STATIC) target_link_libraries (hl_ex_${example} PRIVATE ${HDF5_HL_LIB_TARGET} ${HDF5_LIB_TARGET}) diff --git a/hl/examples/CMakeTests.cmake b/hl/examples/CMakeTests.cmake index 6014f6c77d3..6a0a6e78afd 100644 --- a/hl/examples/CMakeTests.cmake +++ b/hl/examples/CMakeTests.cmake @@ -27,31 +27,45 @@ endforeach () add_custom_target(hl_ex_ex_ds1_files ALL COMMENT "Copying files needed by hl_ex_ex_ds1 tests" DEPENDS ${hl_ex_ex_ds1_files_list}) # Remove any output file left over from previous test run +set (HL_EX_CLEANFILES + ex_lite1.h5 + ex_lite2.h5 + ex_lite3.h5 + packet_table_FLexample.h5 + ex_image1.h5 + ex_image2.h5 + ex_table_01.h5 + ex_table_02.h5 + ex_table_03.h5 + ex_table_04.h5 + ex_table_05.h5 + ex_table_06.h5 + ex_table_07.h5 + ex_table_08.h5 + ex_table_09.h5 + ex_table_10.h5 + ex_table_11.h5 + ex_table_12.h5 + ex_ds1.h5 +) add_test ( NAME HL_ex-clear-objects COMMAND ${CMAKE_COMMAND} - -E remove - ex_lite1.h5 - ex_lite2.h5 - ex_lite3.h5 - packet_table_FLexample.h5 - ex_image1.h5 - ex_image2.h5 - ex_table_01.h5 - ex_table_02.h5 - ex_table_03.h5 - ex_table_04.h5 - ex_table_05.h5 - ex_table_06.h5 - ex_table_07.h5 - ex_table_08.h5 - ex_table_09.h5 - ex_table_10.h5 - ex_table_11.h5 - ex_table_12.h5 - ex_ds1.h5 + -E remove ${HL_EX_CLEANFILES} +) +set_tests_properties (HL_ex-clear-objects PROPERTIES + FIXTURES_SETUP clear_HL_ex + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} +) +add_test ( + NAME HL_ex-clean-objects + COMMAND ${CMAKE_COMMAND} + -E remove ${HL_EX_CLEANFILES} +) +set_tests_properties (HL_ex-clean-objects PROPERTIES + FIXTURES_CLEANUP clear_HL_ex + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} ) -set_tests_properties (HL_ex-clear-objects PROPERTIES FIXTURES_SETUP clear_HL_ex) foreach (example ${examples}) if (HDF5_ENABLE_USING_MEMCHECKER) diff --git a/hl/fortran/examples/CMakeTests.cmake b/hl/fortran/examples/CMakeTests.cmake index 6389fbe8506..46827b5c60e 100644 --- a/hl/fortran/examples/CMakeTests.cmake +++ b/hl/fortran/examples/CMakeTests.cmake @@ -17,14 +17,28 @@ ############################################################################## # Remove any output file left over from previous test run +set (HL_FORTRAN_F90_EX_CLEANFILES + ex_ds1.h5 + exlite.h5 +) add_test ( NAME HL_FORTRAN_f90_ex-clear-objects COMMAND ${CMAKE_COMMAND} - -E remove - ex_ds1.h5 - exlite.h5 + -E remove ${HL_FORTRAN_F90_EX_CLEANFILES} +) +set_tests_properties (HL_FORTRAN_f90_ex-clear-objects PROPERTIES + FIXTURES_SETUP clear_HL_FORTRAN_f90_ex + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} +) +add_test ( + NAME HL_FORTRAN_f90_ex-clean-objects + COMMAND ${CMAKE_COMMAND} + -E remove ${HL_FORTRAN_F90_EX_CLEANFILES} +) +set_tests_properties (HL_FORTRAN_f90_ex-clean-objects PROPERTIES + FIXTURES_CLEANUP clear_HL_FORTRAN_f90_ex + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} ) -set_tests_properties (HL_FORTRAN_f90_ex-clear-objects PROPERTIES FIXTURES_SETUP clear_HL_FORTRAN_f90_ex) foreach (example ${examples}) if (HDF5_ENABLE_USING_MEMCHECKER) @@ -44,5 +58,6 @@ foreach (example ${examples}) endif () set_tests_properties (HL_FORTRAN_f90_ex_${example} PROPERTIES FIXTURES_REQUIRED clear_HL_FORTRAN_f90_ex + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} ) endforeach () diff --git a/hl/fortran/src/CMakeLists.txt b/hl/fortran/src/CMakeLists.txt index 538b6892e74..5455d777d2a 100644 --- a/hl/fortran/src/CMakeLists.txt +++ b/hl/fortran/src/CMakeLists.txt @@ -59,7 +59,7 @@ set (HDF5_HL_F90_HEADERS ${HDF5_HL_F90_SRC_SOURCE_DIR}/H5LTf90proto.h ${HDF5_HL_ if (BUILD_STATIC_LIBS) add_library (${HDF5_HL_F90_C_LIB_TARGET} STATIC ${HDF5_HL_F90_C_SOURCES} ${HDF5_HL_F90_HEADERS}) target_include_directories (${HDF5_HL_F90_C_LIB_TARGET} - PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};${HDF5_F90_BINARY_DIR}/static;$<$:${MPI_C_INCLUDE_DIRS}>" + PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};${HDF5_F90_BINARY_DIR}/static;$<$:${MPI_C_INCLUDE_DIRS}>" INTERFACE "$/include>" ) target_compile_options(${HDF5_HL_F90_C_LIB_TARGET} PRIVATE "${HDF5_CMAKE_C_FLAGS}") @@ -76,7 +76,7 @@ endif () if (BUILD_SHARED_LIBS) add_library (${HDF5_HL_F90_C_LIBSH_TARGET} SHARED ${HDF5_HL_F90_C_SOURCES} ${HDF5_HL_F90_HEADERS}) target_include_directories (${HDF5_HL_F90_C_LIBSH_TARGET} - PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};${HDF5_F90_BINARY_DIR}/shared;$<$:${MPI_C_INCLUDE_DIRS}>" + PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};${HDF5_F90_BINARY_DIR}/shared;$<$:${MPI_C_INCLUDE_DIRS}>" INTERFACE "$/include>" ) target_compile_options(${HDF5_HL_F90_C_LIBSH_TARGET} PRIVATE "${HDF5_CMAKE_C_FLAGS}") @@ -235,16 +235,19 @@ endif () # Add file(s) to CMake Install #----------------------------------------------------------------------------- +set (mod_export_files + h5ds.mod + h5tb.mod + h5tb_const.mod + h5lt.mod + h5lt_const.mod + h5im.mod +) if (BUILD_STATIC_LIBS) - set (mod_files - ${MOD_BUILD_DIR}/h5ds.mod - ${MOD_BUILD_DIR}/h5tb.mod - ${MOD_BUILD_DIR}/h5tb_const.mod - ${MOD_BUILD_DIR}/h5lt.mod - ${MOD_BUILD_DIR}/h5lt_const.mod - ${MOD_BUILD_DIR}/h5im.mod - ) + foreach (mod_file ${mod_export_files}) + set (mod_files ${mod_files} ${MOD_BUILD_DIR}/${mod_file}) + endforeach () install ( FILES ${mod_files} @@ -265,14 +268,9 @@ if (BUILD_STATIC_LIBS) endif () endif () if (BUILD_SHARED_LIBS) - set (modsh_files - ${MODSH_BUILD_DIR}/h5ds.mod - ${MODSH_BUILD_DIR}/h5tb.mod - ${MODSH_BUILD_DIR}/h5tb_const.mod - ${MODSH_BUILD_DIR}/h5lt.mod - ${MODSH_BUILD_DIR}/h5lt_const.mod - ${MODSH_BUILD_DIR}/h5im.mod - ) + foreach (mod_file ${mod_export_files}) + set (modsh_files ${modsh_files} ${MODSH_BUILD_DIR}/${mod_file}) + endforeach () install ( FILES ${modsh_files} diff --git a/hl/fortran/test/CMakeTests.cmake b/hl/fortran/test/CMakeTests.cmake index e082f4c8162..209186b58f3 100644 --- a/hl/fortran/test/CMakeTests.cmake +++ b/hl/fortran/test/CMakeTests.cmake @@ -17,15 +17,15 @@ ############################################################################## set (test_hl_fortran_CLEANFILES - dsetf1.h5 - dsetf2.h5 - dsetf3.h5 - dsetf4.h5 - dsetf5.h5 - f1img.h5 - f1tab.h5 - f2tab.h5 - tstds.h5 + dsetf1.h5 + dsetf2.h5 + dsetf3.h5 + dsetf4.h5 + dsetf5.h5 + f1img.h5 + f1tab.h5 + f2tab.h5 + tstds.h5 ) # Remove any output file left over from previous test run @@ -34,7 +34,19 @@ add_test ( COMMAND ${CMAKE_COMMAND} -E remove ${test_hl_fortran_CLEANFILES} ) -set_tests_properties (HL_FORTRAN_test-clear-objects PROPERTIES FIXTURES_SETUP clear_HL_FORTRAN_test) +set_tests_properties (HL_FORTRAN_test-clear-objects PROPERTIES + FIXTURES_SETUP clear_HL_FORTRAN_test + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} +) +add_test ( + NAME HL_FORTRAN_test-clean-objects + COMMAND ${CMAKE_COMMAND} + -E remove ${test_hl_fortran_CLEANFILES} +) +set_tests_properties (HL_FORTRAN_test-clean-objects PROPERTIES + FIXTURES_CLEANUP clear_HL_FORTRAN_test + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} +) macro (ADD_H5_FORTRAN_TEST file) if (HDF5_ENABLE_USING_MEMCHECKER) @@ -54,6 +66,7 @@ macro (ADD_H5_FORTRAN_TEST file) endif () set_tests_properties (HL_FORTRAN_f90_${file} PROPERTIES FIXTURES_REQUIRED clear_HL_FORTRAN_test + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} ) endmacro () diff --git a/hl/src/CMakeLists.txt b/hl/src/CMakeLists.txt index 270018828fb..fafa0ef071a 100644 --- a/hl/src/CMakeLists.txt +++ b/hl/src/CMakeLists.txt @@ -43,7 +43,7 @@ set (HL_PRIVATE_HEADERS if (BUILD_STATIC_LIBS) add_library (${HDF5_HL_LIB_TARGET} STATIC ${HL_SOURCES} ${HL_HEADERS} ${HL_PRIVATE_HEADERS}) target_include_directories (${HDF5_HL_LIB_TARGET} - PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" + PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" INTERFACE "$/include>" ) target_compile_options(${HDF5_HL_LIB_TARGET} PRIVATE "${HDF5_CMAKE_C_FLAGS}") @@ -58,7 +58,7 @@ endif () if (BUILD_SHARED_LIBS) add_library (${HDF5_HL_LIBSH_TARGET} SHARED ${HL_SOURCES} ${HL_HEADERS} ${HL_PRIVATE_HEADERS}) target_include_directories (${HDF5_HL_LIBSH_TARGET} - PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" + PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" INTERFACE "$/include>" ) target_compile_options(${HDF5_HL_LIBSH_TARGET} PRIVATE "${HDF5_CMAKE_C_FLAGS}") diff --git a/hl/test/CMakeLists.txt b/hl/test/CMakeLists.txt index c619fc6acff..cec245893a7 100644 --- a/hl/test/CMakeLists.txt +++ b/hl/test/CMakeLists.txt @@ -20,7 +20,7 @@ configure_file (${HDF5_HL_TEST_SOURCE_DIR}/H5srcdir_str.h.in H5srcdir_str.h @ON macro (HL_ADD_EXE hl_name) add_executable (hl_${hl_name} ${hl_name}.c) target_compile_options(hl_${hl_name} PRIVATE "${HDF5_CMAKE_C_FLAGS}") - target_include_directories (hl_${hl_name} PRIVATE "${HDF5_TEST_SRC_DIR};${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (hl_${hl_name} PRIVATE "${HDF5_TEST_SRC_DIR};${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (hl_${hl_name} STATIC) target_link_libraries (hl_${hl_name} PRIVATE @@ -58,7 +58,7 @@ HL_ADD_EXE (test_h5do_compat) # test_packet has two source files add_executable (hl_test_packet test_packet.c test_packet_vlen.c) target_compile_options(hl_test_packet PRIVATE "${HDF5_CMAKE_C_FLAGS}") -target_include_directories (hl_test_packet PRIVATE "${HDF5_TEST_SRC_DIR};${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_include_directories (hl_test_packet PRIVATE "${HDF5_TEST_SRC_DIR};${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (hl_test_packet STATIC) target_link_libraries (hl_test_packet PRIVATE @@ -90,7 +90,7 @@ endif () if (HDF5_BUILD_GENERATORS AND BUILD_STATIC_LIBS) add_executable (hl_gen_test_ds gen_test_ds.c) target_compile_options(hl_gen_test_ds PRIVATE "${HDF5_CMAKE_C_FLAGS}") - target_include_directories (hl_gen_test_ds PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (hl_gen_test_ds PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (hl_gen_test_ds STATIC) target_link_libraries (hl_gen_test_ds PRIVATE ${HDF5_HL_LIB_TARGET} @@ -108,7 +108,7 @@ if (HDF5_BUILD_GENERATORS AND BUILD_STATIC_LIBS) add_executable (hl_gen_test_ld gen_test_ld.c) target_compile_options(hl_gen_test_ld PRIVATE "${HDF5_CMAKE_C_FLAGS}") - target_include_directories (hl_gen_test_ld PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (hl_gen_test_ld PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (hl_gen_test_ld STATIC) target_link_libraries (hl_gen_test_ld PRIVATE ${HDF5_HL_LIB_TARGET} diff --git a/hl/test/CMakeTests.cmake b/hl/test/CMakeTests.cmake index cc4786e5806..9d3ff0c2221 100644 --- a/hl/test/CMakeTests.cmake +++ b/hl/test/CMakeTests.cmake @@ -81,7 +81,18 @@ add_test ( NAME HL_test-clear-objects COMMAND ${CMAKE_COMMAND} -E remove ${test_hl_CLEANFILES} ) -set_tests_properties (HL_test-clear-objects PROPERTIES FIXTURES_SETUP clear_test_hl) +set_tests_properties (HL_test-clear-objects PROPERTIES + FIXTURES_SETUP clear_test_hl + WORKING_DIRECTORY ${HDF5_HL_TEST_BINARY_DIR} +) +add_test ( + NAME HL_test-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove ${test_hl_CLEANFILES} +) +set_tests_properties (HL_test-clean-objects PROPERTIES + FIXTURES_CLEANUP clear_test_hl + WORKING_DIRECTORY ${HDF5_HL_TEST_BINARY_DIR} +) # -------------------------------------------------------------------- # Macro used to add a unit test diff --git a/hl/tools/CMakeLists.txt b/hl/tools/CMakeLists.txt index fd9b6d98ff8..a0bd744e601 100644 --- a/hl/tools/CMakeLists.txt +++ b/hl/tools/CMakeLists.txt @@ -1,6 +1,12 @@ cmake_minimum_required (VERSION 3.12) project (HDF5_HL_TOOLS C) -add_subdirectory (gif2h5) +#----------------------------------------------------------------------------- +#-- Option to build the High level GIF Tools +#----------------------------------------------------------------------------- +option (HDF5_BUILD_HL_GIF_TOOLS "Build HDF5 HL GIF Tools" OFF) +if (HDF5_BUILD_HL_GIF_TOOLS) + add_subdirectory (gif2h5) +endif () add_subdirectory (h5watch) diff --git a/hl/tools/Makefile.am b/hl/tools/Makefile.am index 6687f40f871..9f93f676dce 100644 --- a/hl/tools/Makefile.am +++ b/hl/tools/Makefile.am @@ -18,7 +18,13 @@ include $(top_srcdir)/config/commence.am +if BUILD_TOOLS_HL_GIF_CONDITIONAL + TOOLS_GIF_DIR = gif2h5 +else + TOOLS_GIF_DIR = +endif + # All subdirectories -SUBDIRS=gif2h5 h5watch +SUBDIRS=h5watch $(TOOLS_GIF_DIR) include $(top_srcdir)/config/conclude.am diff --git a/hl/tools/gif2h5/CMakeLists.txt b/hl/tools/gif2h5/CMakeLists.txt index a797c80bd5e..8f18b093681 100644 --- a/hl/tools/gif2h5/CMakeLists.txt +++ b/hl/tools/gif2h5/CMakeLists.txt @@ -17,7 +17,7 @@ set (GIF2H5_SOURCES if (BUILD_STATIC_LIBS) add_executable (gif2h5 ${GIF2H5_SOURCES}) target_compile_options(gif2h5 PRIVATE "${HDF5_CMAKE_C_FLAGS}") - target_include_directories (gif2h5 PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (gif2h5 PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (gif2h5 STATIC) target_link_libraries (gif2h5 PRIVATE ${HDF5_HL_LIB_TARGET} ${HDF5_LIB_TARGET} ${HDF5_TOOLS_LIB_TARGET}) set_target_properties (gif2h5 PROPERTIES FOLDER tools/hl) @@ -30,7 +30,7 @@ endif () if (BUILD_SHARED_LIBS) add_executable (gif2h5-shared ${GIF2H5_SOURCES}) target_compile_options(gif2h5-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}") - target_include_directories (gif2h5-shared PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (gif2h5-shared PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (gif2h5-shared SHARED) target_link_libraries (gif2h5-shared PRIVATE ${HDF5_HL_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} ${HDF5_TOOLS_LIBSH_TARGET}) set_target_properties (gif2h5-shared PROPERTIES FOLDER tools/hl) @@ -59,7 +59,7 @@ set (hdf2gif_SOURCES if (BUILD_STATIC_LIBS) add_executable (h52gif ${hdf2gif_SOURCES}) target_compile_options(h52gif PRIVATE "${HDF5_CMAKE_C_FLAGS}") - target_include_directories (h52gif PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h52gif PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (h52gif STATIC) target_link_libraries (h52gif PRIVATE ${HDF5_HL_LIB_TARGET} ${HDF5_LIB_TARGET} ${HDF5_TOOLS_LIB_TARGET}) set_target_properties (h52gif PROPERTIES FOLDER tools/hl) @@ -72,7 +72,7 @@ endif () if (BUILD_SHARED_LIBS) add_executable (h52gif-shared ${hdf2gif_SOURCES}) target_compile_options(h52gif-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}") - target_include_directories (h52gif-shared PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h52gif-shared PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (h52gif-shared SHARED) target_link_libraries (h52gif-shared PRIVATE ${HDF5_HL_LIBSH_TARGET} PRIVATE ${HDF5_LIBSH_TARGET} ${HDF5_TOOLS_LIBSH_TARGET}) set_target_properties (h52gif-shared PROPERTIES FOLDER tools/hl) @@ -101,7 +101,7 @@ if (BUILD_TESTING AND HDF5_TEST_SERIAL) # -------------------------------------------------------------------- if (HDF5_BUILD_GENERATORS AND BUILD_STATIC_LIBS) add_executable (hl_h52gifgentest ${HDF5_HL_TOOLS_GIF2H5_SOURCE_DIR}/h52gifgentst.c) - target_include_directories (hl_h52gifgentest PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (hl_h52gifgentest PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (hl_h52gifgentest STATIC) target_link_libraries (hl_h52gifgentest PRIVATE ${HDF5_HL_LIB_TARGET} ${HDF5_LIB_TARGET}) set_target_properties (hl_h52gifgentest PROPERTIES FOLDER generator/tools/hl) diff --git a/hl/tools/gif2h5/CMakeTests.cmake b/hl/tools/gif2h5/CMakeTests.cmake index 36ea5f94cb3..b2a8e27f26f 100644 --- a/hl/tools/gif2h5/CMakeTests.cmake +++ b/hl/tools/gif2h5/CMakeTests.cmake @@ -28,16 +28,24 @@ HDFTEST_COPY_FILE("${HDF5_HL_TOOLS_GIF2H5_SOURCE_DIR}/testfiles/ex_image2.h5" "$ add_custom_target(gif2h5_files ALL COMMENT "Copying files needed by gif2h5 tests" DEPENDS ${gif2h5_files_list}) # Remove any output file left over from previous test run +set (HL_TOOLS_CLEANFILES + image1.gif + image1.h5 + image.gif + image24.gif +) add_test ( NAME HL_TOOLS-clear-objects COMMAND ${CMAKE_COMMAND} - -E remove - image1.gif - image1.h5 - image.gif - image24.gif + -E remove ${HL_TOOLS_CLEANFILES} ) set_tests_properties (HL_TOOLS-clear-objects PROPERTIES FIXTURES_SETUP clear_tools_hl) +add_test ( + NAME HL_TOOLS-clean-objects + COMMAND ${CMAKE_COMMAND} + -E remove ${HL_TOOLS_CLEANFILES} +) +set_tests_properties (HL_TOOLS-clean-objects PROPERTIES FIXTURES_CLEANUP clear_tools_hl) add_test (NAME HL_TOOLS_gif2h5 COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ testfiles/image1.gif image1.h5) set_tests_properties (HL_TOOLS_gif2h5 PROPERTIES diff --git a/hl/tools/h5watch/CMakeLists.txt b/hl/tools/h5watch/CMakeLists.txt index 6c667c7de43..9d07a1e689b 100644 --- a/hl/tools/h5watch/CMakeLists.txt +++ b/hl/tools/h5watch/CMakeLists.txt @@ -12,7 +12,7 @@ set (H5WATCH_SOURCES if (BUILD_STATIC_LIBS) add_executable (h5watch ${H5WATCH_SOURCES}) target_compile_options(h5watch PRIVATE "${HDF5_CMAKE_C_FLAGS}") - target_include_directories (h5watch PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5watch PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (h5watch STATIC) target_link_libraries (h5watch PRIVATE ${HDF5_HL_LIB_TARGET} ${HDF5_LIB_TARGET} ${HDF5_TOOLS_LIB_TARGET}) set_target_properties (h5watch PROPERTIES FOLDER tools/hl) @@ -21,7 +21,7 @@ endif () if (BUILD_SHARED_LIBS) add_executable (h5watch-shared ${H5WATCH_SOURCES}) target_compile_options(h5watch-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}") - target_include_directories (h5watch-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5watch-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (h5watch-shared SHARED) target_link_libraries (h5watch-shared PRIVATE ${HDF5_HL_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} ${HDF5_TOOLS_LIBSH_TARGET}) set_target_properties (h5watch-shared PROPERTIES FOLDER tools/hl) @@ -46,7 +46,7 @@ if (BUILD_TESTING AND HDF5_TEST_SWMR AND HDF5_TEST_SERIAL) ) add_executable (extend_dset ${extend_dset_SOURCES}) target_compile_options(extend_dset PRIVATE "${HDF5_CMAKE_C_FLAGS}") - target_include_directories (extend_dset PRIVATE "${HDF5_HL_SRC_DIR}/test;${HDF5_HL_SRC_DIR}/src;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (extend_dset PRIVATE "${HDF5_HL_SRC_DIR}/test;${HDF5_HL_SRC_DIR}/src;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (BUILD_STATIC_LIBS) TARGET_C_PROPERTIES (extend_dset STATIC) target_link_libraries (extend_dset PRIVATE ${HDF5_HL_LIB_TARGET} ${HDF5_TEST_LIB_TARGET} ${HDF5_LIB_TARGET} ${HDF5_TOOLS_LIB_TARGET}) @@ -65,7 +65,7 @@ if (BUILD_TESTING AND HDF5_TEST_SWMR AND HDF5_TEST_SERIAL) add_executable (h5watchgentest ${HDF5_HL_TOOLS_H5WATCH_SOURCE_DIR}/h5watchgentest.c) target_compile_options(h5watchgentest PRIVATE "${HDF5_CMAKE_C_FLAGS}") - target_include_directories (h5watchgentest PRIVATE "${HDF5_HL_SRC_DIR}/src;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5watchgentest PRIVATE "${HDF5_HL_SRC_DIR}/src;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (BUILD_STATIC_LIBS) TARGET_C_PROPERTIES (h5watchgentest STATIC) target_link_libraries (h5watchgentest PRIVATE ${HDF5_HL_LIB_TARGET} ${HDF5_LIB_TARGET}) diff --git a/hl/tools/h5watch/CMakeTests.cmake b/hl/tools/h5watch/CMakeTests.cmake index 32b2b95c4da..a7828b30fa6 100644 --- a/hl/tools/h5watch/CMakeTests.cmake +++ b/hl/tools/h5watch/CMakeTests.cmake @@ -115,12 +115,6 @@ add_custom_target(H5WATCH_files ALL COMMENT "Copying files needed by H5WATCH tes macro (ADD_H5_WATCH resultfile resultcode) if (NOT HDF5_ENABLE_USING_MEMCHECKER) - add_test ( - NAME H5WATCH-${resultfile}-clear-objects - COMMAND ${CMAKE_COMMAND} - -E remove ${resultfile}.h5 - ) - set_tests_properties (H5WATCH-${resultfile}-clear-objects PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles") add_test ( NAME H5WATCH-${resultfile} COMMAND "${CMAKE_COMMAND}" @@ -134,7 +128,7 @@ add_custom_target(H5WATCH_files ALL COMMENT "Copying files needed by H5WATCH tes -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5WATCH-${resultfile} PROPERTIES - DEPENDS H5WATCH-${resultfile}-clear-objects + DEPENDS ${last_test} FIXTURES_REQUIRED gen_test_watch ) set (last_test "H5WATCH-${resultfile}") @@ -164,7 +158,12 @@ set_tests_properties (H5WATCH-clearall-objects PROPERTIES FIXTURES_REQUIRED swmr if (last_test) set_tests_properties (H5WATCH-clearall-objects PROPERTIES DEPENDS ${last_test}) endif () -set (last_test "H5WATCH-clearall-objects") +set (last_test "H5WATCH-cleanall-objects") +add_test ( + NAME H5WATCH-cleanall-objects + COMMAND ${CMAKE_COMMAND} -E remove WATCH.h5 +) +set_tests_properties (H5WATCH-cleanall-objects PROPERTIES FIXTURES_CLEANUP swmr_vfd_check_compat) ################################################################################################# # # @@ -189,7 +188,7 @@ set (last_test "H5WATCH-clearall-objects") add_test (NAME H5WATCH-h5watchgentest COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) set_tests_properties (H5WATCH-h5watchgentest PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles" - DEPENDS "H5WATCH-clearall-objects" + DEPENDS H5WATCH-clearall-objects ) set_tests_properties (H5WATCH-h5watchgentest PROPERTIES FIXTURES_SETUP gen_test_watch) set_tests_properties (H5WATCH-h5watchgentest PROPERTIES FIXTURES_REQUIRED swmr_vfd_check_compat) diff --git a/java/examples/datasets/JavaDatasetExample.sh.in b/java/examples/datasets/JavaDatasetExample.sh.in index b299ff207ad..96830763215 100644 --- a/java/examples/datasets/JavaDatasetExample.sh.in +++ b/java/examples/datasets/JavaDatasetExample.sh.in @@ -58,8 +58,8 @@ $top_builddir/java/src/jni/.libs/libhdf5_java.* $top_builddir/java/src/$JARFILE " LIST_JAR_TESTFILES=" -$HDFLIB_HOME/slf4j-api-1.7.33.jar -$HDFLIB_HOME/ext/slf4j-simple-1.7.33.jar +$HDFLIB_HOME/slf4j-api-2.0.6.jar +$HDFLIB_HOME/ext/slf4j-simple-2.0.6.jar " LIST_DATA_FILES=" $HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Alloc.txt @@ -222,7 +222,7 @@ JAVAEXEFLAGS=@H5_JAVAFLAGS@ COPY_LIBFILES_TO_BLDLIBDIR COPY_DATAFILES_TO_BLDDIR -CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$BLDLIBDIR"/slf4j-api-1.7.33.jar:"$BLDLIBDIR"/slf4j-simple-1.7.33.jar:"$TESTJARFILE"" +CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$BLDLIBDIR"/slf4j-api-2.0.6.jar:"$BLDLIBDIR"/slf4j-simple-2.0.6.jar:"$TESTJARFILE"" TEST=/usr/bin/test if [ ! -x /usr/bin/test ] diff --git a/java/examples/datasets/Makefile.am b/java/examples/datasets/Makefile.am index 8b71cedad4c..d4bb6662bb6 100644 --- a/java/examples/datasets/Makefile.am +++ b/java/examples/datasets/Makefile.am @@ -27,7 +27,7 @@ classes: pkgpath = examples/datasets hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar -CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/slf4j-api-1.7.33.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-1.7.33.jar:$$CLASSPATH +CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/slf4j-api-2.0.6.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-2.0.6.jar:$$CLASSPATH jarfile = jar$(PACKAGE_TARNAME)datasets.jar diff --git a/java/examples/datatypes/CMakeLists.txt b/java/examples/datatypes/CMakeLists.txt index 47df1f89001..c77a71580cf 100644 --- a/java/examples/datatypes/CMakeLists.txt +++ b/java/examples/datatypes/CMakeLists.txt @@ -65,7 +65,7 @@ endforeach () if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) get_property (target_name TARGET ${HDF5_JAVA_JNI_LIB_TARGET} PROPERTY OUTPUT_NAME) - set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$:${CMAKE_DEBUG_POSTFIX}>;") + set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$,$>:${CMAKE_DEBUG_POSTFIX}>;") set (last_test "") foreach (example ${HDF_JAVA_EXAMPLES}) diff --git a/java/examples/datatypes/JavaDatatypeExample.sh.in b/java/examples/datatypes/JavaDatatypeExample.sh.in index a82d883ebae..fc4a62706be 100644 --- a/java/examples/datatypes/JavaDatatypeExample.sh.in +++ b/java/examples/datatypes/JavaDatatypeExample.sh.in @@ -55,8 +55,8 @@ $top_builddir/java/src/jni/.libs/libhdf5_java.* $top_builddir/java/src/$JARFILE " LIST_JAR_TESTFILES=" -$HDFLIB_HOME/slf4j-api-1.7.33.jar -$HDFLIB_HOME/ext/slf4j-simple-1.7.33.jar +$HDFLIB_HOME/slf4j-api-2.0.6.jar +$HDFLIB_HOME/ext/slf4j-simple-2.0.6.jar " LIST_DATA_FILES=" $HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_Array.txt @@ -218,7 +218,7 @@ JAVAEXEFLAGS=@H5_JAVAFLAGS@ COPY_LIBFILES_TO_BLDLIBDIR COPY_DATAFILES_TO_BLDDIR -CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$BLDLIBDIR"/slf4j-api-1.7.33.jar:"$BLDLIBDIR"/slf4j-simple-1.7.33.jar:"$TESTJARFILE"" +CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$BLDLIBDIR"/slf4j-api-2.0.6.jar:"$BLDLIBDIR"/slf4j-simple-2.0.6.jar:"$TESTJARFILE"" TEST=/usr/bin/test if [ ! -x /usr/bin/test ] diff --git a/java/examples/datatypes/Makefile.am b/java/examples/datatypes/Makefile.am index 55ff91f7f42..12e5cb8fadd 100644 --- a/java/examples/datatypes/Makefile.am +++ b/java/examples/datatypes/Makefile.am @@ -27,7 +27,7 @@ classes: pkgpath = examples/datatypes hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar -CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/slf4j-api-1.7.33.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-1.7.33.jar:$$CLASSPATH +CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/slf4j-api-2.0.6.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-2.0.6.jar:$$CLASSPATH jarfile = jar$(PACKAGE_TARNAME)datatypes.jar diff --git a/java/examples/groups/CMakeLists.txt b/java/examples/groups/CMakeLists.txt index fdaf95cdbd5..01da53c20f3 100644 --- a/java/examples/groups/CMakeLists.txt +++ b/java/examples/groups/CMakeLists.txt @@ -64,7 +64,7 @@ add_custom_target(H5Ex_G_Visit_files ALL COMMENT "Copying files needed by H5Ex_G if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) get_property (target_name TARGET ${HDF5_JAVA_JNI_LIB_TARGET} PROPERTY OUTPUT_NAME) - set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$:${CMAKE_DEBUG_POSTFIX}>;") + set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$,$>:${CMAKE_DEBUG_POSTFIX}>;") set (last_test "") foreach (example ${HDF_JAVA_EXAMPLES}) diff --git a/java/examples/groups/JavaGroupExample.sh.in b/java/examples/groups/JavaGroupExample.sh.in index 1b84ed36b3e..477357955b8 100644 --- a/java/examples/groups/JavaGroupExample.sh.in +++ b/java/examples/groups/JavaGroupExample.sh.in @@ -57,8 +57,8 @@ $top_builddir/java/src/jni/.libs/libhdf5_java.* $top_builddir/java/src/$JARFILE " LIST_JAR_TESTFILES=" -$HDFLIB_HOME/slf4j-api-1.7.33.jar -$HDFLIB_HOME/ext/slf4j-simple-1.7.33.jar +$HDFLIB_HOME/slf4j-api-2.0.6.jar +$HDFLIB_HOME/ext/slf4j-simple-2.0.6.jar " LIST_ITER_FILES=" $HDFTEST_HOME/h5ex_g_iterate.h5 @@ -257,7 +257,7 @@ COPY_LIBFILES_TO_BLDLIBDIR COPY_DATAFILES_TO_BLDDIR COPY_ITERFILES_TO_BLDITERDIR -CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$BLDLIBDIR"/slf4j-api-1.7.33.jar:"$BLDLIBDIR"/slf4j-simple-1.7.33.jar:"$TESTJARFILE"" +CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$BLDLIBDIR"/slf4j-api-2.0.6.jar:"$BLDLIBDIR"/slf4j-simple-2.0.6.jar:"$TESTJARFILE"" TEST=/usr/bin/test if [ ! -x /usr/bin/test ] diff --git a/java/examples/groups/Makefile.am b/java/examples/groups/Makefile.am index c5208608a95..a3fb774c5b7 100644 --- a/java/examples/groups/Makefile.am +++ b/java/examples/groups/Makefile.am @@ -27,7 +27,7 @@ classes: pkgpath = examples/groups hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar -CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/slf4j-api-1.7.33.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-1.7.33.jar:$$CLASSPATH +CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/slf4j-api-2.0.6.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-2.0.6.jar:$$CLASSPATH jarfile = jar$(PACKAGE_TARNAME)groups.jar diff --git a/java/examples/intro/CMakeLists.txt b/java/examples/intro/CMakeLists.txt index f74455984b9..518bbf8ceb1 100644 --- a/java/examples/intro/CMakeLists.txt +++ b/java/examples/intro/CMakeLists.txt @@ -54,7 +54,7 @@ endforeach () if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) get_property (target_name TARGET ${HDF5_JAVA_JNI_LIB_TARGET} PROPERTY OUTPUT_NAME) - set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$:${CMAKE_DEBUG_POSTFIX}>;") + set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$,$>:${CMAKE_DEBUG_POSTFIX}>;") set (last_test "") foreach (example ${HDF_JAVA_EXAMPLES}) diff --git a/java/examples/intro/JavaIntroExample.sh.in b/java/examples/intro/JavaIntroExample.sh.in index 41ed6940d98..56b6e572cd2 100644 --- a/java/examples/intro/JavaIntroExample.sh.in +++ b/java/examples/intro/JavaIntroExample.sh.in @@ -55,8 +55,8 @@ $top_builddir/java/src/jni/.libs/libhdf5_java.* $top_builddir/java/src/$JARFILE " LIST_JAR_TESTFILES=" -$HDFLIB_HOME/slf4j-api-1.7.33.jar -$HDFLIB_HOME/ext/slf4j-simple-1.7.33.jar +$HDFLIB_HOME/slf4j-api-2.0.6.jar +$HDFLIB_HOME/ext/slf4j-simple-2.0.6.jar " LIST_DATA_FILES=" $HDFTEST_HOME/../testfiles/examples.intro.H5_CreateDataset.txt @@ -207,7 +207,7 @@ JAVAEXEFLAGS=@H5_JAVAFLAGS@ COPY_LIBFILES_TO_BLDLIBDIR COPY_DATAFILES_TO_BLDDIR -CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$BLDLIBDIR"/slf4j-api-1.7.33.jar:"$BLDLIBDIR"/slf4j-simple-1.7.33.jar:"$TESTJARFILE"" +CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$BLDLIBDIR"/slf4j-api-2.0.6.jar:"$BLDLIBDIR"/slf4j-simple-2.0.6.jar:"$TESTJARFILE"" TEST=/usr/bin/test if [ ! -x /usr/bin/test ] diff --git a/java/examples/intro/Makefile.am b/java/examples/intro/Makefile.am index 741f122989f..b6d6a1f3082 100644 --- a/java/examples/intro/Makefile.am +++ b/java/examples/intro/Makefile.am @@ -27,7 +27,7 @@ classes: pkgpath = examples/intro hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar -CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/slf4j-api-1.7.33.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-1.7.33.jar:$$CLASSPATH +CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/slf4j-api-2.0.6.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-2.0.6.jar:$$CLASSPATH jarfile = jar$(PACKAGE_TARNAME)intro.jar diff --git a/java/lib/ext/slf4j-nop-1.7.33.jar b/java/lib/ext/slf4j-nop-1.7.33.jar deleted file mode 100644 index aa8fc0943b0919f97941745cd623d5c20a0799fb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 4020 zcma)9c|6qH8=fIMS&}7@5Rzdm*@bLlUuzVTl--OOV{FYO)YK)}msCW^awD3umWpI+ z2!$*Ok$s&BqhEeA30mL{~jm;uz0`WMLd`=1{_Oq8dY zzO4?($lO5I@<$jW;07g34v?q>1p)x|Gynki&tYad=0*nkHnuWm27d9)=CxoB{iV1C z_HTgwB%HR6G#L`6?f%Youe$SX56_A2S3Y;Y%o>p){XEI(3cjCPl0-$ixzLw*GknEy zF+FFR{m3_q?3l>5>F925RW+I}eotX~E{$$--UCJ$FcQL4<3~?7>((% zA2VNb|6_3elsmJ^{->S9P6OY&HejAg%QS`B7lx{iU#d%D3rh9~p7ZMT-^Y!_%Qo@{ zOT71ai#IGJr+%W*6CoX>MapA68(2e_?;T`P(!SOM6*j(?lNU^uoO%iTkUGn2;F{zU zDVoYnkjl4deaHzY;wP4`qA#{(B`pq2^QI|Fq&bwBjHz{|??s&$T%*e% z8;mwJ+9fl4R#^byx(#_2UecZ#_*##|g9`5Apd)?+D zvfYwflbsoNnxU3pcDwSx-OvOzJRff(m$K6fMKDABa+kH;v@!D`gxgSAj~;A@ODpt1 zQ=sGca!IQDTA=dm2keaM(kD{a>;Wwx{A1955h3H)IL>*j5lhLp#pZt8aeKm@=1S14bp z*4XrnqP&Eo`$PmPp}9fHGw(I8m?|wf9>>8}A#RX7SBnSf82ieOH>Y`A?&{;+F7&bv z!iySly$FJ*XLMt;PRvKaTQa)wdG9>(O>Br=FJ;c?N2R2Tesa`^+}nFn7*fzHVQw#b z%b})*RLHzv<~%(Ue?Y9Oo!}AG&cf=Ma^uIPT6MC+JfqIp;O625i-Cr;>owPEI+LTs zT=g{)#)Ci!oaYB0pEN{bZEE|f^G)N<>^6yVs%B>uS+RLnsdjP<)Rd760D4q&J^iF2 zk#l1defVCveEh^IwBH5mZ zXeuH*cwGc+)c<-RemUdH^MxWrxwL;ML^CI}*S128hgs~=;l489QPw~6@Qt-q@yTMI zM+jDy2&nNBNzO|i&H79cDIG4-Hlt&uBI$WUEX=a@FG;^5$Qd|G*6R)S58e?Ev`|63_ zu+{mI=gGBkUIDs7#<_%d`Ictmf-Iz)P9c1sv3*Rzq|{7Bo#`?Otm5tdxOCP5aJCmo zlpfKg#lyOiOw{`f@7^;JyW~*7ZC_p6FIwcBD4`G1hyz}F$Lj>W5DdLsU0-bV&ss_g zjnlM8{>Wisba&FJb6q1kZ`k(Ma>dbpi|CbEB0sW-6B0qIN8jR8tooLUewiMXC_8ZC z5&t01f+YBUjQ(xL05RLM%0VcFJJMy~7&;c;VvI(arEWy#g2nJB?ES?IWe#%{9G$3# zMPbE@h4a#-gp7`Xwn5!6W=%d?=eQ3YwGa+X{PFDQ7S?wSd;EA0dx@AizC}2MA1#U{ zqaug_?!2}pnGl~a@lcR^x0tuUk|e%}U%F0Npge}5F?gh$*4;5yH8-Iyzevyfkq%l z;89|5HAYtiK6Tx5Un2X#B#iYvBaO=ZafckEq`(Th@%g`Bp4WW1)W8mN3Wwm)X8kpi26}e|~qEFwtHs|04ldkcz3ac$X){GLr z8v#*hofT}o4T4v8*GSd&JiB_{E`4NzWGG@w%utX#;6w%-JHP^O?|M&s#_E@L*_=gk z{OskvtcgR@rdd^8XHzhegU1SYPd}Go)u`o4>pRTU9CS@hDC4xE`>1lL53XI`JDcPh zE%b)W@i@G~Li4ESK*raVeXr^YdlUPshn;!NiMn*KG2EQ=Fa>YjMZ-aw4{ znEx@(;y~JjzCxrVtfdaoMpi-EuhDKO$WF&Y?R%S!2jS%J;=NfjJA?Um22*;g%=X^8 z-BpSH3HBWf@ z=e;g(aGS4R6$y&`9H^pg+$@@ZV@Of5eL%4o6Ync7ol_e7^`<|Yo10~%#B&LZR`n{8Eqvrov5D(2 zI0#l@S3>d<`H%Sf(W|^+{8qCsP2?uiwMl-fq7kE~&fOKXHd7LYyX^Cw}6T{|ooS?rwm8ZFo1}l!a1k@Rq0k z4*s3}-CFyv&C3>PF=Z=$NxIX@Y$x5`*0x9|>3>K1?(T|XZ`#=4OKz6Xiol#=4UrTjV4plI0SG)QL(;a<=|1 zr^q#=7@E7|UQ(Tk#H8lm{I@!1Tb*P7o_&6w&u^bS&+|U-`+VQ;^Bo4oFCYVeKp+5d zA!9$lWio&OV252qo0oJ zS<7x6{gFsMnRPh0uLzAFQ}8kwu|J;J^eb5XdWM;L?aGHs1oNkY=Z;CVpEaSTYW*3< zKYFB)C={r&ZN_z~&f2Y75O*JzJ-I8d0>JI)lQ=&qYkZ}Y47aaM>(EZv>bpV8>C$Y) zX6wdeqP|0zg<=o%Owi1$@8ZS)UGwu2vP`Q^3d;>>X^pPV+uRXaF%1<}uH zgm;`po(UsZ@9eZaOx2{498W7QBp;A?HH0J2IG$TLa754BJG=Zf%A?SL_9nYMnjAws zUT>Oz9(7<`bO4fO?byaQO*Oc&Hg_+Fo=ly0EFE+!@st#~v7-BuZYEBNb(k?ucX7+A zl5SqyUtugb>>ir;PVP~P`kNxLh^W%MKO#dBK{e`X(Q80XAt*4z{6%|$7^(e_%Wnr@!Cm_yE$ZZyo-GrGDjc0zo$)pxCAEjm)aS$)-R zTj=?P|0Zh9^TAdbtt<8`8?|m2`n*ng{a-;0N*MHfXa}Ii- zh7ck{AkOH$`nJwr8i~Qa};ke6t?JKg^dvIGLO1(2tON*z{0T z@j58$5bPsKRoq*uG&Q3@IYXddBA}+NEhTq;^C@hhw@R<8x~58Ro9uKtZ1>!&J27xZ zMcs?}qQ^_)W>clB%d4vnfV`*1k>#FmVWp!YcSFa^em>a{KS6e>sq!x%&mD77RvS9QcM~SGf4o_;)k~~= z)BwB#|6)(Uv8f8E)ddsr~={u))py3)pSokineYb~GhMra`!OQ)A zSQ7Uau64alc+fijzV|s}-f_nf%=m zsgC+LH%Zml^G2=NkX!YU2{B!TmNO$y4+@Rx|918X2&WikJ5Rjn9PlZz7+V&iIX>9h zVyC0C=6O#j2b?ac(gMp4ROD97gOC|o`n~Ew|r_P;gB&7_~7-+K>j|UX= zDbMy@R>(}RKAJA)-t6~EL3^2h@zSJ)eMk% zEf*_p(^+9<5~`+Is5Qjyms0{G-g6I+|6GZObq5>MVP zsHHV{96r&$J)x~M@iVRyZ#n3uF!|D2kQc5snYd>2@9OE9Hx5pS zctvZge&Wl+;V^auJO_3N+bL1zQyL7c8Z<_?sx7Uuu}&vF_~MW-t3u@jxjr@f>aqKc^$TCvYCaP0$ar;L16#GnO4_FfK%M*n9WVDo9A|Hvt8M__KaM%<=8K~j^NV1C`Bk{OMEdFo^mVnaM>_DT?kay?`ozEoMO@U6^LOY>4 z!0BhQ*Iy?gYdf9NPs5j%!ZK;%w62=Zs7kRTNx_)Q88Dyq*;1XmAg=Kjjthtm!|H@Y z1meDsCDz_Eq6HzodC4|uF}Rfv!>F_Pkp}vRsLssYsO?M47y2pOs?o^Rd>3$P&n_Yz z;Z|uj^UiJep+*NFP)_QeXO(fuE9Bduc0Q@pH6cGWg3U|$U(>X`9Sr?~-a`~po|hM- z-L+jrbp(Y7oT5U|emmf^#oe^wH^rz0hWH-`S9?_pdepE4#d7LJuiLrNRJ9D;535@) zKbaUhP9=8TZ`Y)jsa8)?9*cYuO@T4a5<9aH66TH%H;Nv72tNY0BkMdb^uM}0|2NUb zR-+V3&Ha~8L|JV!yCq!I&pZP|X6rC0z6~+}XR2`P7?!a9+SqsT8=1|@A_}apc{o|h znRlL@g;y9$197a6JB&SKzdwEh6QS&7*)8W4&(cU7gZ*nfOEIm#@vjdrfSG|cqIpHI zG!(~D|BCoq!K}X$OH*--{8ii+@v?p_OJi|tlzX3lE3Wl}S(=Mu!pt4B5BXcR=CyA% zP7dbVZeD4)71`IwxT1iFKz@`p=i&-=JPUrG=IFX~bnj=;VGkdjk~nYG#xLt)&hO z1auAp1cdrmGiiPqaS>rfC0c0_M+J3RA9_?zvUR>INRs*JR7m;ubp`uZF)X(;Bul&1mgA&F-uD_6Tjlz z4_emxid#!@TihHA-Ky+``i?=?x7`mp%z2!`Bsgo$8+0+SCZ zmt4mlCp;}TH|gnQKxX-9HlP4*==#Jw3#|?BAa!JPz*3x@=wMV(6J;D$8qRk}yapdj zV*@bVRm|WUS$_yVY^ZEp(QNiY1dtOqFgS^(X(bYr$ZR-MTNUEnEuI4U5g3g zl@cc9iMfifs*isWHl@nc8PFl@rCkuk+cxVHFsCDU>?IT)Dp()6F2Ot14?z^$NHdt| z1-eY8b1`tq#@M9{Ceo`nosBlV`qsl4vX6_)nY$veFc|Fg#oAk5&jAO!lJoka-FC-n z4R+&=AiLNMy8luP7f|D8zvN5PuCTXbiJTQ^~8u&8SYCu zz?6-b7`Tb2MGr@Gq2^K?2&G5l!p=PaJ6#l=!4;HUhl4pAl4c%(lX%i+^S?>!numP!Hghiq$+8Eb8_DqNsh#BfX z_DsHMPX{v}6HzRk7Hywice%8(xa!sV0#felj*9Gn7OWD*K(Sd6#2(k6Do>vkW$!1- ztw=w5-rx<^hXEb+R-01H)}4fShyVvozpS}xgkA~$akHK7E$dNX&C~vz-Z-n~>;-h# zyoH*yzn?-^(Z2Mq&Yf7@VPDuvBEzLMCmKF^F~$no>WStx;&PC>`Z8?=fu%)U5Gq4e zlhy(SHtc4A=|FSc}C+?o37^F;vOXi+)$=H^%nmtHF<;wB>Ms--eavR}zo# z0U&f*wNSrmR$|J8Z) zTKpl=%orkXcj^Un3yQ3-8Sl*kr3=YR-gU!N##B(xpRg{LNIVihgGe zSJ|KgKmZD?XvRs#T+EmhQ#&qwlN#)@f%=~D1_kFrN3tr*eG;vu61%8kdj&@8`E8{U z7mk+bNvaRT-Z_vyLB;Cdf2YkfM85X+%B0 z?dF#(^IbYn8F!fx&4IXh4Zf4n2z5zVh(G*Il|U<*k$r-)4gU1kQkGc#!e+BH^WV9oc$fO;3p%BOY#Ss>~gI+6<00pP3^f7j+Dl|%$u{L!J`Ap zfu&k3qv=lbWeQ8anL|I>K5^f(9&iA{Sx~#O$pNAB(Qg& ztjQfL$=#QWcW{|?YQd@}`KO2)}>}ywP2Ah(gpE4!A{~go2krj+4>BLb;3hxUuGtx0HHY z8O>^C-WM2MZM6)>_Vjf#hSUj}*W!ybGWL{Skfcd6t0@XLne9PNO*Fgo;=3oY`}Dd` zMX}Y$_w%<-tIcfIE5nbwBgu6(Pw{jffnPtKf8xG!M0nEYZP-h9cX1a~TfLg#xq`!Y zoS-3mQN>%0>ND@Wu*713h_Q573D{)D|( z?FrXvu`4rHK#gW(?x>jcyJ>U-N@(Y!Jvhi4h_05%(Ojb|8*<2u$@YT)^_v=?Yq*IS4)j4E@%B!lf6&s(%36Rb9m#45S;FgD!ka^$oU|W6_Me%{t zOjM#~s7D$Mmp(GFj5RGvgcdl?6w9Z0+ek;wx+#?HZ>pSKqNWo}K3GU-clxrqF#7$ty=5fkoE)X_ym5?> zZjfFKj)l>6b0V5dAW+4hqEEXjeqcG}XqJRt530q(U@s#rNC=^KVl#}Qv|0LM1tjx% zz?Kk1qH};3>=PL%I>D{A@txbj@aZx6V_oHFr_K^0==j4Y@l23OB< z`T2*(eNjhUlIFuO{f4E+=1<~FV(@bE=;tmsEN8zUoi{cJ;{Wgb#L^aor; z624=XCY@FcK7@D(5! z_+=~MP%mQ*u{ayzMp!YhqC-C;rd)?IRw@)~%FjUUs4f$skBkv6DkDQx?hP?kzGn*U zRF#pltC&km^8JR<$(19CsDg)^xzzVLi%-ZJgtdYJ3>Ew9Dn2<%9H_||jRXDFI!PLZ z@*(i%SL{5w6G%bkSA?c;6P}^S%o!6X7q>ia3Y($L;+rGYt6l2MD}#&7RvX9cgSlc1 zy_Hncl~j{XJxIA!%vL+-p*}?QvCbyV(SrTad#iqid%|u|NoQ7}UE^Vk^_tnTFxE`Y zE-vA{H#T`9JkvBhn%JUyvKJhUihAOwg4*=vHNz0t`rZ=XX%HMdUf_p>Ydp7TNQ1`K*C|#Om z@zHNKi}z*Lm?hvz^~kQDDg7+Jc}~<~7~vtZd8uqdvLCGbVBgrb1vrL6j*I8*k1^xr z^l`h3ZDMwLcb=P$cZo7{cG&{Gm;K}-_SH8a8XF-vcx43WTHirpli8U0{;h`=UN#=y z#4S1g<(X}$>>GEoJN|XoH(YkPHRw~R(U+R)%#K2`eh?_);9kt3a;a?K-+R?$h&?7+ z1>O3@Am({K-BKref2iHA_wk?2Yn>kbVO6W!O?8yoX&%0^8} zGEY~acQX44Vl`^oSOY zpGXrNA+U?>%}5?JJ;UZtP~5r7ox1MSmoApw>IXW^6-7Ob9^3~Qgn(!gnkn5kK$C%; z*|5!{ZOnB3ST7!4RvoH!j{TlKxb{3UH5k|xLwcd2C)!!7qn_p?GX;(kdu)&XcN6hg zgCqnJeFYflMpK75e>NFjZm!-DTE7jX4-AXY8aZvnPtnl39PrCF!{u_eTeC}FZpH59 z;;h|GLG=SdRnKI)$+u6=WL5RzIeXw4>%H_;k1{wsNCenahx~$ zX#gO%rzs_>_O2^q)iPW1eV&B_wnMK-_-9_T?C>wVR@t(jc}=tN!JZ;QJrVX^LA61j z4q5L#GVp6B+JNubEU&D;cy^2MNnCFMZl?72?tt88)@feQQnOPOw|EbEWjQd;Kc<85 zecjaU~A%IusOU>ZtGAe&Uz8xwZH&!J?r*lERLasjo)#l@Q z*XXRg2I30R1E7mwYp0X*%=iv|0WMOIQF20w25~?e<)1mlOa(MBHbsqS9E(NdjF%B$ zrBkLgr_E(-mo#@>t0G)LCyis-c%xsbv5BMQXs0|Sv|zTR(vtlbqxhJR$+u_2SO5h) zKxOShFms*33i)!y_Eniy375X923#3Z3siV3xb|}d^F31RL^x8EHR#uLIhZFUQylt_ zY&9bQ(t8g;NkDDVg4whfzHq`$M(D-z_+l*N3imB9q6)plwk)v7M0x!I$cVKA6#^~0 zu=$7V4a#Nt6aNR@p%jnplZ`_8!X;-lPP)k~dnG{lq+XG>bOEn$@OXtbr?2?{`Ld}L zttzE;Pqh?7EHxVhYP45WPLiThMC`jl2yFYIHEWPLc8U8@ZiX;C55v#hcq~{wDO=mUAo`Zych~gf~FwV^kS(x*&Vw; zRYUTiD;>>-N44)M;KP#sqHht+@<%GRDzm^tO`FtoUSY4oC~(@1+}2OSOFKm^5-Vz8 zi!&7jR53`c=}YcF{4?f~(!A!l=~1JFB$JxAm!?!KJirobFdA8w?HLpnM>}cX<3k}d zDqBkMw9Z%>56fbj=!l=bVo;JapQ{(HlvYfSLLyx}oq5ZkaliM(t1d2Uzcn7-N`Mog zi<+xqhqhr8ZDTlyvL?>;Ox6wE;}!j$`ly8Gz(+4JqVGyC+jD_b6Jb=IyrFpk)roqo zn@`ZLtUg~{s-++ysc)t8#UMC?Ta9i!tO`Yn6CygStAnh-7eA-xaeS`S`(9O4S-s<9 z=;bhdRnn*){&xP1YxNTQTJXN>TiphBBfbFr;qhbLkTgJZh;h5p`hJ>`%%1H+iK` zQg%sQ9_kGfQ%0HCN};iuOKEHt3Tk|I-4Q&u`^Mv#2;Yv`sO9%A5;cmCoet+5Vs z){S9VxX2T!*&*o#wFURBjB_Y;Yn@K;WJs~U#bSVE5_5_Q{iH~0Nj~Q8?dLW#&=q}P zuMv>e0Wes%ol7WvX+htPBdHHXtu>?3)jDp^MbFv(#r~&}m!SY=pNZ6kyu5 zV8#&FU^6^j<3F{T#{Sb z>po)cXA_WKG3Udz5#BmU(z3fL9J^erDb_nT>G*uWO63;o9f%g()wXa|ta$4c46+dN zWrvwvIy1+u?IAf-VM$Y zMGYy{5ao;)!Cv~U3p4iBUrHf&q?)qgJ%Vw6G;T50uKYa+5^GoYGaDXCx0dE##oEfV>bo#Q02H`Fe@ z=o-1Y78KPhJ2cgT!1d5vO2Ms!^5x1g>+4Z(8`QDb@L?eG{rIxd#(Q7xwa}Wz%sM-Y z><^ta=JHtwk0tbq2@MzSFu7^D4YY^4s4!T~n{ISw>?%Tjnd65EZ(;SjJIE=~(>PuH zNg&70y?2oENOaXI#}(v*sc2j*)&V}sF2`%34ABW$4^vI#unoc4S&GkIA7P0J11ns6 zzxdK5;CutGiG?1Zm%kH-xB!Kl z0Be<}>P#Q~yb7pDAn)>!$`Q>pj>d!jbxWF!r)UBN1_)>j9SDf_|AEiHZO(j!EZx~@jgF}<=S0-AZlX2NPqvy=bEC?1X%$Ok@ z`jORyinW8GmT4_4JXAWHpFXsnKCccNINL+t9VfG$ep+o>eP~?=yzpLAfBrCNLAOzF z%t>ie_W6OEoqX&8oSR^c9~c43MWvk>1Rm-(;1Z?i1!qk`x@ouj#b3octXL};abOILL3 zA7ov48y930QT6miVxtboPdC#6S`w6+iDT&~Y6K78ckorxi7mJG-ZDQSvp=7GYCojA1)V4LeI)kQRK zp&~Koqvw)MSdkNz?DJx!jTOw%hAkMFIH)2kYDq(yOEmh| z=$rCr^ll`wRxi%x!8|<(aYn5hIdS4x9JbKz%&K5kG8t_GTzZ6Uh61$pS?bW^_9nO& z6K;FMMbC|Q;$b8&t3HBp*nHze)br=%&5ClUK%gTzR+Ib@4@%!44y zK-iWfrU^dZ3j?AI0E5(CYd(9lp zR((`6o0L{rk-!;AhH*$@IL>k)wbabYX!h*+2#!W$#1>i>l*8o4yjrI|){D~A!=>FV zMH*5PXrZ>Espt1CIrpqxMVSly0M;(ZQrF0BvcR2H7cc^g_VFt(I$0eZ)qY^P?p}c} z(s1sDhg&bSX7u#KGH#8&glBh$s(<3%ipGY4DyN{8sL6r_yfJh~cFa7*vYk_-`gbZ7 zMZQ+5KE;nx(~W3YO`goNtW96UV-uKErfXK1;dmk`D3h7KRN_)Ok+Yk7`$0L54$rGB zIdHW(Wre076|Vb>cbJM-h0BlYXPaH7KuNpCdefXTU~Pi?ok@I)M<-V`x>8ii)Cx|V z(|8wceol@gKV`;rnRqf8QiMSQS4Z8FYQ&p7orw^EO@hC9xpvp0 z4UM`dw7m+1hGnI4vKmo$6E`f|N=N0fMN%&-t#-9{>OoNNb&8xd`)ca@8O z3J19IzDv4UtFNR9M6&+_51%m#*XAk{J(x`F!YVJF|v+iM*gK?SuX8GUE5u%Nx+3h1F`Jd0hvKXD)A<#3bYedm~SFkd`S zOTqTZ%|PmCOw~;rC2;eh*df9-#!k9GDR+XcC!3;>v7|on%T&z1hGMfo8ryshm2Hhd zWnecJ2s7TZq6|B@eP7t>^V%N6@s& z_#_<80+1-!naLWTJK=GQ$~#&Tr!fcJY2zG8# z@2qC?TGO7I_g+pJK4^TZDUYCX%XHAAhQQm62?Fd2FPK_`P0+LF@S|P7kW!$ z3{&YqaMTbEqvom#{=GMSW^&I5xMjf?3F%xJ-y>CP#xO9>5;((40Rc}fRI5DA?Q;if`c>Yi<}juJx0 z;)(ja4G5f%!xjO&J|ZxiiaHi{Mrv5k-mMYDq1{IXGC>UE&$VkHcGZmO3?Hv#J~|-i z#>KkxP^}q#JcCrNfmAIEP)+qIp3B~1?+A){e~(q1o#2mihHj=mw-jOK0&87ko-1Wl zVP)7Xa3aAwn<}*F1W~^amXzK%k7T&rWs>E?LKRRE^xB(sZ664!Eu7D$zA6jjHeYlG z_3lDY0KyvMH#DXZ)klqau*Z*D(R3Xd*IT+@;)nZHY^|oiqjSfv((oMA`KE+Sm zZx^q+O=63glO9`4j&6^B%PAqWq%p;~bfsuPZrhz(DYb7Y>7;DNWm^#L`BZy&jJ8#< zR>k>#N?8;~+c$3=krF!`b#TsHvLW)kX&?^U<`%|KHPm^I0rkj8&v5H%16@xct2wJF z3Ar?PAuG30FyVx&%ocMOAw=F8#&1b2tF$)b$20E0Y5t%P`oM+6{+pquqoV{@@&?PW zjBxe3uv1BN2h8n?O}9)-0OxG|!tPu_Tc~wi^fPbiQpbviS7r!#nT~j>xi}WrJ^G^U zhgMCECxmm&;-#%7`uC8hLWXyrtxJ4lioV0&(qea*L%3svuLVQ6$A_$4gSJYFp4mh6 zfBZUemmuzfA-b?!eS(KR)RGYVV6x7SKJ-K#0jWZOFj*9!f{IXOi`N0ct;YC)b)Dy= zeAjO!mC}AcUFhhub!ztaD?eww8F8@rj>GWm?XNzy&<8M~`leKs7B=Idlf3KSqvxty z*4b%;2lX~>g~B3Gx9dahSP|@jE_V+JFh%l){gG*yi$hyd*EGm+3&JPPyI?yuQxJU| zZpm?Zd#zl&N8#=JqEnD%&)>2Sq{`aUatrpQ*Q9EXT=mWwo*J4B^)sXvJOM4QjI1;Q zJ1+p7Gi+Agmdm2!wWiP#PpyM+xl^|uBm`xcLVJ$(bMSq0{Qpj?#V+!1^?IOBGl`0@%6;}FPU}! z$(Z}=a#2KA-@)3>_4kRj;*aztRL_$c`582Rtr(157?4h_{Jr*2BxFH>1gu+Kf8TKD zZ3PJl$psl7uikLa9w8qwy~mtDzCjl&QC35kh}_*Yd$*HI?%kyy8B89K<)174d8LXH zCJ?(EkU7&#_IfO!?fdil3;PGy={8+WVbNqiQ0Ke!MmTT}AwQGy=_M56oz*Rw_}1zt zq?xn2Yu9I1%-9taN~xRGZeNtI!}u$+%~eho+t!<7)r{9^7HM3&54}lg>HH!edOK7E zwp69pgv4W1_Su36!tEeuG`2Sl=BytaD9%%sodP#5y7DVQ z)A6^j=u^CoCFc1hyG2w|fy%>COsd^|XTaGx;8&);@YLUW>5 zbkp0Ya(8v3MMwM6IiSikWK)a(0tO=WGc>?Gmfg&oz(ZH1myZiNF4AgB7H?W+fIukK5tForPbH09&``m%?3A zHxR6gXnV20)gLC8i`>&Lw}VP;A2v5iv7w#eF1(>wy$(OmPi|0UU=J3T=Yr=W}(4gv1}p6IASDlB8g2r9#f)`==_H*@eTlt?&PP>0TbMn z%*o7zpwT5*sHU-@CJJL%3b4mBHTh1U;z>$Ph9%!FHuLQceo5%;kHL53dF3+AYZ58h zzi`>%b)!T7x6};*Q!9f%5;#H?Ev+YCsZ49_rv}+IBAwERA>S^Zsu~h(5SUCK2)7sX z&QlG#s3xaca$o9EIXPcaV*}w#6l;tI*P1Eu$wu~sW0zyhK5gmw`SKFuGaR?{`*sf` z(Ym`YfX zP^-mtQtMEcSwlV8Es<;Kwg|hf(K_JAWjSJIRyr`mFWdcK@Eub@s6k0w>BcPsiC&1bJ7mbC_8od3k4OcU5;N=r zp25FWgF##=12JRD4b!}?UTaMcSm34t#&Nyq)A22X8Bov zO@j@9&0h`qk}Y_~Lk+p62ShqwVQS$4ivv||Te+tfxjQ}5)?@#sTCf=*PUea*w~NoF zBz$gv+`mKM5K%shVlhVZx?$!SD&h~-< z)4C>0Rg(g86)F{&1=fiVo4DPr)1$vf*pRQ?p}sqT$-gbZFE_ee*49B~oq?HaMGobz5eyXbg6n!mNyI zIa}95)Q!=-E25MQ9;>?6>m4RE?tb9Pem>na@M)70g`;~wQ-H<*xonV{3pFT&J%NC_ z8>q5xYhU>X>&N&}BHd-xf>^ApxVakFtm6xUa>1T*fvpIyjkuu};A6#{fUieko1_qz zwO)m9E2xOKi|WvL!fjB%QL$Lbh{F81&?(E+xa-qoE(ot!hY*Vag3)h zCzRHnw$%E}LKk^&^X1CfWLHTtQ^+=uG)uIwgXje9M9xTyK_$?7be;BuqEZEloh{w7-mRgEQ=^^ud z#`mk2$#%Q~*0NaLv_IjPeZGF^lLh)T=cJP@|0R$NJqdo$gxw{G1?CX`3mgm%Z0uV0 zOXkB#&MuRML$t%5;Y7SoBw1f{KMblenzOIz(6vSxfFg=12}a2Z!hN6pXvoY7c5T&R>0=k&;wfmpR_TrW2@VuIJ^{yW-N z7DPOu2)9Dp-Lc;6;G9TfmG1$tEK#Ai$|u6ycv1Wkz+bKEy7CkHp`tFCQBMJ3YRSQ8 z8s?hRosRNfy2^QnY)G>L#hAuUamP|&X<@m$kW-f^poUz7Iq@>o zm`!%rKHK_D3R~q|8-?MG-y_y|H^}m^n5||!-eLFllq|+9w{99>$Xp4k&|J|4P z`9EXWSXSN0hq9DP3nI5+KAcL* zPG#l0)M>Gtx?a3yyCXBMpt)8f^$se8Zr~mCM#?oy0R~0R9~R%ae(kHa7p$gG_HOFm z91Pn4*t>nT?O+uk%*E1%R!V|xYaKoVa-_l`2;ibpcWYYCvV2YGwyF!@GoET#RVf#u zJWSXiI$07L*>#4e@y#PKhCgUZVr>*UvQP_#vgt3=_bR2trx;Y)!goX<_l4q}1j$Q8 z8k}epiAuC9Nmn>njvT@8LF))bePfiOL4VQ=@?HT4(H-%PNdnMu=BMdXlx9<=2LdaU zu8Z!OL4np?vrMcz+Y)qxbTnsD z6(e*Kvs9zvOamL+64aCfQj$`%z(L-^4#7aWrev>u*tm?1Qr{8Yi zhtCg>ncqGB=)nKo@5eClTlwMl{5QYfWlw)N|GV(%5BHoeRIjt=pW3SbIR90>^2ZtB z&-?y&HOx;yoV*o z>Lb-ZQUA-L>{rlVXVyPKJzq}+e{Hk>fc|ry{VV9NbFQDD4X;P%{|@^18Q4$6e@5iL z_RE)-_E(wMpJT`$!TgVY`A_E`;rzFP2=wxD{7>iC+Wi^a|2xc&@ct9#9n(MR@Dt{j zVE+f^zlZxjP;j0sS-h$Vq}j{6Il{eIUMm17ZE~ G>;D7q5?_`8 diff --git a/java/lib/ext/slf4j-simple-2.0.6.jar b/java/lib/ext/slf4j-simple-2.0.6.jar new file mode 100644 index 0000000000000000000000000000000000000000..f8913c0a8caf34877b66c316f2c12d3a39cf4a9a GIT binary patch literal 15239 zcmb7r1yEht5-slT9^Bo6YjAgWF7ECSG`PD32<{NvU4jG%7Tnz-ID9hmev+9tllOm9 zRGq5Z-Mi1(YxlNZ@=~B+Xh2X37&Ga!MR)CXWZoYUz64PXk#Qi4u>`Y95mkY?lH8%wj z{M)nQy`>h{liM-fs+3AWa|cL8d*qfezi-x-#Ba@^hHm;Jw*X;*4MhU3= zR~j5==PfcD+m@%MP`%N@AQoVlc7S_fmIdDU6U7b#6~(0MDhnvv4zN)@e>P56#tWac z1-Po`SIi^e;0(Y=Hk_t7K(Ae5`+spu>&H$eUKT9VG3{sGmXmNNb$|O@qXqp%!7fjz zB=>5KSB29Tyd{7GZPMB2@$w+}n%F#>V1DM36In4H7lWs26w)iNjPg{|UC=UNU7=C8 zwA(qHVZlKfc~!vKRjww8QqW)X4O#27j&!o==J}m}(qUMZa%4>LAwVlx)SnaDL^hbV zDN5c*_Kof%JEKfM%G2EH^1NQ43nEu}+VJ84F4(YzzocU}O3soAK`Pg-b*yOG#tn(S z32>eC#7HC>HsV`p^=?(3bsUCpd0biaK{!`iQq^muU5VZVhaK&`SM5oSvW0b_DOqy? z+m<_+&SaHVk(W{+x|~|-m9Y_~s~)t^M>C!x{s!R8k;`JZM^95y-4JB{wBP#7N-8nv zY%7KB%uad`rL%sVsIi1s}&rezj>fnwRT&Z+S1%U&<|{6GWxCn<(w_b!$YH z*Q>>>W9q3U8ty`TAcAuzuq0YzsRZ4vZ>zj9X1~#hyaSEcA=4f&j>N-ne5r~b?_2cU z3M3?`_8h-4u?zI;*Sx-ggNF~p&QS-UWbwG4Rl4h+faRqip)4kqr@}yhfE>YrfaG8B z34)08mhH{3&7?#%Gm#{{1P<%{|brx zcV>SW)BO_XKW8e|Uw!=OvLCI}JKCAkD@lv8N-F)?o$?O0F6PDnhffTWvovE8EJKqt zV%dg)l*RVIW;gLax!8<296K2WfoVxzWj7BRT5(sbnH zv~ul5^qz3zD?jkvam6X!U?aVkQeyeAl=r_i%+9KJ|c8X3IBOr!V!7N%Jm)@s*xQdxzME|b}qYCt&K0LiR<-p-@>yLZ&Q5G?c)&RVCr`7 zzJ$Vr&PS$4H8FgUeip^3P3b{L6YqzaSFu_}o*$U&X4k;4G>DPO6=$BTP9c_Lr7&B} zul8HS4Q0l8PBmL-^*WzQBp(xkY(%zyNdEc1fAa{~c#w-CLySFL|sL^mjP> zj`rW<>{rk!yW0W&13Tr%@37;I_WoFk$R12gNC~nJAt@LfmR`jqN~obNWH7Xj_#uK> zvN`H7p!Zo)l=w~Fn&~s;hWB_FD%{Y_hu1sHj^n&--b>NhdhMX%`YM8mJyzwq6S!l7 z>*BU^(?$c6u;ttKrK!AS^8m{dR4ZSedLjK&M=&H~`G~k`t+{!VluxdOacIocV&a3B z0G9d4#jh2G9-l;i(8E$g8SLjCgdtYLb7HMYp@2TtX?NTTd{$isc1IU7{S zUQrZG)xbc)g;z%%ml?6+3`GS_lr7kh&2tJui9bq2QSlf+6#~Mh+>p^|Y)yKyQjk-5S{a__*h}P%m%!gr?>{`z2w}xlLnIDzs&ywmr ztm}$xyy>P0M~mwA0hUV`QFq)_wL+P1ZJR4pk}GE|D8*SKmbik69K7W{qL$~eyofqFRE${lYLy{vB{;yUP_P(iFc!1MKS+e9VINemagh|z7UH=&O_g>dyG zDDJ+vMwyzPN+4ZGy%$G96n`(lh@bFY%^zFDe-Jz?+hFz5B2Bd5EuXPj?zrbWQbp$W zo*HAuX4M!8#7B_tBfv|;_wfD82yp+;O#aWAmgIkpfd2qq>5n|?bIeYOi(l5+4+Q)+ zXQ9)2(CaM`Ln;Y}o#HaW5c>%_?u<<+81r#4hD>(1r! z)WvzmPR7&S;}N7EsVc8sCsij|E@&oPJLqjW-mGyO+R7ZSd>DrCPFzXp408Yr-n>H| ztPL;$@)8oZVX#J+THWkWfSU@P8Y@}5LM=5LO`C0z2-z18Dm+y*h*=NzWCQWd3d|Ig z&pnLp!1Sf&k`gmBBZ}o^yQy1%vAV1Lkda?A-XZGT_LSr&gpv*Sua3{_DU&=cDm&BIriN+D z+Usu++nmb}#f`=p)_H9MC`6>|l1Hs<&TFQKZO#M=4byA_k&%(mnF2I|&r0VrjMSmF zR*F^apNqAjV;u)w5ZE$=pHoveRAz)f8xpUJGeQM774FxVFuX}J;PYBC5Un>up6n;s zIvy=*FW8iOw&-FQPCB(2twWfhBH*1MQ14=nl2HV^FCtzj3a(| z@wRsWGf|eW9p};|&Pg8u&UH_j0`a&3hZj7ZGuSTh_~g_kJNMeuX&a2M<~DjYl7Pg` zzd9O=xaUm|QVqP0g6PS(%eMQZf}1#ECpiD(0PrP=_&8EOQv{S~%=;tVUMy88xi}us z$h*dndq3}WH*A8>O^6lz37td&C}vWt*G*QJi0Qj}tP@gryhJEajAGV3_H%ZW;Cm92 zN?;UkSfv{h630GCsuOx(Wn#oy6g&&?E!Mb1EKD$*diQ0{Q8W*Tr@(`y5T=rcWOzhRjt_kJ+WaV_m}^8N2@U(z zqnX>w6T(yLUbE#68vY4a(lT5`tUIr`QSoD65OX6dExFq=S0XHW(U+<>1Bw%+CTR{M z@1(KL-o&q9!6V4kBap?Nt=df*3)bE4Qq+ zrygYCpM@3Pw`J{Ww@8?9iwGGFkxtyTYo_GOOb``S5A_WP-x7QQ89-}4?}5)Y*BKR! zOG}7sG8S)LeHN-bDQ_sPDdcXwq#d;$&xKF$rPdL3pBAb1V`))lxZVIxa*o1)C_rHl z3%Y<e&7~fZY1>^T-5p^n}}Y~*Dy2ITrN3@D$ughSQAJ&Ov#y! zQM8=*erTsxZF;Mf)oZ5MX{`Mo*PVKF`J}oGkCFj3QOk&L-kJde=7)F!bSLie$XTBuADq*f!0#WhT5?#^MdI z<*xSepx$|YL*BAlr{UpD2KW`S#5)^%*NSyC*XN|D>vcYM(hT{dPF!>`yCBY#&G*6A z0G@o~y8ewW315y_r!^mCj1Az2sNj|9GtZk&I6Wws>u`NaR+=xVy=nm)!dwKs!h!pC ztQNi80Tc4QyBMe>Yp9~)nn0HG7!h`Qu+v{_(yt=O(-WHMo>GMzm3MU<`*x&Bhf=L? zK(TRNv5&4W#CoX@hyl(e3i-*~5dudEoKuo#F0L1@Rw{vpV|xQ`Z6Rt zrrSLQURDi2(^xFnd@1G&lC!*th@v>%9I@d^701yQ4lfHOwQvd|iM11A+ioMi`Vcj~ z?g*yl`@W8W5$n|1UE@wS)<^O~HpZUkPBQ&BABBM_L)*}~Ps>dHIW}x{W2zpoMax7w>$neKa+pH&Vgo51Og2usx zxIo>iIki%@Q|UYCiNDjGIU5lA$Vg1J7Jj@v%xg%BO5f^VX>o&^+D=Z5CsFH?#n=flNYY&b zY+DMZJz(sCqOvQE9lE|7g!a<46K5~Pl^qpBdYBd~+LYbx`u9RU@{~^s1Bj<`HJU{m zsj>}|Iz5OQkLCw0{%0hjtyq=o{;<#fuPkrW@SlTyDauk?sGYGx-`IUw?vV`f4?V52 z08btJNGHZUuHbrc`TqIyS@sQ;m~x$0cR|x4Gd7pp7RCnKgLV&t(*sJ!I%zhld2O;A z_^P_yw)%Zs`eR~CwfK2!#YP8!idwwiOX1kvcGZ_nd+N;Bjb%rZbE(xMz+M)}1yjvb zfz?rZcVuF~s=02it_fLfENQk;^4+@zFU$p_{)GI*(LwrYR1JHFU1dUzGP}{3;pze< zB6KDSKAXtsf_{zAz{|sCyfxI+KCZowoupNxqhz(=OE8hN8i|VPqC9vJho+EdahX=>Iu~X;Rg`nl$OAeZB0YMFwm4$Lqmh)u5diVOa*$q9e6s2sDQ&ST}Sn0P_ z&jP-k3jiYYNgQ^!tjOs&@0M$UB!G&RrBVvcAnF2oAM5$`>USRNqq6#GR|qq5YODi)a*14v8eZ-Vbs*-|(%g zW6DOK?7r;EMj~RL`OPC?6yZ11a2Rv}&KHD@ePb4YdS-G+ILQyBqSC`3ZB~fY2tB`H zoD;JeK;)5HEr&mNiC9DrY2!(r0Y34b0IhjbQr%{kYS_zCOh-~^S#YZPMt&Pt_ZvAL zv?vx&o#Mpd?&qj&aI)8#32(J8r4e=6*bZ)fHvK#B}x;tOmW&2U&qA?-`gFaP^V8ysU5ib#=7Er6RvF*370t>k^9p*okXKs%m zgGIv08~*WZgqAjw7kUeXodl7Bg1M>D2IKf!Q`(U-@JHs@rfB1OAItYOIFD-+W{jy} z7Z+y@Zh#@G-Pf=Je5Tk2SCtEFq{_ifC@M=>Y(@=ob{Dx`XXW0d;V`E8+A)f;gPZtN zKHMnmLUGZei)E|bM?J__wdvYghQ&8k24i6&-!Q5aX!|Ud(P`X>83vd&-;KX5O32~S z3Ta^q>o?Fd_Yq36M4o+TJMW;;A!sBPiPb?bC*nrtknLkzS zBI}5P$$HovT0ywIis?9s*Mq)YkeQyk{Ft^@$L_O$t!wJpk+lZT-k4+%4PRl}I00d6 zd_#`g8oEZueq$1BTivCFy=)SYj=gLX;L7Ng4!_)YYJKBMBoNwH}oOHyrxh zB)!aD5JX2G40t%G3$MOb2$6?dYO!N?j7yM#vk=OG=+$l5Ny!JSIxoSm5!o@()g?0a zACqm=(rn&_k#EqG@bhm`^kaK&#~o}Z9EL6YS{;L^5bo3bN39iQ@qPy6hb$mN-v z_R!2VGoL_PRgCMU_Afk??E|d1?SSb33sZAL9NbfLGUjeA`wZP`Gl`<(WAU>c%cs&a z32fBpzQcUa*q--nE{EeyRz6+Ow^CMpm7k+#6R`=DngOD~n#EG7Icw}wa0ONcHU*#5 zAm{{^(?82-afA>ivwun`J@0@rR#VM7k}~pr&5v*9(mFSYb0WK8BPVQxBmD4s-KEtD zrEe7c;fUUC4K7@xSGL$_pUX;lr2t!{42m*3*$WcmY^zU3wrpKnLn>(Q4ShOvdTs2S zH1C(vb5oWjLGO`;K3pGua9Y$7QgUZXd8YY-wW?rYoTGR&o(G6ej)S(*NivXSA04Lk zL=3!zan|;%sA6A#1VW$|L{9u#)15fom(NOgMT)Ztm~4B8%^x@FPXj06>_$)0Z{X9S z*Tm@a{$Awb?W60*(pa5s3m4Y*O_$crSd3&9dv37ja_h+NJ5q$+A+_ z_kIw{#@O{}biFvu@d#D9b!F=U?B15WDy3`k6=6gh`3HEWEcr&%`Khz7YwHh@E=aG) z^0wsi3pbWomv5!(u{pqlR4MbFP?r?@i1kue_k~m6w{7@}0BV!;|@%VOJW|S9*7eXZD8VHh2q7sb&C5Bj(kl&D>ILL%B0N z`4xDZ-s}!^Qa+TJ?(PWZZ!o^AkI0x~a7yw*H$=Gu7i|Dd{9#_%#9NregO$y<-7b2cjhsQ zOzMF>oJAZ0eObhHqWaYGp_bkR`WYH>`1+4`AU+-Q#TLgTN{!RB?WJ}tnLMg|{noCX zqqc>UZdR)wGIV<>#@DGxBFE)A0iG34ZGwY&16)qUHuZ|xC_OotXWY<2SL@TOKr`eXyd^+^oQ4m#|gVtLE zzad)`Vps@1p0TR4*}`=Wld;Q*J@5eMHl{+{H*#9UX_hT+@AF#N2^*IVgXpIK^d5Gd zs%Rk&e;T3Wiq%1MEwSm;bO6rkQOcj*VN4u{-2o)vHwuvLROx#d(JH0!4QKF=!fsRP z?#o`JyHbvwJotV;0GKEttg3psx}hQZ-6=BTKb@iddH^77Yhz+=>g-_P^m3u~=V?Hu zI;0z3ANrG6T-(l+X54HN-E|KXZKjG9V{bpXplbDa2B}yaJUi*|z6}|Enax%UZ#o9l zo`4Wx%x5X_cS>mM)YuuighXxE%iP6*fdWsdOElDlX!lb~Bc8U)t=HSWC!>q;7@ytts@o4H_XO<^wxWCT6I$BBvccUR_h#Nm$6D-Kn{@`azT1_Un)XfyHSq0 z)S0#)$vBC1zmo zX&klO%fz*rRS%#fM4OC1Hy--4amglMQcIlRyC~WKU%=s;N}A-;>4cIt1#pa+Y z{+)t{v(wkNPMA4kP5OiBqGd*tI8iz|K>(O_mrfE*n7ebN*2O$#1#$aW zt&eOO6jkCHiT0>27ZnLC7Ws>Ti3o-)l3SA(&aFrcxvqJfy2rEyWkf)rW)syixs}~6 zBW_uRIWqcNWk|5^g;%A+SgB$QoPaCMVZt_Lxt-or#g8FsRP01`EG2@Z?-vYsaOL}O zNl-`a@4mrAPEq~HCuU70&l@m#Cl1sCHiO300;H|z7#*d{C<2$(I%;qANqXB_mnY!P z`|2KKj-<*3<1vU3e%0{plqvF~rK$PJ*_DOacsdG2#sU(0a7^i$E+lY9bv))N2dBoy!-Q6- zRpz%UeU~1$ctAu7YHv(5`G%+D_U#y{s~EQl4t~^96i!8^*{jz>t`!B0BKjd&ubA`;pQ8x{NP1Jo z!nvEwXRX{4SCct08Y~c176d6KjgHxO!ZbmrLaS}yz%$I+(^ZfhU(h3Hu_qU%yVh1! zY2}wG2&I>sf3qTg$_Uu@v!1{YC_nYUuk0A^FJW``Nh>T@Cr`AiH13ncTLnhY zI7>=#ij;W9X;tkxaB3xZeY#2!-q35P$~6kHTYv`<0zA7Rt1I@j8D=Z5Zq}+h65kRk zF25N#g(rbQ2w+(*+AwJi1=q$-Kb zv=c4|hp@Kl3~+y-3Mx^)Hx@IoA5j&7KS5L1CEN&U>?|L3qx5r~@=XpMEAwIqv9~1b zA@zARDn|;L3A8*OJ8}jT6*G!(RONz4@+PB85T3%sLMF(a9TAnIcufI6dM!adH@XF@$wRdM)u^{-lD@Kt5@*n5h0nR-XZW4iAEegJwMR` z5$1tMymrc^Ta=SHcPC70`BT2AO7Y@K+2OY!yd{3#nFDHOqRAUB9I3`UkSOpgXzI-9 z5ev-F+&20CHxQDbHwe{p`^JZElzWXtxeVCNZ)(~LH|}qM!(%xmapJ4bIwT47I30Jz zz)q#ogshjrgUezL0?*Y%ng-;Y9?v1UtQ7b-;xITUp z9^A2igPT?G4O;~XWwCBRk~>v-#6rSTj7naQr$&i7dvO~Z|2f8!HgT38eQXaWa^|=q zYVg|1I!9?w(EU#g@GL+@}Y!-VXPKns~oPrNwr9-Xl8e zM;*HOTjG|lJ7QnY$KPA@#hm1ItAGXMUgQ*=DJc_2nai z4=bk)TP@aUEv0|e;<{J_oIjoLj(d=?OxE#^(^@*OU$!%+iEHYl#sbq!Q+O766>u<3 zEgKb}I2@yXT3iETc!Hmzk(*Fk$58P`JzVjJvB-PAjp{<5R-(U~_exngILDbakGlJo zKsxue)D$d)yrfpd^!&>28>!6g=P!>kUO)*F56t&gzQXlEf+1t{;TSYFtRKBt^@3pN zsAy%+kzZRHVb=)rq^Ci z<>nr$WHHKaWL5?OEU=%SQQ^x>#vN4s3&j~i=wUM+Vnrn$U|}5}c5ck===Z#G&$ub| z^d)_mk|^&k+}(ji6wb@u#b1>vU*ae*i|}oJN|ue~Un)$N$;sZE+CjUiPVznQYDufs zJ=>AhR3r9%rQlSqza;8oqj&&!yAgaYH~4B){WE9=ufL)=iE9uAVYgULX6J+59<*{$ zA2cmYa;89J27Pj#ii1;Hv5~urc`T?(H_zIR_A}Xv&kFvkM;O zQ%CgSU0Y7?(%e_mW`=0vT^3(e!|93&t!2z|>K5FFtaq#+zg8)MH7Q;nqHYOg=B8xV z>GT#W)fb4F--8?E=|mWLIxTHbB35#T!se40~sZ8IGv#ZV9+jfncn%JTE?# zcan+J(RW1Cw{AsoG03pWaS43kIPljzk=$%}DN+;GP45`CtPq06d?UCO&r13Iaq|q9 zddzY^Y0XA%z(2HXA*s3&bf3%Gw#IA>)7$dwwAS<^zRC&9mrg>m=>dtdxb=o0`n)<$ z*7zC;FP-B$w+8jR+;U1W83mHkk!@B_3VxRHtTHb+;ENrbv$ho7?EBb{SB+ts6$M$A z#d|p2r(9NjSJWusiO)Mb4=aRl)t|4Qwavzr>_db7`Y9;l_v%%jtkqTwCoVqAZ(nez zc%4P~n+3XfNF;uEV)juB+WV%}jFas*V+RF@43W6tfuV-g9qnuv)j<2USxdNMRxN4B zsoAXnO%{+FoTmuLkBT!@=u&F-^Ub(Md9}QJ|JYV~;aSNM=-!_hxzXrp6$boT{wNY0 zLU{+SXbf9k1{8xDlo}Qe$^_o*TDzlX<%!+*9z8c+;)Za%y{vQ%TKVRNqu0I1V4uza zFTicC!K7Mfdi2Ax(4F<>;;^Q@CqliV-b@X8JAkarX)+51Fz%4RI}XU-H#?3MTpQwv zfI-jpdx)KSeM*Tb8Sjd70>eV=Y9PCpaz@-b#M%2@e0}0EZ8)lv)=( zYbz*&(b{d=D~QVVjt~Pk22kdHkZ({+zWn&yBOiH$W~1%|Hl$^yf7%-(xpcTBO2Iwx$i_ z@$SzzJh1Pk93P$9zJ{1<)$L-z_A*`8CRRlc`cQsBK!+K(+%>`{Q6BfURH@_<&e%I5 ziB@L~y;`@bR?txH=D^3?(UvC}Lr?E@SHH9UzS+XNr{zL~f2lXb{oQ7Z`hRV372X{U%UDY)ro>Hl3RDK%q;^$yhZF!B=+x zxFRexz=!Ab;GC(o69gJ%RhR}lt^TSo6{VKEpvODdoDKF}6o9ga$+@)&o!gpNF90gZ*Eib>Fw5+XR{Qh1TX zqV`KTQ3%J?-geyRlr$L*C}@*W)gtz0xe+P9;BCY*qJqF%D}K@%_Zo12>Pfd?+V@!@ zlj3BCE)R2K(3 z6!!~lYCM5{M?T0sXkMfB9r=GN#{3WV6MW^S1h57=1|XIskyHHqI*qB} zzMvzsff@Q&k)@mqhlM|hG#}%IEO9Le7-01TlnV{{#+jfnxxR}P^rkO3!ohzNXWukLzb-n~%`x7PUP?E(Swm+nlS}&{zT~x-*u_c-wB29m?#A1wq@QxF*}h$h&ac%UE1DXkG4Y zJBny8+(j4+Rf;nfa!{4|tqn+usxcH9qDhne^|0vDg>VCljZP7_R(fQcAGVSm>;_yO zpd(&n9angE$@(w;U;MMN?ZqwdOMr@_Y*E$8FL zeW@OF6G~`orS^OLV5h?S<7JzwS&vP`ASwQz^XJB22E!T(`#2=qgSJ1 za0vzG7qp*-nlR-aBPU|_4?g<&@@!QMe8P2B_}~H4($6TQ9n$)R-)8`}2PL~0B-DAz zq7_J^K@LA2Gf*#er~-WOcEBJBrxfx7*Sh%Kqeyi5fG_jg{22t=D{76iZyw zVr$ZBH%v*mb``(_9fqvKV-3}3Jd314yX?vqkGzh^f^;=ovQOzR?AjZD`lAS~ zjaKxv-wU!eev53n7i9k^C;ziJ`bWj_pEbb$w{Y3c)|%eU+Nw^e%L11PsqI9KR;rSQ zGnA@t5atre%uc-1dPrqj7O$x#E@pEs=W>N6QuUataZ@?lWxLIt$?C9qw$7bn}BS3a&W6eX#yS+bDjpx9PY^Si+ef)z-U|UC3`X2bD()*?6 z{@*LT|59)MKfB(}!PXAo;A9SPl%k~`l9rODKa;Me`=jif?vJwbElGx9209>^ZOr_G zw`7gOis4}_7Z)#V47kgY#y<+s5BtR5u)W}oyWbGgancm0}297#jp!4^F{i-g4pIr3&$xCL698^ypaBo zOX4T`q1lC75dAQsoHmvNqgz+YHGyq)zu*IL9bLU`>20x zSR%+lPs@u`evc|*MjOrmwS-b;d`V%HnB@oalotlUP{G(H2b5^e`k;Ov+wd1#_XRR{<=f_e(Cm8mhu0G!+*Gf`+K;*uEM|XGJlE` z&HwQJmp$j-{rh~X4^ndsK>lFKa4fRuAG5yy7{ej|tTv7eq^{><9 zU*U$s^8e%d=ZyXL7Ji*!{yIjge{JDMBIacHugOUsPF& zj6D>#+C>hZp6;*ynw`eIWGKe6XE_Y3>o?gt&NENWyL7|4c8hc`kqy0!&dCM5lxCI- zj>PlbZCEYQ8c3ueaNKzi483Ru^W4o-)-Ltj;tqS(^9=sAF`X3n8pWZgm1!c@*f>7o z;TYuQPD9N=q@AoU?`&L=lptt)y6uAf=KyjVm2Tr;-876Hhj+zEVN|lRZMXXIxbsL9 zpgJm`bXOu4bc%(y38{Ty_NRIkfmFex{JRq$)X3Ow9x?PYO-nGz zG6?mlwt%VwZ9P|UNUs3SHrekV7-_0Kn9)XxPWc6!u_l>kpq`|SJ&vO;PKNnwbsfVx z?VS0LuT1O`u=> zZ|h+CKO6%5i$nkB;lFl&vG*^8qm>DZ#s39|@DH4^i?NN<{{@Zo544%Fm7TGJDFaxmWQ{dS!fX^JJk-tywu z`E!!>?&aYR&^G8B=Q&!O0BJ}EP8m#4k+ti&&)6wb4O*Wz@ip;)_rjho&8)IhcWf7{ zwbYSW6+&}G`DCD4>4nesnJ+sNkTd8)Lo*RbCi_O zqawF#H~nz*u#}qjK57)k4(<~!PSIZe$79&iB@$y|4PwSx2~DgaV=Gd=)-0n7FOdHT z;ol7V#bfSo_$6BoWB>r={};m)jSX#$%xz4iZB0#$9scH8W!g_GEj84ksfVdS_am~f?Jr-;847QWig3>ojiB&T5T`u#sl;;8aBmAA0RvkSUQH;0s)JQEr8?c3DX zFt(GoW2WwRDw|%FXxRbKvd>%~TAH$yFkU*Q`}Gcvin2o)Z!D~F(anr@B+NH0znIjQ zk`(qPR`#Epm?w}W9TXMnC%ju{Dw3xi(q~CccDzy?R3$8j93|mU&!ECNY2&QWggEkw z=n>_{G-U_gtP7u9(K!(XN$D7h81QY@0wkR~b|&*X-sD`JoU`EI13h2gFls13TusTc z7WNz12*@47QKtg*@!FA30-Hj5CVkU{55YDWeBb8N!u01itAsUAMhZny8-L)*glCOK zj*I|l$Xr``PZ!CM)WIxHNl6$@mq!{Kq(;};KRB`$k(oM)mP0+Gxx^PAMpA3Wl=yQ4 z!z$ytA+7kR;3@T*%`=@KeCfKwH|0&;?-!i?s!&_7OaP_MPU)Gao%~ zLUIOE(ve<;cVzOm+h}y)$D~{}5|h6ET9>?v;LT>2%o(9mVm`5MX&!BC&@88HWFoJF zkSh>UIiLh@w`DDB4e@L2_<=bw5VhJg^h_F0633f~H-)j+1gaahBcLHO1((xDO^B?z z5XnWIe=P-?6b|2xg*>%(GJ$J>V_8Daa0PTBm|&$PPxeT%q*qfB=_Wy=1yzgFE>nww zJGo25t_7_tYFmxIdKFL9g)8cf^CR?(=|C;QRf&>xF)b~V3IOJKZ_Ye4Ls@JXmRdoG zZ5d%;z~GZ%NT~(QDoO)WV1#%Z=V|f-?)m%G*#11@0l)tk4Y{sPM$1n~KKD+-9#II6 z9d}60JQ@Fo8UcTB2&R^`e(Y&Tdhu%+jHGRrb@CpJeiC^-ZGR4F@oS>48zkG#fehO_ zGDymb`q__eBm%a6$gQ|-V>c0hP_L98aK{tw9TiC3fF)8lkWTU*5jQ3Ot3G&=mamDc z1;$q5T4?z!kQzxl=r`z`*E5!O%C@!J5F}sxHu!o!hG~cM3BJybZ_8j=JO2VJ}DVDv(2;Yl-Vejmd74csJU0PK2!5O5Y)?jNe- z8uSCpE~b;CfWB#&F1Z2*rLqBo`l(yj+nD@RV|o#M3z*!}Xs+hCj&KL5SHFLkCEeaI-8yw<4^`Kk^oG6#&G zYko&@q*FWLMvx+!0uXQ8JzN7?9q?*I^rS(F#x@1cT+Ag0s;*&ZSGIPhB*JbN<5J_byNVSIWR#?0w63NU_S?N2fj!^1t5_E?G^#uqO2CnfRIQ6 z_v?TYOl^;vmIK5SK|?dZhaJu6Q^EgD-)3Gh)gA^&1bEokAGsfe=Dsj#fZ=MY z|J+$U^JkESKUl)o&CK^5?c%Bo@E2H))At<@{sSHw_}{nk{rXV)pE^w&ewG3g6iWm4 zt4a7p#eLn(eZ71iZBYSCg24S!Ap2KlpZZt?#3cN`zgi=w{qps+e_z=$`!xLf3Jd?A zh)1D00H+EjL8J;`_=`*QkO(H<&>d63|9zkuFkAKaom$`o<568a9$$R=5(ix*f(p3$ zTNR*xA6QXxm3A5Xsz3j9b0V+nbZ!*5?|vSnrE}gDBc~2$t6C0&$!|lxoHS^u$GWuQ z-~wmd*Z`$L!uozW=>l>ptNGFNUDQnn(%DKSPAz9=!PM;mQ`b9KEr)~SQpL}H<1FHM z!LMn+_9wa*`t~H}=Eq}>FFdgu>buwacV?g2!{(I0!@c(L#crY|G}U_NngDAA!b{3w zY{Djt|7^#Ew|LdGaJw8fGK9|koZeBhiTXesiBdFKb zA=GXfD}XX-hCkJTrPLu*Ysu96*aV2zXnyeqIwo)5kkstEtrHSo#v-~7hjJVBltgNyMz##Q%lz&h2!^%-JSPafLFjdH(!!|kEg8|j0ZsXf?{ z?IKPLiFwAAaaSKXsv7OnaaWIm+}&X1nc}G!Xm#n@<65`&>seuGChGCC4#8>yIV%85 zuFY~IXwUU)6lAsxxUgbxsOPndy$Hfv1X5_462}!{LBeDD&o+yE*wG-~0^XDCCn2yL z6J|F53G`1(wR#&30vH4Um;nO-u>XstD*feQSp99AIxA@@&B-JDfs;vR5vJH#{YBws z8KJ50+aF@le*uaacH8NmbUsvHkZZ1p=UM0-^}AU|baeF8ABv$3AtQvfai^}!>r0G->hp01+KX`} z+B1xk&-_9X{Isf6mZ*L+4jr{wwy*RiuD6%j*nuxhethJ)JQ`&cNQtT`OBA0zH=Bb{ zJ08ZR@+i~ZveaOCz7!wM7d3=>Nv@f^bx$3x}6R`G3#_7D;+uxpEG3p46{R} zAv1OSpMRsf?fCOIff$$0=S`3;1k>{>67swvl3Ceu+x4mjJc}~vSj^YZrC^R3xo7En ze@g37BwRGedv_j+nkmFt=i#vk zkwavV$=+~tnC}weLBhCN>F?r`V;-Pq4VX}eYto4a?bxA zLB_E%f5m-*a-_^>X9fLKA;LgNu!%@qD%yO(sA?d%Rc$cW1K)EcR>R9{w*fd%jIu=Y z{^&lp;~7U&e_sDQgWCDk1{i)L23wNnEJP)RD$eGcSOFEeb-?_lI5wJVp+o63;8OgO zq^{p^P+o3;xm>W0vFn+oXdUq&`7LZx8X+0SSk#I)Ls%pac7Zttn(RY+3g&=Ezh3&_ z60+V$4a4)#oakg9O_4h#r#ClNAxbirx6<;>`&v=UwpXQTSbvrk;X50RNGiR%EPGrd zuBKI9dz0MvW)LXTPalIm;@qEMTu@N7%Lh$h->mta(6c0QTasL`FId|`dmUOS62=b+oD^2X(9G6pPA;rbfuEBfuobTle3fYU(WJBJ2filOz}%C4Y7AL zg9}J%*+4-FLP^TYE6elm6D3A81x!9u2Bm^~>c^uDg*b?s8fD%Dz;6q2UXqz-WM^_T zFnc=4K6bs0U9x@LJuTS++yI3rp*2~7mNi0$tZ6z-o<>kck0Us-oFtAYA?)^U*df0F zGi4rms$j#YSixi&3o#LPr0v-#B}3yup62L|<4nr#aW^Zpkih?U`aRK>FWZZfPgB7Nn7 z24-i$d0vPFA3X_2HC{#8>LpT|9x}9=k{8+l{gg-4{2L8QHHFL@2+2xA1Fe~}wg*Aq znnXS{g1Q+Fk*iIAT{Zah#yX2c9NzD*VT!z}Fj6B^Z z^6hy~@zHOpJo-%=!5=fY^u00$E#A$XAUZdz7ZT~XDUth6xeUC*2PZ^9yZ5z2=PrGA z->cD7daPfR(HDNEXR~nsGC!fhl1%yq+yG8u?7`;kX1Xz*&RUVm&5B7mm6h>*?kGKp zBRf&Pzou|9b!UD%H_T*7UwGA8rEx#ah$_dGp6f{qIsJ^P%~e#Se#~Be_jToff-QY8 ztgSF%3*sl#kvL5+Ee7%q*M0c(H+JFUkOl~lSVTfGwEzZ^UWUlf*uo2n^?p@d>(Sb5 zs{>Gshlv_7pyrz-5@1z`_-8>Oid=Yy73yCr)Uzv=iyKGls2Ufj_7|x7GewMe--%c4 z*GF2QT0~8K{I7B81yfN7Oz;P{;WTILeIZl(ReakS4v;^6r`bg_55tQE90us8xrH~G z>r)U;3uIS{Ip<+k5$mE;u>Gn^;1W6DnQ>wnpqYBX(tBW6RG8QnJAOGe%O`*6ewjSQE z9t%b{Z0I9mKr@VFkqzp*zaCDL7lKbR=}+S*klo9SC!PtWAu^!}fcSu*ykV+n5D>JE z?NTQQ41=xss`134K;Oc#UV-o*dy9v$TTd+V{Zj#Yl4h&8fdc>wFaQAYKSu3; zLY{*9j^>6k`VN-=iEkyT=r|%9qxi5%WW>@fo9qjX5-|H{bM9ri>bmV^>E6KiA8lAQ6Ix?t z$!_s_KK?%9IpUmpJ6!tN`3cmG<%J;(i{wKRa81DYZOUL8p4C@K0ELirGX!|r-wQ4! zSDPjz4s9&-`lOesp9!lQafeYV){iu~0$(h^O~-B2UYf>m_n{>e3prwED2#wA4rvVg z*Sv&&2W5qlfxFc2PTcMjz$B>MVvOs%o0_1xC@U-JMsYgk#Pm~xbqYbFRO*_+F{ac^ z<#*C7l?b2aU67xjjh}g-IaQG*NP@V1qY9+Ss`ttjOH8c1jcFOu53v?y9*P&!OlXcr z{&mU8(U9;$hxZngI<<9VjulkT>&yD7rRrqfW*c@iRjDB=j(4gyH_pz@E|L@$C&#DO zN!u6J7s>MLB`4{PDuV74P9%-q>k9@jb%XD4GHTrQ#E=W?l_axl4#Xoqs4f$(LnrmY z4UvJU?hwKA_Y@LgtkjsaC*5(|a;2)p6WzV(t7#n9Q~?2OW#%0a%8uH>Rs=zW1$N>i z_tJw1am__8)qzMJTEPwX1RF@e>4oXx#;HhDWCF`1O`JZ5VdKyGXu%OO&ix!^lfW4d zW7^B@QF!ERu`EtfGfqJ9Ih+K2Sc(%GWW6K12Rcw2eDmQJ#)@ZamKk1ihRB%Tc%j#0Nsd% zfJ|Q~;Q|UD^@y83`c~yzUoxUrLnI7Of$5)5tQ2L#tWx%1BEjHPa#?Sp~e2 z6~dxN?Y$v3c^CaSsh6)5Zsma-&vL&a1sCKUURRczgi*uZ?z${G_!SsbgAy;Pg z=Z9^m)`l9~Y-nRAc$zNR{w0`Q#xEIOxy%$A|<#T5;yCi z%=G*gPP9d6Hz%WG#l5s?M`q3yY8p^YD1caq9JTPGVA?(nzF?CC-chPK-NJl=#(z6vAVp z;jqG}PM32BJ*#}N2Ay4bwNnAY)J`53No^O&>75#6*97k|iI?Zd_5`YoOyy&$a4&Y@ zYBfYhg`u-cL%;qRm05L)9r8riMX@YkYW)HMr+X;A>@?8?|@(_lRuYpNT5b2wg#> z!v>hq3W0{1efF}4ptU(9n@V(ltlgj6b>25FG4~G6xB2?0h|1WIK1AnDuthepXL1yL zCfGu0>Cy>j2auL$P-@m8ImJq|zEyinJ_6fDN2rHNk-RVo=-LC-`ac7d2x63_aUaQTr2wY^fv{Q^?k|F|r?nZ^tyQ|}sNL00!j7^{o^R)T|I`&9G0 zzLSN=uh_-%f|AGWeU!}l3mmhvDCKEI@07zow(!=~RHx0ffFRK5)iAz-9(8<=p{cj6CN_kCgP9EdanVK5= zfbD?llwWum92qRj0UHJ(P@hN;_@<}c0b`&_r!9K>M*%#K*)NS)VQ>F_zR~B#H7FZU z_v6-wH=WKqZ_chyuTLO7lo5a?LNWqmcR&sPM5rW2)sQ&AIh1vqP?VG>{+h#=-<;0` ze(&*GxK|0%9&!rwxqjShY64{p^7zMw={FkH8fDrdj5b9EnJTT?g7$(ENdJm*%q$JC z1ZX#R*kL0|Q9}>1CY!Xf>@;=OacP?Bi|C6X#ppl%EQoJpDmsltU~b<+*uX+13`0oL z)l{;oQR~cYI`z$?Wc|y7dmCg-${sttC^S7Sbi2F3PI;>iTGC=BYJZm{I-#W*U><(s zj(};FGzG<~Xyi~ni{lS&QUU^_hemKtcE(w}R`6J1njL~)_$^|xMmM*CMsQ^pLFtKF zs9MK$m1(EOTVsPF9YkN|a*5lB8NJ-kv^pbj@=+`*MI)!li7Bv-Fzsq%t}*8pY+i3t z6kFI5BouT+voH!~pDuC6(>{zPWr#}88=|lQM0<82jJ)Xo_W+KivSi47a zGfU|gB!mcyR!ie1%!op$BI2#QKQW)!k@Ds|a>y&ri zzs%weh4*2i$V=9sUc+O3g1ZDGaq>3{H%n{`DMd_ z`f+{=-z~QXQ%g216_0X+*b5w76Uw10UrwvAR>9HPi-H;g?@HP0l10JYpw@9r9K(A& z#QS=d=gfDRqEl>kmDif2z2qJrd34WbBhbR;GpnCn}<=`b#u%n&-z_s+9ZXp+$ z?;ic11FU1(Sds24Sv~WWf&XXz^PiTXu#kX}zMYe?0}12b<10%^#}-Kdg%>1|c1)9E z@wIYZalfx+ag`h*OfgD^AD@B5`&!>O4!4fZ$d&Qt$4&s-O#idVZVc0=MibOPVDQHI zd57s`rsw5&){m|(pKmoW-yqGFXKJH8v0R*m_1M$t`*zPE&V=>^-3unt!DUPgu~4R-1;zj=$c z+q#aH5iepqcW9r`DSbzXq~wGdtlfiXwsD+7-oK=sN?Id#_E?H{sh+rCR4|5)0{ZNp zsHL~A9}!F1<;)@E+^w>Ew-d0?_wm%-9hylS?|Yd(oo{TNW7q1W&hjxvEJf7+MfwK! zG(=~=9F!y;PF*yKZgRbp<#+sQs$GxJHv;2!uHKvEBj0fVwF%8u&=wC$DrgHkWTW;w z)&jYC5_dxsoI%Fg@xJ7T@R^sDk_yfVXSN~!N(FWD76x?%7`|g zBlvT_K#2G<31vS>Wp|+TVbn(0fPtV|MHe6Vm!s|x3x8vd@zxhA!cbygs zhcQUd6to4c^?brzf}14|PUNzQwxjXlkkV_3r6&}dooYJ`p`5vY(3Q3M@4Q#FRhu|3P#B}Hy^d2H z>&{6ATP#TD<=XA~YYgR^n0vTrdAz2GJuJ31vo&5&7;9h{+x5)_p(! z<(o>;*nt3u9Qgo;{;{PN8sjQT3@&>$nH$w+7m+ycbSs*2)rN|t3|g;SiKH~Stov?# z-r3DV*Zu>I($V_a`IpPgNbZfn&B1z|?Kl_0HBOg08y;GWU7_zlU{}{?3C>3!yqz#! zZumMl^^R^IzRB*O9Kf%GaCwoqJ!;$CpX9(@YnPO|2bLf-`o1DqyA-=L-3*hrRW-dw z8+u(ma`bvscTh9LPAI!sArOC6 zQ5!k;i(yd&{loM1)m@HNFJOEc1%6ys24{b%sy{yF4LQ=g^u!MyP!MN--6kVuzXleFBF?F~}ETtFfe4d-*fm4wf{UV8}T9$j0mydqDi8zfxM~BI-|DAskbL90y{+cO; zz7*GgS6ZUB4%YfkPG3P!WoJ7p9pRNS(|ueBZA)b?~z78J*yWW}bFA zFrF)CG=^O<@4w^2@#ZimZe%}=I11Wsa}U)Lz;w_J>L7YQt`Y)KsR1g-*z}>Yub+ah zI~gjxTKqXJp4`aE_f_RCObsR=X=Uj5lsy$^ zo7|=hA^J7o4AMPfF*fD~`Q^#!^OYTCGO0(}Fu*|O#O59BpB5PDic&w{S5B|+Yc1k` zcX9p>%So8p*gAa4)>owN-=bBRVy%R%hB7o26d20_B{E}?liP>@jDc@P3oha=A3Kc1 z4h_}VLL#Lu?s9S|YNax7P)=*z2+w>GnPqPJ+C+bv!7(>^4{|T$>@i7(d|jutdrS2B zW_QW?>FN42!|Kn=(^MDGHiZ|uzicX`0LpiQR>@n!5t+T@E_A*9x!& zqv_=7D68@Di6@#tw8V8Lk0H9z!FYp66V?s+#&cgQDtqvD6=vgMb7S@VcbI^~;i7p( z$=`-pu1U>rO|XR3LK-ev6P(lm* zXDiAJvL7xFYkY)^S8+<;RF23Qb{)khWO!^WJowm!Tc?0l>3|#isd2ZOpPb>icS_(m zxNmvzTCEa1nxFa+QEeVCGfaQ1=0Pk6$kT$}9ldcf+bZBmzON9>4tR42jh;P|hWyDu z%a9yB@6eU5bxw9uO;cXBNEheTYD&qjP1^)?XL^M?zf7A%Q0V0SaE9ld#p=|ieyp_;B^HaBSj_jf< zFj;0O^wDBWM;3@{@DDRP6Rm(h;R3*PtKfUlHS>2hFg73&+b~kGTZiuBn59@~Jz&WQ zKU`oEknr>nwe{V@nh_9^!*F|uTDp-AD}{MvMnP!&p=sLP0w`Hp&ViC4#nfHyYR0WV zq-HUb!F@97QynWU)Z^)DSZ&Ij|2lu8D&>}*`Ry$F_JodfwTt>R>00?acHTmQB0B+#HWwaim zj-Go^vu5Ai`V-(U5YXl}5EjopKxz~*Ie_QOChR8S?7OAvnh;C9Yq3x#L08w^8}o^g z;B)g8aR@qa-rN-nJShHpr`GOUaN7q^xx4yrf1x(_VSNMwe(-kt|MVUOM}GY1LEq`h zJs4#+6EWGO>Rl7zn7@aEf?F%uidGC9#>bKAvi9({p>}7_F6`2TyV}I{s=I4{LiJp@ z|M^cf?0)ZZ7Wb7N@xuiGDE_NYB4h06sBdcg?^*sod6GhPNH^sL)IYW+WC_xEgrMJ$ z;=6rAN2JjLd@E*RMf@QJkn(^ubeZT!`bWk&ppop+8ygqelo9*EtR%`&dRC1Rv0Tk6 zHK|w2bQ)|L2UaHvn^fMm*y#x&g)*0~GS=TN-H)>GavU!?pMLY)z4UnlBt_C;C6=v4 znqdvEy*c0c;hsi!zEeLg>kYsk)GUR+Sucltuq|KvqVh@h1UP&cwqC{wRj(r_^$9t= z)S!N|sxfRXco(kw9`FDCEs+K~@>*UcpYpLI<{(a_dko03a zYLw57nLi|Y%Hv;HIp0O$x{~^?_OCsub>(`n*=PoJvH$q8gUwQvTUeehva=*to1q|} z0vl4jcgm|j1gVv6hMN2mV~1XTT{%sW8XW*G{YXRwPHV(H6>aPJ^Bowgw_q zV^vP1uh#rgTi_M6?pVctD;P6{Ooz;HfL?*Zp!U3x-o30DBd3LSVzuZWWl^t|aQjYM z=30n#Kinw560*ii=y%fiM^8&)c#W$(zd+{>yg{#|Ko&Y^o~?2f|6I9GpH}>0r1DiT zm+VvBE`Z`;0#{SYO+WyrG?>H2EE4oLR#1`aAPVhLL8~9iHSi&-SZ<`n`SZ38nfy}6n zE7;}8o!5SZLDYB*tC^lItn67JCv8s%f_WTs)Zy=YAJVY;tQb`{t&K7~i$Fis?}B*C*vZ0CwTj#c%w2xS^Ma*>&3=4z{XaOHYf{w?2kJF1#y@)I9`~Hqbm+X&1GqiHQX_p7J zXc*wnZCC8yUaLf>ky7kqZk=+w8Q-@Cj2yEkM7^nSP4|alWfM8|@c}PKd zDo)QE)E|F%l;>?Vgv36?_ar}_^k}Jhd2S^|0f>fu+h=ZDv>*LvNtvJW*K%HF17c9$ z2g#x6fJuJ@m7{v$ZpmNEcuDmzu(jqHI#X0Y{*iy&>2i%JIV#A89vt)^h9iD=KDe>-aVDq`=b6GE zmgtdMO;5&{b)s1d$jJ2AJ7!&|biIMe4)Z0);k3S>5<`kFj@fX4ryn#>WG@f{=|z?5 zh3ToW`9J`&#Di=5#AoRV^N9=E-3-+!4KTUUUr&~$xkF9_69s6yspzGaLUnGYA)+xI zK5M-ipS|@L_Mo#0ona$ve?Ac5C=m#XIPU$%Z8d{K;r73{w=Ljg-Zc+xCT9QnApU)F z0{;eP|BxQ{#ra{lnp(_*_qo9}s8^HR7L<2J^PamKbErR^gMjqdrfZmUCP*#+~o5y?S{>qTJbYqVUe|BbXf@hYU_RClUQBM_Ov0 zP?YbQ$(#{T`QqQp41g5?+#H)g)R)3omxACfr&L*th#*wtf`tj7?#wg5?4;@=<#8sn zNzH?a5o*Kw2bt$Qf%hOaY>;aD^oozIZ~}2sTzt+srRM-EnMu3?dRkr#bkBo^jQrvCV2=;| zcNgik`1hdw=8OrOksRxh<@Eq$Rr`*-3hV`;t=97qX87*S`3Oh_>4ps2cvH>DqZS;k zc#$V`R$^bQYiER39M`$>4U7m+M!cuEI`o8E`~f!v$C*AMw1XG#RPr|fydJ*y8^z=h zatngot|U)T`3;LruYm4ONDcIq!;|B_M|L9m4KUmu?C(HViZck^0dhB)m^~!X{LI(Y zX*y!6_&4IwVs+tz1hJT7qu(KLw`rlrsF~mB4K>13W+XJ3>Z78YOf?|nvDa*t z&EC=y<4HYs_tgtl4U>w_1Y;PuH;eQ6FcOm=fka${;$+y=yvX6N zO!_MNcq+{&$ItPkH5<1U0y6RTZODT(5|^TMm-bSBN1f`#=rjFPkUkltL&620>G^@8 z)NYR(HV3oI^5+*vEp~=@rVT>z9r}R8h8dxKut4S)g51>*GwcPNF{g_Hs*N#5xLCmH z7CopOdrpFeB{b+=M4{i1Ol{!ZUV{iaTf%jc>yYW1koXpn`H$gnl{N5p<9oh7iD#VR zUFh+nT6H4OQxT*^DCG6t$lRLT2-5>B{ecG}Fm7S0OGLJFbn{Rsux}z|4WEm&RFL}%|9uhi7?qU zLaC;`RBAucBo>=~>!hAKtxPaOKWN6Ax@>`EFgtdFb_|S{-L|d&CV*^6xetoJ99O3< z#DhKXS_NYSQ{Cz2U_?#mXW0)!W)zYZ3*&$+4#T9_Yg0tYRpr2xMOO_c3T{wuc0`-u zV$hNEV7_$|(akIF;vxBPsWLp783WA;4zbyYowm-Se+DuLyoB3BOSgXRU1%OwPEKiu zR?4j68MK}$`@K#mnqhXI5g;8IWC^ZlZa^uM37Nr>WHKq(lnNV?8j#RRhvdj8HI5CD z;a@5Wn|}&KY%&*Wd9T;%gq-l)KUDylmmCFtDk!KUTw)yP!IhUSP;wBcDO!RWsF?dDy+0s8Vf9he%uSKB^?-*%aog03VK5}Mxn>>wcvH=yYEuZlP&wB5g zos~B0?TQwJXSnTc_)FVUD_9R0)q2aD?oY1?)O5%lnfm@-Q9SrL4pW(ejZahMQ)vjN zXRl-;LQjRCG>U$B&0RznvV|b4lt)P(Ue=orFi^X+0X`mdv!}1?$J15vWK>y?0lw+Z zjqWk_9X42NlCqt;g*d{SjemaE(b;|a^X;E*jbapO{p{DWSN5+Y`hSmF%IaGi8~y!d z&f~AAOZLLG4F~Il4YR8Duwk;o=0zGxWE18`|14#w@qi`Ub1U}i(27{!p5WBR}?{u zzNBN--qKT(3Qg<2iprHj#qCx%)Sb{{Adgd@RO;&N3nsXs$&VWnvefsHA!Q$1$$_-VE-( zf*qxnF!t$Lg$53GgWGkcz)>v9ndb38ST3L8;jzFkMug^tBLEDax@DW_dZt$WL7&Op6YxVpLk39LW7%5uGC4PN9^Z4a|H_b#oaQ+M|z&DE-w3)&143u zu*hk2y=--1IFj}}g{~kj-mtQVGU>A&8TfRkbi{iu{a6DuCG%*~B6>{>nn$w10vZSl z#j6rva)-y5t=2C39kLJ}>^2K|7>+K!DxP#JBhIpNisEOG(*D&)U-iUuiZy(&il!yn z&ja>58}nbgD2*Huvgb&m*0%}p=N*Hcp~4uYi+MMaYWwH|3AbOl&cQ%Muf z5}_6;Q%j(^w(JlhgS*^i$$18lp* zJYWx@Wg^SI5Yv=+8Tp*I*OFk3rV(rn@CdM%#`Ral0UMK;9;CT85o$MPFvD!oJkwR# z*G0psj&Gwehj!{ll)|blaIQl4OjJ@CZ40kM-pZ@MF|;qKYN3&4&=+%`K}yf3J04tY z;4smUuKi_2bC@J(Gm< z7k$Yo>beNj_SSSvy|FWscr zQy*g+2mXd!$npF0*4-6)Y03hO-7#)U@{a(|Z#-)>&P%G@E~(b6xBqJ~#(+=4y3Cid z7XL~!{r4Eu|BT9itaS=fn2`ilK;TtVW1pC%nW)1Hhd{Lm^MHudB_dU4Ve*PPbJ41< zXt&o1_*3Qq>2VD5T4Ls+#sqbKkj>0|G!EnX{rlD?5RM)(EbJI%MQ(0>iJ^*E7@<3n z&PfhaKlK7#fXm?qRB#2Q&I}g3r=ZSVwouyd6OHu~2hsqy`1$uv9LDq1?7FBpM^_ex z*WOli3!{eBlp@nLLDrT7&(qq;*t#J60}L1HLtj3-zMU#-CofW+cHNH|ve`HZ#}@{i zPqE3^OE*K~uwWP$%J=LRd=4gY!KK>d9W#APgj@Dhxtv*()58kibX=1y3+fxG_qPxj_CPY1UnSG9>+M zj8J+#``PVOhG`4>P+sr1gghVmd$j!5e(V|=hn?j@`J7MSe=5qYyjNw2FLvyFW&KG1 zzZ9j+|6Zt|sPI?L@Bg9e9m8yCnl<6xZQHhO+qT`iZQHhOcki}s+qQPw_Vjz^%y-T6 zd}n^ts?1t-Wo4{fnNb-L_Z_^h#+lM~^uP)VY52$t%$TLb_`{PY6Z)h~ojb`=r_|Pt z8?;w?r-|V|)FQq~x92(|DTW2t&RKh=h$4~hp1-CqB)Or*71i{>C7le zux889&?si__n!va@Fhi*~Vee6YABA<1 z*JJ#Vfu&uoCXNh<`<^W7eQx;bVfz@d2JeRxE#3z6)Rl*%ot2p+Q>wqR{Q2GlqAerV z7ivJ16}fbYWp1}swH%FMT#QDcQOXC+9`UFK%xt*xwk{!%_&nXAA-y)xU6ryS)vc_E zn>(RnF=~yopxr?Ayza7B*?>XXe`zU7WS1D2hQKK2XWl>`Be;oWQ$TG>{8}TiOR*v8 zO}z<5ALy3FcSUh9SC@&iS1#r#cFM7U7{T(=+`c)EFtk?hEYgP%!KT5d?A<9D3aZR! zcNoQ_;L%{bK;06cDRIl_{QV^BS-yh4Wv(&g8s8!Ij1&TAj-VN_`=}>eISaHJT3)J~ zud+(;idoPef&2L{0AIUeKJFeA008RG)c&7m_dmfi|DT0f!&6^L4dYujk=fc~LlEjn zBL_$lQW!EuEdOB1u%(%M+$dP4Q-;nQUj`pe7Z!N)Z`#$bpg5C-=u`eAnO>BjG3*6@ zA+{~2vyx1k_R|ujpW;IPj_cK>bw*-l(2bbS^S0y6wP*JA$5q#5@q}qEfZv!^5R7Vn zxK|n+8AP|tO&Uyo1%OS65^yO*w~$@PE_L4l@D2&P*bQ*sGk6X$yW9a1Wtd_^xq)9@0n1 zE=qtJgpbq>SHKFmF4+rDA0GHNzFXpMEU+)3TjZ{F03Xsv@Gee358_Ak?lthWBEn8X zKcCf&oLlZL(4T{zyFJnuHk5mz_K#7>@4CLn8@=|IRnXKuUw6C3@!s(?7}Pp!Y||32 z*2zg#lYApuhxDcKJ1gqkwaUugabhZ}#(F|3Dii&uaU96Ngb?RD(zU(#E?P0i+3TnF zKNyEK=1SZVQm}Urygd^LHJz znuoV-n92GbHP+o1RwF<9wB$9p33mv?sSL@^rCbm+l~Ps@E2QAfh@O-dl($cTN>->j zdPf^kQShX+=8O#4$rIZv4AIi1qtk(2cu$pVJD0@G9JUEG;7XsoEDq{na7%S1uUJ5~ zC*ZY(<_RR=QP*{r#&dJyFmL17xL1{7H2Y|H4iof%au59>?kxEPX2}?TO1!gTO5qV*lS{igRnj^AH;*DMs*X zNXP-Cy7nY}r0Z%Ql!L@awRo=DNjk1%O*)Xd;a+71TuT#(dHldH2}8n;k{CZK6@k~$ zf@SAb1uvf~&DFQ?GB?*Jqbe~fM=Pmdik~6%h3Mopg=o9-X{?>b@@8?$o~s{E4@}r}Z}*bxcQ%-! zAkU6WEIxg5dq$!Je~_Ew!*Lf7#8%N)FZLYiu~dL8p1K{_W@^R2Eh;>)4`CdaBg788 z0Bn`jWnXSbG~ncYAe>!lOy*XSJ$S`*yzuB>qQgTQhnBD$8!A;bTl>naaoPo*NW6r0 zCc*ejU4y~c8ZYdD2IZv|Pz9ok$aijTLy!IT0+_MCI1a9I1Rkbn!GyOEcT;MK9o1%7LTc3p4TQYWVeOml0HfcYFE3*GO{dXNK42!@fs1;#f^rvw3>%UI_UH6P@RoE8|$0D&w`YGTN|JUii z>)C_R2yCL8==MfGg_guthW+6%EFznzR))j+?#iIPvIjW|DyrmN-d4Q;uw4NNUZDEH zx4JJxuVl(@)LIcy(;H0w3RO}8D)uTXzd_J`R|SqXW|ePoD|jqw0W5Mv zEI?V%frW?K!H@u@fPupbx&5!kdWUcrqXCSllQxy>eBFmhXU=4P{+2*z#8R`^h*pW@ zeJJHFS0vuu(d!_t6 z7MMs>+De`AJnpf~@GJD!1u8~&X9V43^5*XTNK&A36&@`D&y!0!ya6?}@V+c&ITMz` z7wq6?#ryi=2}=dvv%jp&2ljb$%1IV%v_l>wdsBjmXTRm}?7>C%xxOUzU+ndR5fSk} z)-%Mo0Ao28*FVBO$6{MqUJzIyInG~RJr47=_g)3D?*rgQSa+9TxifB8XM8Zzy6T}E zE|A)Lf74w9-a}xs^@$EL*5XeOrZ2_8HROSt2vIEh;fY;R;sL?~@0udx9hf2u!1598 zkb>{E742xnJ};iI?yZ`v28c6o+=x3;KaJoYv*+>)9NzV{ZI)XM7H}9B^my1WHUHuIrHc3`sifz`jg}R%j$S7 zu@5Jsk92ZVn6|AC!W4goSlvq41gV8xFuOu4Y1dJchh!C^!;W=ryK2p*?@Z|vN@W9S zhQC{bz8kn)>HxJuo=vw5i!Nx!@xaN2DkuFWRF1w7*~$=df*}~iT&WeOJFnO-H2xBB z&IM{+*I3`P!^RVCbIkhSUbVqnr(aW&Ly&9Jsy0S1prg^bbAxy zYNTE49w%z}c^LrDb3%<#i@meRlJn-+O8n+#MVo@%%&aQy@+LH6uD@aA?r1cZ7mh2l zN_I6D#> zXk6PG1LoHaU9J67J2DOGNq&fh1@wrlhYl~k4g=y>kfzc=ZO7P`VoydxHAf6 z<<`7NcT`Z9@TY58z1Xy=ob>ej3M^m~hIc(I7ysE%^C)QUrBP!c;CJRcOWpbq$B_7?q^+`AXzd z>N*Fbj*2xKdBCmVrqy*hVUCPb@aZbKqyKZ=xEcLK9NJ0wr*~2WbS(RvF@4V1nZccd zisKW`KTo=qAG}Z_p{ql_V-%aTA~a#8fN~Enj!SJT$Q(NFJ|^4-ND z*WwP|gRJre8O?|M>WtQ!-hKf7SGG1MD0t+ubWe%L7v$JC!ZaG)7YWsgA*r5(-j8ME z75&faXufaK>BdOGdsz>7@zcoO8!DG>n(P3Kj=bCe4{rpyuM5$enZQOx;;P9F%Lk&0 z^@`a$cj}k7d}`~3Y^96VChN=+Ra_)XocmSiBbhjKrPneyblD!DBp9x&zySkf6KrZ* z(U=!3YEzmdDqC&{9& zK5v;J4t=n@0qI8G2nWakJD}bV+H}E@HzIe+BFi8u$D;`F-ByGwO0-mpLO9CCIIrlgbGjq~gV%x^lSy3+nZRFl^2`fWQjN=gl@`&kcL1 zxz~(4##qY$yMv@U0mXxaYB$p^O6?AzD`BCrnzE>tHee~rAPrH=(R6bj^~{|24_3JC zW&6FI65;nUan;nWme=y__rDnFjN*R*Wd4}uQo#WL=>EqY=fAfJ|F8x82e-Q+jLc&> zqFP94bCD7!_^Xtr>M|{Y{{&1H$Xt9KnJb??NMZ+Bja^cif=DVIg6@MPjo`PS3Ho;0 zo)|+`GIR4qYt{Si($$vd42S3S)??52=PQ~Y$4A~i2Hg5Z)bKwUp(O`=Q2+4-0b5NtYx*DCE7fRCcfan( zUp7BM;Jh(9IQ~8k4oZu&;fMTGE#g2<(y2{L6Y+N{?dF%JNw=W}%f* z9#f?<#PWr|PYGtHfY$DF>NM#C2IhPfzIC%u=P0ooYkK2;^ssDDYyv5NyssDx$twh110uXm<3!69mOFh_3 zVU&3C7F%qFqIP#prrG}ACB^>6i*;6`m!xH4$EVp^AotXgcR(cFv^^{z5Wwpj=TIxy zn8bQCGtbz2YWFzet znyBvSIzNV(25i1QcBi|UJy06Wpuo(Rv>l8e%thJ(+HXN`$K;*(be|H}5r}7*@se_TT8l8CxK|h}40W@U0)SzeaPkdrp(OrhRtYmpo=a>!@ zNEv(G;#Fnx5>z~P-;i+YF_(y}UI^O9(fa&>fx+@S2DhXEuEx_Epe$_2)(6R6IrGc` zQ--|h)G1-F|&h}_a`uuXr{ z_58)H2jCC|9KVzAIF z5BB@^UuI0wWI9mKA2a6XkKK&@|1Fu&f5=aNxKN#p-JFz+jhr3Ko!n&%Y<@!Q{+nVm zIetbKkO6+UhZYJdEFvuL<$3oPd#ipcI(Rq%1A_ov8JZl*xnjdr#+Bhi)x+=ozW;Ee zE(8k)>Kno_Eu`;A_cY_>+Fk>ktx60GYGU5TQ)sN#){e!QNyvr#fW+Dy`t)5sEc>vFIQMW}gA7Np zJ#*v{2*GWfypLA>Jr`P=!1}w;C~IegPOrWgiX_#oepWPdN+AjvctEhceXu`7^v&M&#ua7D*I^CB9Oqv zlV)Q4N*e%lSJI0t8~eL@FYIV4d9OHlSf4Q#i-H};HY~L21QLuuk6TcPGJe`@I9Owt z;}t`Qbnf&UVghr5wC7)>FM~8!IR~_DEA0R0fc`f~OI1tlCn)_ZgjgC~w9gJf z*lJx9OB^cMqukJ%A)o*PFkIE6P>4OK&ww;Ly{xS1`JC^>=zU)bZ?+nwx;>?Psp$>u z4e5<;-N%%XJY;prr7q2JI;X40`}K)8edp`rh}ZA)MOFw=iL&Hi)Y6P!V8C5l{x&59 zrxkr9Dx~FB776Zpe8f}nqdP(aX5+@68{L7ew4lrYSI~m4jC`oBEFUMBv>+Wb#f}12 zYJkxSB_;${dPpJVJoIkw&yDg6b zszpQUkP#$bzNEgw&G?9&WoU8yF>9e8?G@zMoit=ch+L-<2 z&)l1C7ClE;nFzuz1jSb#o+3|yP>MD!OQ>nU{C(6rCTBFq1N56Zs zV+6_(AGadp?L|gZuovsl=uTEc(VVgSEFnX_Je9`aRh_VhEjg?AI4gAVgoa>n@b*e# zVsFC(Oq+jaJg+5X&Udn#)QaCr>`2h2J$WV60{yz+2j{{!ppebzPW_##9-zf_N*7Ud zj5Q=RAhD(_VLMl@elM|cn;0qDMy(z$cbgtD+{qD+X2I_%`H68osMR7zeAwIh) zD5orKx%0x5B^QUw?ys5R$jXc8NK5Nj$6ei1TN{bO?;0nb;-dH5+3Iab$P5nM*z1sG z&12+nII57N=v1o_6pR5Qw8xSh7qknwb~^Mm8DL5?xyL1}{uE3E$0BoQ{0${%y53<6 zc9tr=DW2A0!hUBwn5a`|+4*YgbyV9|~-F0D%Qlw5=H;?3sewO7P*drJHIM^ht zm(56W8NGek8p=ImXkd8kZunk-AYXpDZy~FW(2#WD-#Bw%lFi?=hHF}3f9ETdtwq4v zXVLqw{l`rsZoF4zC8+$lz}G}xY=&udTtEpPZkPv~P^O}s!^CCS{Q~pv&CkTEfV0cj z9)t!7`HGQlS(rZ`$diG){7@ON3od_H)z*S?!EcQ|a5<27EggP{?8fk|@!i4JYvfKuKLdM9Gd56Mm~>`zrVBnWpvvx_Ki67| zYK%NBHH=%}hcnGbYg)sNd&VbVjKUSea1x7%F<{TyL)Y9BGx(M^Pr9DbYECC0imvyB zEL$0lU?R4S-flm9;>~?3w!4NMvow0g4r`gc5f*H|oyYF#INDwW{(MWEddU{2$ed4AFAs^{XA5LlV<+N5EwVoo&&r+7gZtU&}}w` zhTWg-h7{>k|Ak1{x>uIe2@vCr1A|m!3G>K?8vg-#tl?rm;pA_I z?m+bh3yfX#pf=eyOyljg&nCbnI})?sW;ThrDV#6PMr@d)EHp*~buTl*n3d8@{vbVScX+`eVRZ32wBY9Y51z7t1W3 zcAy!fm7Rb3&Aj$q1R{P^MdQ!$&$kC@2d|ZslOT@MqRwr_<+dLcwIGlch}VM}gDleim~gwRae&r6_3i?#Fz= zp>oS)ma-_Qfm~RtmZ5`$k3uo-D7YvnL=da8yC~?dAd)W$jq))Pt0?FwXlZEt-qHTi z-pO7-Lxx&F2~tqje<{AivE9$M9|_=p-e&$K#s3Cn{X+`t=sw5*e)y1YiiO4i!hjt7 zvjpI+&JcKHoOb7pWe0Hj80W@=^#7xKk~ zvB0>XAGO(lF|APTAb7nq5;6-NWa#zHaCH)= zAt8KYFwMy&N&7*L5Onns>^-bPN_WtoH2W?3eFbr8dtreERV@veA5Z`tC?A{;CJ19v zzN~c)vXa_-g~BfP&!NL(wy3}Q5m@g>pV+y5gVBvJ1_0U<-z{89)-N%P}QK>}aI zjl8RAF99ckbCOZPq_g^faKAM}oX^`crCaVMSMK`Yc0(Drb!5wYRh7IsImn*B{%&uj z*X;265)_Oy>f}WOT0>IeICE~@yO{d+I6L{g?B|h`ZQ`cMm0+aJTH=mO!*%fpK+zT2 zp=Z+bGW3m3&!^-Cm>vk-&q4lK#M0mq@m+N;vDL~y1$h#jb?UOGB*!Dm&$IY^E*TEb z%Z~a8En2`@^~qAbl2uOM(waHpnV|K4c{Xal)^SR3 za%H@`_;e-vGl~0n>HB1|kRvPnKA`6LR8#ozf%AITS>F}!>gj%m7Jig@0=E;CI%l18 zzgB$9b=7n;+5r3@lOpXz-mdZJD!$JT-fd&4-XdV%R0*SU$a=a{|~n>M_+0t|$13=!MY^U6t1j z=}WhSazpZh=mpUYtQXJ&_@7U@zSLc*8_OT|c9a)LFOY7)9?)(;J^z|swVzAZfjy94 z7UMl8Ize=TQe{7P3#I0xJgDs1zF6*B-mv^#Nz*&PhWWyL!SsUZ2Gt9!LE`6Xc4WU#uXpEAiIRsG>Y%}Pa^Kf;atL$lfT4-0ZKtfdx`IC(LT;5w zBa~=0E(@R9r~8zrY7>Q`xJ@>?1J3h*Qb$l^byfjLi}g z**eT|YaDM;(QKV>SnDBj&%DOzgv zaI0i(9Fx7BWa(mfKR z9wpCB%rWLUclYzem&kAyhCXR=?QZiTg=o8n#=w54NK!l4sjms#g|)GhpKBxa_V?8x zDv{855DmMFB7gc8mI-%Trj8*Tn*E_jw)1yCmE+ZgZL4=;>%*a2sHC%hs^*aE$i67) zdcWhyJ~r{1=BUssH&O3s|Gl_PEy%kmQjaKmi`J)#`kNDtZ+CyC;|0brifw$~*5Ni& z=L7Y2May`+PLUTj)1b*k| z&X5u9mtP_ud5P}TAza(3F2itW@WO!XS6C$9*?w%>3j%3XUDp`a$>qrd>wC4)-HR=Q z4a?bp=KQsAOwjEmVSxVH((_ZEl=CLiu{R{6Wg7(g^jj>mF2_B zi_5Erju+yEtYsVD)S7|o%VfCj=ImeR3lt^u%{DzfD?3&&URmW{H7!iLmbT~Ck06>@ z?U=4@FG#!aC))@och1hP?w(a7l@JHg_m@6eLKSR0V{aZAV_T<}H*62;pAhtCHc(D% ztR6uAe{FngYNl3t|&1{K4K0ZXH)n+gM%N zUK>z8h#dw!JJ|brY*~cM*fU^4t$`zk8$`+%W;A#!1OhUtTmvU9s8e8fby;pVC#AZh z|7Co2fg-a_Yip!ttDEZN)a>fU)zNGHbQRJC)8*M^EpwTTF#tu!h-K1)i)wpv^T^uV z;=+G`!`o9+QMz&s<;0-lBE(yys-7mz(5MhTrmxn?f(${@%!Hz zXe9-ULvN%r>|aYk01tz?*Ck3#?Cm)lj}AT5_d{mYRze<;ZqM`!y;t>fZZ!_BA~?$K zq&6O0gh<*lWYTRDogP6Nf(SXhWdszkf0T_G8KLoU(~#;ubtmD*RYLfE;pV4Ay?8aFzjmwWkJ}cai13Z0NuWx`A@CODV!+hpC4PZi z3D_19zbHsQv>y_R(t|GlFj=8Ri3W_ROQ!)0f)O~fV{(JTdIq6lOD-1ipU{({g6!#n z?_x5-QdGZG0^fx0qtnjpQW2$Xv0N56%c@b_UjEHW)?GX#`72L8S5CcwD>e7a<~>SX z*uBDK9rP(2%v3`cQlHt%g9dF$Iy2bLP7B}&%@!jgoi={5C)T+?jk0J&y z6O}4h0pUdmvSf`3znHR-)Q&cc?ur$~gnxBXCpDcj(2U|g9bJV{Q91JD=DvCnrH8SK zhK3;Mdq<11Lo*#x#kZ2FrqqB3U+x%ejtsFOhvj?&7fnLS8& zNEeEod}4a*YWkLpz!N7f5+vb0Dh2W%h`m;dsF#AD2i4Nz8BkbaerA8=*F{QYb!r~S z-5Ft~+J}qo#4V+0cW%zUD0#^i&deoBXJ#y2Rge{Uplb-iK{jINp`Ad_nU~ zH~!j$32z+ry9pOw5e~iF%vUQ2shY3SE{39W1RquPW+tf{PnQpmJWA?x;UM#t{b)lzLPSpl~b`^VVb zoi1SS?mEcLYz2z&L4D~l_4rzHE;Ijpx^ zn*AXX-2+U0m-LYmqFiqiHWZ~$ZbSU>Rd(z8(m{XxFb)~*Bi6HW((>R-xc&W9%_WyA!1B{WLGd$x56SdOUxs}FygUeU_xN@^Gxy_amnaUf81 zvvR>Y8DEJVdu8F!A!0pW`~iUpRDj@GpBXVa-P`%C`UIRBsX`?|+!#rMT!Qqqmr0#5 zhPsuLWM3ToPmHh;m@^TqOC14x%o&CDuk1|X%4j=okFnL5V*p@t7FW;Bc<5jil>-~U z_@mV_HH!}iV%KoNec}jY118$S3429FNjGItrNmLO1s>P|$!De3B$nRMzOry; zP9eBUFK7wv$B-rSk7k@=^h>8;qE*NRtHz{ww>~!g4tI{2-wD1$hncdx%`2xkkV7iJ zve>W+<;P?OrwEv`#s@H7sk}o5czhrme?vzdMa5ZJ4S^GlT}t30#HpMcb!%KX`;n|vixBC>KOip-{gu` zw(pqzW5l#aREvwlDpKn!Nvby~mGwavExQmj{QdXatkVyT(LHlA=tz`HPLiRb6hNu; z8rr~`rBy?(nX%g{vcCwq5F;?I!3&Y}l5#WaLofP!CMPk2(nRd!3I$+GG@46|w%GSJlWk~`Z%GFqN*5(4$dcm} zGgH&-fcc~fGtsMrv>HdtAXP@g^HS>m=39|9Q0dFvCjkDlsTo9Co;5ufO9zoM(Fv8YkB^rk&GEtDpb`ueRu` zmfHUeVK6yo#(B?hyHbX9#9!GzHOFkCwRR2JcX7_VfY71a&x^3cC{sV@w>^(tJ#g^S zK7*-nJX^%oQ+ooJK#?5c6Kid6RzmQ4YEKU;iNyyils2Y^B03=C>8`od)f13%5hOaJ zs77Qks^m~wN^|%`1~Bp0L~QdcKblylD5rMrR%X%C9L7{w&i0Q};tgITV$D{Q|L{Ol zrHCRA&I;M&dI>gDLNTDVG3DSylsc)bGe@C2ps*M@pkv6|$dSH-8;S?6#$#1bpv5Rv zV?;*p87R?ytQ>aLwecsni(H|5p}k*73M-{M%9gsywXn?|S)V1Ji@vLI>_THnQ6uc2 zC3yDSWiiEU_F>sW=}=MGrbUEv*Jhbs# zFYmDWGf}JRZnFwRcXyEXtHElV=q4B;sY=M5d>cM?qKeMJm55gIJM`uvoC^-fFNM~TL5}={I z`pAf0tTNrQ(MZWMaRNXpC{Gl3Yua%l4ty$3EC0fmJ$R2%VKx8;aQD(gy z-b&9J=d9hO1&*L6`?=XXa)@l>AKrhm$j#4V$^|EQ+Qdt1;y*wg)q%&JIo7XlHZ&HE zG$7;7&omi&T(%+22IGG-o)BI0gh9E-&Ko}RMjSI*C|vXOhIB$D^IU}tgy{mEa|5t- z8u5ET$n!)l0_2I-C0X{AByi(c z*DInku);g69p*G=@lNj!s2iAlt$1K*%&GG>y?tpH++v#-*Rljc`fBqENXafbI%p(B zGdrN4Fb}f_jlvyzuq`R|1+a>|*$xipp^2?HrTXFb$IzTQtlp~4&$a1 zF5ZDz*9;QhjC6zMI0f>V9CxbqU@fi_IQs(Yjb=pKw%A;}Z4xS04yc6FtHg(~Xbd@# z8@jLO_eaiDzbDCupHKPeT>B(EntN2mh++y+>6;sQVLiXVZbhsv}=7AF(=kjx~DWuhC&hj@B^zoQ3{50|PSSPP4@Iq>7y< zs+Ug$^dpm<4{Y6AMdLXLdBLyp6p|_}o8n6rynv$poy*av7^M1`9#A){A)|MOjC_Yt zmeSL8N0#!(<7M(WNlX4sdeLdk-&qpwXMT_Yvan_* zqM0Xnep@|BS?whLG^Lr#RAB}50g;*Go?b6YcaDRIXDS7m`8KOa5dZ+*5;LV zI(dxSz64|6y$9S=gNtiX8A-v%P!D36$I&hTm7G#sp0W$Hv9?(*bm*n1v3Sw;7AVWs zcV;v5s>MLG1;N_VoLuH49gO=%D4!CSeNcy?oLs!vlzHL(WaWj$c2x8FmY1c?Xf}3} z_S8ObG-cTb>Md<$?FD98oW@G)9)o>G$G&Yh>`SG>Q2c~xJ}sqCaFe-tSN7{%6n}Y} ztdpAX!yDbITMRYqYVkUDozJ$!c$z<^dZFt?VQC8ubAB?2VslOkVow*baAz*xVc*?q zpAElr=7_z&I<%)#jrMlLrpAqN;v5=F9$Ty$B6NPExmAK+;<{zLE^yHW53#&8i z--iA{e&PDi%cI`!*GjP1&}jb!xxKo-XTN@3;~rs>Rg_6u*5VAUFXWhIV^hTw*i?)y zbl~?pL|^MMgK1HX|9yCx4=njJ$A@^~?M$pT2db?%Ql+Z1_E&kV8%$ek&bS+myBp4f zjvSR6kkghp)~iK(2-_U&wxsU1^i*f?M<-3!v9jL`pZ_Z0I{5T=$#x^`bUing|44Zc>&;EoNy@eR5=4*>1cOh6_eB$BHVS#8QfNN@-H8kSVg`K~fxlR%36`UyGGPtn#F1C75Ruw^Ek~ zpC*pCsh;tfqfCe51#t5x!QbR!#Z{#3zzVY$U z@;dmG5ve~oB<^m7RVS|ix6AC1mExg};%U~7L!M81f}%Ls_OT?lm^E+{iriDN)GSQ7 zR;31!D9=3L0Vz@Abx6GR8n$j5JL(hpHg>i}!b7bZ;VpdjFhsw%{{Y(pi%#eRX7?$p zf2qS9iC4rCbx+!pnsWONZ*H{Mk5}aKMgMp+64xi!DdZ1395BPEf zwH}joF&T#L`(Hso0=vIo;(mNB89$*i|Ma!|-?=!P6*puz_>r@Y5Ho}9U|Ila)F}Ld zRLX6Y#T6t;gy|IvL<{K5>|v)tM?xMs;k-2Q-hO?S{TY#p`vU;ialpi7>JroB`}Oq( zv&-~^W5u!RQcEOX`MScE+VpowLrFh>E)nQdgb}tYWsVY74tV^LST;IJMoScy%0A{V<61Ty1o z%x@dv1DV{}So8W%Ss{fvBPNRW))P6s>aN4C4bN#D z&T-a=IY~O~Nk>E)CC`paRtw~0bcw#jV$yeKvGRVCI2A@YnR}*IxR9>cPiEL*%dBS> zY5jyAs1M5FUAxOKHMxBDK6`mU^iqXp_CSqD$28L;K&pF-_zG+)bwii6ry9F1PUV_< z*BHE78+)M4N`x1))x}N;ldfR96&k za{;N8CJd$ex_wk3&5QkS7FM4+UIguR&D~RWT&H1PMOK%S7ovz1I-SRa)mYU;yi<%X z$I=^46jkn&jYGSUoE+1$In35M)yNInRwo*f)H)I)vsE-YX?NNzZ<_%cCao%JyVPI8 zE_7$eQtg(C!;G$2ANG65Xfj+QAOpQWNf>f?JG6@;0>^Yd199t|xbP&n(jyea?OS#a z4F%Z)`WIKl0#mgY>cZ9VeHCNfq?e3Mv<=o@*Je4kbqTso9zAKZ@)H44TPv_Ya=3n_ zG>bMv(>R8x;7}1&>I0pY6{k)}d68WyUtnHmIukUZ{^r9dC4|Oh>*6vaXQE+Z-t3hD zsbT5t3kVT`rhm|5AcRq%Ha`(;2}ldU7iEqbE7miC)tO2(xNy=3u}N+v7ejrlH_tll zc@Ho_FJZI(svCq!Jr3Y%H&+3MyFw{z&`)9JM}lSTB@eSyABGgVCb=^_VW(+K*&;Cu ziC@rfP_I9^6ZcdJAk$npnPya;-LEEQ-Nx^dsy|`myfb}}LYyF;pBP&m*32+?=;42( zMdA>ec|EmhFAtrZF&`0V4c^E}4HEoG|EK_B#wWPo%6wgeyPL7e zQ-}vCfGyOVpT6u~-$_VoYf(${8)FbYJLL45C^Eut@ptkb^pOxfBcPgKL^S`{Ej~e-2W3PsS+vhpGZlO zZ!;Da$|rDIZ7j&`JjzybT5f^LI@TcIjI%v$Hvl*iS^FcezrH4J-=FT?0EcOi;QkOl z8JfDPCcEuJ;!-vYtsjMuP`UKl;IZ^lOFDuh{SHmdEK#XB&$(D&MgP~Rl$52ou+=G;k|l!4VhK>gEyYLt8V^5J z$!OxgX^>(@!}<8(8E0)FDl9T*XEQney)bv(;nUp#IIJ_k_mBF8;s}&9g{9tG5JCbo zfvJja$o7##pkuR+v*BP}W02hfIAVFhR0bAv+z5*?%;wwDpc3NN*K-IxR)27N@MIkg zhUPMLBsg8OVY38_?cXuvcD@dQF)3LmZ}%A|rgA@e?{vp~3i z=E|rcxd?*4lDYOLL?z^rukumiuvqQLMl6VXV6=ue(Eh3=kduz1KS6fMI^UwdHxJy1 z)Ct;5ovAKHASC@;hOx*aMAkw^*LE(Y{_tg)O|yOmWMDQH^7Gqo=M5)@gp&;g8nesX zAPANfR3xo1LZVDqJb!7FM`+BNhv11{ybpfJU?0$UP$7TJsJ~ZuP&DBIpDBO*1mkDZ z<|t>#Kf^=#6yPQ|7q?J)cr5)V9f3|!D*Q`Wtx#OLt|Mc^1N5At%wb<#mQFN^j*gPF z7%g7LK+|U}Dl2OUyvlqW(J5SM5EBiE;uGngqK^UI1YZu)SY$m)&nYe>%5kqA z=zT(WV@az6x?;&fXT2~%XSQOY_P$vC)TZNocIGO|h$z@str5w@`5!EKS5Mar@BH=G z`@J3jd&sjgw_i=!3#D${fFd^C)1CQXU6x-tVTdB#li*+-q8NvN1SCpvS8eT|`EJY+ zDN0a%DJd1$7;J5h#X~!V>bw1$18`+%i9Z5C5Za1$Arlal6|DiONH3azda(ys5MD}{ z-6$he_MLzDGhMLvj;scrgJ(`B+q9=$ybY)~1O@aAcysj%ti8;?1a$=%dn~jW>zntI z8islsvhE~k$J=7?&}D1&SM$YpHy}Zw8l;BWZre_?HaneM+U;!i1A@eNxgZxU#<1xy zn)uxyL+-29rr(~%ZRMbVHS4mT0I&Stb?$b*)Q_okLU zrT^+I)S^MpB;1G8k(H@;Yi|+bvn#~X)*NkGOo3BZXCewDI?Ov(n9`II<8WnRZcR1e zj?^R^=Aeo|+AuIvF%g!v7DLQ99x*!a*Hc*di-e<#&_E+|C#{$7%SJ}Eypz;T-zFF^ zF>P)nq&KvH)$=^PYc)t6w@(Coaj*WeXjCdN$|-A475B+ofPL#2X$%%PeL`S6US2j; zQ^)jg#hyfG0dXzGEHltw)5ri{(`p=#K?uTC$TjozJ#!J{CZd{6^^z(1{U=(SvowQ- z>dn>7!Ojz+YQ-jH&dBv-F|nL$om}UVss*(;V31;=+a3Ooj|OTk1zbRP)k}epmu#hU zdwzq8Z-lhkM17S*Qj><0&pIwc zi*}gEUM~xBaZWE{m=5fv27>Lp!D7tZk>)V&lV)y;lCHm>kr@_A1w>X^_FmWeFC3@L zm_N{MYYmG5QY1XP1cq*rU~KnkKw{-EC-o)Zw6|QM6Ko_Ay@P6_@vAJRrW>vC#s+Vj zLtN>{AWOwW&A_PnA@*D0I-CaLIL~SRNTL%UVRntlJCvW(kT|D)gn5MP_?mMNuBd5$ zh3d-h9VdXF^vHF3Zj`&u+en8pf*PZyrDGSN1WqX~u{9 zqG5yeS>IY}+iTH59yy~?N>H*YoCK}oTe?+5H*2X&9+kCJzZ>RSluWKb*HzGl4mOHN zESzaLzM}qgcbPaTDR09?O5gs>$gFU?(sW9XrrqWBLGbX~X={H4F+(;dM?a{9%02qn zOK}HnPCd)INqXx0lUyjLTMTW zQNR7yY)^Q#<%!StN9Zb1ZahN>JHc6;>zr6`QL(U|YO_94V^*)kwCrt9Xo|qYIO`&Q z>!|m9`rHHANr(TcA!E1y;0Rs;TSi5sgf)=2Lk@07Oh@}%lwlgIUv?3gZQPg4#fTPr z|9xVA2f}9`Cw)$deAxeL?#$z%>fQi8DDk3*5+Y0XZLFoTC1MORWGU2GBaE127;BOt z6_O=ymXvG>iAu?m<(H)pZ`O#CC~qm*e`k`exrSeV{;toRnUBwXzUMjT-g}nkoaZ@e zs_c5+t>9eyNs@RzDg1xi94NXGF7FXV5Xn zWK2<8-rKT^_MAy&8|oQmV-m`36Uwm(4}& zRHP-inBS&DWhAy>q80?(${D}yAgV=t*w^n9S?pk6TC{n?M-+aSwaIt>Th++aoyd%d z`(nrow2o6m_iIB;#CC%0Y6q z3YMyXQV!mb=#H|Hzhu9QA2vmB5<6enuy^e@vRrjkh( z%1%UF!US^`=sZk;lUmU_0e_ia%TV__yW1}nEwrT8-J86sRz`j*>sc+ zJr~RJb+kqtmS7<*Bc8WOh4k!Mg|PMoWkSb3YeaoI}SRBw*Q zhKU66ENx$YgA2?N3`ZlGxm-Co_~U^>`}+pNrMgRAG3VnF=|x$Ub?A8ix-|IO|AQJb=j;FVzm0|CnQ#pKs! zMOdNB&8R93;18AOW}8#Iko#j@l8sepNQj_rb?nXaL6b`ET9rwmjkkt#2;(%YH`+HviZxi-_NS>eQ%h!`=A!u>Nj8R5iv!xFvjpPOUS($U1Rfta{9# zdRKpnYy7tD(tU+D?YDg%;kS`A#he)MRpw#qY5sC)sxbrogpS!k#nf7VN>5AtuJTl zd!DilsZIobrO~dE;P(IRHH$!8B1)Trm*e-b3H&n9Hr15YGt`lF!+JZo%PzfAD=+3+ zc`?q@Nyf|7QPE`$F`>1@ehN!t!7W@$mjUt7Pui5GQiGcBHBJ7-Tjg3Ht5MMhPSs<`r)iJ*Si&OYIV zIkBm?vvJ*+)PMT<3bJav} zebsShlb4T#csMti^PJ34K;_^@Gf8y{D6N^k8wp<;$x(#@7G(E~Kt8~nHE zbeP<;rr_3b$4kAyvjOb>G&3|rqh z5LF^KRvmEpU3#2bME(>8k!%fHvoOB@^M zEec@J@~mR2cg?G~d_^xy>H%F$@Oo=cMlP(p&|aB!G_(gKJIOxa`>kN_gDE z7bGgAP!>vx1L{o&(?{tiF7oiBPaJ>EE}j^m5xo7Ii`c8HjFrNg+r<6bb>ABwsPrJ7 zE-S9wD{5yVO*rds;Z=8o5&BX=eBb^fk7g2`rH)SL&K3>cxtbXC!uHU95dTwI`!J6qzq7Bh zW1fm3@?7N=kye?3dV}YaG?7=X58`496vBIaetv8{>;JF4vVAMlOq3CT$xdoPLSf( zeDm%>N(~F09a8>77~X9*NTg=qwm^zWg=BK=kY9Jm(<97j0;q`0aH}IBN}3s!q~u8M zGmhKeG`iY6n%)^OB+$*xN@z2B&%aGQ9Kl#MiHHJLGQ*VMvm3Lm#<#a*ay-(j&;AQ{ zf1EzKE}&xK2Wd-0^VjAD_p1WDY>=T!pVo$MjQyXM{HxIF=HdI&9yww-e` zPsrzzNp*|7m6A;feEb@WeYic$s(`HR?;Fr1T7o zb+b*!4W#rN-wt6m){7k;M>Uur70r-}#q!Dp@^WS#-YmwTb09x3B%v=Y(bUjfq|~O+ zp`iS32qpU;S?O_fL8C0HL$XOvp?+WpB_&ah)z#8&SxUdohsgVkxVBnP$KN z9Wmg_olBzNJ0sYSLUvJeZLA zoPrb}Hc z)44QP0LQ{NCWWwRpg2}j_cxaM?SgL{2tmz27|4}Ss@n*{HHGgQ2SM*hP=hYsq?LQf z!MX5lkRYxSk}8+#rbuuqJPJ8PWmcs|r5ckQ4u!|ChM+>~RH2j+t>HL$@LdS^VE>x9 zH6K)X{8)&JKClWEKJ0@l3XdZSA>(zZAt@_>CHnxJ3J*I3QJ-M`gt}%az+;_2@LOio z;L9VQz!il@7lBwo=Bu*)XRHx;D)5jOP%6fjtEEEy^@n>xL!{7S%SqJyqTyP>{gELS z>No|9!can8FSwg8#8SsDXRXyl!W}^&2G(vRW6ehdz7hsP<@hc~t<~DVS7Ja69C0OM zO?BW?%@FE!AO#AVc!u9B_$)7kNDG1^R?hjtY4Ev3h(-#5(Wu{VxQP}5c#)O^)->3{ zwSpTbA!t*?O6V%4N;nj51B9Suk#H!*gTCA%2#3N=Z4fm3!V2iBE&9sw1=-lNc(5WC z7AB&W4HTA4Pf*VtZmA+0pCDm@iz|dd@8Gv7YIO|MRU_+Qu@(lhEG;h}YR0K0%`$=$ u{|xxcRJEK-Z5#=4bz;}ZFQh1$3ymrd>8}SJR0Ki>{Ef085DNf;K>QbV@y^5m diff --git a/java/lib/slf4j-api-2.0.6.jar b/java/lib/slf4j-api-2.0.6.jar new file mode 100644 index 0000000000000000000000000000000000000000..a2cb8020a5afda869b487e2f9d172dcd1e9795bf GIT binary patch literal 62531 zcmbrlV{~oZwk;al=8A3Gwr$%^R+1Ilwr$(CZ97@9le@mX&$;jJd-nd`E9K2n#{4l$ z?Q`_nTPwY_(MPTFQotZ601yxm0Q_l@Dggh-2L%8GKt@DafJQ=AlukxKRzg%nNtsqg z^lKadASlt!Zk--sc!%f~x3e=fG;k`%RR?>pa9*)2(z&wMa$dJ>3{Y3*avjd5=aZHI z3UK*u-fiadBXh=e6qy11;uIKzN*O5@&T7R<4XiaF6>b~IvCN|p1nG{2*f^H74umo8 zOpy2x`l9yU(R?`*?wXW}1L5wY%yEs^_Zx z9PUJhsdj|oJ%B`|G{4VRsw&r3yOE8nY7MMj6JxZj`@B9{axZ zAj-KF-gAA3-{6f-h8f)PKdq^yi;&&xNPb^xa3t)z&|PzM?WglWQQ&lYo1pvmRSaxE z6|AAJQK08L_YtYRz zh9JJWY|oitt#+NbW8>a}AQi0wmHb&#LLCn$*>%tqMDnW&k~nEg$oKwj;;FX7iH~bH zO_32lfVkM~5`=HewqwI4r>VzPmeZZiXMw6|p75RW4lgpgx(EEQGdx->Hmbenm9UH*x5SK{bxMN zzr}O>%?OY`BkUZ_{s!@fz&{X9)~3vse}hB#7o3T!iLLYBp#Ng_*M0x_ujVG!_J7#@ z4Ib`a@J{v?e}jepgZHKV@vdR!~QXt#^Apu`R_NZAOAu~ zLfX#E%*0XDz{uIo(Sz2=+Q7+)>(Oq77Yqyx8BEp{j8qgX>?7a#b3c8CkrASRQx)Er9GeK32$%^Nq84Cl zdc1cS;0~Az7(Deja9$hVAy?8Ce{8T9_#cb#@mA5$_GbYQ0RL_g(EcF)pWOauWd9k` ztmJnmWEGT=o($gkkz_IGBLYJYlzB8P0p;8yh4n_!`NCq7CgnjJ&kWm+E!d8XX0S;A z1HRk?_Q-=5@Kg9mzF(t})2A;Hz5&rauWIB6ZEPh)iLl61lhtaT-)ib;=heT4z({rNN9K#J7h1M8ze}HifUfw(5tw zgelM&p}kzTf3b)#wxqTn6O9R`-s5Txo)&kxxEz8uxLmj;e6m`_#Hb7A+zH$04o(+W z)1G%bW&ag`)ttH=Xk`p)CPz{FfU`U|1|?)z9BTy1;i4=iEZdZ-yP568I_Z3Ix|b#~ z(8M;0vobevZCHKL%8fcafdxm4F(JChc&W{7t3ha{4s2UpD?B%%+CX@d5sf=6QND=W z<*-vETxnN}%ud3#T0!|C4XnF@pt_5@;CL4q-Edm4Wp;SqPda)KH)yJ-ya;vuUKrst zKlCb-T$TnJ_n2d}_A8>1t;S_FdS@@ZGNEBAKlLgJj_wR&Fg6%$hhoEg713iEndZk( zZ6J2uK3GteMIkQ2$}K6aQX1JjZ~%j(N>CP+i>(zX2Md=erDWAOnEha$EdT*+Z? zYcwPaJx8qUxL;(ngbA`#iP9N;@hOV!G%#$DQd!tV*y5--wKoq3y`*0wFE)csOj8E2 zkvizyFE;`Ex^@blJQnAvXCEL~l(u-_<=y1Yupg?)aC-+} zx;7>>M-9>BY5XRbYYd`)1|`;5#4A>Gb(|1j3AIL;uB~JBciqg9elC;uXo&;4jgE{M zzmI`^UtN4{toh1nSX8B+{E`Oc_XWb(r2~q+wtdjATpU#K^qb07IZ#Gl$GY`rK+mg8nMtU?|B4bxi;Db zG~}yFz!e7W;wu{B8?g8p4gCT%kQk!yQ=;WeEitbK)kcit&}BVCS}VkaT0hQ_pe;k; zn@!g0@lI^0Acp0zhgNF1`VdJf#m*3PqXrQV4 zroB^zzkc!`3t3HuNlvcMm@oPe9P3q>Mbk^gX@pNI8YdGrprn!XQ;V2VF#h*X8Gh7a z%j$|r>Vg!Ufkx)gMHOae2FY5Jj#71-q-XX!rfFuM*qhIP-QT)p@{@4_+>ctviux>^ zx(9gzZ}86Sk-BSA(J&clrD)KFf<6vAn8tHu zZ8lrX)9937Pb~-1qF6{0$dFz&GD%z#Q_zUF%Gk!|C8W{26_f1xcl0_v^H9*jqeej+ zqQjPr=98DU`gL@l+-YZ%pL<;!=H2*qdzr}y2E^d?OnzQ}cHd`xe$2Sp^?YoW;1TEB zXvWB5EyN2umX1Zk+IDnPBw}FI$6uNHQpVxj$Aw6`P%?>=(Jg#jOc|LfO-91AG!08{ zn5MHC7cH8Q8^Ebc6rZQB%g<4tC^W>R{sc=(=u`GHL8S7aUbizrvn@+A$daPV3Lj2~ zT4ugrjwz#jZX`)#9uZXFIx1*WHkbt}H>TXcj1p7oNR_m-^zv?uMLTO;%_V6>F`VH~ z8^e~-kU*x?*VIm!5t!}T(UzSsZ*%)V;JCV6gXyiJM1;`}X^?7E@ z%!0;xa2y-jSl`Pi5|cN|Jfx9v^0;P%6s2sy|C=X?!=BOH;f| zk!$aYiH(S7>ryfI?yy)+gmE=cbkw{-^Hdn-`T)U;4kHVyeW=TxE$&&JNeLl z2r4J%#qfX?nb%HEE>!YTeW%spBu?1#d~_nrUa5gv(2p8*En3_)dPDh^$){qZX&t=B zd4a5I##01bQTOAYuB+iuni3sl;Ol@P5?0J{(x?_p0|fb}Smm>4B%pwYr=D;?O*6R{ zqab>H(fV$+Bq-dQ^SGCnrxDIA&fux+<#`wXf5*ZRc zM>fG}eTp49EaqOEk%z(=`Y7lVq^Y7AVHml)Wg9XIL7d({WVqnTdjddjoSa5=c1Rx5F^jT!8JlGcq7&#*^?zrSO zu&tu8_cro6?=b_4)fA;*JuuD%%Q4z7MvBf78kwV&hunSa0ufYi^)*7%)MDo@p?Xm!J3PYW^pyM1Mdjq@z>iK&fJXN=HJkS?q z{pp#gFJn}8M8&X+?}g$)}QBz%>3f5^`P&Pt4ULEQPnP z(M{C{%VpEVO;qq!h3PuvOF;LmQ=vo&RG$nb3a<(5DZSafC`MLN;Ds1KoVJ?8GQHSS z->AoYRSelVM>YX0t4NN>Lt3ju2PGe23~)lWFrl$cE7r^2y3o$$%5jpG!Ek8_7bLUi|a{15g z-0!?^lk|k53Fo=SFRe{p7U(fPtOv3;Che(aMVsV8Nr6{>A|=z6C(pAQdD(a8)`=;7 z5%0B_(;F}zp>KXd5Vvt}-OT3s)!e+?VSIR?Vba^xsJE>;;co@|D${@&vonsY=>jq% z)AF<{LH!@tN7^i`zXYAyK-w~ST49O(;vc9}Fe{lnMZNLExHqFbd#HO{o1k7?cb#il zOB>+Jr_P3uJi;#ntCCV{VX(nwOqhM4Gf%^IH6|71RD#oR2})cYgZiK`rklpG!U%^T zx~7dyL0`DcNe{|tStrx&XZv*co^Ek;LCE%p_SqXCu^n)9E$y2Qn}QU$4%otNCmfWZ z$nG5j)I#gg57k}U5|1iibZI6|j+22A#@boHYLeJB2`}okPK*2+2KeO+z9JdMN9|e4*6POwC@x7a;KguK znX6uK9yc$T(1(6+3IVx@m2$^7n=Rf>l_J9u)F9t}S>j&NxAB6+PPQt1oo(uvhwc8V zSgf-(;7@N;B4Ej0o$QX zsL)TxW?~|xAl}TK=SPr6G+DgZXEAK}7~&-Ey;-eBK}rq40ua+=YE~=#a&5%s(gclo z2TkqQC}OJ+tUY`S&@(;f5^4~S2;!tcV=2Jdti82olyprm_mJ2D1r-Gg7UlxVW>Vm# zS6X!xS+r5GQZWg$u=sqC*K;rcq2iywdBH!f4pM$MaXw@I#Ld7gb}Vsj&ct7x zP8$hMuP_rHZBtc6vK*WMmVP${jnFxM7tMbPO*_ZSZVp-SKo}!#@em`7zs9qT(X0bJ zAr1bCy6$*MqBJS<9M5s^9q(;a3fswsv75`nN2?(Z-E(%6=%C)%^}dSotdmQFOX zBqxQ_HNUHNTQOJA1lkTG`<@7HZlk7RphD^jeF5~1r;)mL+{QlJ%E zV10-mdh)|oO@?P8*(b}6<%P8y*Pb~$G-RlSQL3! zQ{Cmj_ZVO@=jg~cqiTR12r?G#WJE7-q*wcANFUjY?C)rdTAPP@B%Les@4 z9h=`?FI@o3wd&Ot6d>v|ZuCQLm>;~iec{x#Gn@$b;4s}<{PTnW%ySwBJp|j21OX$j z1JYE06>sZWf_YmsRqyYFe!yLQ%lx_{%gnI5N>aNfVv}D5LzNz{1=1}BQ56e7%knpj zC-rchcW$HwmH%$xfL7;j7dv$4BqKZkw5PBU#&<8fXywu7R)mDiG3?ve4aIS@xV&x=g8~V7@Sm zq0d3?+?-8iu+9<&@O&V;dZ9UZ6O|5maI?UHY`i-HaBcdcd1&)y03|wS?{Rxg?TJTb z`&7xQAZ3WzL~fy_wdlkh&fDNRc{Y*H;ar1&M5uk@EGD4o;6taPv*k z!*kM6{ifhR+68Gsa42^`opVU?1MPWm|iEwa%dq;*dP~TNT-}8Cp+3ZRVvO z<;=DbfK(NwKsc0ltHWruD%K#EG3AWPblv8}I{chAc3XFFuoC?`HF!ybE<3__*_YWm zy4`Uy3?f(@Fo3DceEhvpBY}qLJ&VcsJ4hF7faY>v?P?1nd&@ff`;F+j^SgN{rP{pgJA-4yU%6}{S7=BC?J@$Er8W`w!sH{`;4Iy5!v ztE5R?))c4Gc5QCq(A^P@yRQTI8!FsSyIy!GCF;kXgV>_(po!M&};HMA9s-==93qTz!wniSTUkmFv$a61sg9;TC zg{~t|PV@zzcX1zcYIj8r=p5LWEC;|06QVC<(Zyo-pqXz|{C$Fc9N(KlW)mlRT3nB1 z>U>Bt8dIo^$eL?6M9YG~t{j-Cn&7(vQcnf zMBb|14eJfPTzDMV>3e2-&)!yM$CMEMGM9SzWF(A4^7A*{jS*!QgYNl%iV zRPgih{bG}wQ;2GmO3%+=%Fh0zqV=TD&YMI|e(bAVA|1Is=lDl^%yGyCS!ub^f=0P1 zr@#?x2C2p4%PSvqrZUg$!b?ZE^GB2tCP`_gXj!-|z=njT5bKg+x^F~@3%;Hx_KVwx zSLFEE%=f@I^nsedFTUS+b)SmbbbEYxk8ez-FJ*&Sk13axJ*H-#unk|)8W9p)Ff z10Ol&lKm|FfABanJv=Iw7p5q&bOGgeDTXygNm}rxIs}#lKh4}W5MS+asx7jtZhM^P zEq&nRjIa~sHr&yrC1JhEy769909HtNp=F$7El(97smb?X?FtIJb#{k64(z_(1jMdr>~i&UObwp7+jx;HDO4E9v{y&JbGCAbhyw zetB;8z{+g{*z7>@Y-Kl<#B`>EkI(%s)TCM(w-4${;yLPxNaE25>tKJstXVVjbqZRP z=>E2?auw;9?@k*QLbRY#M?RLkxGt!Wqv(M0EVi1s&aaoKcf?ksBzq%Y{9u@E4LP3g zk_<)KZ<}4`4@Ouq%8$iLFEoHGRo^ODly^%OO}j31aUn4qTOJfNJAJX>^hvfARRsx2 zeZ6zgEIO#aOQM%KsyLdlx;2Z}#VgM#+#oxfI#4Dd-f`gDawxr%H9y{h=OB%gr|X(x zF#s0~I%QsE2j2N4mVTjNANQu;B4(M_Xb-$DnK)%?gtlFfgeNrOmL_+i_cCmJFl2h5K=HX6SxFU!t9J7G)2G#tD<^LOdR8dYIn@7UyG(-PmLsA(rKRMY7 zbsflLq!wu%arU;k7HJl7A{S|ggHr6v15Edt-I2t#9ks4ta39w8&!aI9q^;bIS{bYI zHwI>s8R?nQp2OXeT6S9Q`vAlmxX55ptGjU6szM0JM`_^gtDe4Y>7CT?z>y1Ce|S0_ z$a|h+$rfBVi3g+2PYR^rncXP9lA{MH@lUGqk`ny~y@jI(?8;YMRUX(fU#a)gXUTn? z(AOg6d=(4mz6+92!niIF=VtSW*hzpw^Q8C2sPV?3b)+U=t!&dl0URtThKV#fwdu;p z=JK)!jaM^hH-fyZ0>-Q#Z(LQIRSQNY)#v2~=`E&4jv3hfs0j~C5^uv6Z^t0sZvJ0P z=u@)FUx*pskb)neqMuacKY66~r1S+ktAMTGufDZ|`+ZGleH@Z~l1{X!n7SX8F}@x$ zB;Ity28toy23}rs7I}a^Z+aC0fysRX>P`cma7UC&ZM>&=^*>J)`3e^0hy$E;V zNRF@^mm<;m0HxTw)h`E9PE-P9ucxWl8Y@b}(^=v~zz01V)Y~md| zqS7t6&0;Rm)Z-nyADQumw`vWy`1m-}w!SWgs@(_fIAYE@(yDH23_Y2{s?G(|<#4NKExoP$E<=du?KAwaituq3Kno0ExD|Xnag% z$N_7F1!|SIj5bH#lMLvuSz_In4F3&S1r|1yAY(Q?uBjsTTUzK->CeN1$%#XSa*0`5 z+=MG)rOw#ND0GF*&)`P0So5EE7AaQs*tJE42~94@o%a4InfQ1p^)Q%QZFP5n=8e|z zpC$F%jp{#B)cmI@ONG13HKCI#XFL%~DHJ4hDC$t0ij?bn0dDS+G+7ZOl^wU#LLwa` zn_&QVKEd!~R_G9SR$8aS54tK1-0C%bf!1Rds7r+=_b%n?GM9u3#e2O=&ctkmw+2N z_vmvZvkGq$r{g%50C?|E)2Nnx*n}#p_pf%#!u1NjuH}8*%6mGrbvZ)p+{uy{)(N&j z6LbJ0JxG){fcmLc50JX2qiQ)@hLvxVN@*rY-Q7^Z>s5-wpG$iL++w&(du44 zzu-Tjy;(j=-bq-Yhj61?d*CvwgWTe7WFmwNLU zG^LoFZg*u6@`(5rE%Gg~>2PV*j6-utrJjaF=fiQ9QB`nDjsf!7lxa81=QMx*+8G-SpWzgd7aZaVc4Tj-JgN{WS30`v-?2QH(BmyQZ+UG+%g2Y;H)I>{;w zO4C(!W{i~2pdD5G8kPLMP|0gDa-(;=9jgB)S3ge#h)|SIqJ4dz>2YuS`ca$`$Z4qz~5n5C8z)_$jZX z#X_C(F7djJa-Co*XSz`@APC2-1STsn*JU_0EnUGPi@DdS>hW|$h?d){>R$?KCilcT zJ@?a?{ksPqP#GtV_-g$sJ~ym0Yb)N(L|TT-MA%qdad@`-84*}D90?!Q4KT#Mj2Yfp zSu=uCH+#?R3q7DVqr3s9?bn4%NtO0kwBA2{Wk~q+;M3WaUdO$K*uNKqKe*nIXFH#-i4|r8uCxxsj3Ic9^(1SDtx*|-C26QRmah_(!sLjRzXL&C z7xdzv^M@_p9V`y(6?J{M1nwX^$M(pm-MeiK4|_!?166bIPc;-54yq~cB^fgdIa;S50#L< zPGWWs-2;fhB`MzuAHujDT4wtrI@63k5b)Lj%i;ybZ38y?1TpR;X+{kEYQ}!{U+1N% zbSd98tdio&by0f!2K?)NA#MP!&k;5NKpoHDy)XPrCG>wQLH_G~p{S#UiLLQJWg*`3 zGExKl2;No;!^7qm&ur%+<)s`i4hkS;Wl$2Jusjcn&1afQ^C0o$FAakM`+a!3F-T?! z;SlFPOkGWPI@#X0`E+*xE)66G5F9B{a6M2k#(8{ZU~|BA4(hj%ZTscf?||qyOh;8% zv9N*_l=IFo;(o8upJUbeHmopJ)^4CGM0H4D@X#L;Ax0F6|5m^?B4?n^PLI}b(Of=S zOY>MN&sjB{=x%M6%RJ`*wkpRkQ~wI6Me`s$r6*siCBl$QMh|_{>7gMEY4JXnR?nOG z1dHL;fKQinycmL^TJEU~X~e$rq(pkNd(u7eHt=%DA-krTQ%=W*1yccuPi~&A9_ScF zWswV?{1j{>vKho0Kdk2Fpg*GZ(} zRC19Wi@UkB;<P9_G1)+YZX=%TnG z+s}`Z32H4>1@-8IE}|@5Nji)kNr!<BV|1FA-y%xW=rYL;fs%=-f9lX!cX4B=nA z#=~VcBYkpteX{Zn{!6FwER>*{&_EvmY9JDCv1+z!+eW=4WQaN{{~~0wx;;~1&5gUn zrI)^2*K+l_`Dk5ti?Ri1qyn9ll9QQO+8KWL0k`B`R*X6c98=7)wU#GWP6Bm~6*4N)FH zs1`qtp(Duz8Kmd9rJH&&k=VtM&{Ij75mk)98h&X*s)+93fNi9EApvWCVGd&};)>|h zPslq^EJc>mDoXkNoN8f#8`>x+J!I4CGOXeRE=n);+zmaW>h6l zXG{=HZr4B!OnidfL=Dw{GWzOoGaC8-j{Scr(p0sSa70kPupm0DgCmFuAj3?Z zh&l!@v4oh}3TpWU>j5W$!dqO_(@py6nWm@XcOOGeJLoi%PJ#QBEi965@A8XgG{1mp zyc-mg_23h>0MP;HInUOgCtXrH-X%njVtAq+wuJ zkpv2YA@YH2RvtikY|d2~coYRouI7cQ*S=*hi0Q;2BSmY7hmTufzvCwb@s4%On)Xw8N3AYLy0CjWuF z+7y?PI8ULpM#R!zO(GmAh~UOYq}SvY^1^4dPEsb9#u&^FN67;Z>AVD_+x5Wikkq4t z6^$rwAzntju1H^WU7kk9j5Cq|v=N$Jmo*fZ z7HQ*g7wjhD*4h>Z&~b@rny$oa9}}G59`2#3XjQ&^6~;151(-Q+uUWQrm?+iY9g%XB zpUzXUmkC96zA9^S*BNYftG$}x^D3(GwgU=hhtnZ5Lxf-)T4ULT5 zY_8@D5e8cOdcEmWlZ66psM-3MwZ=u2tRW#1(@CykWt#r)`0!oO~4CLAD9Ta5d2QjA~uVDY0(1MmJ~cT}$2tE^cnd z5*jt-ro(!CC628=0w+(*4O^(k z>V$py>P7bsWy8x#7Fa;u85aU;QP}Wf8IGcJ^Ii2)y(KGXk8M*3nPifc2<1=9EJM@Q z+EpX@PI?Im@MCV2xAM>l$d~reloKGbcEx6ky($lLPM}Qe8U?aN>F_qiGV&FfI+;3~K<-*=@E>Hc^z2`@nY3B}vuOqchSY^S+v7b&+3I^P*;(QQE`8k@S; zAd!{YC=Xzlx7{M+>Kky(Wu9)m(;j$C$19M*x6Qk@T^Fchx#{hU*>RC4e(oJF%Y4@& z9S_>L3pR5o(%&5fvi&5%rTR<380Ytn>1QYm-&cFGs`mX(>fA?00A{PRfcH^D)BCB- zWktfSnT0NmyavVx5r#F0C4ntpSm}VuK&4 zfIG&$Ef~K`2!fFP;G2XFA%DE=e&FH7yIcr=$EWJeh18r4=)C==Ie52S@F}D`V-i<> zv~zaOo1Dq4ftJ#^||KNcsN8}n+X?gIRj?$;M^MdQ%?jLO9aM7P^iQl&g3y$QuW49 zjUtENS+^Z%`1Tv^M4COydud1(>6*~sgwmkg;y?+)B?$Uq23%k%UtfBrvgDL-2vLz( z=++sG=ae^1y}~1HtW92GwuPgBTWVYM=IFZP5^|yyU){+D&(Q2WX#>Lo?_dU3TAFrG zL{`nkJJJTpItSLex-+j<@oq9;g`-BPS&-xs45#0vxhAQ$CI9+IvzYy@o$CgtEA!Dp z#-*iY!MXbd`a>DyO)12B{x{HH#YaY)6`ULFPx1L%-Q2$_qW`H1|L=A~M-Ev4g-5Be zqmd8-v`X(6o;tmQp8G&5vkJ1IMm#HP96F;l-A364X4drGuH5}!%`f9NrX~Kb4C(7l zu9M6zv*WL?`+M|XI-TM2i0X{2a>YYu*X$gzPgH9VEv=l%W{ebt(+L9!3?{UKD!GW| z;;q*NcK_*|#Ywo=y|hnTf!L z$K#Tw)sH_K6gX9g-y1lcj^8O)=97{;w%QOMp4x-lq$8OMvXr|CTs^MBI}0ei=}a6@ z?Z1iv5u64JqA?X)1H8Kfx+!c#@sfPk-(GjN-6`P#jNG?GL1lzPJ@{q{lm(*hxl-;x z7u`eA>OlCUpKEdUp|A+54Wk&I_jEaQa5B1pQ-1Fq!2QY~w=#H3$%dJ1^_GCv>sa*QKyFPj?^1s@E|PBy!5 zw2wbN?jN~+G5;uoz%Ruw%!9px+2Z>6b%92Q`nWWOYtRmJg{cvLZ4|t#?8xI!PY9(9 zt6&Zj(5$pbwXEtXrnfg;ZC}6XY;%GubTTez`)YA*<7(hw!_v}6Z^-;JhmB)KY%Yuz zbPLN9!3d*$G>$+K#a3mAp2LJWeySsy8ucV$crEbfZ(i8XACB-rbdV_-8~O4q6@Rqf z{syo_t&7Sr2IvtbsRbx+<8|=HdE?&cWs1q3+oQ#Lhe1yGG7kG4;K6hb>(e@Z1o-8#a`4zWm6wpJTJNd)_WjW^eh_m`d4 z=vd4};wm)bV!3{M$mqZyfDH`BukODmOHsz0K-P#iP$Xf3vSA`s;Z#*q79$!%(5!>@ z&6x(Xjx!EddL7k3dw z$G0%_8<1;1+GQ zaB~Ia#((qR<$V(dZmR}vwn03XwNmz%+5rj%+Ml9tgnA1F@s*~$cMm6V5BEBrgSUs; zC+;yN!ed10aEKBn=O17fvqjSjS=NvU;MwAY=U9iH4hV0DgkTI)pPif*nVnMv$Kp>= zOE^7fdlb1_`Hsfjl-QFKsx(cPz_f@YV zt~xnXcU0J2oN{@n;jK`S?%J+Rj#1Rlu}uV$klbFBOSaXTgXTp6cBxv2%Y_0W1H^y| zQ}|hH!HeMLr7S#Gfv>$-iMmyeaMZY~-2}^&#!Bf*8R-K$H+)=ZVsY^kNdOOwV^#-> z(0E_ab3m2M0I-S%WN_3iFz!T^q9E0o8`DpWO8@`a!ADh>&-?M_%E)0|4x z$#>~ZKZZMn7mj%^#xa88P1R`foqE+M4zjb;5vvl?#mB0f$Y)lfJAtZLI{J-?my08` z%=o#bKG{{Kk*}^S3JPabBcR(^PKe`7!6@REr~_86O-s6r}j+k`wx^IWd)Nw5>NeRF>`rHN^dN~e41mH z2o(`&Il-dg!AN*w7Q>yiSc1}76U4VT5vh+5AB~7*9A6RsLHmT-W#x%+U&foiPc-{P zZqvZfn~q-R;qkIvqf+_^+-{@|7Z#DCe`Jah_uGf#Mtg_-T_%1dAJlH<9VKU z!fUU^+hPZZa(!YqRp*~3nQBD6jR4-Oy_?lxlQPE3y~!KAS$$>m@CxgG-Gzp+)+e)r zwcBq3u&v*Ki`f`}Sb*jX2l=GKK(l?vO>N&Gpw8!)O%5zVD#`(?gMp}#&}1mU?{L?J zryyoc0FrZ|ZFP?ho@i_VS}5{Kc3mxzgu1%@?OaQ^e~A&qeY{t){8~nyFDwqPFkBwt z-Y2q2i}2m+Cqqn}J0fv`{6oHG`B@)P{nz(nYn`b$J)B9bA#N1~4iZ$%-ST~V;})S4 zMZ9ZfVbFWl0PZHaA;0{^-qp>yE-!)I1xV*9D-t#q2Rn3-6qa`nsD#raZtq9Lg z7Cm5YqfgZ((}FIrf-jkj;~?F=DYVHcv#fl1nSrtXHRQh0x2aY*1P8xKr#aW22&p{~ z2weh0TtI5xEqWXj&c6_UOgN`LUD(H1SF&;ziqMe1VC{&9W&2PP=lUSE_?ym@@_et^smJfcBOzxdHRC-_{)FyD_}hWQmsb>{U(1s}M_6%7F; zZp08Fji_;mu`K#?k|o6}dKouzP@chuJVfj{PR~pnJ>rTUi zsu$%i-PE@(?~TSXsj*a8+X{ZqX%9Fr7!#$HOLGLuGC~%OG=i-=JdlBM0v-d=8DPYR zmtIr{%=Xxg6Te6g!s4Zb!lLrwZ>t<86l*@H;C;>EnQ`9%v)f;#1ajcujH^RP;f;fg zb3m%0YA5bd?%vT-gGU_O_}iHWH2XG}A9u7K)L)AA?}g@&z!4=k;$0*Z;A0qJp+faT zVS0TEj$4-TD)0X|;I3#f{g8nPP7m#nSO zK0XwXs3sK(i!gnYZuxLNgg_o3RiApTjb}VgaNihARs`C{&g{t-1m1d+<)um0Qx&}T z)s96+`^+Q)Eo4QbM`~un%-7S`(^2N<`wrhH&@NQBB!Pdae`+{863hv;)&$4!dQ#M8 zJ3VXA)nT~cK~q&(`N4YUF&d2wGON&jvhDdbu@Ezps%$>A1SkcnST1AaH3s5L;Mp4)wYK}6?FTs zy%aHods5KT9v0&I5%MWIgP5R#RU&t7f{i*d?KrBG1?DGe(Ltm zl(UE2!;}+!+{aH~XP`)$`czWSj3^sB_~k`RM+^?>vnNqpmz?Ar0uKg;M~pHMlMqVp zzfKyABk$6cxud6GwrjoMe}1Mtb%KRfP*(5p+>Ks5g4*&W{djltf(guff<(=iR-87tkG8-ve{fq=s$AN`pp@)CTq~TvjOeLOgHr)j=n%?+AIVyZ47Z|tUAQA zNrvvsXkw)sU@g1Gx?_kj%DrZCC*UHqxEXub84fQG6VYg^HRG|A8<{zId4w!tt zp1UiA3ST!k+fXx7`j%Mcjk)F8dY;hWVw+ed-fqI1o?)PWk22M0!b!`v#mL5u_?P zyE3|b+?;p*ZTNx5NNU7Y&NcB+9_~+(wftGpbsk|RytJUXZ@ZPov{JeCKC;QGEhrM%D$mtE$!l_j;b69=)yW2APDxn97RE1qcXsbnRG{$jd{D#WZ{ z$}_O)d8(kh;;fK@x#e@LNHWo2WvH4`ekP0Srhqg^AC(B96Ui={7idDyov#C8@;1-x z|1n4W_-%1L`zrSJ3MFfZDyR8|MXVGt9mY=9WM&y1J{ITb)`iH!bK$>2*eJ}g)C0(T5b0?5-P ze$KwQhB%?Xw+wm4C#KuZ8}y8G4|?XU8i?q27B!$(6XFMaU)<`1_h_BYs5o>PE)d5Y zVw(@dVNtaBC&Fk2ZX9+9|Muyx$UTZkfU$gK(jeoc9EQd_EW2&#LS(*OXXJu25LBK7 zS^4cG(&0}LJ>;9n@BhWuJ3vX+WKpBlWxJ}&wr$(C?W!)T%eHOXwr$(Cy3AMKH#6`3 z^ZuDx>)w?&Z^p`uj1@P|Is5FhBglw@erq7n(I7AMT!D9cKnclZKGd-y6!2p(J%#Y2 z*KtpjO6bP5*t9A)y(@l!wz+i7rR4ED0Cp+Gn@`Y4OHCo3dop7dMf!y!NrpHp%p39c zV^Kv`dB*27Sj9I``inv7hW$ZrL3h=bOs&;^SKgoK)PgJjQDz4!&e``PbQ}_98Ldg< zV@ZN|ecMVBJN%|^+wJN_#-h zWCIn3G{gvXOj)U!?8g^V9$&W)m_CA7Y9si+a`X%#h5NL0p#yP4YL2rT-s@=eSKGr|%bNs3zsFh-xYkY)(Bw3PfpMBruse8qCWv9T` zr`Y^CtEBgN8f*PbJzVVU@cU`>x6x^_t|t=~rMXBCqAW?4Ae81qFj-h?h3Vo?lIz?| z0Xg+e0T+RHsReeG7Y>6dS(V7PVR@R2-*p=N@5%F{z%>X|IHuwP5B%GRDp)>Q8IPLN zX7R2@mL*t~KY6%jC~Bta$8YZro>e-4FO=``=AmVg)~#IFI2O|fzv^7T?wo(=x&CHN zdC5TKuqr33VO~vouBRKfr7>L+vA(H+D#$9zU6>b23)*JLH!q^!*~+(f4;I~Gj-XBH z9*jSuz=SoevyGgjw#fE^A~VTJa}?S!DG2Cnu=pbV8$X)Ui|+fjt>i%eYesx$*#D0N z`ai;JsqYu7f1-y<){5&Q@LsUpBdCyQi@>(NY%xqt5NxzD1~(~p1`U9`9I$4?VGa(Mvawmx;H)?V3QE~8o15!C8)0x?80J$W&U zsZalne8tzCCZeCNPh!{0X?Yqcas()6e_o_Lt`*S(0sJKmlD&&6XuH%P?Gm+Xu~%1| zq;+gef>J=&3t#6-o_5JV7fCBcks|Q=0mx2YlB`a&VT|?$Zkc4f;AokvV|gX-Z{MvH z>ACD^1@UA)ud(?vy{OT6!FZz|div#uKW(fQ+%AG(^@;UHgLb~j!anB42^0wWgo6I- zXP5pjdZ{LIDmRF`J|nh4tYX&*x{MhP%%GZ{nI{GtPMZ!JrOY(zPYWw)yRQmA(h+%0 zt>cYir|eEDqm5pM>QnMar*^7R3GpHx@5#tJ=uzk21&uBju(_lGUGRAsxg|LP4B1Nb zoG?LZ1gqvMrEb@i@9L)GJCQ8bhK;Sw<_qJmMZzOO>s8KsihpF3o0lHiMmxD7yDP8l z4zE&>LsN+g`ibjJ&TZ;#Zc?o@v9NKM5e&uRbLdBy0E|Tk_MFT!|FkIBKb*-@zIG)G zm^E3r{n>VxFyV}uK#0_`PkShrW5M{(2w?dL2SzAKpJJisHm@jIZ?$@4z zKH2LZ23Q3YZpQSz7^J#gZTJ)iy0j%j4j072cl*D4Z-Ior5ZaO*(wL@6V%+@)uBlI> zw8U6<5Q1w%%Z@!H3OuYyAJf5GsV=6n-|~cZ@Wm5|1Q{gmQi%BDxc%H+19T7riKGzO zxcW24(65wb=qh=*hK_3uJxZtMhv`veYwlTJUWB~z8)kc)EMC}fc1cX^p#F%>!O-~@ zr*yA!6e?zb+}e~--mlP&tc5k1eiQJH=^_@H4qjU+(pRhO0=6+8lb8$2RDTb~*+VJ$Xk zO+&2=V0PG$=Di7Du$^3Z!(zUi5UjoTtm|xa+4slKJ4gV{8U-I5&_A)LIu$leYdGFJ zxwr3P;OJq3l#nroau)kQ?vR4JnXGFa=4kg&HXeN_{;3v|*An`I>(D{eBBw1p{XUFn zq>r5Hn`UK#D|q_FTv77yibS==!iaKtDg1}?acHGnxWi1A2KH?N>pZFu7g(9G~qyPriy5+ zXD0}lVX$luhGx3HvT~puHgqkfGn$py#f+pOhLo9ZsrNr91|Pf<0;m@puQvD>@@7hC z1-}tbm@3+N4HRCByzlCs$}6j*iOPa{D(A1G3F%tJa?wzmJ3@2mLo3_<$81wRZa>eU z6O{*koQq2ezHUu0j&nR{1XZFkupsg1o|1K1FOCQ(A3oVEHsrF3flwue9$UZ0Pwh_sy{Y_H3m>d z>0%6-?ht5e+8cnXPaNN`3OEna zAcPV6;#|~yX|BBRB7f7y{KHxX30FBvS(q=N#w49M&K&S5rbW;{j|Ioce~A7i%b>b zXDKz!thXY+>Y9-l$H}l*hleqIp!GdUT}m}n_ZH*znQ$#Qn@^G6X0=0~I_)>((&qlL- zq1=|&JaW>2mVbsS+v`HOZKfmpxTAHuG~MP}U|gT(c@g}JBdSb+H!Ch2<$|v4;)pcb zFt|wS(8B5Wbc^Yo$IpXKr%cg7qD=+_I*(xFkL?p&kk6T4fmWFJD7{bjMd{irr%R~& zb-dZPqHW%TEjT@!xvZO!eKM*zoxx_2UTODTlJ!)P5ko5*U^P#Gb>i18bVat&TVcmO zxVG6ML0~HuKwrKCcHmqjAiM!b{>!7BeVxZgvN2{wG7#tAE;%AX5H~F^;MW;~JbF{D zprt-Rr#?YEB0&~g^vEL`?`uF5JW6TNkt~AeG+}0*_th_Go^a2eR-}9T`S)zJs2zyn zbz=JnMO3}C8b)MokT001IoT_UDv%Rd<`dan(gQNXtS)k0D(P+Bd^~wa0<950rCCx} z3PnePiW36;Bo4AMSyb6~=xJi1s({c6vCxhlN%UAseFDpX)4JQ)07oczJm;-Zz}HZyRUKf9<-T?ArLVLHqcFw7I&_n+iSWeW?F zkvs4k`POCCV0YColMbiFT#P0t@yeiWmtguwkhr3Ou-901rjjWqj(~88`nI6|l9T4K# zq-{MGLk(G*_O%j>tfH6GFZ4~8?KTR)#H;1fE@tHv0)Peb%7Fb)#T4KJC9o6_5Wyh= zlmdVOePhC6_{6mKzP2JvU7WYZwklBEOr77mp5D5ix?V0)`0JDxF;G0k z<;4Y4pjGnZXU?znYcQ?waM_7uOVO3BAcX}X{zgw^ZH`ctCsuYV)hNVJK(CBO?|pQ2 zbstWBMCaxz$Gm;43U%$9Awlx5)stgIBbTSnW4wbgF~yNa%QdU1$8e}rD2-BKyaRn0 zh(TMK*&x6?%Xg<`E?E$Bp>N|1XQuZA7>WQ9QBE<9eV42MotM=6E*Cqbk@)Tim#>`r zj25^2JAX|`EgO+JyCe^8wo^x=c+RsSOWx2K^dkdDx&dwv-sAus^YrXW*xrBcho3{Y z1l)|j+7UW62g;JH^(lkjtqpa=&Fm4N9eXmI`H;HN1r6o#*h&OK`thnuQ-s5TAcb|_ z5tY3q>dOiw{-jK;>yJc@(n$Ht(e#tUVBA~%L}v|{rF_{5kV??~$_Io*=i9Ek`}-^2xoI$$yWaJeRYHo( zL;c3wXFf-cB%}xV#5UIi2D$~-iD2hHxdbJug*=oeB_=`+emKhJe8AY380dR~aCf-` zCovL&Dt?_o2#of5IEtq=Vl^&CMy>&HIEVhiD+q~0r)2pg4zL@of*ayd~WNgK=zw4kO>v!1?IzW8@G(p6`mHfzZP7GZ+& zUT3t71XzbALOuLz#V7LhSvs0&C_+=^uqfGD5}rBb>vAm>3EjC+d5-VelyUOUw*JJw zS^<_0bvj1Pc7`jb63zg%q1F!PK(Gt`Y);d>+J|>0ifSyymNttwg)^^LSxYC%H)dtz zxhxWf(j%Az=iqI+$Z(;yWO;dqEZVfdX}aZ6D>A+r4GL)mx51|J?lgkFo-p>N%ORCB z$iHtlXiT!x4O;<5-KT@&-Y=U93^!cyh}r4%m3|~9dNlV-d_C&h4eWPlIV={0hlDQ2JgQfGFteIU zS$GrZI6lv0J?Q5mHwMbk4)RuHid_C}79=om0-XgienLh5UJ84h*%CC&-yw2Sg)JJC zPih;!l5SvzT#d44;A7ybJo$r3GP@`C)|5l_hIRDqp<|~qPA?ET%n%uF%LGIf)JEab zS|c8JI~_^ae2{biE8^&eS@yV+DazoJ0n0Px^a@i6(=|Lzjj~-B)0O|wU`m+{5gU&d zyMWnIY{D}c-FR|x8Ws2&0%33od;|n4?}x&bU&CfzJmS4r(uy2KuL zFS)ce%4Q~Cb`$}!MRJiOQQcFsCTZ_m3vH_=(?My#wMqbNdWX1WX%F^w8f=kJt9WMI zTmmeTK$23%BXvV8ZjysL6^M}_&!G6^j{6gHB^kvo6!>#WK z_Zyzha=aoW^$76o&&wQ3k~LS3yEb3XxG`C0m-esCZQjn;M#8IE zFvL~N+X;onD~1L~qZTf4Axg_wxo;Q1%}@E061sYW+l&?~2%EwU|9@z!W(O|I7HE-QOi&n*?Xw z8Q1v*8=|mvmc`xxcX0flcH}$~;+_q|z}N9_C)^G9vNLnM+EvjC3b@Aj47eZAG1)+O z;d`D5aj$04r}>^B9`}+rUV*D#+F^gSVTW{Lo_PcK0uB6Za>Xqy;IZG)N`KI_o&a0Ay>FV|zdh>Nuzf$>1%BUYjG~l9S^1D35=Svbe_p;7+4ob1O1# z;UN!)-fKy>PxK0EO|t^fde?V{$N$k`&R9XPvu)+D=(+DI%EO~|_0NjmPc!az0nXGH%Z|>Y zd*?9h|MyS{dyi<&7qx;>(Af@&y=)kic(a~38?2qI-Y7z9|E}tZHlya{?QHimJ*&R? zEwt6Rm$^6LgdJ_Hil1j$P)D>_k6dYJzShIAFjf`bk?Y+c3M2KWq9kXs>i;_;s|W@7 z??qmrSyn4@!p1%YF*_V9B;-$&H6qiHKf7NCaL;)uAD@Q)QR##*G3M3&tyfL|ovZ%u zaLK>(p#J3!3g|n0%l8Qh@*C;fIvU&mtB593QTrF-H|ewAR6R++a`0DS(jFj%byydl zSc1hG@V#$PL3BTZ@z3SJ3mTG7N{<eYX*~B^g%peO!Cd7FR|0$z3jK#s!by0HYLZyXa|T z;R_0gWr%QYNr<5F$o&hLPF>3;CH8udtyHuNm7M{bp9>~XrY5j1v-SKpRMhCf8RirO zqt0|ERqi;Toj)23i756Gg9)9kvCkk=8>Z_NiBEs5EOzRIV!oj#=h)xSW^EG*Os_%K zU1u_4{)Nx9Z34asrtK%iBBj}Z-gymvJdiAkRoKMhB1P`92mNw9Qqzvb)h>V6k5ZMmTuma;dXVKRNRHG;oBrvf-x zEk;~C`MmA?D)2;8W1G>b0P<4y~*|r)kS-kK(N^li87C|$rvb=8+?98~ z%3EGzDR{&AO##IcESynwK^=D}CY~gMRN%tvzhe}&$-aD^@2vdUcbxLyCnf2Bj8Xo7 zlky*#gv_M%Z)Ikz;cilxO_TP{m^s84$(FR5`E?+#ov;Vw(OtHV5 zXK#N3*rHJ~5SYcoE7$zEgzgMK3rnL{s!vGG(p(6G6?|*4Ih!&MYFd!v7XTA1Ge~4; zs| zl%vOp2(9T@U!mTXKZUMA2-$z76*D!O)DP}J8Ci~Lf~FzPOj4m>tW_*I7?{) z)_E`p?Zm{%G`?GkA{ryqu&XSLi&j=+m8rV z6I%~%bq;CXAhRY0S$ucWEq+_OYishm*@LfNPQ-UPR2oB@S$87HN6`T_PWY@C2 z$=K8RFg3k#50?`NlyrPIR}Ms!L<_w?0zn-0fh4NnYj~RU z*q2F!EKNgQ&b#ZBLmkg-sGlufTIpa=eItWy$!4*1ev!MpVQ?sBqY8JH34#0P=6j|G zf?1^t6?x7&$EO>}WgkDT`f{YwicE}Q7MqyUSwdtu4ACJSBmXeQz;3bea*jNbaEsDG z1!kST7oKYxr3xfu`B?W`>h;6nPgdbO!!d9_PQ_7s!F_atPdA^gMY6O-#Z3R0;|Xy& zk+%D|rP<2o%#VDCEB@e&jGFVL`T*93t2Ji%oWp4PVt=Q6y(WAC_gBo9oq2*M4`b&V zoXmfU#gUHY;wyq#c3UX{Z^y)T?Bx6P6Ji$;TQIp6oBbkx(iC}~EsPS-cm?J-_on#C zPH-oZzpxmbw}&p6z<2WZNtH0+IH0;gh~rlzHllS+#Nz|`FA|q$-k)ux-x59}h6D4_ zv)plBKB@$+0r@Vg+PLgAa*#^V=O_6a*Lc`bDUMDa0|mh=p5!1clW66Nw2}9KYT?6G#FO;csbR}#?bHC6riUvE zBzdq;QNxkWh7cl6Swc@9wn>-a!jFFAa`_x#0kX4;LbE7EXgkPP29)vI!+D|(=?asJ zO)PU{9zP0}ZJSb$73aPpT(97JkDa(QJ+pAA0$p1QeMu2+(2R74+@g=6_7Db5U)&?O zhB0#k(W>T?51vpGUoFFKVVGx{r_Jyb;e|)a_gH7oOgMGJI|iN*b$Ka8Nz$I}~oXY2G_d>aHQ-kEY%*dZLJg+x@PjNqWEq{G{?#lvP3>KmZfK!H) znDLkM4R>^vm`b!UC!?olu|ihyS$3PJUU~W2y2$E$?+q}lspK@NP^70BShkOrl#X6E zemqKh}X+9xiV?e_qvP01%JL2M{P3&Y{C2m*-Cqe?ecWv(f21%HCI zfXX$TR!EDc@mGE2ViC+_S&{_3qh7pGm%5(f(@EGk#4>$lYgbmsfLC)^OVfj6FFJ05EUQ-HG431d?=%h*NGa<=}I=!Z$k|^2WehhSj>(hJj!~u);Kt!HE1i3!|y0rQ27xn9hXM(Mj2OH@O z0}}@e{lr>28c9INL~f6Eev$D(8Pt4`gH|?;8;r%z5K&T!uD78OO(ylGr--fFd;N+g z2vypQ(KBQ3{j|sw(#V{^&X7q-;JpR6k`X2QSiXxKdFKefD-2|t^V+h*G$@g+iWF!D zGD_WKQB(vQn;Ao6xDVoGtMYXtD^H)R7SD-u=AuXzlG&uoA=cmSUJv?5Pr!{V#@!W* z{Tby_NMF{LB73@tAAb(p05^GYJgxewI6(Ge$e)~;PUb1K^?v>)0p9vyHpr5aa{LTk zT@NoIHd3FqAJAcC*38u|t(n^W+BT1sZS9)9yJGH|9W>^>1=XO$=8mGT;rly&8841C zM@qZDtvsZVM{pg+QHmIGx#l)|bV1qbC1kwM_GLrFl@9}<0KD)gW=t@xx1pgn;z zk$UI#noc~am-nce|3*%mcJ@I{&1%Ws6BRHEVqJ|Hbd4Eg^Ex$_AAtkiUiBVf>HD(E zIWcq+eoLA`Y$aHw#Yf7Sy6S@Unx@W;_s943&ZSJ2d8u>wI84`NG2#{{+4(@|C5QaP z!sm`|m&fx#zwhtCpQsZEXB*>5h+)1uH-+^Q&e4aU1jrt&_D3`mm3(Wvp2^kIJ8srt z{ZsS+;dzFH@rz)t`{c%Z_7`JQhJ|-7RWuCiUDP0XE(c0_v>%5`R?NB0CI| zEe0H0Br8+pK|$5t!1d4UdE%GM$*(M^NMarXO>$iktf{9ne^UCA1 z6Lum%(rx{b!n>fLvZy~{p&D;Aut%p!A=&lZBtw+<$Z+T^=7{WQv!yFX^3_ZdeIi?D z#4(9tms+Wkc7;{j6jw^;i!cX0_4ap`Mwf%0Q1sMzVk|4%_2kjD_?}a03{lj3!2Lbc z^yoLyr|Hy3pz^b77Ns}gRQEp7R&W#?7m4`Q^{~t}U)&}0xr0VDk}GyMaV~a+57d_Y zOVM-(%acluquK{fCURQuAGo>KrsVo?-tQ0z417j-G<@&7Qd9kZn#_MEGyeyZ`ET1D zL@LeN&wnSrDcMo&69()c;Kf1&@gp;VQ6Q}~NJ|9;Bzl6-H%v&-aoSW+MfsB$;wE~2 zJ_LfLm=6mw&fftudMF0(R#8!wN}J;pEN*o^Wz-*EoqYsLB45B=VZK6Y}RQ_sU-^NLZhyvV&jC`VFZ|mvLQlsJ(^k7T#X<^XQ!5x(SQa zpK7S!rzzTy38kn{<4p6WNNEjohy&WP?N$$nlp{MYWmJYGID`EWo6hj2LN0H?F2Td@ zNO6=Jj9bv?bX#&S@=!qsEsWL>N>H!AO3RCDBTfu#(4g#0wpdKS`&bVZB(Qiobn7MP z)4(5`;5GnG2Jfk!jt0`p@S-Orvz9U2E3prgU)`3@{fGMFKcQV@XRZrBrsmz51%g_9Ne;*rd1stC z{UFRu5+BDPsfpsU$@hVw+9Rdfg;lYool`ET5MEm+=!#ZKHVnKw2TKs!2i9&U)=;jI zlE)sq;iY4inwx`+b^K;rjzdP~H-!Fp_!her=?xcB9!?A%P$lfrC3s}a%nNW}I?{?N zOKIrHu)Rq!eAmSi2Kkj+=p`s|dALSWg}vwQ7-7k^i@`gT7?d`Zgg&Vf-$jKKR58=u z)h&gQ5ta6GPOrx8yJ)B;0w#P^?FszX!y3zdoTmKEQAzrKVE;S5LG~Z<4M8(wLrVo? zM<;umznQcDaB>5q+rODg$U)ve*Z5jdh(JM=^iD#OZKOg#eTB#&bD`ofVN3ljRYMg^ ztA~1kZsbCcKq+#?)Hr^=eN7E!>t5pmyt0AMz_U5HQ9%7!n?6XG$hZ^GktTNwA4zV- zrc_k$aq^WZLY2TUR%A~zN_YmK_Psh&RzL5!TT^#?Nr!B})lMaKtb~EU`n2}cS%kt8 zE_`qoG1NW@%~kfqw{=(LG~9`^7g?Gg5A3nyMk?nbo_2SThuNQAa>&gsQ|`z>>aWyC z+l~MQBz<72o~ARMc>LtY$(S9D>&0_#i~bd?FUzMFSs8l91podFV`YC)H+R%B`1;5Q ztPj?{Qf?Cf``G<==rBKNPE|ERAtT(&c*dB$&I>Cqe zA_;%aSVm3*CdX|43HG{AyD77H%9M>lVH2F@hyoT&BURN9&For_OjG5k%mU}-%Vlv8*(c%WnL2}Qen%5dY{m89syhtCMPvlKz0RUP|M@FM8uVLlbak)p z5^l5qHfKLQoC zmB``n4}y26zDU@0hdOB+K&C@xpCJ$PVl_Tk{9>TX%LLTH z#4B)HD>whBm6YB@qH5|7CbZT`8yB@0DZFF$1p}$AJ>~=1uh0ds zsjo-Y4Ng^{BMeNBG>PWVpMO2faTDV-*xz8K;Qs_G|DAO9FUu=sY^rbQ_Md1Kk+IV< z>wNG--ILo(Xo%lLGro69X(ZUmA2cY+g<-%DMlJAu1J+2W^*gS!Yy4;9_Ym&%@K8(eR%0X zN(OaC=smJZAV5UtFrafb6eOBlFAd>1IJc<0-*!9X zGaK#X;g+ZPx~S>zi3+Kye#blTS~=cQJLtx_;Ck$$M6`c9Q=m)0It`D%HN$hHC7@Tr zVkUwm#n{|8py^2ZAvpii?Ri13MA|v5wXM+T#J`S{x-lVs2E9~@ah}T-Gdtss@%jPN zecTG8Aspnz){l60Pfs;Z?T>;?Hl6N}_??7^Fz|Xqxad_|I-yF*3v}YRGzDU*G529~ zMPl{NNui(Qm3vPzqe(v+G!@*yGS+mpG)YpRuNRkcf-H&)BPXSoRHR!}nh-#&X8}=^ z9Hb(l*Hol~+4p_|`)ek!1*gl_e6Kr=|C4_BpM9`@h$o1f+WfNrUVpwp?0*V&6(+wa zVt(%_I337`XM-64s4$zP<^~~Mq9iG3=%zdeDsea~pgAr;qi_av8cFf9*&#`GVhMD+LP0-FC>0+KZl{)bde@fW^OJ6L?I4G=HSBsgWRaN zAS6#JO#NL3wICrM4cJN4COJOzfRZo7Siu*-)hXovveSZurEMTwY+p_tZVvb=FdPEL_nCBz|N5&|Q zj?W<+3-1|BfX^W)Uygf(#x!2en=wBp9z{?uhwkI1cGK?$LenD*>U7s`Bp1K8B&IQG zg3jdVoY=t@8v(k5NcGCEtOt>V4qSK){jMg98P=Qim9S1UL)8LE8PjK7R8{SO{UlO7 zkFye2_ZG6`GvxCvG_6{c^Z6i}qVOzim^*^{Qw^l2=Z$1%|1Sv@mN|H!=FBWEja2 zkR(;3RbW(US0Ca7$no;JLF7Jr6VaLgExps2N|Ty&b-2H;<6x$8dpp3?+_ClDq=OFo zSZaha^x4T|9S|%R>hwcfdge&SNoqrW`w;Izj-S^JQuO6rf+vMQQ3V^9GdaiK0!R*k zB(WO%J2AEF{YWYyqyl1~T4Jdg$k0bmD0aiPv`K)GoB&B`Hd;+ra~ubvqE}T5G79Ge zA0LX+3vKOTAkf_Gx4HWM`Xj*c6ssEm7#w#W;w$j+bq1>FCST%R=y7qkG+l$q{W^W| zmL3NDaW8-2?e8YLkmKg7dz=A>Z`i$Yw!ViERu6>Ef8xQ^5Wh307ymmt?|_^;F#H=Dn; zX`ElLw0-qvALu=Vwcr2V9q`B5MX}yD+}!fLZSjBi8UM38E^X}Kpl@pYPuJ1-pG9ni z>JYBThv;5k-2EnE0(iav5WDd4n=t^O2N6L5Vt5c@bO?RSqcU-WlQtWh06$8>Tq(oW zC_?;J(p1O=%Jmb&M5%EWm+FJL8=4;LTq;#4=AN=Msn}Wc^LaQp|4cr0Z@*=J@!lRz zXLG^k$U(dG0M}!qf~c$jE3lMcE&^7cSyHHJbn{z+L#!ORd1T4O`mmEW5BUN^*Y@Y) zA%yh+d*_tmomf+C$jwx^X*TlJQ*|34HlB}(M4Zro`Mq%1={xZLJ zhsvlE3M9p0n~`kI!uAjvW_e+5VcZ`E-&%I!55_vf)*iYKYt4~;YPAB;bjn2)Fq$BB zj20n&7>iV@=AuUT^>C-Bd}fS#thSstO2|!E>ytw|Y)X_00c^qb9x~Kmf>a9e9*nJ* zwGN%rF^fIF3ePRdO?Hgmw&4h z^LM7+_st1jE6)JphSQa>SRjCfNA8W_O34|GHrG*{J1+S6;h_aAxOZ=tm-1|y{j^c} zYH=zjni2`E=i9SVnTevlFX-Yf7=)?g^V7oTmmnHZMb-)}2hS@e(TPEXu#W^TO~T_u z)(Ss2%1Hb0mD?a&L_7oyRiKf#QLtsSK@hup-Ww)MM&l$NG6=P6_tS3izjFQPDr(_J zm5ga85=#YsN%L#BgWEK}e_eboO%PAuM*LQ=zfiWty)r#+eW;4WEK=&bFok(y{DdQO zh1Vsm;#G**vVlCfzT zZl2xF3F7k7W%*KeTa-3_!>kv)tkiJMyy)|X?4lmd&xFw&B#-wI1sT8qi`wCI_E7CL zbj={Fy+(D1D76m*Zcs@|<;uLrM)|OkfMBB_)KJmVsjd0mJCZrn6;O7~POA6mCZT5r z2Y*+*sP`JG6i3%KbZN_y^Ze{m8F9}XWO_$kCTfWVF&#zl*N*lIJn_z3MRtc3#0wAz z>5bo&O}IB;{=a7z?wgT7HJ}2Vvu;g*(@>c%D=S^rY`@rF1 z<;KPZOJ9__A|e3FP0f5zuDexu4S`7<8{&9_48GsEm*!JTW*O^aU{weF-#WxRngl^f zrz$PKQy2LZWxZjzeA`+syyQtxF|tr!Ql~Ccd51pJFv4ytV^Qm!2*=Uqau@L%cNtg; z5zG|{_gUy^aQ@gElnio24RfH)$3^?&*?^-G$^OjCSVVn@o5tZltoXH$z?8p@QLF({ zoX~!*CKe5pYwzZ0{L%O-_)d3IvNduW-R31TlT-9^&!4zqnd0E}SRpH;OK8vdyKD;c z+3c5CSO{Ce+Wdi{p!E;GmhR_VIvxL3W~NhGSm(?)5G0$U)ohnCpGV@Rwq810rMGoJ z7>!iXAeu@Sq~merlp*WY0XzL(vUkAlfS&4SqrU2wcbn$pr{{Of4qVSH|Ji5j=BI8! zj_b;r+t(k&Anz#ejU3SeShLDr)8*1Xm-gX0C=fv25xn<)fqq87MGg)oNGJ$qCKc;x zm^&*6>+^0v9YA6eap#3}$6(&Kckli3pxynn>7gdhzzXfpPywN^~~nHvRcNnY-( z`Qwoo@)qm&(I##ROkiP8^f}1NkX8E3%9AI~>GUO-ijMoolVE_nVC6Oj`)wp>djT|^ zfwbb{4j;QeLC}a3&uZ#tj3YEUWgz7>nLS*)jt>E@I<;Hyr0}p@ zO&n@bp_HyS^6_+Sn(5k{Rn+ajyYA+tY7K1 zI_}8S*eK_yR7x4~^X99EVAD_7w0QtZ&Fp~bl9q?nK%)uK^`Vc+EvTlQ)Lk)NTw&{P zxZ3xCY5Ky|{F0v+-yqvhl(nLkYe1?EIk$S8T#;OMs4sfg*?e4VLts2JsNMZ0-!Rgj zxJ_Pw3_M4$Jo~ERQ#A&xkn0I}`pMP(`j(WTI>HzHI^MKsd#S3}rMVf=Zzl+i++_CY zM}<}P8XiyMNPfC&MWya%9@~7<-Wje$Ttr^PP z7gD%u(%?9TM=G>9ysgI0K58^z)8It*+0vwX5cKr3*H)UvFIEuli866AN}uIt!*Gcw zjHctku=@JcQXP3O=vH|I>v9Z?69y<~5-;OfhF$?gN72d;W|H~iY<3l=!r~|POgDp+ zG!`t<^dAg86VaxID)^}r%|wA=O#3fFg-p-RWGX7T_s#;=Agtktv1pQ=^s3L*pIrz? zPu)R3>1eKdq!Sv@FeiH$BTu)5|N`D6u6QhChBW{5rGG17#1AHcr()@ zdWn4oW^~u1zyDI=_+v!CL06~%Tb%!Fa@I^1a&U;SRfw|}7&!@#xHC`5|0*(MHK3wW)e>?oe{3yhrS7QkbQuH8Xnq@mn{D(Mw_!LPiu;)gE6Qv zI5bM^;-!TtoFL|J->!ZxRT?iyJ^cXOM-mmZ$Cc~~I6PV6giXsZPM_umu%B7QG=9(J zX=SXj@wQ`I#6!xxqQ499Tb0IZ3z$RpC0yZ3N}W)Cy1*SaGav=9ePozU-e=Z;f;YZc zzb<9UnJ3zuMhGEWa`2@DzP?6jLL@RJoY^4j$`OCn#6c)y=~#|rndme=2uCPWXPjY{ z6!F@<*x9;yfu!FZa#g@6E8u|$i-oY299j-O*UX-9{SlgwuWJ9#q;RSsoL%aKO?Ghz19Dh+UZ^tNR!S}t z_Jb8Dwm{!|1iD5izw!w7B?7e^W8vDuq{^yxq3oXeL@qrHiRpNn;@Rj6fr{Bi+ zokKg@ZC92n)w>jAuqOd>^l#qwNN3Q7<>-fky{`CG`ABEdhHCs<8e5C59n00SyjB>Ev3pGYyQWzJ5Tlv8EmFvJ=hul8+(*^ZZ&8AD3@N zTbYyr5!dRJMOEfwhZ2hB4pEFSaNsHEw4WlFW?*k^xF4SQ@o`LI zd#hB#i=S-k`=TEXOj(ewKENSvW;f|iW54UHRt7` zjryctpD(Y^!=GxU^x_ko=YJsK&}r7SEEbleT?f_54WxW#DGa0i%-wLANzXEphkqzD zoWGOOV~j+hG)JC`c3G?>y*q3+UW^e3@Q!QRN7ZrI-nTj18|s+ER@{q%1+;dG5S6q4 zK2rX}6+#RblGxCgxYRV^pdILB&2BSUYM8!zB?&FckQyrqJuRnKvDY2RFcL(p4NWI) zI4H8rV#COe#TeLo(rMaQcgo53=L5wYhHqETth(jH@kpa!uH$3W**h%8?~rBcg@yVvegr$&EH0AyL* zuIo!{XydN=apBz4_nb2)$}jY*q2nS2rb|xm{W--$i<%*$fUJ$Rl4)0+=J}P(c!_v^1a8FSJh+KNdpXy?GtJ8* zS*Y-XkOhiJ*YbW1dGF?vcWht7K)tA=;q?>6uwx2KM@R$-CkhWhcGuJFC5G;p#`Ghr z_4Wq`)QG%G1qNxO>KAyMnyJYK-qs^>NA!bZa7j+yVsQ+U-Cn&qatj4vz6Ba*=$jJI z^<~HBuEP{5nsH|20^a1(W%EwALitN(oNOiMDVR6c=bQ|YoUViC%9uCbK(Zv~e+h3T zYw?%NIW-f`0M1#Pb0!tP38z>65O`K6&k~)tMs~uIc}`*meB6xUFPU}PmYg?d-gKRF z+DCHY3!FP;-joB$lAZq`lqs2a>K4tNH*ec6JatOHay4%waN~r)9$r=>-^{TFsx{Go zVdFjS*|wJciTrAr1Nnl;rFic%T0-vU^Mq&?l!c_v)g5S$_$H0e4&xQl%$Cf6sCmN` zwvrjTNgx|~IpJ=wMZMVD!O_HbZBE&g5;@6g$l+*Rx~N9Fz?#&FGg&r!(g1OZGI@p@sG7ZCBdW1ve$zx;875iTCRlM!|KKrq$ujw( z2WpqC-{wRBy!+tzX_u_>U8eHDqGxgFmCCv|eE z6_q_Y76x;!x>BXU-lMn1MrLQpX@bE@R&Svd7wk?zEIE(;DC;Af(TuD&N-wopOP4w1 zBRV#vV?U0}6qNP3s>LY@m#eYn91f$2Gzu-HilGn}(YvxqyJ;xJB4q9{OXE;1&5g!l z_uhNq6F=)}9xiwe%~^Fa{%hS{*H{m@{()dEbgL+~OW47$0$5Nqb>r7}&+tBu__Mqo zIA*09@pJ}4eMon~V4&oEppd}uavmX{_bEA%_fGO=`tadaNh$*pfS&)YRL)i363vQuC#B**VWtKYJk@{#Q7Pa z%YX_Agm&~}p2{i|I*HiUc#7FU7(;$naVi5zl+BIxq4<&XpN%xRG|nfqOEaYzj~_=( zn+`X|q%1h4CI#X64fc$(Ds$8zi7!Sj;a|U&Ih(gpT1?kuedg3n9#Q=-+=%TZ%fUy>+&F-4-a(Kp!+QJ$!{NX(|c|JK7 zD~u7^*uM^V@kPFG9>wZS(t}5p0~%s4G8{!^fa>Grrk#~!L{X7aZGPisaaIMHU5j3Z zB+UFK9E+U12ThZl6DO3VED7WeHRm|Jl` zHQy>Bdcy$;Cz;T5<1N$ibi$h9cTD+?P#icOlzrD6%4flOK zc91QUWjA1AKk6D`n)`Fq4Y^|;o6ql^pWr(vHXRVB2~$*$)gFV2rWiJA^Vc`#v))+Qd48A+M}V3v$P3j5i9j>Muk6@=*TD0MF=D^@2ie(X`O z;RI*HDj-fd28%n?{vaBoH(fPuH0jdhQkH8`KXj4VT<4pqa7bI{*tDa56M|tR>}&xQ z5zX;mKQUFi3dKEknx{S+QqNvb>DvKsa;A{nFgan;WMU+4F!9ggZ;Xr-2==^u(LG_& zzc_nRACG`t(6v(Qg%B+pb3b<}mHbvjxeqgs+il?LX<1*Wifc1WuR>?1+6zL8X>d%^ zzYT3ttsJy=z(~tfsRvdMLlm;AT3!EAaPf3|)H`KQ;F-F;x@{P0Df8w2$aJX`w?&?}ps zadB7QkiHuuF||QSaCXGtW3IlKnzMd${A6B3$_Y|UwX$Ab8)nzhjCn^?pm~}f_r2sF z^k2S;TYxUA<1W3ku?UN~4`$w5M&6IV4UG_as%OFgGu;w^xcv9U`9CvsWc96$jsBH6 z{2z!s=%uz@euMBQ-N|I;LQ&Z;&nn@3Y*#RNG zPTlVcdqC#h$p=xo3zIir3cxBaNH5S(_Y+#3j=1ja99|7zj_;_6*3Kg2IKgxp(OgTAW z$s6Kqji{N|L8;p$Y_!~CMAvc3P&?LQ?3$C+6_o@zi^2fQG$5L)0XaX3@T^jn-r7n$8ksRAJCx z|FCsz4?GkPpM}Oe-!aN_eSu~y{X9q_i$R`ZVbw>j`k*5v?t^Ir%tk*%f#Rh%Q|r=eXiPPe?(X?(SM~UPJq;{x zvOHn;`W@`AI43A!#i_zQC_Q8UAPpJIV}`C2hyJus3}pgZ~ceM5Er| zqW)L}|C;i9{Fg15xc*t+;rIrZ>lBA)j^h*0D8A3vJ4z3Gmv}xT1FSvpq#5FgTJx2r z`|{n`i)Q(`3j=~#>z70u6b>MKN7z@5VLORYZe@hJds+kF z9jh!;Xk003fIMFXX15?Rf2Z`Q`&FnuidrzD!hRB39ce#b^CRoGZTrlu9RPl@16j`lzr!5HZy zwON6`xy!2Sj8ZUAVAAH>tfnyQVGv=_VvX!O`}G!hq*X&3ijI-ys)1;v85J3%6r(&# z6RQoR8Szp^u##p)FP>jGP;CupV$ou{VP8XJa_pFHn{wR+S+q(Yo4*jo8VMXd2YOK^ z@IVpn%V4zlORxqlE%$fnq)f(6MjFs&ZL!$FE`vr6r9)>l_ zeu*6f?Qb4I`u$OA6mCEU9j-@br7CaoX`fd_#{{!O=05wm-~-Bb9eMrrdQ;wh2{`7A2jML`AVxGbD>>JfwU!0V_= zNLIPg!N6gW;DGtSG1!*2a$id>tsTL{20WdkBY@S>2%HT6t^<0(G7nxDvK^H$!GFW3k^EZEzM4#?){KT?$;z$)j+m6eicCm>OSx0`T zES$P9*in@L*!t0#rC)y`R+?C`FMUBAhrmO?))?-{l6jFutO@nh2Xe)TkhU$%jBAPi z4snV26T4$7aZR)+B~hL9@X$R&!rXT1FckFuNz6wvxVmd<=Hm}&Mi#oXIu>UWYop8L z>9d3;nw}YyxpL6@bs7~jBIjJL-C@t~9m?L5f^w*evPfBmYX99eP9Tj>&k`djpuD9d z9)m1I;pvVQQtEbk&(L}CKo;s}=_Vt|w(u7-ufkq685&=lIImca=(n_$5`BZ^g-gBp ziD8W6>vKab6}it-kHBZRh|_a024_Ri914;x1(lqyzxRUp<#4~V02$^QpfCTwN67!2 zVg7F+yZ_k$7HL4bDJ`P%k}-{r-^Hg!)m%%Z`T}F1NW&mN#Ks4b5JM~-8sCY7kz%?r z0ozy?RW?=Cs#w-usUYBL){tV9g)7%yp)EI|ZD=&AtpBdmo_WfA(;-c0y(itz)xDFw z0)(Bf*5i$@t@Ha+I(#pTZj&%ylb zrOp=+q0Yf;GW>&^%RA#|@03PDkP+%0tvyujoB`$t+d%cU=?;7 zk+155Nezp|J6MJwF?J9wZnl$Sd(I4^r;&|$U4g0{n%HF^Bbbo0h{PS!66L_mqlO#9 zbO|`R&3%EW8mL-!!T7x7CR?hJ%&wtifCs)`%f-x~;5&*`X1W1lfd2{K-l{oZXfeP| z%rHb-ND}!oqJ~feg=@n&vK?o4q0r9S^xYDsp3nrAScr}l9F~%~PqG$lDP5VPhY@&_tZsAtQ$Hr@M5F%(TLI--fq)vR@*dje1+y6<+H(~N=h{%p zt|N!0w>CupbQrt^pk)?I(|em@1sM%rPr{C8BxU!W1Qnwmh8F0C{7W!M=!V#uHrU7WW^oE+7&8(XR9U8RC7S_CrKa%&Et{WES#vh8V&vryL z1nora!gfArWce2FB_4{dp9(_V6yEAr(A`i9Xm#&7)VoU1cJDQ!=;)i<{+RNIrylF7 z>2kGJQ-up`U+OE!FDm!@j{qn!w|knRG=vD3sH#vP9V*N4v+ssGl(SbD2+a)i+I{DiAVG z3QBzz@7_68&a;tTAox@_s;HksELuMv`QoKn&Z)Pi z$k-^qco0H`^L}KhlZ|yoKFO2KMxU|4xyd}j z=WejNueUeog+|EzUZFHcgr&XvG$^a6l6G2%&;Y_NZkB(Q?j5aub~%m8n^M_S8+V{K z8Y+0vVD2CH(TtkX3rT6Pa$uB#(_hgGy>EKxuaNcMQVmrF$)C|;4KJ6t{gKzSQw9k7 z0DH%y4H>Bhrk?Q5Sz9LyCuIkDZ!le_3c)=0T-Ex?VVtoH6OtU)F!n~#XAFO;Fi$%| zpRBz?7PXbidGx4<2z685RvCfG`c{E;gGmEfet6n2E9ii!P8BFOo!BjzQhWpR_yS1# zE27!?3ANLFTz#v(swILcqa}>HmjQd?xf%ylYeAXM*xlI_502RQl{JpI(>Z6#1Q!D& zLiyZS+ft|D%H5CHZ1alsJ&8S%?1t~N^|94CzG`FE>^M+=q9vXr`y_@D7g*yzUr8Tb z=jvA)@n@8`fWs|C;Sbt|9{78CEnIiVUwI-~F?=b{+_8)VSPpuD1T{ko$`*~IhZ)$> z{^S_+!Uqk)%mog1;j%|xjBxEGn$%*;ag*%`zfNXuECjq2wqJo8SD*9{GYan`?Jj^w zG-ncSJ5=^Uj}Wv^*gpzNxZ_8P2dIT|erK4tK-~soP+${^Y2Wf;_9MM!-~!!(4^ZWOJ^|pn}}_O|U5)*0Sqwz!ufP zF_4hrk?3@t-f~wrB%7-;3grAtE6_yK&mtc5Ow?lwkcG3XLn}pStu!m>Z)Z2~vydT_ z&L4JM5c+OXKq%FB@>|CWwS)*lkkQCbJ^?*CV|vo*UrTA^F0DW<{@$2R!^deQ-sD(xX72MN;U6ePVezoRq)s9pzvohqy2gxVIya-&yP? zx7;_n!XNUyTz+fB&zBKop|WIHK?UDb^N$_$Fl?=V>RfdzFI5S^7`Ew z4g8ppAe6+%hrfXS`~dZ1ya2Um$LSM8=^K`SPSmH9KOSAlL3k29&tGsKRL^bf5?jh z`L(IJI0Cg&hzsP>HX@f^O8^Ct*dHE3arw1LBa!2vqUWuv6LKng)9HFxrB z)1b0OjrEpEKr6WdzArizAqCN|ie75t>DQ(L*DFIXA*qqub_seW#^?g7KP{t?TLl;? zz7$Z{fl$|Z9hk+~lf;F;%)yWh^vOrd~KTG7eB9UwIQgmC%F5=Seo>E!aOQfhgw{ju@-o6I0PVEucoR_(*~ zADfu8d?4;1Pi`C^8<3~bb}s@WRy)-&OeZDi-oJp^I9shIuknxx5)f>IojO2KI#7dm zt?6;DeHq-y68C9S!(2sBbGeZ6u}u{n{?rQ-OyxB(bB96WW=i7W1sDYB#eKwj0A^mzV+G zad^A1BXIXb?(RM@1Z~`vxI<-Eby&FeGXdt4)S11hB|9&Y8=Hg_GcIj9*W5j*B!I6- zQ_VGi(1V7+5sJ^A7bK_B73H(O4E=Kvcv$P1;GyTOouDLp z!in(>9MOe$A{fdE=64)Ac@Mg|LKg_F7pBR+<-$H{YDBdlY8Re9XNpHwWQp$!f#r{P zX}g1FiWB*h$rASIz0Q#Q-8M?cM}s5TRAH282IIZ-rY(i7g$Rywmw>gJjBqzLcI+nw zTTsznHEM+EFg4APSm=Qee8ifh{(R?vSS4dvFWfsq|*7vN?y>#yX^{0nrLaO zjF~YdNK8i=@Kg0D)M^!ah#!eJQT2?Bn=C3UM~us40`^F3!#(MYQm>t*f+JDsVNT-X z>LaJ+;X5ML(fu)fmbK;k{chy}(bXH}uYc1TFx|>m$N){$h=8)ve^0*X{$nBj|DAmO zFO)Ay#oBgV34^Cu;*8X}o)p{D@B1ZdTZeVAd=s3jv9?*W;2a~Pf19n1$I8%b0%B z@)Tr{;#=`fUS9h_(3b{h&A$H>* zPG=XR2OfNxLH)YOTNB#-prdrl-{h39pAk$&)lsS!mf?Y^jc+zA-L*Sl_IAqwC^U-HM1Fn+IG zE!~O{g%;7*tP?C!M^~?!XI`p59RsCrh!JKG`-wO<+_|O438dDAs)lPOOs9sf?p5Vm zj>>w^i8RhODBN-=aD-3{#N?q$Qr9?#f{3IY-NVMm#aiwxYdnLNsrNe2+&cq^nZ5Jw z-J~PddId0b?|CDXD}r#kW9f$8ymr~7(dBM>F+TgO8Xwq9WEt9(3tymo#u#i=fK%om zj|~3QKBQ+Dd_Of797&*?RF4NZYYqJR$p;Z?%DF>OtHsl+__O4+G1`{=O;dyH#}>aL zxpb~SW#!Lwyx<%T5q&9#oLpkNNI6~CeO!1PJW+#*K&7lmo0+B;AKx5!P%U?=9oGh) z{%3;RA+pniSamEA^(bHBSAL5I8JF@71Sd#W)V&J3>f}2gkkmqu`2`^?__#ovK>|UJ z#JJM|r<`M|$Mr$1s0Wffv<@~e{2p}(WR@hAV2C0t%(m@{lh(zu^Ahb*QG$>l zPI&t*FS2BlajlL(ed+D`eag`>+wtrD;{}=@Ge&s;K{Zy~5Jn<+RH(DuvUtnJ!t+tc zrUhn{ET3kh1m;%IUbmNBXf|GEl7TN7z7g1FO^Z{bQqfgw*F{UGFBJ4(y3 z(RH?>xz;fAXp{di%!}`ZECflCdhnnE<#cm1R@H zmVM$ETK`xY-SwL6a1+hx-4Gt+X;sY{T^Gj26DcJcH&+Tqav!ASqU6hA>ltZNC79`m zl1N7PS;LuD*Wx+prBQiuUdCG+%2r!_d2$W4^((qfCrGw}e`K-Ha~IAg8nJH5CyGO#n&fw)H<KF@8i8&5*J5!I zk?h3lIoY<9C~qoeMiluADsyHJQ^O*B`EGb5l8;I0Z+O@}Bo#VfQ?NnThSm_75qb3U9Q#V7Nf)vK%;Quq|Bv9e)byUJLuuh7ggQd?eP zs3=_E7O2pnaA?;`S0{`@0p|mkhtN$5fj$$Z_*pj!yOv&ZA@X_6N|fffdm?pzXg!2j0|ZQKQM)G?!AzT#@Heqg zPBaWX_G{bmxEIJao-kX@8{J$&=nx8GAUbbAcK72D1Gu~2mAC-^_B_EIXa->bNwp22 zmdl;|GVf+^oUd+dvg7;?v!-yt_J^0?2D2bzlqoXPAhN958p`nwS#b0Zq>bAR=B}8A zI9GsoI2TL{nM)Y5v`1`YNCUF4QU|IYGo(rMp`YbOkeY=*y-jEc!m>T|^Pis+JG6KX zNkb^v_p2U3p&&fAz>fS8i;5NEgw?fUNg-(}HNmEZ^g_4ZJLa%_b9_gyOWNDeuip97 z@Qv3vY@HZ9Vk6^Z;=NKvIL2Q`0y6x?Sii=Gi*c|a&Ba@QAby#NXaC5v%XvMS%j)~q zZGVMyvBDf2Yk+P-AFX~%J@@4ca!fLqsy$i6ek_keXe2PvkNh3bqaL=}<-Gjt&l{rl z#Q_wr)8)Ir$_h?ZoOHxhkszy+ISdjAT!puxxlIi9Cw@niIcl0bQ-0 zvRpAvb#Zs6yMyZD{_+)(@{6J=^$L}&*JeXkb{`4Xkov@2?DC?4$i-Smz){e!gFaUS z>+#HR>Dztu9_*>Hx@|`N3lnK=e{q*}UW(l*36972sRkLki8R|1)3+sUU1*-E0^Pa$ zb!$%|NRxl9J?G&wSU&(rhuJ?O9eDq7?NN3zxB4H+Ve3d9D1d=(;BndssRDhHnuc0C zM5`aUT4<1)FAMK|e_h<>$u2dwZwM0G8fU-PJj}fZU-maf7*LNub3^FU2ap5&GCj(p7Y1J6Kz7w2Z@Xn~fdOLLn2r zo&#r>Fq)_;GYwOw_#ti-eY;HIfg!dD2&WIQ@17(J{uZ=db#v0p1dL(;@cQpb1^a&= zr;wGtqa(mI*vZ(e&T}Iyq(0Tgrj?<){+4!onci1Xf#qs|}~y4>^Gq_!LngqkH?}8+YJJFoQ^657wifpxa2Zu2&0|Jdl#P;jVg0KcVS3CZ@jRlmTD^?BPu9^fM}cb5 z{){V1BSnwL1{rPG5TaFfW|_ymF!Hd%+%`=D z?R7l&{(3we^WACp4dl0cB|$z>IF{dQ`H(Y_C8i65=hc|9`VFW>bXwCXKA@sx-CjyS zT4Vb1v)gzyU;nXX@Gi39qbmbc{Z3{$nt&j-W;{8G@$%=Y9DhKS*8(JS!%;G9J0T>sD26mfa^G;pY}>6 zLD9bZg?bbpzhInKwUwHV&#a5qs-@c`VC={=$i*!4&s@YZB*oue8t5D0>9)X zQ+HmB1r=nSC3`i{SR+(xfDTi%s=bmPQ;g~>%F5c>(^5D!XviicPQN8q^tl_Zt-R%A ztFI`R&`>1I!!nt=@;D@J=Z{{Nq&780*rX>z7BhB|8c*%OK%r6y~OkcXs&DWYq( zs;JSZ{V&3lh+7<)9m1^zJBi`cbScf`{b{-ODcJpR5eqf|odJ^P8Vj^+m&UKS(I|&J zW2tcLj{UcxTgsG}V@cZ-o(#pPF)mmYJ)}Tx0?_4`MO7ecds3JL?oU*K30E{tp1E^r zN-vK2WTC?}s<}y31TsfB_inS6Vt`?BsDZZLI+J8ehL>(ay`(!XBamy6YLi-t^LO|F zQHq8(CLA>~oZ&d7>Hg0hjZ6e* zms#{H2Lpc4*`ZPgDdeQ0}p(5;g1&$Q{@wZk21bmi=rri{69ZM~E*-hw? zpGl;#TR+BIVJ#y211_fBCdzJ7D7`0fi$B$Rp>#(FN)=t8?-I*yhLk@!6Qg~%Y}e;t zd&H4e-r*Bu`dz6*OktUE4%o~0?zMvgJ&0Q*OM9nmGa0+~5mNWA5W4-h%{;#EmGI|b zXsL{OBKGo1I~J+Z_6{uefNb~9^stp4!OvuRhqsB%48`AqF7P6@J_uQjZHgGtGYywN zn)R?+9V#1pqvv?&@l7@xAS*2Hf19rFn2o-4qp(}W;5Qp$QF~u1Kx5K^T-lhND(+Sy z#&WsE4-`dwhI!0?Qa2EKyr?)xFxCTthv zS3lAJjph(z`v*@p5pe%X*u7sh#BU@lx%HIyNQBnALOwwkY3iMw4c}B~Vg}Kgk*ne# z1hzjN!qyC>AeAJ3!PB~qaZ=^#&Z32Y=a7wkUN0zRX8MFyeJ4%)Gk@?|PU;DvdT$J$U{;(l1sXbt;8qx0aftI=0BurOH+_luM;w8UNGA1>jod0?n0H=a7qvC~z60hVPw$@O=I}?OA?NhqS zJr%>i*X&szbvbyGR_JLX?p%lT)BVQB!P`0*c&ng2J}($UVGP6)szHg*uSb+$3m zcW@VQ)pz)}91+)}?J_SI7#K2`tP2>aDA?e8zSGBECUXMy$DY6Ruj7Ht%mu=d_qmS5 zfqco~yMcTq7cewYFu#g{I^mDZfexo%UxgJ52S}n|y?YBtN{5j!`B=OVLA@j*Y3`>> z>H3i{TD>ntBBNzCX&hydFi2|Dv%uzthDLfudM4k#i{@rkz*zl21Lle~GT8ZRD+LY# zH5rr`4G62PfY*QDTG9Wn!28z_g^f-0ovr@1G6M)MiMSZsI0-tNTLC!J|Ffx#`Z*48 zqDJ9KNI~A8lh;7f$o(#zfl^9AX~i#YzLy`$Ek8G$UmnIJg9FxHmvtjiq4o_)5aRGJ zFKS$>pRAREhiOd56P=C#qWb#>3vf*g4=yAzK=ZONPG9XlAP;ry{-c(pxG$oD)X!){ zx-~O2R=-n=!%_$9fii^J0m}(^S&#mLI=>Hk`FDgPZ=AwBKAg6#S$Ri57!^1n{WFH7 z$#nuYM*&(lJ;ik7{e+vdqYZ)m(n#FuIaqR>sW`rKIZH;KT@ZF-Eh;cu?O5(hZ_gt; zO+2+K7M9H{?~vCR&1h%d;*%$`snZD*M@>Kvyi!uW-@1KL+b_F!6$)&yD{_)WGkljs zyXhVi>P(e(YmyDNn;%oP?VGZy0i&_HN~aIQePI}N)wrh~HV>q%j)E^NK0g2w)>UL{ z2{FGAac;Yajf+mXw6bq{l)Y$a9|^C#at+fRJCEgn#Bkcet#p2kw2wh5*Yb$H638?O zGv9$@Q3v0BZJ`9r-f$x>5W@>09!WXv!=K{TdMRwSV#VD-Xa=qin266ouzy0bzE%2! zJjr}W@!1hAzC4mb0`$^iqI?8ag=P)atuh2(IKuAF;Kmt-T z3mP}`BXn`pq>kFkW&eT}BSwR!$)fVH^zpZ8tvn4>AV{o7Bh*s%Q%wp_MQ1rrL&@XG z^Om%UYkj@`I%)T&`~6Yp_18!D*Vo5(#;@JY;T#Zv8t%9}NDNeUu)!Y`CF>cs(@F&% zG#j20*8oIG${U){0m9LFR5vWs$HaV)BFZu5L?Oi)47UEtFf*U^;H3e*Lg(7q>XQ6X zJ#L)nfj%i#%-Vx|CE_*H)WEPhRb~tSQwf39<*IzaUi(uUp$R7;s(!>$B&BjxW;U?l zX^1-{>*>K}H@P`_Ohpv4Sr}o^;p3hb3>)<15_5JWZ_pwS-EGFr4f2k(_|d+zLJo#k za#*o~>z zQCLU$Zj?(U8AAINu4FQjgsaK9BK=kkNSXeWrLJLO#s0}5j?9_F)Jd~*j6`uW{lVc) z6$ABFp}qIanURX*`oTDmpo-YjuE@gj>2%~QVPmHqXKjsAXGb#0$7uYc9!2Hn>!6WA>z5 z)%jBW>g<}mv&fJ_+nAXQ&z#bVDf!b5w+cP9^EEif@(Fe-8ZzTh)|}M?iHpkh(*xnhGBu*do`F7m-h5{CQ-T|Y3w;p2^ZNa)(N+S)2sGOP7*X>+lFgE3hv z8skqwAG54s9#3Wgijgs?kym^D2XRuYM=nWwf?9=w%QF!OX=oZHSV!cgmM2>{62VJ! z^WfO*)dEwNvC>#ZK??FtT`;wIdHB*FNKlr1w~8vybxOeytfecZ1W-^MgU*RSEH+Of z1|CXFni^q-@`Hm|(()O8qXC1?W31;`B z2445`_ZeYR#^H{37=p9CvRb-m*PXShjt%x)EGK z^X2of!yyt0ic1ZKeMKxAHFg}aXV7w)WBLVZ^*`p@_K z7{}UdhQc6oL|9$ExRKAKemUCQS(S#+c=I)iC@Jwq+}bU!mKZ~LmiX9)rFzBCfF^X_ z@J`An^A);Ss(B&K0uO(V;-49Zuispwfj3<5maW%uQRNO`#PZS(0J47ua!{qPdI?H1uF3`Dr~Gcro~=59j7d7biR_ zVNQK3=vyhUA(_8qdlH)AR3MHc4HTItsmp}*PkmvYw$aL`1%HcBP31flVfNX!iCS8% zm;^0W3DJA{QIQ6swge9|B6vs_poS0lXlM+cH+r{;ffD>G>ANPmFkaRGu>NYNuvfy2 zY(Eg=-*AKA9_q<+!zQTJrGJn=oYf-?+E69^Dgs+>0;R3n_)qcrxsAbKN)ZtXGTqst)CVziAvz5j1XDDN z6Zv^$w?zE;V()^0U-!b?!|qn?02-&--geJ|v0-$J-_bWy$^#f)% zka3*XQHstrLB~hkHTPad_?Fs#ogg3Hni|f^-Ji{A$=MZ12V6%oL0^=&`b z+9r+|A`$c{x7Zv-5ts`X+W2l1lekNsP_9vwbHtcE@Hnhshsuore(M3JgJeK69sIid zPx?T4|ML9~ZbE=7>#e{0O_v`o>lWLK^iE~%EhWj8H%_@8%tFF<4_bJ6Dkgp^+!;wP zXgZ|3bt}!f@a*fFM{S7SI{464Rcwkxb*ZKj8lD5Rs$_u?J~(S@7}9{1v}z!?h?(S_$$9qA2)a)wtJ zHpRa`N(I@0n}>QUd*O%6C_TInN?#2r`cr?8TSY8JgL612jVL1&5dkw}jnivfVpO_h zrbY3vI*Grd4VQ2G9W|;Kibq3QeotqP%MBw&SEe-l{))@wy&h@SIT1)559VWljkDL; zQ#Q>oVi{4sTSkp&U;s&5v^#G0#{sKT5DQ*SFZ$MF*p0`VGAd$edgX41a!wp)==%Ba z{*5xdJN>A6=A9+v@$J}c@aYZRWeH}Lp~9LoFx;1r>1e^a6w1hGrmU}&t^dUNy(=5~ z3JhnGZP-cL61Sj{BTUJXY_1%sy!2ZMwH6>SE7umi5}aUm_1mhN=|Pd7lTAY z_K@J5hEkw4Ib;oz>y-oJI@HjlygfAXnM$}PwrtNO=79<4fytigALn#Q9=<(O>InZk z>Nv?i2PEklzxCAb2}-lA$!)yvoSr~lkaiwDD|xG3br{}MYNl1WivHx2$usL(^Fmav z?yK@NzSWFf^MwBF);NE50Z)JL>Qv*}ZpOwxDCAWDm`8i7F=8b)exD23|C2T@N5;2F zND_GLouj)MlNypuC%sKJ_z`ycHo4bD`^%@SrCM@ZcUM58ws8T}i%7+ut=|K_cwdLo zg)iXt2lR$Y!#)A^(L1fxFo#>C(z&1wRphYyj63tBq1Cfuvr&wL|-JPXn$nJ4X#3wKtnJdAipbFNR_lLYFUp zxRFL?ubU;tAh$o%oLYVx){8CBD?PFDF^xM+XqL zm-KjrNSK0ODO!_#g>t+WM~Ob8VYa*5@uB|EMT7BQf9GSVzUUI;P~X0JWBpS;_Ww;e z{{j)6%x!J{PB#Ca;vm*>ld=K)C_&fO%9I+F8egoWS7)NEo3(wCu#kd+fOwGV7YQMY z!6g4?L+kYof6T3n0D&bz-HYtmXo~0l}{5WxG76Ygsj)@S{{__6_O|lQBlg0 zC3}eMdt@t;wMYvg3h6gji_hn}?(=;A`_1#Zy*z)+`*Y^Ze9mX)%$YMD|LD;e1YT)( zD@!w-z54CV=Cb(b4Q7-999>mZcXwF^A|qb&KNHmxH;!|<=6*a~T1R_4dv0ju&4W){ zZpRou)^M3UbLGzSF_qeL@td!>%;+fJh`H8xN8i2o{jETXayy;P{UnKA8MXa2KbWVG zQ-xkiWxXT5N{azxtXWU?7-Q52ackdU)J;(q;B>ABzm3SIS8hCM`qqCs;6dnXA#?g5MW z3B_A(^xTBtM)Z$6#EnLGfh3zxvH8g^EPHNptr%5NL;ys9@S1xfeiZ;(PKdHWSXEN6>n7HyerP_@bm$xb878$vqs% zx{)4~N|lY$_gTI`8D>v+%Xlv56YJ%Z?q^@0vYamHD~`PFf&P|gDwydkvy&-%_ss)e z`x@;XG$(!{m{&dyEl@sS-yViJ-|-%7t*8qr1y zizdib7z#7aWjPhFwpia+UuiwZz&CcoxWU0mCg^MujmgkE45|mG89a);K zu^UQsiIGknb@Vx>PwE8y_JCu3EEtwol9$8^>96ZOb-mdzjaDH^Z7}qIe=w8OS@+xK zvhwN*tr}}H*^t|)7k;-d`TCccXs#*gK5@5u(yU;9byra+DZZf= z4EPzMksI*cQ=CU)U49E)EuA1RARf&t7tC7~?9UP|Z@3@I77<$bB^gbNF3T=fqpEr$ z60s~pyMpk$?&_%;hLOaKRZ0w=n{5sHzPR#@+|Lch+c(#TEn^1rxIY`E3=h4F z!!{m7^-xRiRZNN1A**LQ;N22S-s5$7ieZ*(3;Lc5**F&UejoRsdBcIW;e}W(Qod)W zt5H1djm=8s$)9!$AMMpYy}g@~OQ)_=nN?$6O#M28ZG4)ih{>P|RY*kW2Xg&7(t_vA zqlOvUZV|!-8r{vp1uY>e>1l4Wrjx?mL#}h!gx7h&N+B2b{%EOVe_V9){zS*1R`-1w zmV~D#LT+^4V=K<;{ZVS?L0?y8{ZCe8LZ%I+Tl$O^`E;+pPrBCuHNk)2tdZB7*%0qboXP4`p&$Z(6kGv+j z8Mxjq|4IV?zuV=(n&glFKkf45T>iwr+hur4eB2QIohXN}{fb#l$5Q{iFhnKIc$2t4 z_5tI4PR<+L8o9>#j63XXSnDqyRiO&Lca2|u?_z`!hl_h1$YudDvYb3)6fHYK_Hq8B zLdEQ0o=slGdB303h}}7;00en8GE8Mh7lZ9S=CUcGJ!{C0q0!Bz3-}r9n&l zgQesohIyv^e4G5hJ7b$nQY%U%E!i9S82Q-Q9kwh|Z)&M!j~B6c5veWmSx7y9kQZrN z#h&rRGU?3LW?gT^{drfp(Jnk-|DFe}dYt}Zs*!{x8vQZ8Hko_rjcQ(@ zFyWcNgDf*;MH7@4B8;Ezl3Af+u0B0)?g-l#lSM7&{pHsA)2{}T7EGLzqI&k2mir9^ zE>nMU??1D1$}r-?6|vn#?ZM_LD`GT06q|PFSU$T^v4<--SkwL{H`0yEcY0fm_&*n$ zr98)Um4|6fEDd*m4J;Pxbk;3eB9l?3Ymc1LMoeEA9rRt~9ZBiCzdRg`~(u(%~)a)Zy7SJ?CTRNfvI^ zN9n2#4z-0OHqNk{pL~+(krdLlQ$>#9C0Y4?!O@-9)u(UjExJVLo;ExhxUK0P<$Vqc z7vJ_Qih`}f4mZSDQ;rCqD&i;cX`m}(VlM>$nn>%UE9sd1dN@t0>`^bCxYbs z*&ocfdWA7-eJAguFaFT?)#S^=7RJ36UlR-a?QVIH%AaadDZ}P73{#bLpJ3cIt)Rnk z$&i20otO4yBN;25O{2}n>H!f8d99c)W-SKnq6IDe>ZO&1g$+z?QVGpHZxn`VE-;)L zlIAU`q!Fk}n<}SF*5wfDEUFn2FMi5Y=iX5?K5;E!al#>PVL)0%j4UM^DeTNPazYaxOQISwSl8 zpXKx+2ieG_klyW`%^V{=)3ZNo>hv#ZYKdes$EEF^netj1IWPQ$A@smdGn;+ntK?)R zEILcyMa1}LP|fa`<1-e*jn{-GXqX&Y-|fn!FM7wwBV42SBO@~0vqK{Q*`9Q_Yj-eR zzRaea&i7)M_Y4I6I3nP}sk!MQmey=>HFwGr){`XjU>+DkbefAZr)V{OWl_x|hr}Jt z!Pb_zGuB4T&B0pQPJH!WQFEYR7iUK?FGq(019W)51hRa|khQZj@X<%v>-3DGXQ>@D zZ8D!)s8a6goXOVy|&5YO_iZj z$Y{2jv8Xs)!5=9Y7{RE1*xlh}fe}CDql8Teru`j?)inH+@zf^+KP}MQwAp>CR+5FK zmSX<H=Fwy?OS9JT19wBA7=}-=&p>)cW_1H!!v_ioa8*@@Zhm>B_7;xV`eN zp7=#h4KHgs*EusbzkoZmj#};D_REY*+hxVZbu>@AVnkHcMH~b6=Qpw$GJfjG_+Io4 zv&8Ov@C|lzK#=492VojLFEvws1k`0k-d91U8mAn`G8Vso=-GyRdHFy%BaO^>N_wf? zTej3JQ;8DfeDAhkW-knCs=j-==k{ zq;6J|N7|1ni3b-c7;t@~NGaVjUGqIjIB~}pYSnfDrx!(^g*qbn$XMq>h6WT+341=Q zWXD+3?)S_cEY_)b5SXd;1NG>w1lwVwvs^N9XDM~~8?kdQdP=;<`?MuI`Wt7c(-h3q zqD+k}vT5tD>#}~RKXEa+s7j3eLuE|6kB3JOLSOFIWVMrma#3Pb|EVQ=r4Ccvv}d5_ zk@6uh<-zSZZ~Ug?0^|SV=ybt4yI5h}F;;F`BEpT@TH0b?v~xwcMcGe+cZtGl2@v7<3y~?g81s*2@ej5vfTNVBl$P=WA0bU3< zaRqTlXG;$UD^ZM-wezoqlR+d}msAoNV%q$?k~2vD8CkKM>d6VZ6DN$+x^HoAr6E^W z;ghm|{SA}ThBbtZQ^z_BhhgOg9+2!z$M!bL3*eJWg2Ik4J)E+HfK^lh-G55el=KGMYM z(!%O)1uFZir~x-Y+=oB6zDHEZAD`m;J$KDe+0E#)4Y+z;Q+9 zHKD*?mw^RdV?n*C_>3JOBg5P62t@8akkJ&zum*bVTV!)FlfF=I|Y{|SIm1+S_QtUX>J7JUu%%gzvpm?A7; z%XQDgSkOvcASEvhTeN>Yb~W@0gtoi6n>)GxK0Ac3c&5=!^RDvA`rD|>$zH1-m6!x9yQ1O_Lt+!8NIS8LlVfz z0lszMrGk`R_4Pb$pjQXNDy$2)AIJoPNvsp>m^J|r#EVUs_rp8Gs|g6E21c$3L@Z#F zkF4jx&oTdT!Vo6YwKSbC1>bQ=(9`@dva24H{O4;!=(0_FV8A z54{{1KGroH{{=|&WryC3i(754m9H*@xT*v%8pf9vdiyC{r$@1bs08oW#fL&K(u9xJ zDtAKAjjvI~Cqu7#gDbbilMwlrAzptiCq5T?83J6n!RH8a30@nAPlcu$=UVG8@Yj=2 zLe#(M#R(5pXj*KzkdHwG?V7p&^Q!*E5gk|1%*A{#Aq}o>KwW!P2@48M5evSHkV18B z10?YT$oPtdCKiQ{+L;)F4t+Xo-DJ8lw$ZkVAIpA5}O0q;1*L_%b^{ocsX zK+mz`@>#5vsPz9rB|0jgr;2ea{JDOch{2~0Ba!t(Ly(0~*8P)M)(?{vcIfCgB=)zQ(4 z&)zVA1z%ujtO6Xm@A-P@29X;0P-v6@+zP9=6N3Jk)IowC@R88%Ww@N&9mJ5ZU;}(8 zbZZp;a_#NJhyL4V0=l^f7qGi)9cy*Q_1k1YLtgL&gl<>BX9$Dc_)ucIl=xhz>mF{y zM_%D`H{SZdr$Sw>aB0Q**ZEt3S=?w70G|u>oWjj_%fNr;uDNgV@lY=#e6*Upf#4y| bxOL9hqgtE5N*sYu0skcehu+me-2eR_v{?`y literal 0 HcmV?d00001 diff --git a/java/src/Makefile.am b/java/src/Makefile.am index 67d21f81cfb..35232ffc3aa 100644 --- a/java/src/Makefile.am +++ b/java/src/Makefile.am @@ -36,7 +36,7 @@ jarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar hdf5_javadir = $(libdir) pkgpath = hdf/hdf5lib -CLASSPATH_ENV=CLASSPATH=.:$(top_srcdir)/java/lib/slf4j-api-1.7.33.jar:$$CLASSPATH +CLASSPATH_ENV=CLASSPATH=.:$(top_srcdir)/java/lib/slf4j-api-2.0.6.jar:$$CLASSPATH AM_JAVACFLAGS = $(H5_JAVACFLAGS) -deprecation diff --git a/java/src/hdf/hdf5lib/H5.java b/java/src/hdf/hdf5lib/H5.java index 90c4575003d..78ce2a0aa08 100644 --- a/java/src/hdf/hdf5lib/H5.java +++ b/java/src/hdf/hdf5lib/H5.java @@ -57,6 +57,9 @@ import hdf.hdf5lib.structs.H5O_native_info_t; import hdf.hdf5lib.structs.H5O_token_t; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + /** * @page HDF5LIB HDF5 Java API Package * This class is the Java interface for the HDF5 library. @@ -257,7 +260,7 @@ public class H5 implements java.io.Serializable { */ private static final long serialVersionUID = 6129888282117053288L; - private final static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5.class); + private final static Logger log = LoggerFactory.getLogger(H5.class); /** * @ingroup JH5 diff --git a/java/src/jni/CMakeLists.txt b/java/src/jni/CMakeLists.txt index ab306ef1893..fc11c1bed2e 100644 --- a/java/src/jni/CMakeLists.txt +++ b/java/src/jni/CMakeLists.txt @@ -78,7 +78,7 @@ set (CMAKE_INCLUDE_CURRENT_DIR_IN_INTERFACE ON) ########### JNI libraries always must be built shared ############### add_library (${HDF5_JAVA_JNI_LIB_TARGET} SHARED ${HDF5_JAVA_JNI_CSRCS} ${HDF5_JAVA_JNI_CHDRS}) target_include_directories (${HDF5_JAVA_JNI_LIB_TARGET} - PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};${HDF5_JAVA_JNI_SOURCE_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" + PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};${HDF5_JAVA_JNI_SOURCE_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" ) target_compile_options(${HDF5_JAVA_JNI_LIB_TARGET} PRIVATE "${HDF5_CMAKE_C_FLAGS}") TARGET_C_PROPERTIES (${HDF5_JAVA_JNI_LIB_TARGET} SHARED) @@ -106,7 +106,7 @@ if (HDF5_EXPORTED_TARGETS) ${HDF5_EXPORTED_TARGETS} LIBRARY DESTINATION ${HDF5_INSTALL_LIB_DIR} COMPONENT libraries ARCHIVE DESTINATION ${HDF5_INSTALL_LIB_DIR} COMPONENT libraries - RUNTIME DESTINATION ${HDF5_INSTALL_LIB_DIR} COMPONENT libraries + RUNTIME DESTINATION ${HDF5_INSTALL_BIN_DIR} COMPONENT libraries FRAMEWORK DESTINATION ${HDF5_INSTALL_FWRK_DIR} COMPONENT libraries INCLUDES DESTINATION include ) diff --git a/java/src/jni/h5aImp.c b/java/src/jni/h5aImp.c index 4b531a66b07..00c39da84b0 100644 --- a/java/src/jni/h5aImp.c +++ b/java/src/jni/h5aImp.c @@ -155,13 +155,15 @@ JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Aread(JNIEnv *env, jclass clss, jlong attr_id, jlong mem_type_id, jbyteArray buf, jboolean isCriticalPinning) { - jboolean readBufIsCopy; - jbyte *readBuf = NULL; - hsize_t dims[H5S_MAX_RANK]; - hid_t sid = H5I_INVALID_HID; - jsize n; - htri_t vl_data_class; - herr_t status = FAIL; + jboolean readBufIsCopy; + jbyte *readBuf = NULL; + hsize_t dims[H5S_MAX_RANK]; + hid_t sid = H5I_INVALID_HID; + size_t typeSize; + H5T_class_t type_class; + jsize vl_array_len = 0; // Only used by vl_data_class types + htri_t vl_data_class; + herr_t status = FAIL; UNUSED(clss); @@ -173,40 +175,58 @@ Java_hdf_hdf5lib_H5_H5Aread(JNIEnv *env, jclass clss, jlong attr_id, jlong mem_t if (vl_data_class) { /* Get size of data array */ - if ((n = ENVPTR->GetArrayLength(ENVONLY, buf)) < 0) { - CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); + if ((vl_array_len = ENVPTR->GetArrayLength(ENVONLY, buf)) < 0) H5_BAD_ARGUMENT_ERROR(ENVONLY, "H5Aread: readBuf length < 0"); - } - dims[0] = (hsize_t)n; - if ((sid = H5Screate_simple(1, dims, NULL)) < 0) + if (!(typeSize = H5Tget_size(mem_type_id))) H5_LIBRARY_ERROR(ENVONLY); - } - if (isCriticalPinning) { - PIN_BYTE_ARRAY_CRITICAL(ENVONLY, buf, readBuf, &readBufIsCopy, - "H5Aread: read buffer not critically pinned"); + if (NULL == (readBuf = HDcalloc((size_t)vl_array_len, typeSize))) + H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Aread: failed to allocate raw VL read buffer"); } else { - PIN_BYTE_ARRAY(ENVONLY, buf, readBuf, &readBufIsCopy, "H5Aread: read buffer not pinned"); + if (isCriticalPinning) { + PIN_BYTE_ARRAY_CRITICAL(ENVONLY, buf, readBuf, &readBufIsCopy, + "H5Aread: read buffer not critically pinned"); + } + else { + PIN_BYTE_ARRAY(ENVONLY, buf, readBuf, &readBufIsCopy, "H5Aread: read buffer not pinned"); + } } if ((status = H5Aread((hid_t)attr_id, (hid_t)mem_type_id, (void *)readBuf)) < 0) H5_LIBRARY_ERROR(ENVONLY); + if (vl_data_class) { + if ((type_class = H5Tget_class((hid_t)mem_type_id)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + + translate_rbuf(env, buf, mem_type_id, type_class, vl_array_len, readBuf); + } + done: if (readBuf) { if ((status >= 0) && vl_data_class) { + dims[0] = (hsize_t)vl_array_len; + if ((sid = H5Screate_simple(1, dims, NULL)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + H5Treclaim(attr_id, sid, H5P_DEFAULT, readBuf); + if (sid >= 0) H5Sclose(sid); } - if (isCriticalPinning) { - UNPIN_ARRAY_CRITICAL(ENVONLY, buf, readBuf, (status < 0) ? JNI_ABORT : 0); + if (vl_data_class) { + HDfree(readBuf); } else { - UNPIN_BYTE_ARRAY(ENVONLY, buf, readBuf, (status < 0) ? JNI_ABORT : 0); + if (isCriticalPinning) { + UNPIN_ARRAY_CRITICAL(ENVONLY, buf, readBuf, (status < 0) ? JNI_ABORT : 0); + } + else { + UNPIN_BYTE_ARRAY(ENVONLY, buf, readBuf, (status < 0) ? JNI_ABORT : 0); + } } } @@ -222,38 +242,52 @@ JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Awrite(JNIEnv *env, jclass clss, jlong attr_id, jlong mem_type_id, jbyteArray buf, jboolean isCriticalPinning) { - jboolean writeBufIsCopy; - jbyte *writeBuf = NULL; - hsize_t dims[H5S_MAX_RANK]; - hid_t sid = H5I_INVALID_HID; - jsize n; - htri_t vl_data_class; - herr_t status = FAIL; + jboolean writeBufIsCopy; + jbyte *writeBuf = NULL; + hsize_t dims[H5S_MAX_RANK]; + hid_t sid = H5I_INVALID_HID; + size_t typeSize; + H5T_class_t type_class; + jsize vl_array_len = 0; // Only used by vl_data_class types + htri_t vl_data_class; + herr_t status = FAIL; UNUSED(clss); if (NULL == buf) H5_NULL_ARGUMENT_ERROR(ENVONLY, "H5Awrite: write buffer is NULL"); - /* Get size of data array */ - if ((n = ENVPTR->GetArrayLength(ENVONLY, buf)) < 0) { - CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); - H5_BAD_ARGUMENT_ERROR(ENVONLY, "H5Aread: readBuf length < 0"); - } - - dims[0] = (hsize_t)n; - if ((sid = H5Screate_simple(1, dims, NULL)) < 0) - H5_LIBRARY_ERROR(ENVONLY); - if ((vl_data_class = h5str_detect_vlen(mem_type_id)) < 0) H5_LIBRARY_ERROR(ENVONLY); - if (isCriticalPinning) { - PIN_BYTE_ARRAY_CRITICAL(ENVONLY, buf, writeBuf, &writeBufIsCopy, - "H5Awrite: write buffer not critically pinned"); + if (vl_data_class) { + /* Get size of data array */ + if ((vl_array_len = ENVPTR->GetArrayLength(ENVONLY, buf)) < 0) { + CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); + H5_BAD_ARGUMENT_ERROR(ENVONLY, "H5Awrite: write buffer length < 0"); + } + + if (!(typeSize = H5Tget_size(mem_type_id))) + H5_LIBRARY_ERROR(ENVONLY); + + if (NULL == (writeBuf = HDcalloc((size_t)vl_array_len, typeSize))) + H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Awrite: failed to allocate raw VL write buffer"); } else { - PIN_BYTE_ARRAY(ENVONLY, buf, writeBuf, &writeBufIsCopy, "H5Awrite: write buffer not pinned"); + if (isCriticalPinning) { + PIN_BYTE_ARRAY_CRITICAL(ENVONLY, buf, writeBuf, &writeBufIsCopy, + "H5Awrite: write buffer not critically pinned"); + } + else { + PIN_BYTE_ARRAY(ENVONLY, buf, writeBuf, &writeBufIsCopy, "H5Awrite: write buffer not pinned"); + } + } + + if (vl_data_class) { + if ((type_class = H5Tget_class((hid_t)mem_type_id)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + + translate_wbuf(ENVONLY, buf, mem_type_id, type_class, vl_array_len, writeBuf); } if ((status = H5Awrite((hid_t)attr_id, (hid_t)mem_type_id, writeBuf)) < 0) @@ -261,14 +295,24 @@ Java_hdf_hdf5lib_H5_H5Awrite(JNIEnv *env, jclass clss, jlong attr_id, jlong mem_ done: if (writeBuf) { - if ((status >= 0) && vl_data_class) + if ((status >= 0) && vl_data_class) { + dims[0] = (hsize_t)vl_array_len; + if ((sid = H5Screate_simple(1, dims, NULL)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + H5Treclaim(attr_id, sid, H5P_DEFAULT, writeBuf); + } - if (isCriticalPinning) { - UNPIN_ARRAY_CRITICAL(ENVONLY, buf, writeBuf, (status < 0) ? JNI_ABORT : 0); + if (vl_data_class) { + HDfree(writeBuf); } else { - UNPIN_BYTE_ARRAY(ENVONLY, buf, writeBuf, (status < 0) ? JNI_ABORT : 0); + if (isCriticalPinning) { + UNPIN_ARRAY_CRITICAL(ENVONLY, buf, writeBuf, (status < 0) ? JNI_ABORT : 0); + } + else { + UNPIN_BYTE_ARRAY(ENVONLY, buf, writeBuf, (status < 0) ? JNI_ABORT : 0); + } } } @@ -1063,13 +1107,15 @@ Java_hdf_hdf5lib_H5_H5Awrite_1string(JNIEnv *env, jclass clss, jlong attr_id, jl JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5AreadVL(JNIEnv *env, jclass clss, jlong attr_id, jlong mem_type_id, jobjectArray buf) { - H5T_class_t type_class; + jbyte *readBuf = NULL; hsize_t dims[H5S_MAX_RANK]; hid_t sid = H5I_INVALID_HID; - jsize n = 0; + size_t typeSize; + H5T_class_t type_class; + jsize vl_array_len; htri_t vl_data_class; - herr_t status = FAIL; - jbyteArray *readBuf = NULL; + herr_t status = FAIL; + htri_t is_variable = 0; UNUSED(clss); @@ -1078,228 +1124,42 @@ Java_hdf_hdf5lib_H5_H5AreadVL(JNIEnv *env, jclass clss, jlong attr_id, jlong mem if ((vl_data_class = h5str_detect_vlen(mem_type_id)) < 0) H5_LIBRARY_ERROR(ENVONLY); + /* Get size of data array */ + if ((vl_array_len = ENVPTR->GetArrayLength(ENVONLY, buf)) < 0) + H5_BAD_ARGUMENT_ERROR(ENVONLY, "H5Aread: readBuf length < 0"); + if ((is_variable = H5Tis_variable_str(mem_type_id)) < 0) + H5_LIBRARY_ERROR(ENVONLY); - if (vl_data_class) { - /* Get size of data array */ - if ((n = ENVPTR->GetArrayLength(ENVONLY, buf)) < 0) { - CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); - H5_BAD_ARGUMENT_ERROR(ENVONLY, "H5AreadVL: readBuf length < 0"); - } + if (!(typeSize = H5Tget_size(mem_type_id))) + H5_LIBRARY_ERROR(ENVONLY); - dims[0] = (hsize_t)n; - if ((sid = H5Screate_simple(1, dims, NULL)) < 0) - H5_LIBRARY_ERROR(ENVONLY); - } + if (NULL == (readBuf = HDcalloc((size_t)vl_array_len, typeSize))) + H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Aread: failed to allocate raw VL read buffer"); + if ((status = H5Aread((hid_t)attr_id, (hid_t)mem_type_id, (void *)readBuf)) < 0) + H5_LIBRARY_ERROR(ENVONLY); if ((type_class = H5Tget_class((hid_t)mem_type_id)) < 0) H5_LIBRARY_ERROR(ENVONLY); - if (type_class == H5T_VLEN) { - size_t typeSize; - hid_t memb = H5I_INVALID_HID; - H5T_class_t vlClass; - size_t vlSize; - void *rawBuf = NULL; - jobjectArray jList = NULL; - - size_t i, j, x; - - if (!(typeSize = H5Tget_size(mem_type_id))) - H5_LIBRARY_ERROR(ENVONLY); - - if (!(memb = H5Tget_super(mem_type_id))) - H5_LIBRARY_ERROR(ENVONLY); - if ((vlClass = H5Tget_class((hid_t)memb)) < 0) - H5_LIBRARY_ERROR(ENVONLY); - if (!(vlSize = H5Tget_size(memb))) - H5_LIBRARY_ERROR(ENVONLY); - if (NULL == (rawBuf = HDcalloc((size_t)n, typeSize))) - H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5AreadVL: failed to allocate raw VL read buffer"); - - if ((status = H5Aread((hid_t)attr_id, (hid_t)mem_type_id, (void *)rawBuf)) < 0) - H5_LIBRARY_ERROR(ENVONLY); - - /* Cache class types */ - /* jclass cBool = ENVPTR->FindClass(ENVONLY, "java/lang/Boolean"); */ - jclass cByte = ENVPTR->FindClass(ENVONLY, "java/lang/Byte"); - jclass cShort = ENVPTR->FindClass(ENVONLY, "java/lang/Short"); - jclass cInt = ENVPTR->FindClass(ENVONLY, "java/lang/Integer"); - jclass cLong = ENVPTR->FindClass(ENVONLY, "java/lang/Long"); - jclass cFloat = ENVPTR->FindClass(ENVONLY, "java/lang/Float"); - jclass cDouble = ENVPTR->FindClass(ENVONLY, "java/lang/Double"); - - /* - jmethodID boolValueMid = - ENVPTR->GetStaticMethodID(ENVONLY, cBool, "valueOf", "(Z)Ljava/lang/Boolean;"); - */ - jmethodID byteValueMid = ENVPTR->GetStaticMethodID(ENVONLY, cByte, "valueOf", "(B)Ljava/lang/Byte;"); - jmethodID shortValueMid = - ENVPTR->GetStaticMethodID(ENVONLY, cShort, "valueOf", "(S)Ljava/lang/Short;"); - jmethodID intValueMid = ENVPTR->GetStaticMethodID(ENVONLY, cInt, "valueOf", "(I)Ljava/lang/Integer;"); - jmethodID longValueMid = ENVPTR->GetStaticMethodID(ENVONLY, cLong, "valueOf", "(J)Ljava/lang/Long;"); - jmethodID floatValueMid = - ENVPTR->GetStaticMethodID(ENVONLY, cFloat, "valueOf", "(F)Ljava/lang/Float;"); - jmethodID doubleValueMid = - ENVPTR->GetStaticMethodID(ENVONLY, cDouble, "valueOf", "(D)Ljava/lang/Double;"); - - // retrieve the java.util.List interface class - jclass cList = ENVPTR->FindClass(ENVONLY, "java/util/List"); - jmethodID addMethod = ENVPTR->GetMethodID(ENVONLY, cList, "add", "(Ljava/lang/Object;)Z"); - - /* Convert each element to a list */ - for (i = 0; i < (size_t)n; i++) { - hvl_t vl_elem; - - // The list we're going to return: - if (NULL == (jList = ENVPTR->GetObjectArrayElement(ENVONLY, (jobjectArray)buf, (jsize)i))) - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - - /* Get the number of sequence elements */ - HDmemcpy(&vl_elem, (char *)rawBuf + i * typeSize, sizeof(hvl_t)); - - jsize nelmts = (jsize)vl_elem.len; - if (vl_elem.len != (size_t)nelmts) - H5_JNI_FATAL_ERROR(ENVONLY, "H5AreadVL: overflow of number of VL elements"); - if (nelmts < 0) - H5_BAD_ARGUMENT_ERROR(ENVONLY, "H5AreadVL: number of VL elements < 0"); - - jobject jobj = NULL; - for (j = 0; j < (size_t)nelmts; j++) { - switch (vlClass) { - /* case H5T_BOOL: { - jboolean boolValue; - for (x = 0; x < vlSize; x++) { - ((char *)&boolValue)[x] = ((char *)vl_elem.p)[j*vlSize+x]; - } - - jobj = ENVPTR->CallStaticObjectMethod(ENVONLY, cBool, boolValueMid, boolValue); - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - break; - } */ - case H5T_INTEGER: { - switch (vlSize) { - case sizeof(jbyte): { - jbyte byteValue; - for (x = 0; x < vlSize; x++) { - ((char *)&byteValue)[x] = ((char *)vl_elem.p)[j * vlSize + x]; - } - - jobj = - ENVPTR->CallStaticObjectMethod(ENVONLY, cByte, byteValueMid, byteValue); - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - break; - } - case sizeof(jshort): { - jshort shortValue; - for (x = 0; x < vlSize; x++) { - ((char *)&shortValue)[x] = ((char *)vl_elem.p)[j * vlSize + x]; - } - - jobj = ENVPTR->CallStaticObjectMethod(ENVONLY, cShort, shortValueMid, - shortValue); - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - break; - } - case sizeof(jint): { - jint intValue; - for (x = 0; x < vlSize; x++) { - ((char *)&intValue)[x] = ((char *)vl_elem.p)[j * vlSize + x]; - } - - jobj = ENVPTR->CallStaticObjectMethod(ENVONLY, cInt, intValueMid, intValue); - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - break; - } - case sizeof(jlong): { - jlong longValue; - for (x = 0; x < vlSize; x++) { - ((char *)&longValue)[x] = ((char *)vl_elem.p)[j * vlSize + x]; - } - - jobj = - ENVPTR->CallStaticObjectMethod(ENVONLY, cLong, longValueMid, longValue); - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - break; - } - } - break; - } - case H5T_FLOAT: { - switch (vlSize) { - case sizeof(jfloat): { - jfloat floatValue; - for (x = 0; x < vlSize; x++) { - ((char *)&floatValue)[x] = ((char *)vl_elem.p)[j * vlSize + x]; - } - - jobj = ENVPTR->CallStaticObjectMethod(ENVONLY, cFloat, floatValueMid, - (double)floatValue); - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - break; - } - case sizeof(jdouble): { - jdouble doubleValue; - for (x = 0; x < vlSize; x++) { - ((char *)&doubleValue)[x] = ((char *)vl_elem.p)[j * vlSize + x]; - } - - jobj = ENVPTR->CallStaticObjectMethod(ENVONLY, cDouble, doubleValueMid, - doubleValue); - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - break; - } - } - break; - } - case H5T_REFERENCE: { - jboolean bb; - jbyte *barray = NULL; - - jsize byteArraySize = (jsize)vlSize; - if (vlSize != (size_t)byteArraySize) - H5_JNI_FATAL_ERROR(ENVONLY, "H5AreadVL: overflow of byteArraySize"); - - if (NULL == (jobj = ENVPTR->NewByteArray(ENVONLY, byteArraySize))) - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - - PIN_BYTE_ARRAY(ENVONLY, (jbyteArray)jobj, barray, &bb, - "readVL reference: byte array not pinned"); - - for (x = 0; x < vlSize; x++) { - barray[x] = ((jbyte *)vl_elem.p)[j * vlSize + x]; - } - if (barray) - UNPIN_BYTE_ARRAY(ENVONLY, (jbyteArray)jobj, barray, jobj ? 0 : JNI_ABORT); - break; - } - default: - H5_UNIMPLEMENTED(ENVONLY, "H5AreadVL: invalid class type"); - break; - } - - // Add it to the list - ENVPTR->CallBooleanMethod(ENVONLY, jList, addMethod, jobj); - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - } - } /* end for */ - - if (rawBuf) - HDfree(rawBuf); - } - else { - if ((status = H5Aread((hid_t)attr_id, (hid_t)mem_type_id, (void *)readBuf)) < 0) - H5_LIBRARY_ERROR(ENVONLY); - } + translate_rbuf(env, buf, mem_type_id, type_class, vl_array_len, readBuf); done: if (readBuf) { if ((status >= 0) && vl_data_class) { + dims[0] = (hsize_t)vl_array_len; + if ((sid = H5Screate_simple(1, dims, NULL)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + H5Treclaim(attr_id, sid, H5P_DEFAULT, readBuf); + if (sid >= 0) H5Sclose(sid); } - - UNPIN_BYTE_ARRAY(ENVONLY, buf, readBuf, (status < 0) ? JNI_ABORT : 0); + if (is_variable) { + for (size_t i = 0; i < (size_t)vl_array_len; i++) + HDfree(((char **)readBuf)[i]); + } + HDfree(readBuf); } return (jint)status; @@ -1313,205 +1173,61 @@ Java_hdf_hdf5lib_H5_H5AreadVL(JNIEnv *env, jclass clss, jlong attr_id, jlong mem JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5AwriteVL(JNIEnv *env, jclass clss, jlong attr_id, jlong mem_type_id, jobjectArray buf) { - H5T_class_t type_class; + jbyte *writeBuf = NULL; hsize_t dims[H5S_MAX_RANK]; hid_t sid = H5I_INVALID_HID; - jsize n; + size_t typeSize; + H5T_class_t type_class; + jsize vl_array_len; htri_t vl_data_class; - herr_t status = FAIL; - jboolean writeBufIsCopy; - jbyteArray writeBuf = NULL; + herr_t status = FAIL; + htri_t is_variable = 0; UNUSED(clss); if (NULL == buf) H5_NULL_ARGUMENT_ERROR(ENVONLY, "H5AwriteVL: write buffer is NULL"); + if ((vl_data_class = h5str_detect_vlen(mem_type_id)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + /* Get size of data array */ - if ((n = ENVPTR->GetArrayLength(ENVONLY, buf)) < 0) { + if ((vl_array_len = ENVPTR->GetArrayLength(ENVONLY, buf)) < 0) { CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); - H5_BAD_ARGUMENT_ERROR(ENVONLY, "H5AwriteVL: readBuf length < 0"); + H5_BAD_ARGUMENT_ERROR(ENVONLY, "H5Awrite: write buffer length < 0"); } - - dims[0] = (hsize_t)n; - if ((sid = H5Screate_simple(1, dims, NULL)) < 0) + if ((is_variable = H5Tis_variable_str(mem_type_id)) < 0) H5_LIBRARY_ERROR(ENVONLY); - if ((vl_data_class = h5str_detect_vlen(mem_type_id)) < 0) + if (!(typeSize = H5Tget_size(mem_type_id))) H5_LIBRARY_ERROR(ENVONLY); + if (NULL == (writeBuf = HDcalloc((size_t)vl_array_len, typeSize))) + H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Awrite: failed to allocate raw VL write buffer"); + if ((type_class = H5Tget_class((hid_t)mem_type_id)) < 0) H5_LIBRARY_ERROR(ENVONLY); - if (type_class == H5T_VLEN) { - size_t typeSize; - hid_t memb = H5I_INVALID_HID; - H5T_class_t vlClass; - size_t vlSize; - void *rawBuf = NULL; - jobjectArray jList = NULL; - - size_t i, j, x; - if (!(typeSize = H5Tget_size(mem_type_id))) - H5_LIBRARY_ERROR(ENVONLY); - - if (!(memb = H5Tget_super(mem_type_id))) - H5_LIBRARY_ERROR(ENVONLY); - if ((vlClass = H5Tget_class((hid_t)memb)) < 0) - H5_LIBRARY_ERROR(ENVONLY); - if (!(vlSize = H5Tget_size(memb))) - H5_LIBRARY_ERROR(ENVONLY); - - if (NULL == (rawBuf = HDcalloc((size_t)n, typeSize))) - H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5AwriteVL: failed to allocate raw VL write buffer"); - - /* Cache class types */ - /* jclass cBool = ENVPTR->FindClass(ENVONLY, "java/lang/Boolean"); */ - jclass cByte = ENVPTR->FindClass(ENVONLY, "java/lang/Byte"); - jclass cShort = ENVPTR->FindClass(ENVONLY, "java/lang/Short"); - jclass cInt = ENVPTR->FindClass(ENVONLY, "java/lang/Integer"); - jclass cLong = ENVPTR->FindClass(ENVONLY, "java/lang/Long"); - jclass cFloat = ENVPTR->FindClass(ENVONLY, "java/lang/Float"); - jclass cDouble = ENVPTR->FindClass(ENVONLY, "java/lang/Double"); - - /* jmethodID boolValueMid = ENVPTR->GetMethodID(ENVONLY, cBool, "booleanValue", "()Z"); */ - jmethodID byteValueMid = ENVPTR->GetMethodID(ENVONLY, cByte, "byteValue", "()B"); - jmethodID shortValueMid = ENVPTR->GetMethodID(ENVONLY, cShort, "shortValue", "()S"); - jmethodID intValueMid = ENVPTR->GetMethodID(ENVONLY, cInt, "intValue", "()I"); - jmethodID longValueMid = ENVPTR->GetMethodID(ENVONLY, cLong, "longValue", "()J"); - jmethodID floatValueMid = ENVPTR->GetMethodID(ENVONLY, cFloat, "floatValue", "()F"); - jmethodID doubleValueMid = ENVPTR->GetMethodID(ENVONLY, cDouble, "doubleValue", "()D"); - - /* Convert each list to a vlen element */ - for (i = 0; i < (size_t)n; i++) { - hvl_t vl_elem; - - if (NULL == (jList = ENVPTR->GetObjectArrayElement(ENVONLY, (jobjectArray)buf, (jsize)i))) - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - - // retrieve the java.util.List interface class - jclass cList = ENVPTR->FindClass(ENVONLY, "java/util/List"); - - // retrieve the toArray method and invoke it - jmethodID mToArray = ENVPTR->GetMethodID(ENVONLY, cList, "toArray", "()[Ljava/lang/Object;"); - if (mToArray == NULL) - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - jobjectArray array = (jobjectArray)ENVPTR->CallObjectMethod(ENVONLY, jList, mToArray); - jsize jnelmts = ENVPTR->GetArrayLength(ENVONLY, array); - - if (jnelmts < 0) - H5_BAD_ARGUMENT_ERROR(ENVONLY, "H5AwriteVL: number of VL elements < 0"); - - HDmemcpy(&vl_elem, (char *)rawBuf + i * typeSize, sizeof(hvl_t)); - vl_elem.len = (size_t)jnelmts; - - if (NULL == (vl_elem.p = HDmalloc((size_t)jnelmts * vlSize))) - H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5AwriteVL: failed to allocate vlen ptr buffer"); - - jobject jobj = NULL; - for (j = 0; j < (size_t)jnelmts; j++) { - if (NULL == (jobj = ENVPTR->GetObjectArrayElement(ENVONLY, (jobjectArray)array, (jsize)j))) - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - - switch (vlClass) { - /* case H5T_BOOL: { - jboolean boolValue = ENVPTR->CallBooleanMethod(ENVONLY, jobj, boolValueMid); - for (x = 0; x < vlSize; x++) { - ((char *)vl_elem.p)[j * vlSize + x] = ((char *)&boolValue)[x]; - } - break; - } */ - case H5T_INTEGER: { - switch (vlSize) { - case sizeof(jbyte): { - jbyte byteValue = ENVPTR->CallByteMethod(ENVONLY, jobj, byteValueMid); - for (x = 0; x < vlSize; x++) { - ((char *)vl_elem.p)[j * vlSize + x] = ((char *)&byteValue)[x]; - } - break; - } - case sizeof(jshort): { - jshort shortValue = ENVPTR->CallShortMethod(ENVONLY, jobj, shortValueMid); - for (x = 0; x < vlSize; x++) { - ((char *)vl_elem.p)[j * vlSize + x] = ((char *)&shortValue)[x]; - } - break; - } - case sizeof(jint): { - jint intValue = ENVPTR->CallIntMethod(ENVONLY, jobj, intValueMid); - for (x = 0; x < vlSize; x++) { - ((char *)vl_elem.p)[j * vlSize + x] = ((char *)&intValue)[x]; - } - break; - } - case sizeof(jlong): { - jlong longValue = ENVPTR->CallLongMethod(ENVONLY, jobj, longValueMid); - for (x = 0; x < vlSize; x++) { - ((char *)vl_elem.p)[j * vlSize + x] = ((char *)&longValue)[x]; - } - break; - } - } - break; - } - case H5T_FLOAT: { - switch (vlSize) { - case sizeof(jfloat): { - jfloat floatValue = ENVPTR->CallFloatMethod(ENVONLY, jobj, floatValueMid); - for (x = 0; x < vlSize; x++) { - ((char *)vl_elem.p)[j * vlSize + x] = ((char *)&floatValue)[x]; - } - break; - } - case sizeof(jdouble): { - jdouble doubleValue = ENVPTR->CallDoubleMethod(ENVONLY, jobj, doubleValueMid); - for (x = 0; x < vlSize; x++) { - ((char *)vl_elem.p)[j * vlSize + x] = ((char *)&doubleValue)[x]; - } - break; - } - } - break; - } - case H5T_REFERENCE: { - jbyte *barray = (jbyte *)ENVPTR->GetByteArrayElements(ENVONLY, jobj, 0); - for (x = 0; x < vlSize; x++) { - ((char *)vl_elem.p)[j * vlSize + x] = ((char *)barray)[x]; - } - ENVPTR->ReleaseByteArrayElements(ENVONLY, jobj, barray, 0); - break; - } - default: - H5_UNIMPLEMENTED(ENVONLY, "H5AwriteVL: invalid class type"); - break; - } - ENVPTR->DeleteLocalRef(ENVONLY, jobj); - } - - HDmemcpy((char *)rawBuf + i * typeSize, &vl_elem, sizeof(hvl_t)); - - ENVPTR->DeleteLocalRef(ENVONLY, jList); - } /* end for (i = 0; i < n; i++) */ - - if ((status = H5Awrite((hid_t)attr_id, (hid_t)mem_type_id, rawBuf)) < 0) - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + translate_wbuf(ENVONLY, buf, mem_type_id, type_class, vl_array_len, writeBuf); - if (rawBuf) - HDfree(rawBuf); - } - else { - PIN_BYTE_ARRAY(ENVONLY, buf, writeBuf, &writeBufIsCopy, "H5AwriteVL: write buffer not pinned"); - if ((status = H5Awrite((hid_t)attr_id, (hid_t)mem_type_id, writeBuf)) < 0) - H5_LIBRARY_ERROR(ENVONLY); - } + if ((status = H5Awrite((hid_t)attr_id, (hid_t)mem_type_id, writeBuf)) < 0) + H5_LIBRARY_ERROR(ENVONLY); done: if (writeBuf) { - if ((status >= 0) && vl_data_class) + if ((status >= 0) && vl_data_class) { + dims[0] = (hsize_t)vl_array_len; + if ((sid = H5Screate_simple(1, dims, NULL)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + H5Treclaim(attr_id, sid, H5P_DEFAULT, writeBuf); + } + if (is_variable) { + for (size_t i = 0; i < (size_t)vl_array_len; i++) + HDfree(((char **)writeBuf)[i]); + } - if (type_class != H5T_VLEN) - UNPIN_BYTE_ARRAY(ENVONLY, buf, writeBuf, (status < 0) ? JNI_ABORT : 0); + HDfree(writeBuf); } return (jint)status; @@ -1914,21 +1630,24 @@ H5AwriteVL_asstr(JNIEnv *env, hid_t aid, hid_t tid, jobjectArray buf) continue; } - /* - * length = ENVPTR->GetStringUTFLength(ENVONLY, jstr); - * CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - */ - PIN_JAVA_STRING(ENVONLY, jstr, utf8, NULL, "H5AwriteVL_asstr: failed to pin string buffer"); /* - * TODO: If the string isn't a copy, we should probably make - * one before destroying it with h5str_convert. + * Make a copy of the string since h5str_convert uses strtok. */ + char *utf8_copy = NULL; - if (!h5str_convert(ENVONLY, (char **)&utf8, aid, tid, &(((char *)writeBuf)[i * typeSize]), 0)) + jsize length = ENVPTR->GetStringUTFLength(ENVONLY, jstr); + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (NULL == (utf8_copy = HDstrndup(utf8, (size_t)length))) + H5_LIBRARY_ERROR(ENVONLY); + + if (!h5str_convert(ENVONLY, &utf8_copy, aid, tid, &(((char *)writeBuf)[i * typeSize]), 0)) CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + free(utf8_copy); + UNPIN_JAVA_STRING(ENVONLY, jstr, utf8); utf8 = NULL; diff --git a/java/src/jni/h5dImp.c b/java/src/jni/h5dImp.c index c8dc0c32f2f..de7fb21c808 100644 --- a/java/src/jni/h5dImp.c +++ b/java/src/jni/h5dImp.c @@ -181,47 +181,71 @@ Java_hdf_hdf5lib_H5_H5Dread(JNIEnv *env, jclass clss, jlong dataset_id, jlong me jlong file_space_id, jlong xfer_plist_id, jbyteArray buf, jboolean isCriticalPinning) { - jboolean readBufIsCopy; - jbyte *readBuf = NULL; - htri_t vl_data_class; - herr_t status = FAIL; + jboolean readBufIsCopy; + jbyte *readBuf = NULL; + size_t typeSize; + H5T_class_t type_class; + jsize vl_array_len; // Only used by vl_data_class types + htri_t vl_data_class; + herr_t status = FAIL; UNUSED(clss); if (NULL == buf) H5_NULL_ARGUMENT_ERROR(ENVONLY, "H5Dread: read buffer is NULL"); - /* Get size of data array */ - if (ENVPTR->GetArrayLength(ENVONLY, buf) < 0) { - CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); - H5_BAD_ARGUMENT_ERROR(ENVONLY, "H5Dread: readBuf length < 0"); - } - if ((vl_data_class = h5str_detect_vlen(mem_type_id)) < 0) H5_LIBRARY_ERROR(ENVONLY); - if (isCriticalPinning) { - PIN_BYTE_ARRAY_CRITICAL(ENVONLY, buf, readBuf, &readBufIsCopy, - "H5Dread: read buffer not critically pinned"); + if (vl_data_class) { + /* Get size of data array */ + if ((vl_array_len = ENVPTR->GetArrayLength(ENVONLY, buf)) < 0) { + CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); + H5_BAD_ARGUMENT_ERROR(ENVONLY, "H5Dread: readBuf length < 0"); + } + + if (!(typeSize = H5Tget_size(mem_type_id))) + H5_LIBRARY_ERROR(ENVONLY); + + if (NULL == (readBuf = HDcalloc((size_t)vl_array_len, typeSize))) + H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Dread: failed to allocate raw VL read buffer"); } else { - PIN_BYTE_ARRAY(ENVONLY, buf, readBuf, &readBufIsCopy, "H5Dread: read buffer not pinned"); + if (isCriticalPinning) { + PIN_BYTE_ARRAY_CRITICAL(ENVONLY, buf, readBuf, &readBufIsCopy, + "H5Dread: read buffer not critically pinned"); + } + else { + PIN_BYTE_ARRAY(ENVONLY, buf, readBuf, &readBufIsCopy, "H5Dread: read buffer not pinned"); + } } if ((status = H5Dread((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id, (hid_t)file_space_id, - (hid_t)xfer_plist_id, readBuf)) < 0) + (hid_t)xfer_plist_id, (void *)readBuf)) < 0) H5_LIBRARY_ERROR(ENVONLY); + if (vl_data_class) { + if ((type_class = H5Tget_class((hid_t)mem_type_id)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + + translate_rbuf(env, buf, mem_type_id, type_class, vl_array_len, readBuf); + } + done: if (readBuf) { if ((status >= 0) && vl_data_class) H5Treclaim(dataset_id, mem_space_id, H5P_DEFAULT, readBuf); - if (isCriticalPinning) { - UNPIN_ARRAY_CRITICAL(ENVONLY, buf, readBuf, (status < 0) ? JNI_ABORT : 0); + if (vl_data_class) { + HDfree(readBuf); } else { - UNPIN_BYTE_ARRAY(ENVONLY, buf, readBuf, (status < 0) ? JNI_ABORT : 0); + if (isCriticalPinning) { + UNPIN_ARRAY_CRITICAL(ENVONLY, buf, readBuf, (status < 0) ? JNI_ABORT : 0); + } + else { + UNPIN_BYTE_ARRAY(ENVONLY, buf, readBuf, (status < 0) ? JNI_ABORT : 0); + } } } @@ -238,31 +262,50 @@ Java_hdf_hdf5lib_H5_H5Dwrite(JNIEnv *env, jclass clss, jlong dataset_id, jlong m jlong mem_space_id, jlong file_space_id, jlong xfer_plist_id, jbyteArray buf, jboolean isCriticalPinning) { - jboolean writeBufIsCopy; - jbyte *writeBuf = NULL; - htri_t vl_data_class; - herr_t status = FAIL; + jboolean writeBufIsCopy; + jbyte *writeBuf = NULL; + size_t typeSize; + H5T_class_t type_class; + jsize vl_array_len; // Only used by vl_data_class types + htri_t vl_data_class; + herr_t status = FAIL; UNUSED(clss); if (NULL == buf) H5_NULL_ARGUMENT_ERROR(ENVONLY, "H5Dwrite: write buffer is NULL"); - /* Get size of data array */ - if (ENVPTR->GetArrayLength(ENVONLY, buf) < 0) { - CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); - H5_BAD_ARGUMENT_ERROR(ENVONLY, "H5Dread: readBuf length < 0"); - } - if ((vl_data_class = h5str_detect_vlen(mem_type_id)) < 0) H5_LIBRARY_ERROR(ENVONLY); - if (isCriticalPinning) { - PIN_BYTE_ARRAY_CRITICAL(ENVONLY, buf, writeBuf, &writeBufIsCopy, - "H5Dwrite: write buffer not critically pinned"); + if (vl_data_class) { + /* Get size of data array */ + if ((vl_array_len = ENVPTR->GetArrayLength(ENVONLY, buf)) < 0) { + CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); + H5_BAD_ARGUMENT_ERROR(ENVONLY, "H5Dwrite: write buffer length < 0"); + } + + if (!(typeSize = H5Tget_size(mem_type_id))) + H5_LIBRARY_ERROR(ENVONLY); + + if (NULL == (writeBuf = HDcalloc((size_t)vl_array_len, typeSize))) + H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Dwrite: failed to allocate raw VL write buffer"); } else { - PIN_BYTE_ARRAY(ENVONLY, buf, writeBuf, &writeBufIsCopy, "H5Dwrite: write buffer not pinned"); + if (isCriticalPinning) { + PIN_BYTE_ARRAY_CRITICAL(ENVONLY, buf, writeBuf, &writeBufIsCopy, + "H5Dwrite: write buffer not critically pinned"); + } + else { + PIN_BYTE_ARRAY(ENVONLY, buf, writeBuf, &writeBufIsCopy, "H5Dwrite: write buffer not pinned"); + } + } + + if (vl_data_class) { + if ((type_class = H5Tget_class((hid_t)mem_type_id)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + + translate_wbuf(ENVONLY, buf, mem_type_id, type_class, vl_array_len, writeBuf); } if ((status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id, (hid_t)file_space_id, @@ -274,11 +317,16 @@ Java_hdf_hdf5lib_H5_H5Dwrite(JNIEnv *env, jclass clss, jlong dataset_id, jlong m if ((status >= 0) && vl_data_class) H5Treclaim(dataset_id, mem_space_id, H5P_DEFAULT, writeBuf); - if (isCriticalPinning) { - UNPIN_ARRAY_CRITICAL(ENVONLY, buf, writeBuf, (status < 0) ? JNI_ABORT : 0); + if (vl_data_class) { + HDfree(writeBuf); } else { - UNPIN_BYTE_ARRAY(ENVONLY, buf, writeBuf, (status < 0) ? JNI_ABORT : 0); + if (isCriticalPinning) { + UNPIN_ARRAY_CRITICAL(ENVONLY, buf, writeBuf, (status < 0) ? JNI_ABORT : 0); + } + else { + UNPIN_BYTE_ARRAY(ENVONLY, buf, writeBuf, (status < 0) ? JNI_ABORT : 0); + } } } @@ -1086,234 +1134,52 @@ JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5DreadVL(JNIEnv *env, jclass clss, jlong dataset_id, jlong mem_type_id, jlong mem_space_id, jlong file_space_id, jlong xfer_plist_id, jobjectArray buf) { + jbyte *readBuf = NULL; + size_t typeSize; H5T_class_t type_class; - jsize n; + jsize vl_array_len; htri_t vl_data_class; - herr_t status = FAIL; - jbyteArray *readBuf = NULL; + herr_t status = FAIL; + htri_t is_variable = 0; UNUSED(clss); if (NULL == buf) H5_NULL_ARGUMENT_ERROR(ENVONLY, "H5DreadVL: read buffer is NULL"); + if ((vl_data_class = h5str_detect_vlen(mem_type_id)) < 0) + H5_LIBRARY_ERROR(ENVONLY); /* Get size of data array */ - if ((n = ENVPTR->GetArrayLength(ENVONLY, buf)) < 0) { + if ((vl_array_len = ENVPTR->GetArrayLength(ENVONLY, buf)) < 0) { CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); H5_BAD_ARGUMENT_ERROR(ENVONLY, "H5DreadVL: readBuf length < 0"); } - - if ((vl_data_class = h5str_detect_vlen(mem_type_id)) < 0) + if ((is_variable = H5Tis_variable_str(mem_type_id)) < 0) H5_LIBRARY_ERROR(ENVONLY); - if ((type_class = H5Tget_class((hid_t)mem_type_id)) < 0) + if (!(typeSize = H5Tget_size(mem_type_id))) H5_LIBRARY_ERROR(ENVONLY); - if (type_class == H5T_VLEN) { - size_t typeSize; - hid_t memb = H5I_INVALID_HID; - H5T_class_t vlClass; - size_t vlSize; - void *rawBuf = NULL; - jobjectArray jList = NULL; - - size_t i, j, x; - - if (!(typeSize = H5Tget_size(mem_type_id))) - H5_LIBRARY_ERROR(ENVONLY); - - if (!(memb = H5Tget_super(mem_type_id))) - H5_LIBRARY_ERROR(ENVONLY); - if ((vlClass = H5Tget_class((hid_t)memb)) < 0) - H5_LIBRARY_ERROR(ENVONLY); - if (!(vlSize = H5Tget_size(memb))) - H5_LIBRARY_ERROR(ENVONLY); - if (NULL == (rawBuf = HDcalloc((size_t)n, typeSize))) - H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5DreadVL: failed to allocate raw VL read buffer"); - - if ((status = H5Dread((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id, - (hid_t)file_space_id, (hid_t)xfer_plist_id, (void *)rawBuf)) < 0) - H5_LIBRARY_ERROR(ENVONLY); + if (NULL == (readBuf = HDcalloc((size_t)vl_array_len, typeSize))) + H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5DreadVL: failed to allocate raw VL read buffer"); - /* Cache class types */ - /* jclass cBool = ENVPTR->FindClass(ENVONLY, "java/lang/Boolean"); */ - jclass cByte = ENVPTR->FindClass(ENVONLY, "java/lang/Byte"); - jclass cShort = ENVPTR->FindClass(ENVONLY, "java/lang/Short"); - jclass cInt = ENVPTR->FindClass(ENVONLY, "java/lang/Integer"); - jclass cLong = ENVPTR->FindClass(ENVONLY, "java/lang/Long"); - jclass cFloat = ENVPTR->FindClass(ENVONLY, "java/lang/Float"); - jclass cDouble = ENVPTR->FindClass(ENVONLY, "java/lang/Double"); - - /* - jmethodID boolValueMid = - ENVPTR->GetStaticMethodID(ENVONLY, cBool, "valueOf", "(Z)Ljava/lang/Boolean;"); - */ - jmethodID byteValueMid = ENVPTR->GetStaticMethodID(ENVONLY, cByte, "valueOf", "(B)Ljava/lang/Byte;"); - jmethodID shortValueMid = - ENVPTR->GetStaticMethodID(ENVONLY, cShort, "valueOf", "(S)Ljava/lang/Short;"); - jmethodID intValueMid = ENVPTR->GetStaticMethodID(ENVONLY, cInt, "valueOf", "(I)Ljava/lang/Integer;"); - jmethodID longValueMid = ENVPTR->GetStaticMethodID(ENVONLY, cLong, "valueOf", "(J)Ljava/lang/Long;"); - jmethodID floatValueMid = - ENVPTR->GetStaticMethodID(ENVONLY, cFloat, "valueOf", "(F)Ljava/lang/Float;"); - jmethodID doubleValueMid = - ENVPTR->GetStaticMethodID(ENVONLY, cDouble, "valueOf", "(D)Ljava/lang/Double;"); - - // retrieve the java.util.List interface class - jclass cList = ENVPTR->FindClass(ENVONLY, "java/util/List"); - jmethodID addMethod = ENVPTR->GetMethodID(ENVONLY, cList, "add", "(Ljava/lang/Object;)Z"); - - /* Convert each element to a list */ - for (i = 0; i < (size_t)n; i++) { - hvl_t vl_elem; - - // The list we're going to return: - if (NULL == (jList = ENVPTR->GetObjectArrayElement(ENVONLY, (jobjectArray)buf, (jsize)i))) - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - - /* Get the number of sequence elements */ - HDmemcpy(&vl_elem, (char *)rawBuf + i * typeSize, sizeof(hvl_t)); - - jsize nelmts = (jsize)vl_elem.len; - if (vl_elem.len != (size_t)nelmts) - H5_JNI_FATAL_ERROR(ENVONLY, "H5DreadVL: overflow of number of VL elements"); - if (nelmts < 0) - H5_BAD_ARGUMENT_ERROR(ENVONLY, "H5DreadVL: number of VL elements < 0"); - - jobject jobj = NULL; - for (j = 0; j < (size_t)nelmts; j++) { - switch (vlClass) { - /*case H5T_BOOL: { - jboolean boolValue; - for (x = 0; x < vlSize; x++) { - ((char *)&boolValue)[x] = ((char *)vl_elem.p)[j*vlSize+x]; - } - - jobj = ENVPTR->CallStaticObjectMethod(ENVONLY, cBool, boolValueMid, boolValue); - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - break; - } */ - case H5T_INTEGER: { - switch (vlSize) { - case sizeof(jbyte): { - jbyte byteValue; - for (x = 0; x < vlSize; x++) { - ((char *)&byteValue)[x] = ((char *)vl_elem.p)[j * vlSize + x]; - } - - jobj = - ENVPTR->CallStaticObjectMethod(ENVONLY, cByte, byteValueMid, byteValue); - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - break; - } - case sizeof(jshort): { - jshort shortValue; - for (x = 0; x < vlSize; x++) { - ((char *)&shortValue)[x] = ((char *)vl_elem.p)[j * vlSize + x]; - } - - jobj = ENVPTR->CallStaticObjectMethod(ENVONLY, cShort, shortValueMid, - shortValue); - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - break; - } - case sizeof(jint): { - jint intValue; - for (x = 0; x < vlSize; x++) { - ((char *)&intValue)[x] = ((char *)vl_elem.p)[j * vlSize + x]; - } - - jobj = ENVPTR->CallStaticObjectMethod(ENVONLY, cInt, intValueMid, intValue); - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - break; - } - case sizeof(jlong): { - jlong longValue; - for (x = 0; x < vlSize; x++) { - ((char *)&longValue)[x] = ((char *)vl_elem.p)[j * vlSize + x]; - } - - jobj = - ENVPTR->CallStaticObjectMethod(ENVONLY, cLong, longValueMid, longValue); - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - break; - } - } - break; - } - case H5T_FLOAT: { - switch (vlSize) { - case sizeof(jfloat): { - jfloat floatValue; - for (x = 0; x < vlSize; x++) { - ((char *)&floatValue)[x] = ((char *)vl_elem.p)[j * vlSize + x]; - } - - jobj = ENVPTR->CallStaticObjectMethod(ENVONLY, cFloat, floatValueMid, - (double)floatValue); - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - break; - } - case sizeof(jdouble): { - jdouble doubleValue; - for (x = 0; x < vlSize; x++) { - ((char *)&doubleValue)[x] = ((char *)vl_elem.p)[j * vlSize + x]; - } - - jobj = ENVPTR->CallStaticObjectMethod(ENVONLY, cDouble, doubleValueMid, - doubleValue); - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - break; - } - } - break; - } - case H5T_REFERENCE: { - jboolean bb; - jbyte *barray = NULL; - - jsize byteArraySize = (jsize)vlSize; - if (vlSize != (size_t)byteArraySize) - H5_JNI_FATAL_ERROR(ENVONLY, "H5DreadVL: overflow of byteArraySize"); - - if (NULL == (jobj = ENVPTR->NewByteArray(ENVONLY, byteArraySize))) - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - - PIN_BYTE_ARRAY(ENVONLY, (jbyteArray)jobj, barray, &bb, - "readVL reference: byte array not pinned"); - - for (x = 0; x < vlSize; x++) { - barray[x] = ((jbyte *)vl_elem.p)[j * vlSize + x]; - } - if (barray) - UNPIN_BYTE_ARRAY(ENVONLY, (jbyteArray)jobj, barray, jobj ? 0 : JNI_ABORT); - break; - } - default: - H5_UNIMPLEMENTED(ENVONLY, "H5DreadVL: invalid class type"); - break; - } - - // Add it to the list - ENVPTR->CallBooleanMethod(ENVONLY, jList, addMethod, jobj); - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - } - } /* end for */ + if ((status = H5Dread((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id, (hid_t)file_space_id, + (hid_t)xfer_plist_id, (void *)readBuf)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + if ((type_class = H5Tget_class((hid_t)mem_type_id)) < 0) + H5_LIBRARY_ERROR(ENVONLY); - if (rawBuf) - HDfree(rawBuf); - } - else { - if ((status = H5Dread((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id, - (hid_t)file_space_id, (hid_t)xfer_plist_id, (void *)readBuf)) < 0) - H5_LIBRARY_ERROR(ENVONLY); - } + translate_rbuf(env, buf, mem_type_id, type_class, vl_array_len, readBuf); done: if (readBuf) { if ((status >= 0) && vl_data_class) H5Treclaim(dataset_id, mem_space_id, H5P_DEFAULT, readBuf); - - UNPIN_BYTE_ARRAY(ENVONLY, buf, readBuf, (status < 0) ? JNI_ABORT : 0); + if (is_variable) { + for (size_t i = 0; i < (size_t)vl_array_len; i++) + HDfree(((char **)readBuf)[i]); + } + HDfree(readBuf); } return (jint)status; @@ -1328,201 +1194,55 @@ JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5DwriteVL(JNIEnv *env, jclass clss, jlong dataset_id, jlong mem_type_id, jlong mem_space_id, jlong file_space_id, jlong xfer_plist_id, jobjectArray buf) { + jbyte *writeBuf = NULL; + size_t typeSize; H5T_class_t type_class; - jsize n; + jsize vl_array_len; // Only used by vl_data_class types htri_t vl_data_class; - herr_t status = FAIL; - jboolean writeBufIsCopy; - jbyteArray writeBuf = NULL; + herr_t status = FAIL; + htri_t is_variable = 0; UNUSED(clss); if (NULL == buf) H5_NULL_ARGUMENT_ERROR(ENVONLY, "H5DwriteVL: write buffer is NULL"); + if ((vl_data_class = h5str_detect_vlen(mem_type_id)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + /* Get size of data array */ - if ((n = ENVPTR->GetArrayLength(ENVONLY, buf)) < 0) { + if ((vl_array_len = ENVPTR->GetArrayLength(ENVONLY, buf)) < 0) { CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); - H5_BAD_ARGUMENT_ERROR(ENVONLY, "H5DwriteVL: readBuf length < 0"); + H5_BAD_ARGUMENT_ERROR(ENVONLY, "H5DwriteVL: write buffer length < 0"); } - - if ((vl_data_class = h5str_detect_vlen(mem_type_id)) < 0) + if ((is_variable = H5Tis_variable_str(mem_type_id)) < 0) H5_LIBRARY_ERROR(ENVONLY); - if ((type_class = H5Tget_class((hid_t)mem_type_id)) < 0) + if (!(typeSize = H5Tget_size(mem_type_id))) H5_LIBRARY_ERROR(ENVONLY); - if (type_class == H5T_VLEN) { - size_t typeSize; - hid_t memb = H5I_INVALID_HID; - H5T_class_t vlClass; - size_t vlSize; - void *rawBuf = NULL; - jobjectArray jList = NULL; - - size_t i, j, x; - - if (!(typeSize = H5Tget_size(mem_type_id))) - H5_LIBRARY_ERROR(ENVONLY); - - if (!(memb = H5Tget_super(mem_type_id))) - H5_LIBRARY_ERROR(ENVONLY); - if ((vlClass = H5Tget_class((hid_t)memb)) < 0) - H5_LIBRARY_ERROR(ENVONLY); - if (!(vlSize = H5Tget_size(memb))) - H5_LIBRARY_ERROR(ENVONLY); - - if (NULL == (rawBuf = HDcalloc((size_t)n, typeSize))) - H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5DwriteVL: failed to allocate raw VL write buffer"); - - /* Cache class types */ - /* jclass cBool = ENVPTR->FindClass(ENVONLY, "java/lang/Boolean"); */ - jclass cByte = ENVPTR->FindClass(ENVONLY, "java/lang/Byte"); - jclass cShort = ENVPTR->FindClass(ENVONLY, "java/lang/Short"); - jclass cInt = ENVPTR->FindClass(ENVONLY, "java/lang/Integer"); - jclass cLong = ENVPTR->FindClass(ENVONLY, "java/lang/Long"); - jclass cFloat = ENVPTR->FindClass(ENVONLY, "java/lang/Float"); - jclass cDouble = ENVPTR->FindClass(ENVONLY, "java/lang/Double"); - - /* jmethodID boolValueMid = ENVPTR->GetMethodID(ENVONLY, cBool, "booleanValue", "()Z"); */ - jmethodID byteValueMid = ENVPTR->GetMethodID(ENVONLY, cByte, "byteValue", "()B"); - jmethodID shortValueMid = ENVPTR->GetMethodID(ENVONLY, cShort, "shortValue", "()S"); - jmethodID intValueMid = ENVPTR->GetMethodID(ENVONLY, cInt, "intValue", "()I"); - jmethodID longValueMid = ENVPTR->GetMethodID(ENVONLY, cLong, "longValue", "()J"); - jmethodID floatValueMid = ENVPTR->GetMethodID(ENVONLY, cFloat, "floatValue", "()F"); - jmethodID doubleValueMid = ENVPTR->GetMethodID(ENVONLY, cDouble, "doubleValue", "()D"); - - /* Convert each list to a vlen element */ - for (i = 0; i < (size_t)n; i++) { - hvl_t vl_elem; - - if (NULL == (jList = ENVPTR->GetObjectArrayElement(ENVONLY, (jobjectArray)buf, (jsize)i))) - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - - // retrieve the java.util.List interface class - jclass cList = ENVPTR->FindClass(ENVONLY, "java/util/List"); - - // retrieve the toArray method and invoke it - jmethodID mToArray = ENVPTR->GetMethodID(ENVONLY, cList, "toArray", "()[Ljava/lang/Object;"); - if (mToArray == NULL) - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - jobjectArray array = (jobjectArray)ENVPTR->CallObjectMethod(ENVONLY, jList, mToArray); - jsize jnelmts = ENVPTR->GetArrayLength(ENVONLY, array); - - if (jnelmts < 0) - H5_BAD_ARGUMENT_ERROR(ENVONLY, "H5DwriteVL: number of VL elements < 0"); - - HDmemcpy(&vl_elem, (char *)rawBuf + i * typeSize, sizeof(hvl_t)); - vl_elem.len = (size_t)jnelmts; - - if (NULL == (vl_elem.p = HDmalloc((size_t)jnelmts * vlSize))) - H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5DwriteVL: failed to allocate vlen ptr buffer"); - - jobject jobj = NULL; - for (j = 0; j < (size_t)jnelmts; j++) { - if (NULL == (jobj = ENVPTR->GetObjectArrayElement(ENVONLY, (jobjectArray)array, (jsize)j))) - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - - switch (vlClass) { - /* case H5T_BOOL: { - jboolean boolValue = ENVPTR->CallBooleanMethod(ENVONLY, jobj, boolValueMid); - for (x = 0; x < vlSize; x++) { - ((char *)vl_elem.p)[j * vlSize + x] = ((char *)&boolValue)[x]; - } - break; - } */ - case H5T_INTEGER: { - switch (vlSize) { - case sizeof(jbyte): { - jbyte byteValue = ENVPTR->CallByteMethod(ENVONLY, jobj, byteValueMid); - for (x = 0; x < vlSize; x++) { - ((char *)vl_elem.p)[j * vlSize + x] = ((char *)&byteValue)[x]; - } - break; - } - case sizeof(jshort): { - jshort shortValue = ENVPTR->CallShortMethod(ENVONLY, jobj, shortValueMid); - for (x = 0; x < vlSize; x++) { - ((char *)vl_elem.p)[j * vlSize + x] = ((char *)&shortValue)[x]; - } - break; - } - case sizeof(jint): { - jint intValue = ENVPTR->CallIntMethod(ENVONLY, jobj, intValueMid); - for (x = 0; x < vlSize; x++) { - ((char *)vl_elem.p)[j * vlSize + x] = ((char *)&intValue)[x]; - } - break; - } - case sizeof(jlong): { - jlong longValue = ENVPTR->CallLongMethod(ENVONLY, jobj, longValueMid); - for (x = 0; x < vlSize; x++) { - ((char *)vl_elem.p)[j * vlSize + x] = ((char *)&longValue)[x]; - } - break; - } - } - break; - } - case H5T_FLOAT: { - switch (vlSize) { - case sizeof(jfloat): { - jfloat floatValue = ENVPTR->CallFloatMethod(ENVONLY, jobj, floatValueMid); - for (x = 0; x < vlSize; x++) { - ((char *)vl_elem.p)[j * vlSize + x] = ((char *)&floatValue)[x]; - } - break; - } - case sizeof(jdouble): { - jdouble doubleValue = ENVPTR->CallDoubleMethod(ENVONLY, jobj, doubleValueMid); - for (x = 0; x < vlSize; x++) { - ((char *)vl_elem.p)[j * vlSize + x] = ((char *)&doubleValue)[x]; - } - break; - } - } - break; - } - case H5T_REFERENCE: { - jbyte *barray = (jbyte *)ENVPTR->GetByteArrayElements(ENVONLY, jobj, 0); - for (x = 0; x < vlSize; x++) { - ((char *)vl_elem.p)[j * vlSize + x] = ((char *)barray)[x]; - } - ENVPTR->ReleaseByteArrayElements(ENVONLY, jobj, barray, 0); - break; - } - default: - H5_UNIMPLEMENTED(ENVONLY, "H5DwriteVL: invalid class type"); - break; - } - ENVPTR->DeleteLocalRef(ENVONLY, jobj); - } - HDmemcpy((char *)rawBuf + i * typeSize, &vl_elem, sizeof(hvl_t)); + if (NULL == (writeBuf = HDcalloc((size_t)vl_array_len, typeSize))) + H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5DwriteVL: failed to allocate raw VL write buffer"); - ENVPTR->DeleteLocalRef(ENVONLY, jList); - } /* end for (i = 0; i < n; i++) */ + if ((type_class = H5Tget_class((hid_t)mem_type_id)) < 0) + H5_LIBRARY_ERROR(ENVONLY); - if ((status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id, - (hid_t)file_space_id, (hid_t)xfer_plist_id, rawBuf)) < 0) - CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + translate_wbuf(ENVONLY, buf, mem_type_id, type_class, vl_array_len, writeBuf); - if (rawBuf) - HDfree(rawBuf); - } - else { - PIN_BYTE_ARRAY(ENVONLY, buf, writeBuf, &writeBufIsCopy, "H5DwriteVL: write buffer not pinned"); - if ((status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id, - (hid_t)file_space_id, (hid_t)xfer_plist_id, writeBuf)) < 0) - H5_LIBRARY_ERROR(ENVONLY); - } + if ((status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id, (hid_t)file_space_id, + (hid_t)xfer_plist_id, writeBuf)) < 0) + H5_LIBRARY_ERROR(ENVONLY); done: if (writeBuf) { if ((status >= 0) && vl_data_class) H5Treclaim(dataset_id, mem_space_id, H5P_DEFAULT, writeBuf); + if (is_variable) { + for (size_t i = 0; i < (size_t)vl_array_len; i++) + HDfree(((char **)writeBuf)[i]); + } - if (type_class != H5T_VLEN) - UNPIN_BYTE_ARRAY(ENVONLY, buf, writeBuf, (status < 0) ? JNI_ABORT : 0); + HDfree(writeBuf); } return (jint)status; @@ -1958,21 +1678,24 @@ H5DwriteVL_asstr(JNIEnv *env, hid_t did, hid_t tid, hid_t mem_sid, hid_t file_si continue; } - /* - * length = ENVPTR->GetStringUTFLength(ENVONLY, jstr); - * CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); - */ - PIN_JAVA_STRING(ENVONLY, jstr, utf8, NULL, "H5DwriteVL_asstr: failed to pin string buffer"); /* - * TODO: If the string isn't a copy, we should probably make - * one before destroying it with h5str_convert. + * Make a copy of the string since h5str_convert uses strtok. */ + char *utf8_copy = NULL; + + jsize length = ENVPTR->GetStringUTFLength(ENVONLY, jstr); + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (NULL == (utf8_copy = HDstrndup(utf8, (size_t)length))) + H5_LIBRARY_ERROR(ENVONLY); - if (!h5str_convert(ENVONLY, (char **)&utf8, did, tid, &(((char *)writeBuf)[i * typeSize]), 0)) + if (!h5str_convert(ENVONLY, &utf8_copy, did, tid, &(((char *)writeBuf)[i * typeSize]), 0)) CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + HDfree(utf8_copy); + UNPIN_JAVA_STRING(ENVONLY, jstr, utf8); utf8 = NULL; diff --git a/java/src/jni/h5util.c b/java/src/jni/h5util.c index 06087a71c59..152f019ac0c 100644 --- a/java/src/jni/h5util.c +++ b/java/src/jni/h5util.c @@ -77,6 +77,9 @@ static int render_bin_output_region_data_points(FILE *stream, hid_t region_sp hsize_t *ptdata); static int render_bin_output_region_points(FILE *stream, hid_t region_space, hid_t region_id, hid_t container); +jobject translate_atomic_rbuf(JNIEnv *env, jlong mem_type_id, H5T_class_t type_class, void *raw_buf); +void translate_atomic_wbuf(JNIEnv *env, jobject in_obj, jlong mem_type_id, H5T_class_t type_class, + void *raw_buf); /* Strings for output */ #define H5_TOOLS_GROUP "GROUP" @@ -435,40 +438,43 @@ h5str_convert(JNIEnv *env, char **in_str, hid_t container, hid_t tid, void *out_ break; case H5T_ENUM: { - void *value = NULL; + void *value = NULL; + unsigned char tmp_uchar = 0; + unsigned short tmp_ushort = 0; + unsigned int tmp_uint = 0; +#if H5_SIZEOF_LONG != H5_SIZEOF_INT + unsigned long tmp_ulong = 0; +#endif +#if H5_SIZEOF_LONG_LONG != H5_SIZEOF_LONG + unsigned long long tmp_ullong = 0; +#endif token = HDstrtok(this_str, delimiter); switch (typeSize) { case sizeof(char): { - unsigned char tmp_uchar = 0; - value = &tmp_uchar; + value = &tmp_uchar; break; } case sizeof(short): { - unsigned short tmp_ushort = 0; - value = &tmp_ushort; + value = &tmp_ushort; break; } #if H5_SIZEOF_LONG != H5_SIZEOF_INT case sizeof(long): { - unsigned long tmp_ulong = 0; - value = &tmp_ulong; + value = &tmp_ulong; break; } #endif #if H5_SIZEOF_LONG_LONG != H5_SIZEOF_LONG case sizeof(long long): { - unsigned long long tmp_ullong = 0; - value = &tmp_ullong; + value = &tmp_ullong; break; } #endif - default: { - unsigned int tmp_uint = 0; - value = &tmp_uint; + value = &tmp_uint; break; } } @@ -3426,10 +3432,8 @@ Java_hdf_hdf5lib_H5_H5AreadComplex(JNIEnv *env, jclass clss, jlong attr_id, jlon size = (((H5Tget_size(mem_type_id)) > (H5Tget_size(p_type))) ? (H5Tget_size(mem_type_id)) : (H5Tget_size(p_type))); - if ((n = ENVPTR->GetArrayLength(ENVONLY, buf)) <= 0) { - CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); + if ((n = ENVPTR->GetArrayLength(ENVONLY, buf)) <= 0) H5_BAD_ARGUMENT_ERROR(ENVONLY, "H5AreadComplex: read buffer length <= 0"); - } if (NULL == (readBuf = (char *)HDmalloc((size_t)n * size))) H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5AreadComplex: failed to allocate read buffer"); @@ -4042,6 +4046,854 @@ Java_hdf_hdf5lib_H5_H5export_1attribute(JNIEnv *env, jclass clss, jstring file_e H5Aclose(attr_id); } /* end Java_hdf_hdf5lib_H5_H5export_1attribute */ +jobject +translate_atomic_rbuf(JNIEnv *env, jlong mem_type_id, H5T_class_t type_class, void *raw_buf) +{ + jobject jobj = NULL; + hid_t memb = H5I_INVALID_HID; + jobjectArray jList = NULL; + H5T_class_t vlClass; + size_t vlSize; + size_t i; + size_t typeSize; + // raw_buf is normally bytes except when used for variable length strings + char *char_buf = (char *)raw_buf; + + /* retrieve the java.util.ArrayList interface class */ + jclass arrCList = ENVPTR->FindClass(ENVONLY, "java/util/ArrayList"); + jmethodID arrListMethod = ENVPTR->GetMethodID(ENVONLY, arrCList, "", "(I)V"); + + /* Cache class types */ + /* jclass cBool = ENVPTR->FindClass(ENVONLY, "java/lang/Boolean"); */ + jclass cByte = ENVPTR->FindClass(ENVONLY, "java/lang/Byte"); + jclass cShort = ENVPTR->FindClass(ENVONLY, "java/lang/Short"); + jclass cInt = ENVPTR->FindClass(ENVONLY, "java/lang/Integer"); + jclass cLong = ENVPTR->FindClass(ENVONLY, "java/lang/Long"); + jclass cFloat = ENVPTR->FindClass(ENVONLY, "java/lang/Float"); + jclass cDouble = ENVPTR->FindClass(ENVONLY, "java/lang/Double"); + /*jobjectArray + jmethodID boolValueMid = + ENVPTR->GetStaticMethodID(ENVONLY, cBool, "valueOf", "(Z)Ljava/lang/Boolean;"); + */ + jmethodID byteValueMid = ENVPTR->GetStaticMethodID(ENVONLY, cByte, "valueOf", "(B)Ljava/lang/Byte;"); + jmethodID shortValueMid = ENVPTR->GetStaticMethodID(ENVONLY, cShort, "valueOf", "(S)Ljava/lang/Short;"); + jmethodID intValueMid = ENVPTR->GetStaticMethodID(ENVONLY, cInt, "valueOf", "(I)Ljava/lang/Integer;"); + jmethodID longValueMid = ENVPTR->GetStaticMethodID(ENVONLY, cLong, "valueOf", "(J)Ljava/lang/Long;"); + jmethodID floatValueMid = ENVPTR->GetStaticMethodID(ENVONLY, cFloat, "valueOf", "(F)Ljava/lang/Float;"); + jmethodID doubleValueMid = + ENVPTR->GetStaticMethodID(ENVONLY, cDouble, "valueOf", "(D)Ljava/lang/Double;"); + + if (!(typeSize = H5Tget_size(mem_type_id))) + H5_LIBRARY_ERROR(ENVONLY); + + switch (type_class) { + case H5T_VLEN: { + if (!(memb = H5Tget_super(mem_type_id))) + H5_LIBRARY_ERROR(ENVONLY); + if ((vlClass = H5Tget_class(memb)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + if (!(vlSize = H5Tget_size(memb))) + H5_LIBRARY_ERROR(ENVONLY); + + /* Convert element to a list */ + hvl_t vl_elem; + + /* Get the number of sequence elements */ + memcpy(&vl_elem, char_buf, sizeof(hvl_t)); + jsize nelmts = (jsize)vl_elem.len; + if (vl_elem.len != (size_t)nelmts) + H5_JNI_FATAL_ERROR(ENVONLY, "translate_atomic_rbuf: overflow of number of VL elements"); + + if (nelmts < 0) + H5_BAD_ARGUMENT_ERROR(ENVONLY, "translate_atomic_rbuf: number of VL elements < 0"); + + /* The list we're going to return: */ + if (NULL == (jList = (jobjectArray)ENVPTR->NewObject(ENVONLY, arrCList, arrListMethod, 0))) + H5_OUT_OF_MEMORY_ERROR(ENVONLY, "translate_atomic_rbuf: failed to allocate list read buffer"); + + translate_rbuf(ENVONLY, jList, memb, vlClass, (jsize)nelmts, vl_elem.p); + jobj = jList; + break; + } /* H5T_VLEN */ + case H5T_COMPOUND: { + int nmembs = H5Tget_nmembers(mem_type_id); + + /* The list we're going to return: */ + if (NULL == (jList = (jobjectArray)ENVPTR->NewObject(ENVONLY, arrCList, arrListMethod, 0))) + H5_OUT_OF_MEMORY_ERROR(ENVONLY, "translate_atomic_rbuf: failed to allocate list read buffer"); + + /* Convert each element to a compound object */ + for (i = 0; i < (size_t)nmembs; i++) { + H5T_class_t memb_vlClass; + size_t memb_vlSize; + size_t memb_offset; + + if ((memb = H5Tget_member_type(mem_type_id, (unsigned int)i)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + memb_offset = H5Tget_member_offset(mem_type_id, (unsigned int)i); + if ((memb_vlClass = H5Tget_class(memb)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + if (!(memb_vlSize = H5Tget_size(memb))) + H5_LIBRARY_ERROR(ENVONLY); + + translate_atomic_rbuf(ENVONLY, memb, memb_vlClass, char_buf + i * typeSize + memb_offset); + H5Tclose(memb); + } + jobj = jList; + break; + } /* H5T_COMPOUND */ + case H5T_ARRAY: { + void *objBuf = NULL; + size_t typeCount; + + if (!(memb = H5Tget_super(mem_type_id))) + H5_LIBRARY_ERROR(ENVONLY); + if ((vlClass = H5Tget_class(memb)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + if (!(vlSize = H5Tget_size(memb))) + H5_LIBRARY_ERROR(ENVONLY); + if (!(typeSize = H5Tget_size(mem_type_id))) + H5_LIBRARY_ERROR(ENVONLY); + + typeCount = typeSize / vlSize; + + if (NULL == (objBuf = HDmalloc(typeSize))) + H5_OUT_OF_MEMORY_ERROR(ENVONLY, "translate_atomic_rbuf: failed to allocate buffer"); + + /* Convert each element */ + /* Get the object element */ + memcpy((char *)objBuf, char_buf, typeSize); + + /* The list we're going to return: */ + if (NULL == (jList = (jobjectArray)ENVPTR->NewObject(ENVONLY, arrCList, arrListMethod, 0))) + H5_OUT_OF_MEMORY_ERROR(ENVONLY, "translate_atomic_rbuf: failed to allocate list read buffer"); + + translate_rbuf(ENVONLY, jList, memb, vlClass, (jsize)typeCount, objBuf); + jobj = jList; + + if (objBuf) + HDfree(objBuf); + + break; + } /* H5T_ARRAY */ + case H5T_ENUM: + case H5T_BITFIELD: + case H5T_OPAQUE: + case H5T_INTEGER: { + /* Convert each element */ + switch (typeSize) { + case sizeof(jbyte): { + jbyte byteValue; + HDmemcpy(((char *)&byteValue), char_buf, typeSize); + + if (NULL == + (jobj = ENVPTR->CallStaticObjectMethod(ENVONLY, cByte, byteValueMid, byteValue))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + break; + } + case sizeof(jshort): { + jshort shortValue; + HDmemcpy(((char *)&shortValue), char_buf, typeSize); + + if (NULL == + (jobj = ENVPTR->CallStaticObjectMethod(ENVONLY, cShort, shortValueMid, shortValue))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + break; + } + case sizeof(jint): { + jint intValue; + + HDmemcpy(((char *)&intValue), char_buf, typeSize); + + if (NULL == (jobj = ENVPTR->CallStaticObjectMethod(ENVONLY, cInt, intValueMid, intValue))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + break; + } + case sizeof(jlong): { + jlong longValue; + HDmemcpy(((char *)&longValue), char_buf, typeSize); + + if (NULL == + (jobj = ENVPTR->CallStaticObjectMethod(ENVONLY, cLong, longValueMid, longValue))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + break; + } + } + break; + } /* H5T_INTEGER */ + case H5T_FLOAT: { + /* Convert each element to a list */ + switch (typeSize) { + case sizeof(jfloat): { + jfloat floatValue; + HDmemcpy(((char *)&floatValue), char_buf, typeSize); + + if (NULL == (jobj = ENVPTR->CallStaticObjectMethod(ENVONLY, cFloat, floatValueMid, + (double)floatValue))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + break; + } + case sizeof(jdouble): { + jdouble doubleValue; + HDmemcpy(((char *)&doubleValue), char_buf, typeSize); + + if (NULL == (jobj = ENVPTR->CallStaticObjectMethod(ENVONLY, cDouble, doubleValueMid, + doubleValue))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + break; + } + } + break; + } /* H5T_FLOAT */ + case H5T_REFERENCE: { + /* Convert each element to a list */ + jboolean bb; + jbyte *barray = NULL; + + jsize byteArraySize = (jsize)typeSize; + if (typeSize != (size_t)byteArraySize) + H5_JNI_FATAL_ERROR(ENVONLY, "translate_atomic_rbuf: overflow of byteArraySize"); + + if (NULL == (jobj = ENVPTR->NewByteArray(ENVONLY, byteArraySize))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + PIN_BYTE_ARRAY(ENVONLY, (jbyteArray)jobj, barray, &bb, + "translate_atomic_rbuf reference: byte array not pinned"); + + HDmemcpy(barray, ((jbyte *)raw_buf), typeSize); + if (barray) + UNPIN_BYTE_ARRAY(ENVONLY, (jbyteArray)jobj, barray, jobj ? 0 : JNI_ABORT); + + break; + } /* H5T_REFERENCE */ + case H5T_STRING: { + htri_t is_variable = 0; + + if ((is_variable = H5Tis_variable_str(mem_type_id)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + + /* Convert each element */ + if (is_variable) { + char **var_str_buf = (char **)raw_buf; + if (NULL == (jobj = ENVPTR->NewStringUTF(ENVONLY, *var_str_buf))) { + CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); + H5_OUT_OF_MEMORY_ERROR(ENVONLY, "translate_atomic_rbuf: out of memory - unable to " + "construct string from UTF characters"); + } + } + else { + if (NULL == (jobj = ENVPTR->NewStringUTF(ENVONLY, char_buf))) { + CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); + H5_OUT_OF_MEMORY_ERROR(ENVONLY, "translate_atomic_rbuf: out of memory - unable to " + "construct string from UTF characters"); + } + } + + break; + } /* H5T_STRING */ + default: + H5_UNIMPLEMENTED(ENVONLY, "translate_atomic_rbuf: invalid class type"); + break; + } /* switch(type_class) */ + +done: + + return jobj; +} + +void +translate_atomic_wbuf(JNIEnv *env, jobject in_obj, jlong mem_type_id, H5T_class_t type_class, void *raw_buf) +{ + hid_t memb = H5I_INVALID_HID; + H5T_class_t vlClass; + size_t vlSize; + size_t i; + size_t typeSize; + // raw_buf is normally bytes except when used for variable length strings + char *char_buf = (char *)raw_buf; + + /* retrieve the java.util.ArrayList interface class */ + jclass arrCList = ENVPTR->FindClass(ENVONLY, "java/util/ArrayList"); + /* retrieve the toArray method */ + jmethodID mToArray = ENVPTR->GetMethodID(ENVONLY, arrCList, "toArray", "()[Ljava/lang/Object;"); + + /* Cache class types */ + jclass cByte = ENVPTR->FindClass(ENVONLY, "java/lang/Byte"); + jclass cShort = ENVPTR->FindClass(ENVONLY, "java/lang/Short"); + jclass cInt = ENVPTR->FindClass(ENVONLY, "java/lang/Integer"); + jclass cLong = ENVPTR->FindClass(ENVONLY, "java/lang/Long"); + jclass cFloat = ENVPTR->FindClass(ENVONLY, "java/lang/Float"); + jclass cDouble = ENVPTR->FindClass(ENVONLY, "java/lang/Double"); + + jmethodID byteValueMid = ENVPTR->GetMethodID(ENVONLY, cByte, "byteValue", "()B"); + jmethodID shortValueMid = ENVPTR->GetMethodID(ENVONLY, cShort, "shortValue", "()S"); + jmethodID intValueMid = ENVPTR->GetMethodID(ENVONLY, cInt, "intValue", "()I"); + jmethodID longValueMid = ENVPTR->GetMethodID(ENVONLY, cLong, "longValue", "()J"); + jmethodID floatValueMid = ENVPTR->GetMethodID(ENVONLY, cFloat, "floatValue", "()F"); + jmethodID doubleValueMid = ENVPTR->GetMethodID(ENVONLY, cDouble, "doubleValue", "()D"); + + if (!(typeSize = H5Tget_size(mem_type_id))) + H5_LIBRARY_ERROR(ENVONLY); + + switch (type_class) { + case H5T_VLEN: { + if (!(memb = H5Tget_super(mem_type_id))) + H5_LIBRARY_ERROR(ENVONLY); + if ((vlClass = H5Tget_class((hid_t)memb)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + if (!(vlSize = H5Tget_size(memb))) + H5_LIBRARY_ERROR(ENVONLY); + + /* Convert element to a vlen element */ + hvl_t vl_elem; + + jsize jnelmts = ENVPTR->GetArrayLength(ENVONLY, in_obj); + + if (jnelmts < 0) + H5_BAD_ARGUMENT_ERROR(ENVONLY, "translate_atomic_wbuf: number of VL elements < 0"); + + vl_elem.len = (size_t)jnelmts; + + if (NULL == (vl_elem.p = HDmalloc((size_t)jnelmts * vlSize))) + H5_OUT_OF_MEMORY_ERROR(ENVONLY, "translate_atomic_wbuf: failed to allocate vlen ptr buffer"); + + translate_wbuf(ENVONLY, (jobjectArray)in_obj, memb, vlClass, (jsize)jnelmts, vl_elem.p); + + HDmemcpy(char_buf, &vl_elem, sizeof(hvl_t)); + break; + } /* H5T_VLEN */ + case H5T_COMPOUND: { + /* Convert each compound element */ + int nmembs = H5Tget_nmembers(mem_type_id); + + /* invoke the toArray method */ + if (mToArray == NULL) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + jobjectArray array = (jobjectArray)ENVPTR->CallObjectMethod(ENVONLY, in_obj, mToArray); + jsize jnelmts = ENVPTR->GetArrayLength(ENVONLY, array); + + if (jnelmts != nmembs) + H5_BAD_ARGUMENT_ERROR( + ENVONLY, "translate_atomic_wbuf: number of elements not equal to number of members"); + + /* Convert each compound object to an element */ + for (i = 0; i < (size_t)nmembs; i++) { + H5T_class_t memb_vlClass; + size_t memb_vlSize; + size_t memb_offset; + + if ((memb = H5Tget_member_type(mem_type_id, (unsigned int)i)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + memb_offset = H5Tget_member_offset(mem_type_id, (unsigned int)i); + if ((memb_vlClass = H5Tget_class(memb)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + if (!(memb_vlSize = H5Tget_size(memb))) + H5_LIBRARY_ERROR(ENVONLY); + + jobject arr_obj = ENVPTR->GetObjectArrayElement(ENVONLY, array, (jsize)i); + translate_atomic_wbuf(ENVONLY, arr_obj, memb, memb_vlClass, + char_buf + i * typeSize + memb_offset); + ENVPTR->DeleteLocalRef(ENVONLY, arr_obj); + H5Tclose(memb); + } + break; + } /* H5T_COMPOUND */ + case H5T_ARRAY: { + void *objBuf = NULL; + + if (!(memb = H5Tget_super(mem_type_id))) + H5_LIBRARY_ERROR(ENVONLY); + if ((vlClass = H5Tget_class(memb)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + if (!(vlSize = H5Tget_size(memb))) + H5_LIBRARY_ERROR(ENVONLY); + + /* Convert each array element */ + /* invoke the toArray method */ + if (mToArray == NULL) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + jobjectArray array = (jobjectArray)ENVPTR->CallObjectMethod(ENVONLY, in_obj, mToArray); + jsize jnelmts = ENVPTR->GetArrayLength(ENVONLY, array); + + if (jnelmts < 0) + H5_BAD_ARGUMENT_ERROR(ENVONLY, "translate_atomic_wbuf: number of array elements < 0"); + + if (NULL == (objBuf = HDmalloc((size_t)jnelmts * vlSize))) + H5_OUT_OF_MEMORY_ERROR(ENVONLY, "translate_atomic_wbuf: failed to allocate buffer"); + + translate_wbuf(ENVONLY, array, memb, vlClass, (jsize)jnelmts, objBuf); + + HDmemcpy(char_buf, (char *)objBuf, vlSize * (size_t)jnelmts); + break; + } /* H5T_ARRAY */ + case H5T_ENUM: + case H5T_BITFIELD: + case H5T_OPAQUE: + case H5T_INTEGER: { + /* Convert each element */ + switch (typeSize) { + case sizeof(jbyte): { + jbyte byteValue = ENVPTR->CallByteMethod(ENVONLY, in_obj, byteValueMid); + HDmemcpy(char_buf, ((char *)&byteValue), typeSize); + break; + } + case sizeof(jshort): { + jshort shortValue = ENVPTR->CallShortMethod(ENVONLY, in_obj, shortValueMid); + HDmemcpy(char_buf, ((char *)&shortValue), typeSize); + break; + } + case sizeof(jint): { + jint intValue = ENVPTR->CallIntMethod(ENVONLY, in_obj, intValueMid); + HDmemcpy(char_buf, ((char *)&intValue), typeSize); + break; + } + case sizeof(jlong): { + jlong longValue = ENVPTR->CallLongMethod(ENVONLY, in_obj, longValueMid); + HDmemcpy(char_buf, ((char *)&longValue), typeSize); + break; + } + } + break; + } /* H5T_INTEGER */ + case H5T_FLOAT: { + /* Convert each element */ + switch (typeSize) { + case sizeof(jfloat): { + jfloat floatValue = ENVPTR->CallFloatMethod(ENVONLY, in_obj, floatValueMid); + HDmemcpy(char_buf, ((char *)&floatValue), typeSize); + break; + } + case sizeof(jdouble): { + jdouble doubleValue = ENVPTR->CallDoubleMethod(ENVONLY, in_obj, doubleValueMid); + HDmemcpy(char_buf, ((char *)&doubleValue), typeSize); + break; + } + } + break; + } /* H5T_FLOAT */ + case H5T_REFERENCE: { + /* Convert each array element */ + jbyte *barray = (jbyte *)ENVPTR->GetByteArrayElements(ENVONLY, in_obj, 0); + HDmemcpy(char_buf, ((char *)barray), typeSize); + ENVPTR->ReleaseByteArrayElements(ENVONLY, in_obj, barray, 0); + break; + } /* H5T_REFERENCE */ + case H5T_STRING: { + htri_t is_variable = 0; + + if ((is_variable = H5Tis_variable_str(mem_type_id)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + + /* Convert each pointer element */ + jsize length; + const char *utf8 = NULL; + + HDmemset(char_buf, 0, typeSize); + if (NULL != in_obj) { + PIN_JAVA_STRING(ENVONLY, in_obj, utf8, NULL, "translate_atomic_wbuf jobj not pinned"); + length = ENVPTR->GetStringUTFLength(ENVONLY, in_obj); + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + if (is_variable) { + char *new_buf = (char *)calloc(1, (size_t)(length + 1)); + HDmemcpy(((char *)new_buf), utf8, (size_t)length); + HDmemcpy(char_buf, &new_buf, typeSize); + } + else { + HDmemcpy(char_buf, utf8, (size_t)length); + } + UNPIN_JAVA_STRING(ENVONLY, in_obj, utf8); + utf8 = NULL; + } + break; + } /* H5T_STRING */ + default: + H5_UNIMPLEMENTED(ENVONLY, "translate_atomic_wbuf: invalid class type"); + break; + } /* switch(type_class) */ + +done: + + return; +} + +void +translate_rbuf(JNIEnv *env, jobjectArray ret_buf, jlong mem_type_id, H5T_class_t type_class, jsize count, + void *raw_buf) +{ + hid_t memb = H5I_INVALID_HID; + int ret_buflen = -1; + jboolean found_jList = JNI_TRUE; + jobjectArray jList = NULL; + jobject jobj = NULL; + H5T_class_t vlClass; + size_t vlSize; + size_t i, x; + size_t typeSize; + // raw_buf is normally bytes except when used for variable length strings + char *char_buf = (char *)raw_buf; + + /* retrieve the java.util.ArrayList interface class */ + jclass arrCList = ENVPTR->FindClass(ENVONLY, "java/util/ArrayList"); + jmethodID arrListMethod = ENVPTR->GetMethodID(ENVONLY, arrCList, "", "(I)V"); + jmethodID arrAddMethod = ENVPTR->GetMethodID(ENVONLY, arrCList, "add", "(Ljava/lang/Object;)Z"); + + if (!(typeSize = H5Tget_size(mem_type_id))) + H5_LIBRARY_ERROR(ENVONLY); + + ret_buflen = ENVPTR->GetArrayLength(ENVONLY, ret_buf); + if (ret_buflen < 0) + H5_JNI_FATAL_ERROR(ENVONLY, "ret_buflen: Array length cannot be negative"); + + switch (type_class) { + case H5T_VLEN: { + if (!(memb = H5Tget_super(mem_type_id))) + H5_LIBRARY_ERROR(ENVONLY); + if ((vlClass = H5Tget_class(memb)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + if (!(vlSize = H5Tget_size(memb))) + H5_LIBRARY_ERROR(ENVONLY); + + /* Convert each element to a list */ + for (i = 0; i < (size_t)count; i++) { + hvl_t vl_elem; + + found_jList = JNI_TRUE; + jList = NULL; + + /* Get the number of sequence elements */ + HDmemcpy(&vl_elem, char_buf + i * sizeof(hvl_t), sizeof(hvl_t)); + jsize nelmts = (jsize)vl_elem.len; + if (vl_elem.len != (size_t)nelmts) + H5_JNI_FATAL_ERROR(ENVONLY, "translate_rbuf: overflow of number of VL elements"); + + if (nelmts < 0) + H5_BAD_ARGUMENT_ERROR(ENVONLY, "translate_rbuf: number of VL elements < 0"); + + /* The list we're going to return: */ + if (i < (size_t)ret_buflen) { + jList = ENVPTR->GetObjectArrayElement(ENVONLY, (jobjectArray)ret_buf, (jsize)i); + } + if (NULL == jList) { + found_jList = JNI_FALSE; + if (NULL == + (jList = (jobjectArray)ENVPTR->NewObject(ENVONLY, arrCList, arrListMethod, 0))) + H5_OUT_OF_MEMORY_ERROR(ENVONLY, + "translate_rbuf: failed to allocate list read buffer"); + } + + translate_rbuf(ENVONLY, jList, memb, vlClass, (jsize)nelmts, vl_elem.p); + if (found_jList == JNI_FALSE) { + jboolean addResult = + ENVPTR->CallBooleanMethod(ENVONLY, ret_buf, arrAddMethod, (jobject)jList); + if (!addResult) + H5_JNI_FATAL_ERROR(ENVONLY, "translate_rbuf: cannot add VL element"); + } + else { + ENVPTR->SetObjectArrayElement(ENVONLY, ret_buf, (jsize)i, (jobject)jList); + CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); + } + ENVPTR->DeleteLocalRef(ENVONLY, jList); + } + break; + } /* H5T_VLEN */ + case H5T_COMPOUND: { + /* Convert each compound element to a list */ + for (i = 0; i < (size_t)count; i++) { + found_jList = JNI_TRUE; + jList = NULL; + + /* The list we're going to return: */ + if (i < (size_t)ret_buflen) { + jList = ENVPTR->GetObjectArrayElement(ENVONLY, (jobjectArray)ret_buf, (jsize)i); + } + if (NULL == jList) { + found_jList = JNI_FALSE; + if (NULL == + (jList = (jobjectArray)ENVPTR->NewObject(ENVONLY, arrCList, arrListMethod, 0))) + H5_OUT_OF_MEMORY_ERROR(ENVONLY, + "translate_rbuf: failed to allocate list read buffer"); + } + int nmembs = H5Tget_nmembers(mem_type_id); + /* Convert each element to a list */ + for (x = 0; x < (size_t)nmembs; x++) { + H5T_class_t memb_vlClass; + size_t memb_vlSize; + size_t memb_offset; + + if ((memb = H5Tget_member_type(mem_type_id, (unsigned int)x)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + memb_offset = H5Tget_member_offset(mem_type_id, (unsigned int)x); + + if ((memb_vlClass = H5Tget_class(memb)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + if (!(memb_vlSize = H5Tget_size(memb))) + H5_LIBRARY_ERROR(ENVONLY); + + jobj = translate_atomic_rbuf(ENVONLY, memb, memb_vlClass, + char_buf + i * typeSize + memb_offset); + if (jobj) { + if (found_jList == JNI_FALSE) + ENVPTR->CallBooleanMethod(ENVONLY, jList, arrAddMethod, (jobject)jobj); + else + ENVPTR->SetObjectArrayElement(ENVONLY, jList, (jsize)i, (jobject)jobj); + CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); + ENVPTR->DeleteLocalRef(ENVONLY, jobj); + } + + H5Tclose(memb); + } + if (ret_buflen == 0) + ENVPTR->CallBooleanMethod(ENVONLY, ret_buf, arrAddMethod, jList); + else + ENVPTR->SetObjectArrayElement(ENVONLY, ret_buf, (jsize)i, jList); + CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); + ENVPTR->DeleteLocalRef(ENVONLY, jList); + } + break; + } /* H5T_COMPOUND */ + case H5T_ARRAY: { + void *objBuf = NULL; + size_t typeCount; + + if (!(memb = H5Tget_super(mem_type_id))) + H5_LIBRARY_ERROR(ENVONLY); + if ((vlClass = H5Tget_class(memb)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + if (!(vlSize = H5Tget_size(memb))) + H5_LIBRARY_ERROR(ENVONLY); + if (!(typeSize = H5Tget_size(mem_type_id))) + H5_LIBRARY_ERROR(ENVONLY); + + typeCount = typeSize / vlSize; + + if (NULL == (objBuf = HDmalloc(typeSize))) + H5_OUT_OF_MEMORY_ERROR(ENVONLY, "translate_rbuf: failed to allocate buffer"); + + /* Convert each element to a list */ + for (i = 0; i < (size_t)count; i++) { + found_jList = JNI_TRUE; + jList = NULL; + + /* Get the object element */ + HDmemcpy((char *)objBuf, char_buf + i * typeSize, typeSize); + + /* The list we're going to return: */ + if (i < (size_t)ret_buflen) { + if (NULL == + (jList = ENVPTR->GetObjectArrayElement(ENVONLY, (jobjectArray)ret_buf, (jsize)i))) + found_jList = JNI_FALSE; + } + if (NULL == jList) { + if (NULL == + (jList = (jobjectArray)ENVPTR->NewObject(ENVONLY, arrCList, arrListMethod, 0))) + H5_OUT_OF_MEMORY_ERROR(ENVONLY, + "translate_rbuf: failed to allocate list read buffer"); + } + + translate_rbuf(ENVONLY, jList, memb, vlClass, (jsize)typeCount, objBuf); + if (found_jList == JNI_FALSE) + ENVPTR->CallBooleanMethod(ENVONLY, ret_buf, arrAddMethod, jList); + else + ENVPTR->SetObjectArrayElement(ENVONLY, ret_buf, (jsize)i, jList); + CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); + ENVPTR->DeleteLocalRef(ENVONLY, jList); + } + + if (objBuf) + HDfree(objBuf); + + break; + } /* H5T_ARRAY */ + case H5T_ENUM: + case H5T_BITFIELD: + case H5T_OPAQUE: + case H5T_INTEGER: + case H5T_FLOAT: + case H5T_REFERENCE: + case H5T_STRING: { + /* Convert each element to a list */ + for (i = 0; i < (size_t)count; i++) { + jobj = translate_atomic_rbuf(ENVONLY, mem_type_id, type_class, char_buf + i * typeSize); + if (jobj) { + if (ret_buflen == 0) + ENVPTR->CallBooleanMethod(ENVONLY, ret_buf, arrAddMethod, (jobject)jobj); + else + ENVPTR->SetObjectArrayElement(ENVONLY, ret_buf, (jsize)i, (jobject)jobj); + CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); + ENVPTR->DeleteLocalRef(ENVONLY, jobj); + } + } + break; + } + default: + H5_UNIMPLEMENTED(ENVONLY, "translate_rbuf: invalid class type"); + break; + } /* switch(type_class) */ + +done: + + return; +} + +void +translate_wbuf(JNIEnv *env, jobjectArray in_buf, jlong mem_type_id, H5T_class_t type_class, jsize count, + void *raw_buf) +{ + hid_t memb = H5I_INVALID_HID; + jobjectArray jList = NULL; + jobject jobj = NULL; + H5T_class_t vlClass; + size_t vlSize; + size_t i, x; + size_t typeSize; + // raw_buf is normally bytes except when used for variable length strings + char *char_buf = (char *)raw_buf; + + /* retrieve the java.util.ArrayList interface class */ + jclass arrCList = ENVPTR->FindClass(ENVONLY, "java/util/ArrayList"); + /* retrieve the toArray method */ + jmethodID mToArray = ENVPTR->GetMethodID(ENVONLY, arrCList, "toArray", "()[Ljava/lang/Object;"); + + if (!(typeSize = H5Tget_size(mem_type_id))) + H5_LIBRARY_ERROR(ENVONLY); + + switch (type_class) { + case H5T_VLEN: { + if (!(memb = H5Tget_super(mem_type_id))) + H5_LIBRARY_ERROR(ENVONLY); + if ((vlClass = H5Tget_class((hid_t)memb)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + if (!(vlSize = H5Tget_size(memb))) + H5_LIBRARY_ERROR(ENVONLY); + + /* Convert each list to a vlen element */ + for (i = 0; i < (size_t)count; i++) { + hvl_t vl_elem; + + if (NULL == (jList = ENVPTR->GetObjectArrayElement(ENVONLY, (jobjectArray)in_buf, (jsize)i))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + /* invoke the toArray method */ + if (mToArray == NULL) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + jobjectArray array = (jobjectArray)ENVPTR->CallObjectMethod(ENVONLY, jList, mToArray); + jsize jnelmts = ENVPTR->GetArrayLength(ENVONLY, array); + + if (jnelmts < 0) + H5_BAD_ARGUMENT_ERROR(ENVONLY, "translate_wbuf: number of VL elements < 0"); + + vl_elem.len = (size_t)jnelmts; + + if (NULL == (vl_elem.p = HDmalloc((size_t)jnelmts * vlSize))) + H5_OUT_OF_MEMORY_ERROR(ENVONLY, "translate_wbuf: failed to allocate vlen ptr buffer"); + + translate_wbuf(ENVONLY, array, memb, vlClass, (jsize)jnelmts, vl_elem.p); + + HDmemcpy(char_buf + i * sizeof(hvl_t), &vl_elem, sizeof(hvl_t)); + + ENVPTR->DeleteLocalRef(ENVONLY, jList); + } /* end for (i = 0; i < count; i++) */ + break; + } /* H5T_VLEN */ + case H5T_COMPOUND: { + /* Convert each list to a compound element */ + for (i = 0; i < (size_t)count; i++) { + if (NULL == (jList = ENVPTR->GetObjectArrayElement(ENVONLY, (jobjectArray)in_buf, (jsize)i))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + int nmembs = H5Tget_nmembers(mem_type_id); + + /* invoke the toArray method */ + if (mToArray == NULL) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + jobjectArray array = (jobjectArray)ENVPTR->CallObjectMethod(ENVONLY, jList, mToArray); + jsize jnelmts = ENVPTR->GetArrayLength(ENVONLY, array); + + if (jnelmts != nmembs) + H5_BAD_ARGUMENT_ERROR( + ENVONLY, "translate_wbuf: number of elements not equal to number of members"); + + /* Convert each compound object to an element */ + for (x = 0; x < (size_t)nmembs; x++) { + H5T_class_t memb_vlClass; + size_t memb_vlSize; + size_t memb_offset; + + if ((memb = H5Tget_member_type(mem_type_id, (unsigned int)x)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + memb_offset = H5Tget_member_offset(mem_type_id, (unsigned int)x); + + if ((memb_vlClass = H5Tget_class(memb)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + if (!(memb_vlSize = H5Tget_size(memb))) + H5_LIBRARY_ERROR(ENVONLY); + + jobject arr_obj = ENVPTR->GetObjectArrayElement(ENVONLY, array, (jsize)x); + translate_atomic_wbuf(ENVONLY, arr_obj, memb, memb_vlClass, + char_buf + i * typeSize + memb_offset); + ENVPTR->DeleteLocalRef(ENVONLY, arr_obj); + H5Tclose(memb); + } + + ENVPTR->DeleteLocalRef(ENVONLY, jList); + } /* end for (i = 0; i < count; i++) */ + break; + } /* H5T_COMPOUND */ + case H5T_ARRAY: { + if (!(memb = H5Tget_super(mem_type_id))) + H5_LIBRARY_ERROR(ENVONLY); + if ((vlClass = H5Tget_class(memb)) < 0) + H5_LIBRARY_ERROR(ENVONLY); + if (!(vlSize = H5Tget_size(memb))) + H5_LIBRARY_ERROR(ENVONLY); + + /* Convert each list to an array element */ + for (i = 0; i < (size_t)count; i++) { + if (NULL == (jList = ENVPTR->GetObjectArrayElement(ENVONLY, (jobjectArray)in_buf, (jsize)i))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + /* invoke the toArray method */ + if (mToArray == NULL) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + jobjectArray array = (jobjectArray)ENVPTR->CallObjectMethod(ENVONLY, jList, mToArray); + jsize jnelmts = ENVPTR->GetArrayLength(ENVONLY, array); + + if (jnelmts < 0) + H5_BAD_ARGUMENT_ERROR(ENVONLY, "translate_wbuf: number of array elements < 0"); + + translate_wbuf(ENVONLY, array, memb, vlClass, jnelmts, + char_buf + i * vlSize * (size_t)jnelmts); + + ENVPTR->DeleteLocalRef(ENVONLY, jList); + } /* end for (i = 0; i < count; i++) */ + break; + } /* H5T_ARRAY */ + case H5T_ENUM: + case H5T_BITFIELD: + case H5T_OPAQUE: + case H5T_INTEGER: + case H5T_FLOAT: + case H5T_REFERENCE: + case H5T_STRING: { + /* Convert each list to an array element */ + for (i = 0; i < (size_t)count; i++) { + if (NULL == (jobj = ENVPTR->GetObjectArrayElement(ENVONLY, (jobjectArray)in_buf, (jsize)i))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + translate_atomic_wbuf(ENVONLY, jobj, mem_type_id, type_class, char_buf + i * typeSize); + ENVPTR->DeleteLocalRef(ENVONLY, jobj); + } + break; + } + default: + H5_UNIMPLEMENTED(ENVONLY, "translate_wbuf: invalid class type"); + break; + } /* switch(type_class) */ + +done: + + return; +} + #ifdef __cplusplus } #endif diff --git a/java/src/jni/h5util.h b/java/src/jni/h5util.h index fa0a061b4ee..d8ecef901cc 100644 --- a/java/src/jni/h5util.h +++ b/java/src/jni/h5util.h @@ -51,6 +51,11 @@ extern int h5str_dump_simple_mem(JNIEnv *env, FILE *stream, hid_t attr, int b extern htri_t H5Tdetect_variable_str(hid_t tid); +extern void translate_rbuf(JNIEnv *env, jobjectArray ret_buf, jlong mem_type_id, H5T_class_t type_class, + jsize count, void *raw_buf); +extern void translate_wbuf(JNIEnv *env, jobjectArray ret_buf, jlong mem_type_id, H5T_class_t type_class, + jsize count, void *raw_buf); + /* * Symbols used to format the output of h5str_sprintf and * to interpret the input to h5str_convert. diff --git a/java/test/CMakeLists.txt b/java/test/CMakeLists.txt index 3e18d35aef0..48ea0f5d733 100644 --- a/java/test/CMakeLists.txt +++ b/java/test/CMakeLists.txt @@ -120,22 +120,38 @@ else () endif () get_property (target_name TARGET ${HDF5_JAVA_JNI_LIB_TARGET} PROPERTY OUTPUT_NAME) -set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$:${CMAKE_DEBUG_POSTFIX}>;") +set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$,$>:${CMAKE_DEBUG_POSTFIX}>;") set (CMAKE_JAVA_CLASSPATH ".") foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH}) set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}") endforeach () if (HDF5_TEST_JAVA AND HDF5_TEST_SERIAL) + add_test ( + NAME JUnit-clear-objects + COMMAND ${CMAKE_COMMAND} -E remove + test.h5 + testF2.h5 + testPf00000.h5 + testPf00001.h5 + WORKING_DIRECTORY ${HDF5_BINARY_DIR}/java/test + ) + set_tests_properties (JUnit-clear-objects PROPERTIES FIXTURES_SETUP clear_JUnit) + + add_test ( + NAME JUnit-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove + test.h5 + testF2.h5 + testPf00000.h5 + testPf00001.h5 + WORKING_DIRECTORY ${HDF5_BINARY_DIR}/java/test + ) + set_tests_properties (JUnit-clean-objects PROPERTIES FIXTURES_CLEANUP clear_JUnit) + foreach (test_file ${HDF5_JAVA_TEST_SOURCES}) set (TEST_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${HDF5_JAVA_TEST_LIB_TARGET}_${test_file}_JAR_FILE}") - add_test ( - NAME JUnit-${test_file}-clearall-objects - COMMAND ${CMAKE_COMMAND} -E remove - JUnit-${test_file}.out - JUnit-${test_file}.out.err - ) add_test ( NAME JUnit-${test_file} COMMAND "${CMAKE_COMMAND}" @@ -156,7 +172,8 @@ if (HDF5_TEST_JAVA AND HDF5_TEST_SERIAL) ) set_tests_properties (JUnit-${test_file} PROPERTIES ENVIRONMENT "HDF5_PLUGIN_PATH=${CMAKE_BINARY_DIR}/testdir2" - DEPENDS "JUnit-${test_file}-clearall-objects" + FIXTURES_REQUIRED clear_JUnit + WORKING_DIRECTORY ${HDF5_BINARY_DIR}/java/test ) endforeach () @@ -169,13 +186,6 @@ if (HDF5_TEST_JAVA AND HDF5_TEST_SERIAL) macro (DO_VOL_TEST voltest volname volinfo volclasspath) #message(STATUS "${voltest}-${volname} with ${volinfo}") - add_test ( - NAME JUnit-VOL-${volname}-${voltest}-clearall-objects - COMMAND ${CMAKE_COMMAND} -E remove - JUnit-VOL-${volname}-${voltest}.out - JUnit-VOL-${volname}-${voltest}.out.err - WORKING_DIRECTORY ${HDF5_BINARY_DIR}/java/test/${volname} - ) add_test (NAME JUnit-VOL-${volname}-${voltest} COMMAND "${CMAKE_COMMAND}" -D "TEST_TESTER=${CMAKE_Java_RUNTIME};${CMAKE_Java_RUNTIME_FLAGS}" @@ -196,7 +206,20 @@ if (HDF5_TEST_JAVA AND HDF5_TEST_SERIAL) ) set_tests_properties (JUnit-VOL-${volname}-${voltest} PROPERTIES ENVIRONMENT "HDF5_PLUGIN_PATH=${CMAKE_BINARY_DIR}/testdir2" - DEPENDS "JUnit-VOL-${volname}-${voltest}-clearall-objects" + WORKING_DIRECTORY ${HDF5_BINARY_DIR}/java/test/${volname} + ) + add_test ( + NAME JUnit-VOL-${volname}-${voltest}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove + test.h5 + testF2.h5 + testPf00000.h5 + testPf00001.h5 + WORKING_DIRECTORY ${HDF5_BINARY_DIR}/java/test/${volname} + ) + set_tests_properties (JUnit-VOL-${volname}-${voltest}-clean-objects PROPERTIES + ENVIRONMENT "HDF5_PLUGIN_PATH=${CMAKE_BINARY_DIR}/testdir2" + DEPENDS "JUnit-VOL-${volname}-${voltest}" WORKING_DIRECTORY ${HDF5_BINARY_DIR}/java/test/${volname} ) endmacro () @@ -208,6 +231,8 @@ if (HDF5_TEST_JAVA AND HDF5_TEST_SERIAL) vol_pass_through2 ) + # native VOL = 0 + # pass-through VOL = 1 set (vol_native native) set (vol_pass_through1 "pass_through under_vol=0\;under_info={}") set (vol_pass_through2 "pass_through under_vol=505\;under_info={under_vol=0\;under_info={}}") diff --git a/java/test/Makefile.am b/java/test/Makefile.am index bb6be7f61d1..6a407dbb6d5 100644 --- a/java/test/Makefile.am +++ b/java/test/Makefile.am @@ -27,7 +27,7 @@ classes: pkgpath = test hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar -CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/junit.jar:$(top_srcdir)/java/lib/hamcrest-core.jar:$(top_srcdir)/java/lib/slf4j-api-1.7.33.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-1.7.33.jar:$$CLASSPATH +CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/junit.jar:$(top_srcdir)/java/lib/hamcrest-core.jar:$(top_srcdir)/java/lib/slf4j-api-2.0.6.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-2.0.6.jar:$$CLASSPATH jarfile = jar$(PACKAGE_TARNAME)test.jar diff --git a/java/test/TestH5A.java b/java/test/TestH5A.java index 4437ec7012a..90fe24cda71 100644 --- a/java/test/TestH5A.java +++ b/java/test/TestH5A.java @@ -46,6 +46,7 @@ public class TestH5A { private static final int DIM_Y = 6; long H5fid = HDF5Constants.H5I_INVALID_HID; long H5dsid = HDF5Constants.H5I_INVALID_HID; + long H5atid = HDF5Constants.H5I_INVALID_HID; long H5did = HDF5Constants.H5I_INVALID_HID; long[] H5dims = {DIM_X, DIM_Y}; long type_id = HDF5Constants.H5I_INVALID_HID; @@ -136,8 +137,12 @@ public void deleteH5file() throws HDF5LibraryException } catch (Exception ex) { } - - _deleteFile(H5_FILE); + if (H5atid > 0) + try { + H5.H5Tclose(H5atid); + } + catch (Exception ex) { + } if (type_id > 0) try { @@ -163,6 +168,8 @@ public void deleteH5file() throws HDF5LibraryException } catch (Exception ex) { } + + _deleteFile(H5_FILE); System.out.println(); } @@ -1072,7 +1079,7 @@ public void testH5Awrite_readVL() HDF5Constants.H5P_DEFAULT); assertTrue("testH5Awrite_readVL: ", attr_id >= 0); - H5.H5Awrite_VLStrings(attr_id, atype_id, str_data); + H5.H5AwriteVL(attr_id, atype_id, str_data); H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL); @@ -1084,7 +1091,7 @@ public void testH5Awrite_readVL() strs[j] = ""; } try { - H5.H5Aread_VLStrings(attr_id, atype_id, strs); + H5.H5AreadVL(attr_id, atype_id, strs); } catch (Exception ex) { ex.printStackTrace(); @@ -1501,9 +1508,8 @@ public void testH5AVLwr() H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL); - for (int j = 0; j < dims.length; j++) { + for (int j = 0; j < dims.length; j++) lsize *= dims[j]; - } // Read Integer data ArrayList[] vl_readbuf = new ArrayList[4]; @@ -1576,4 +1582,422 @@ public void testH5AVLwr() } } } + + @Test + public void testH5AVLwrVL() + { + String attr_int_name = "VLIntdata"; + long attr_int_id = HDF5Constants.H5I_INVALID_HID; + long atype_int_id = HDF5Constants.H5I_INVALID_HID; + long base_atype_int_id = HDF5Constants.H5I_INVALID_HID; + long aspace_id = HDF5Constants.H5I_INVALID_HID; + long[] dims = {4}; + long lsize = 1; + + ArrayList[] base_vl_int_data = new ArrayList[4]; + ArrayList[] vl_int_data = new ArrayList[4]; + try { + // Write Integer data + vl_int_data[0] = new ArrayList(Arrays.asList(1)); + vl_int_data[1] = new ArrayList(Arrays.asList(2, 3)); + vl_int_data[2] = new ArrayList(Arrays.asList(4, 5, 6)); + vl_int_data[3] = new ArrayList(Arrays.asList(7, 8, 9, 10)); + Class dataClass = vl_int_data.getClass(); + assertTrue("testH5AVLwrVL.getClass: " + dataClass, dataClass.isArray()); + + // Write VL data + base_vl_int_data[0] = new ArrayList>(); + base_vl_int_data[0].add(vl_int_data[0]); + base_vl_int_data[1] = new ArrayList>(); + base_vl_int_data[1].add(vl_int_data[0]); + base_vl_int_data[1].add(vl_int_data[1]); + base_vl_int_data[2] = new ArrayList>(); + base_vl_int_data[2].add(vl_int_data[0]); + base_vl_int_data[2].add(vl_int_data[1]); + base_vl_int_data[2].add(vl_int_data[2]); + base_vl_int_data[3] = new ArrayList>(); + base_vl_int_data[3].add(vl_int_data[0]); + base_vl_int_data[3].add(vl_int_data[1]); + base_vl_int_data[3].add(vl_int_data[2]); + base_vl_int_data[3].add(vl_int_data[3]); + + try { + atype_int_id = H5.H5Tvlen_create(HDF5Constants.H5T_STD_U32LE); + assertTrue("testH5AVLwr.H5Tvlen_create: ", atype_int_id >= 0); + base_atype_int_id = H5.H5Tvlen_create(atype_int_id); + assertTrue("testH5AVLwrVL.H5Tvlen_create: ", base_atype_int_id >= 0); + } + catch (Exception err) { + if (base_atype_int_id > 0) + try { + H5.H5Tclose(base_atype_int_id); + } + catch (Exception ex) { + } + if (atype_int_id > 0) + try { + H5.H5Tclose(atype_int_id); + } + catch (Exception ex) { + } + err.printStackTrace(); + fail("H5.testH5AVLwrVL: " + err); + } + + try { + aspace_id = H5.H5Screate_simple(1, dims, null); + assertTrue(aspace_id > 0); + attr_int_id = H5.H5Acreate(H5did, attr_int_name, base_atype_int_id, aspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + assertTrue("testH5AVLwrVL: ", attr_int_id >= 0); + + H5.H5AwriteVL(attr_int_id, base_atype_int_id, base_vl_int_data); + } + catch (Exception err) { + if (attr_int_id > 0) + try { + H5.H5Aclose(attr_int_id); + } + catch (Exception ex) { + } + if (atype_int_id > 0) + try { + H5.H5Tclose(atype_int_id); + } + catch (Exception ex) { + } + err.printStackTrace(); + fail("H5.testH5AVLwrVL: " + err); + } + finally { + if (aspace_id > 0) + try { + H5.H5Sclose(aspace_id); + } + catch (Exception ex) { + } + } + + H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL); + + for (int j = 0; j < dims.length; j++) + lsize *= dims[j]; + + // Read Integer data + ArrayList[] base_vl_readbuf = new ArrayList[4]; + for (int j = 0; j < lsize; j++) + base_vl_readbuf[j] = new ArrayList>(); + + try { + H5.H5AreadVL(attr_int_id, base_atype_int_id, base_vl_readbuf); + } + catch (Exception ex) { + ex.printStackTrace(); + } + ArrayList> vl_readbuf = (ArrayList>)base_vl_readbuf[0]; + assertTrue("vl_readbuf 0 exists", vl_readbuf != null); + ArrayList vl_readbuf_int = (ArrayList)(vl_readbuf.get(0)); + /* + * System.out.println(); System.out.println("vl_readbuf: " + vl_readbuf); + * System.out.println("vl_readbuf_int: " + vl_readbuf_int); + */ + assertTrue("testHADVLwrVL:" + vl_readbuf_int.get(0), + vl_int_data[0].get(0).equals(vl_readbuf_int.get(0))); + + vl_readbuf = (ArrayList>)base_vl_readbuf[1]; + vl_readbuf_int = (ArrayList)(vl_readbuf.get(1)); + /* + * System.out.println("vl_readbuf: " + vl_readbuf); System.out.println("vl_readbuf_int: " + + * vl_readbuf_int); + */ + assertTrue("testH5AVLwrVL:" + vl_readbuf_int.get(1), + vl_int_data[1].get(1).equals(vl_readbuf_int.get(1))); + + vl_readbuf = (ArrayList>)base_vl_readbuf[2]; + vl_readbuf_int = (ArrayList)(vl_readbuf.get(2)); + /* + * System.out.println("vl_readbuf: " + vl_readbuf); System.out.println("vl_readbuf_int: " + + * vl_readbuf_int); + */ + assertTrue("testH5AVLwrVL:" + vl_readbuf_int.get(2), + vl_int_data[2].get(2).equals(vl_readbuf_int.get(2))); + + vl_readbuf = (ArrayList>)base_vl_readbuf[3]; + vl_readbuf_int = (ArrayList)(vl_readbuf.get(3)); + /* + * System.out.println("vl_readbuf: " + vl_readbuf); System.out.println("vl_readbuf_int: " + + * vl_readbuf_int); + */ + assertTrue("testH5AVLwrVL:" + vl_readbuf_int.get(3), + vl_int_data[3].get(3).equals(vl_readbuf_int.get(3))); + } + catch (Throwable err) { + err.printStackTrace(); + fail("H5.testH5AVLwrVL: " + err); + } + finally { + if (attr_int_id > 0) + try { + H5.H5Aclose(attr_int_id); + } + catch (Exception ex) { + } + if (atype_int_id > 0) + try { + H5.H5Tclose(atype_int_id); + } + catch (Exception ex) { + } + if (base_atype_int_id > 0) + try { + H5.H5Tclose(base_atype_int_id); + } + catch (Exception ex) { + } + } + } + + @Test + public void testH5AArraywr() + { + String att_int_name = "ArrayIntdata"; + long att_int_id = HDF5Constants.H5I_INVALID_HID; + long atype_int_id = HDF5Constants.H5I_INVALID_HID; + long aspace_id = HDF5Constants.H5I_INVALID_HID; + long[] dims = {4}; + long lsize = 1; + + ArrayList[] arr_int_data = new ArrayList[4]; + try { + // Write Integer data + arr_int_data[0] = new ArrayList(Arrays.asList(1, 2, 3, 4)); + arr_int_data[1] = new ArrayList(Arrays.asList(2, 3, 4, 5)); + arr_int_data[2] = new ArrayList(Arrays.asList(4, 5, 6, 7)); + arr_int_data[3] = new ArrayList(Arrays.asList(7, 8, 9, 10)); + Class dataClass = arr_int_data.getClass(); + assertTrue("testH5AArraywr.getClass: " + dataClass, dataClass.isArray()); + + try { + atype_int_id = H5.H5Tarray_create(HDF5Constants.H5T_STD_U32LE, 1, dims); + assertTrue("testH5AArraywr.H5Tarray_create: ", atype_int_id >= 0); + } + catch (Exception err) { + if (atype_int_id > 0) + try { + H5.H5Tclose(atype_int_id); + } + catch (Exception ex) { + } + err.printStackTrace(); + fail("H5.testH5AArraywr: " + err); + } + + try { + aspace_id = H5.H5Screate_simple(1, dims, null); + assertTrue(aspace_id > 0); + att_int_id = H5.H5Acreate(H5did, att_int_name, atype_int_id, aspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + assertTrue("testH5AVLwr: ", att_int_id >= 0); + + H5.H5AwriteVL(att_int_id, atype_int_id, arr_int_data); + } + catch (Exception err) { + if (att_int_id > 0) + try { + H5.H5Aclose(att_int_id); + } + catch (Exception ex) { + } + if (atype_int_id > 0) + try { + H5.H5Tclose(atype_int_id); + } + catch (Exception ex) { + } + err.printStackTrace(); + fail("H5.testH5AVLwr: " + err); + } + finally { + if (aspace_id > 0) + try { + H5.H5Sclose(aspace_id); + } + catch (Exception ex) { + } + } + + H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL); + + for (int j = 0; j < dims.length; j++) + lsize *= dims[j]; + + // Read Integer data + ArrayList[] arr_readbuf = new ArrayList[4]; + for (int j = 0; j < lsize; j++) + arr_readbuf[j] = new ArrayList(); + + try { + H5.H5AreadVL(att_int_id, atype_int_id, arr_readbuf); + } + catch (Exception ex) { + ex.printStackTrace(); + } + assertTrue("testH5AVLwr:" + arr_readbuf[0].get(0), + arr_int_data[0].get(0).equals(arr_readbuf[0].get(0))); + assertTrue("testH5AVLwr:" + arr_readbuf[1].get(0), + arr_int_data[1].get(0).equals(arr_readbuf[1].get(0))); + assertTrue("testH5AVLwr:" + arr_readbuf[2].get(0), + arr_int_data[2].get(0).equals(arr_readbuf[2].get(0))); + assertTrue("testH5AVLwr:" + arr_readbuf[3].get(0), + arr_int_data[3].get(0).equals(arr_readbuf[3].get(0))); + } + catch (Throwable err) { + err.printStackTrace(); + fail("H5.testH5AArraywr: " + err); + } + finally { + if (att_int_id > 0) + try { + H5.H5Aclose(att_int_id); + } + catch (Exception ex) { + } + if (atype_int_id > 0) + try { + H5.H5Tclose(atype_int_id); + } + catch (Exception ex) { + } + } + } + + @Test + public void testH5AArray_string_buffer() throws Throwable + { + String att_str_name = "ArrayStringdata"; + long att_str_id = HDF5Constants.H5I_INVALID_HID; + long atype_str_id = HDF5Constants.H5I_INVALID_HID; + long aspace_id = HDF5Constants.H5I_INVALID_HID; + long[] strdims = {4}; + long[] dims = {6}; + long lsize = 1; + + String[] str_data0 = {"Parting", "is such", "sweet", "sorrow."}; + String[] str_data1 = {"Testing", "one", "two", "three."}; + String[] str_data2 = {"Dog,", "man's", "best", "friend."}; + String[] str_data3 = {"Diamonds", "are", "a", "girls!"}; + String[] str_data4 = {"S A", "T U R", "D A Y", "night"}; + String[] str_data5 = {"That's", "all", "folks", "!!!"}; + + ArrayList[] arr_str_data = new ArrayList[6]; + arr_str_data[0] = new ArrayList(Arrays.asList(str_data0)); + arr_str_data[1] = new ArrayList(Arrays.asList(str_data1)); + arr_str_data[2] = new ArrayList(Arrays.asList(str_data2)); + arr_str_data[3] = new ArrayList(Arrays.asList(str_data3)); + arr_str_data[4] = new ArrayList(Arrays.asList(str_data4)); + arr_str_data[5] = new ArrayList(Arrays.asList(str_data5)); + + try { + H5atid = H5.H5Tcopy(HDF5Constants.H5T_C_S1); + } + catch (Throwable err) { + err.printStackTrace(); + fail("testH5AArray_string_buffer.H5.H5Tcopy: " + err); + } + assertTrue("testH5AArray_string_buffer.H5Tcopy: ", H5atid >= 0); + try { + H5.H5Tset_size(H5atid, HDF5Constants.H5T_VARIABLE); + assertTrue("testH5AArray_string_buffer.H5Tis_variable_str", H5.H5Tis_variable_str(H5atid)); + } + catch (Throwable err) { + err.printStackTrace(); + fail("testH5DArray_string_buffer.H5Tset_size: " + err); + } + try { + atype_str_id = H5.H5Tarray_create(H5atid, 1, strdims); + assertTrue("testH5AArray_string_buffer.H5Tarray_create: ", atype_str_id >= 0); + } + catch (Exception err) { + if (atype_str_id > 0) + try { + H5.H5Tclose(atype_str_id); + } + catch (Exception ex) { + } + err.printStackTrace(); + fail("testH5AArray_string_buffer: " + err); + } + + try { + aspace_id = H5.H5Screate_simple(1, dims, null); + assertTrue(aspace_id > 0); + att_str_id = H5.H5Acreate(H5did, att_str_name, atype_str_id, aspace_id, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + assertTrue("testH5AArray_string_buffer: ", att_str_id >= 0); + + H5.H5AwriteVL(att_str_id, atype_str_id, arr_str_data); + } + catch (Exception err) { + if (att_str_id > 0) + try { + H5.H5Dclose(att_str_id); + } + catch (Exception ex) { + } + if (atype_str_id > 0) + try { + H5.H5Tclose(atype_str_id); + } + catch (Exception ex) { + } + err.printStackTrace(); + fail("testH5AArray_string_buffer: " + err); + } + finally { + if (aspace_id > 0) + try { + H5.H5Sclose(aspace_id); + } + catch (Exception ex) { + } + } + + H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL); + + for (int j = 0; j < dims.length; j++) + lsize *= dims[j]; + + ArrayList[] arr_readbuf = new ArrayList[6]; + for (int j = 0; j < lsize; j++) + arr_readbuf[j] = new ArrayList(); + + try { + H5.H5AreadVL(att_str_id, atype_str_id, arr_readbuf); + } + catch (Exception ex) { + ex.printStackTrace(); + } + finally { + if (att_str_id > 0) + try { + H5.H5Aclose(att_str_id); + } + catch (Exception ex) { + } + if (atype_str_id > 0) + try { + H5.H5Tclose(atype_str_id); + } + catch (Exception ex) { + } + } + assertTrue("testH5AArray_string_buffer:" + arr_readbuf[0].get(0), + arr_str_data[0].get(0).equals(arr_readbuf[0].get(0))); + assertTrue("testH5AArray_string_buffer:" + arr_readbuf[1].get(0), + arr_str_data[1].get(0).equals(arr_readbuf[1].get(0))); + assertTrue("testH5AArray_string_buffer:" + arr_readbuf[2].get(0), + arr_str_data[2].get(0).equals(arr_readbuf[2].get(0))); + assertTrue("testH5AArray_string_buffer:" + arr_readbuf[3].get(0), + arr_str_data[3].get(0).equals(arr_readbuf[3].get(0))); + } } diff --git a/java/test/TestH5D.java b/java/test/TestH5D.java index f7e57021b57..eacaabf82eb 100644 --- a/java/test/TestH5D.java +++ b/java/test/TestH5D.java @@ -31,6 +31,7 @@ import org.junit.After; import org.junit.Before; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; @@ -157,7 +158,7 @@ private final void _createDataset(long fid, long dsid, String name, long dapl) assertTrue("TestH5D._createDataset.H5Dcreate: ", H5did >= 0); } - private final void _createVLDataset(long fid, long dsid, String name, long dapl) + private final void _createVLStrDataset(String name, long dapl) { try { H5dtid = H5.H5Tcopy(HDF5Constants.H5T_C_S1); @@ -166,24 +167,24 @@ private final void _createVLDataset(long fid, long dsid, String name, long dapl) err.printStackTrace(); fail("H5.H5Tcopy: " + err); } - assertTrue("TestH5D._createVLDataset.H5Tcopy: ", H5dtid >= 0); + assertTrue("TestH5D._createVLStrDataset.H5Tcopy: ", H5dtid >= 0); try { H5.H5Tset_size(H5dtid, HDF5Constants.H5T_VARIABLE); - assertTrue("TestH5D._createVLDataset.H5Tis_variable_str", H5.H5Tis_variable_str(H5dtid)); + assertTrue("TestH5D._createVLStrDataset.H5Tis_variable_str", H5.H5Tis_variable_str(H5dtid)); } catch (Throwable err) { err.printStackTrace(); fail("H5.H5Tset_size: " + err); } try { - H5did = H5.H5Dcreate(fid, name, H5dtid, dsid, HDF5Constants.H5P_DEFAULT, + H5did = H5.H5Dcreate(H5fid, name, H5dtid, H5dsid, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, dapl); } catch (Throwable err) { err.printStackTrace(); fail("H5.H5Dcreate: " + err); } - assertTrue("TestH5D._createVLDataset.H5Dcreate: ", H5did >= 0); + assertTrue("TestH5D._createVLStrDataset.H5Dcreate: ", H5did >= 0); } private final void _closeH5file() throws HDF5LibraryException @@ -934,7 +935,7 @@ public int callback(byte[] elem_buf, long elem_id, int ndim, long[] point, H5D_i buf_data[(indx * DIM_Y) + jndx] == 126); } - @Test + @Ignore public void testH5Dvlen_get_buf_size() { String[] str_data = {"Parting", "is such", "sweet", "sorrow.", "Testing", "one", "two", "three.", @@ -945,12 +946,12 @@ public void testH5Dvlen_get_buf_size() for (int idx = 0; idx < str_data.length; idx++) str_data_bytes += str_data[idx].length() + 1; // Account for terminating null - _createVLDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT); + _createVLStrDataset("dset", HDF5Constants.H5P_DEFAULT); try { if ((H5did >= 0) && (H5dtid >= 0)) - H5.H5Dwrite_VLStrings(H5did, H5dtid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, - HDF5Constants.H5P_DEFAULT, str_data); + H5.H5DwriteVL(H5did, H5dtid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, str_data); } catch (Exception e) { e.printStackTrace(); @@ -964,26 +965,122 @@ public void testH5Dvlen_get_buf_size() assertTrue("H5Dvlen_get_buf_size " + vl_size + " == " + str_data_bytes, vl_size == str_data_bytes); } - @Test - public void testH5Dvlen_read_default_buffer() throws Throwable + @Ignore + public void testH5Dvlen_string_buffer() throws Throwable { - String[] str_data = {"Parting", "is such", "sweet", "sorrow.", "Testing", "one", "two", "three.", - "Dog,", "man's", "best", "friend.", "Diamonds", "are", "a", "girls!", - "S A", "T U R", "D A Y", "night", "That's", "all", "folks", "!!!"}; - byte[] read_data = new byte[512]; + String dset_str_name = "VLStringdata"; + long dset_str_id = HDF5Constants.H5I_INVALID_HID; + long dtype_str_id = HDF5Constants.H5I_INVALID_HID; + long dspace_id = HDF5Constants.H5I_INVALID_HID; + long[] dims = {64}; + long lsize = 1; - _createVLDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT); + String[] str_data0 = {"Parting", "is such", "sweet", "sorrow."}; + String[] str_data1 = {"Testing", "one", "two", "three."}; + String[] str_data2 = {"Dog,", "man's", "best", "friend."}; + String[] str_data3 = {"Diamonds", "are", "a", "girls!"}; + String[] str_data4 = {"S A", "T U R", "D A Y", "night"}; + String[] str_data5 = {"That's", "all", "folks", "!!!"}; + + ArrayList[] vl_str_data = new ArrayList[6]; + vl_str_data[0] = new ArrayList(Arrays.asList(str_data0)); + vl_str_data[1] = new ArrayList(Arrays.asList(str_data1)); + vl_str_data[2] = new ArrayList(Arrays.asList(str_data2)); + vl_str_data[3] = new ArrayList(Arrays.asList(str_data3)); + vl_str_data[4] = new ArrayList(Arrays.asList(str_data4)); + vl_str_data[5] = new ArrayList(Arrays.asList(str_data5)); try { - H5.H5Dwrite_VLStrings(H5did, H5dtid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, - HDF5Constants.H5P_DEFAULT, str_data); + H5dtid = H5.H5Tcopy(HDF5Constants.H5T_C_S1); } - catch (Exception e) { - e.printStackTrace(); + catch (Throwable err) { + err.printStackTrace(); + fail("testH5Dvlen_string_buffer.H5.H5Tcopy: " + err); + } + assertTrue("testH5Dvlen_string_buffer.H5Tcopy: ", H5dtid >= 0); + try { + H5.H5Tset_size(H5dtid, HDF5Constants.H5T_VARIABLE); + assertTrue("testH5Dvlen_string_buffer.H5Tis_variable_str", H5.H5Tis_variable_str(H5dtid)); + } + catch (Throwable err) { + err.printStackTrace(); + fail("testH5Dvlen_string_buffer.H5Tset_size: " + err); + } + try { + dtype_str_id = H5.H5Tvlen_create(H5dtid); + assertTrue("testH5Dvlen_string_buffer.H5Tvlen_create: ", dtype_str_id >= 0); + } + catch (Exception err) { + if (dtype_str_id > 0) + try { + H5.H5Tclose(dtype_str_id); + } + catch (Exception ex) { + } + err.printStackTrace(); + fail("testH5Dvlen_string_buffer: " + err); + } + + try { + dspace_id = H5.H5Screate_simple(1, dims, null); + assertTrue(dspace_id > 0); + dset_str_id = + H5.H5Dcreate(H5fid, dset_str_name, dtype_str_id, dspace_id, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + assertTrue("testH5Dvlen_string_buffer: ", dset_str_id >= 0); + + H5.H5DwriteVL(dset_str_id, dtype_str_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, vl_str_data); + } + catch (Exception err) { + if (dset_str_id > 0) + try { + H5.H5Dclose(dset_str_id); + } + catch (Exception ex) { + } + if (dtype_str_id > 0) + try { + H5.H5Tclose(dtype_str_id); + } + catch (Exception ex) { + } + err.printStackTrace(); + fail("testH5Dvlen_string_buffer: " + err); + } + finally { + if (dspace_id > 0) + try { + H5.H5Sclose(dspace_id); + } + catch (Exception ex) { + } } - H5.H5Dread(H5did, H5dtid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, - read_data); + H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL); + + for (int j = 0; j < dims.length; j++) + lsize *= dims[j]; + + ArrayList[] vl_readbuf = new ArrayList[4]; + for (int j = 0; j < lsize; j++) + vl_readbuf[j] = new ArrayList(); + + try { + H5.H5DreadVL(dset_str_id, dtype_str_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, vl_readbuf); + } + catch (Exception ex) { + ex.printStackTrace(); + } + assertTrue("testH5Dvlen_string_buffer:" + vl_readbuf[0].get(0), + vl_str_data[0].get(0).equals(vl_readbuf[0].get(0))); + assertTrue("testH5Dvlen_string_buffer:" + vl_readbuf[1].get(0), + vl_str_data[1].get(0).equals(vl_readbuf[1].get(0))); + assertTrue("testH5Dvlen_string_buffer:" + vl_readbuf[2].get(0), + vl_str_data[2].get(0).equals(vl_readbuf[2].get(0))); + assertTrue("testH5Dvlen_string_buffer:" + vl_readbuf[3].get(0), + vl_str_data[3].get(0).equals(vl_readbuf[3].get(0))); } @Test @@ -994,20 +1091,21 @@ public void testH5Dvlen_write_read() "S A", "T U R", "D A Y", "night", "That's", "all", "folks", "!!!"}; String[] str_rdata = new String[DIM_X * DIM_Y]; - _createVLDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT); + _createVLStrDataset("dset", HDF5Constants.H5P_DEFAULT); try { if ((H5did >= 0) && (H5dtid >= 0)) - H5.H5Dwrite_VLStrings(H5did, H5dtid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, - HDF5Constants.H5P_DEFAULT, str_wdata); + H5.H5DwriteVL(H5did, H5dtid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, str_wdata); } catch (Exception e) { e.printStackTrace(); } + try { if ((H5did >= 0) && (H5dtid >= 0)) - H5.H5Dread_VLStrings(H5did, H5dtid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, - HDF5Constants.H5P_DEFAULT, str_rdata); + H5.H5DreadVL(H5did, H5dtid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, str_rdata); } catch (Exception e) { e.printStackTrace(); @@ -1153,51 +1251,50 @@ public void testH5DVLwr() H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL); - for (int j = 0; j < dims.length; j++) { + for (int j = 0; j < dims.length; j++) lsize *= dims[j]; - } // Read Integer data - ArrayList[] vl_readbuf = new ArrayList[4]; + ArrayList[] vl_int_readbuf = new ArrayList[4]; for (int j = 0; j < lsize; j++) - vl_readbuf[j] = new ArrayList(); + vl_int_readbuf[j] = new ArrayList(); try { H5.H5DreadVL(dset_int_id, dtype_int_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, - HDF5Constants.H5P_DEFAULT, vl_readbuf); + HDF5Constants.H5P_DEFAULT, vl_int_readbuf); } catch (Exception ex) { ex.printStackTrace(); } - assertTrue("testH5DVLwr:" + vl_readbuf[0].get(0), - vl_int_data[0].get(0).equals(vl_readbuf[0].get(0))); - assertTrue("testH5DVLwr:" + vl_readbuf[1].get(0), - vl_int_data[1].get(0).equals(vl_readbuf[1].get(0))); - assertTrue("testH5DVLwr:" + vl_readbuf[2].get(0), - vl_int_data[2].get(0).equals(vl_readbuf[2].get(0))); - assertTrue("testH5DVLwr:" + vl_readbuf[3].get(0), - vl_int_data[3].get(0).equals(vl_readbuf[3].get(0))); + assertTrue("testH5DVLwr:" + vl_int_readbuf[0].get(0), + vl_int_data[0].get(0).equals(vl_int_readbuf[0].get(0))); + assertTrue("testH5DVLwr:" + vl_int_readbuf[1].get(0), + vl_int_data[1].get(0).equals(vl_int_readbuf[1].get(0))); + assertTrue("testH5DVLwr:" + vl_int_readbuf[2].get(0), + vl_int_data[2].get(0).equals(vl_int_readbuf[2].get(0))); + assertTrue("testH5DVLwr:" + vl_int_readbuf[3].get(0), + vl_int_data[3].get(0).equals(vl_int_readbuf[3].get(0))); // Read Double data - vl_readbuf = new ArrayList[4]; + ArrayList[] vl_dbl_readbuf = new ArrayList[4]; for (int j = 0; j < lsize; j++) - vl_readbuf[j] = new ArrayList(); + vl_dbl_readbuf[j] = new ArrayList(); try { H5.H5DreadVL(dset_dbl_id, dtype_dbl_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, - HDF5Constants.H5P_DEFAULT, vl_readbuf); + HDF5Constants.H5P_DEFAULT, vl_dbl_readbuf); } catch (Exception ex) { ex.printStackTrace(); } - assertTrue("testH5DVLwr:" + vl_readbuf[0].get(0), - vl_dbl_data[0].get(0).equals(vl_readbuf[0].get(0))); - assertTrue("testH5DVLwr:" + vl_readbuf[1].get(0), - vl_dbl_data[1].get(0).equals(vl_readbuf[1].get(0))); - assertTrue("testH5DVLwr:" + vl_readbuf[2].get(0), - vl_dbl_data[2].get(0).equals(vl_readbuf[2].get(0))); - assertTrue("testH5DVLwr:" + vl_readbuf[3].get(0), - vl_dbl_data[3].get(0).equals(vl_readbuf[3].get(0))); + assertTrue("testH5DVLwr:" + vl_dbl_readbuf[0].get(0), + vl_dbl_data[0].get(0).equals(vl_dbl_readbuf[0].get(0))); + assertTrue("testH5DVLwr:" + vl_dbl_readbuf[1].get(0), + vl_dbl_data[1].get(0).equals(vl_dbl_readbuf[1].get(0))); + assertTrue("testH5DVLwr:" + vl_dbl_readbuf[2].get(0), + vl_dbl_data[2].get(0).equals(vl_dbl_readbuf[2].get(0))); + assertTrue("testH5DVLwr:" + vl_dbl_readbuf[3].get(0), + vl_dbl_data[3].get(0).equals(vl_dbl_readbuf[3].get(0))); } catch (Throwable err) { err.printStackTrace(); @@ -1230,4 +1327,624 @@ public void testH5DVLwr() } } } + + @Test + public void testH5DVLwrVL() + { + String dset_int_name = "VLIntdata"; + long dset_int_id = HDF5Constants.H5I_INVALID_HID; + long dtype_int_id = HDF5Constants.H5I_INVALID_HID; + long base_dtype_int_id = HDF5Constants.H5I_INVALID_HID; + long dspace_id = HDF5Constants.H5I_INVALID_HID; + long[] dims = {4}; + long lsize = 1; + + ArrayList[] base_vl_int_data = new ArrayList[4]; + ArrayList[] vl_int_data = new ArrayList[4]; + try { + // Write Integer data + vl_int_data[0] = new ArrayList(Arrays.asList(1)); + vl_int_data[1] = new ArrayList(Arrays.asList(2, 3)); + vl_int_data[2] = new ArrayList(Arrays.asList(4, 5, 6)); + vl_int_data[3] = new ArrayList(Arrays.asList(7, 8, 9, 10)); + Class dataClass = vl_int_data.getClass(); + assertTrue("testH5DVLwrVL.getClass: " + dataClass, dataClass.isArray()); + + // Write VL data + base_vl_int_data[0] = new ArrayList>(); + base_vl_int_data[0].add(vl_int_data[0]); + base_vl_int_data[1] = new ArrayList>(); + base_vl_int_data[1].add(vl_int_data[0]); + base_vl_int_data[1].add(vl_int_data[1]); + base_vl_int_data[2] = new ArrayList>(); + base_vl_int_data[2].add(vl_int_data[0]); + base_vl_int_data[2].add(vl_int_data[1]); + base_vl_int_data[2].add(vl_int_data[2]); + base_vl_int_data[3] = new ArrayList>(); + base_vl_int_data[3].add(vl_int_data[0]); + base_vl_int_data[3].add(vl_int_data[1]); + base_vl_int_data[3].add(vl_int_data[2]); + base_vl_int_data[3].add(vl_int_data[3]); + + try { + dtype_int_id = H5.H5Tvlen_create(HDF5Constants.H5T_STD_U32LE); + assertTrue("testH5DVLwrVL.H5Tvlen_create: ", dtype_int_id >= 0); + base_dtype_int_id = H5.H5Tvlen_create(dtype_int_id); + assertTrue("testH5DVLwrVL.H5Tvlen_create: ", base_dtype_int_id >= 0); + } + catch (Exception err) { + if (base_dtype_int_id > 0) + try { + H5.H5Tclose(base_dtype_int_id); + } + catch (Exception ex) { + } + if (dtype_int_id > 0) + try { + H5.H5Tclose(dtype_int_id); + } + catch (Exception ex) { + } + err.printStackTrace(); + fail("H5.testH5DVLwrVL: " + err); + } + + try { + dspace_id = H5.H5Screate_simple(1, dims, null); + assertTrue(dspace_id > 0); + dset_int_id = H5.H5Dcreate(H5fid, dset_int_name, base_dtype_int_id, dspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + assertTrue("testH5DVLwrVL: ", dset_int_id >= 0); + + H5.H5DwriteVL(dset_int_id, base_dtype_int_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, base_vl_int_data); + } + catch (Exception err) { + if (dset_int_id > 0) + try { + H5.H5Dclose(dset_int_id); + } + catch (Exception ex) { + } + if (dtype_int_id > 0) + try { + H5.H5Tclose(dtype_int_id); + } + catch (Exception ex) { + } + err.printStackTrace(); + fail("H5.testH5DVLwrVL: " + err); + } + finally { + if (dspace_id > 0) + try { + H5.H5Sclose(dspace_id); + } + catch (Exception ex) { + } + } + + H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL); + + for (int j = 0; j < dims.length; j++) + lsize *= dims[j]; + + // Read Integer data + ArrayList[] base_vl_readbuf = new ArrayList[4]; + for (int j = 0; j < lsize; j++) + base_vl_readbuf[j] = new ArrayList>(); + + try { + H5.H5DreadVL(dset_int_id, base_dtype_int_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, base_vl_readbuf); + } + catch (Exception ex) { + ex.printStackTrace(); + } + ArrayList> vl_readbuf = (ArrayList>)base_vl_readbuf[0]; + assertTrue("vl_readbuf 0 exists", vl_readbuf != null); + ArrayList vl_readbuf_int = (ArrayList)(vl_readbuf.get(0)); + /* + * System.out.println(); System.out.println("vl_readbuf: " + vl_readbuf); + * System.out.println("vl_readbuf_int: " + vl_readbuf_int); + */ + assertTrue("testH5DVLwrVL:" + vl_readbuf_int.get(0), + vl_int_data[0].get(0).equals(vl_readbuf_int.get(0))); + + vl_readbuf = (ArrayList>)base_vl_readbuf[1]; + vl_readbuf_int = (ArrayList)(vl_readbuf.get(1)); + /* + * System.out.println("vl_readbuf: " + vl_readbuf); System.out.println("vl_readbuf_int: " + + * vl_readbuf_int); + */ + assertTrue("testH5DVLwrVL:" + vl_readbuf_int.get(1), + vl_int_data[1].get(1).equals(vl_readbuf_int.get(1))); + + vl_readbuf = (ArrayList>)base_vl_readbuf[2]; + vl_readbuf_int = (ArrayList)(vl_readbuf.get(2)); + /* + * System.out.println("vl_readbuf: " + vl_readbuf); System.out.println("vl_readbuf_int: " + + * vl_readbuf_int); + */ + assertTrue("testH5DVLwrVL:" + vl_readbuf_int.get(2), + vl_int_data[2].get(2).equals(vl_readbuf_int.get(2))); + + vl_readbuf = (ArrayList>)base_vl_readbuf[3]; + vl_readbuf_int = (ArrayList)(vl_readbuf.get(3)); + /* + * System.out.println("vl_readbuf: " + vl_readbuf); System.out.println("vl_readbuf_int: " + + * vl_readbuf_int); + */ + assertTrue("testH5DVLwrVL:" + vl_readbuf_int.get(3), + vl_int_data[3].get(3).equals(vl_readbuf_int.get(3))); + } + catch (Throwable err) { + err.printStackTrace(); + fail("H5.testH5DVLwrVL: " + err); + } + finally { + if (dset_int_id > 0) + try { + H5.H5Dclose(dset_int_id); + } + catch (Exception ex) { + } + if (dtype_int_id > 0) + try { + H5.H5Tclose(dtype_int_id); + } + catch (Exception ex) { + } + if (base_dtype_int_id > 0) + try { + H5.H5Tclose(base_dtype_int_id); + } + catch (Exception ex) { + } + } + } + + @Test + public void testH5DArraywr() + { + String dset_int_name = "ArrayIntdata"; + long dset_int_id = HDF5Constants.H5I_INVALID_HID; + long dtype_int_id = HDF5Constants.H5I_INVALID_HID; + long dspace_id = HDF5Constants.H5I_INVALID_HID; + long[] dims = {4}; + long lsize = 1; + + ArrayList[] arr_int_data = new ArrayList[4]; + try { + // Write Integer data + arr_int_data[0] = new ArrayList(Arrays.asList(1, 2, 3, 4)); + arr_int_data[1] = new ArrayList(Arrays.asList(2, 3, 4, 5)); + arr_int_data[2] = new ArrayList(Arrays.asList(4, 5, 6, 7)); + arr_int_data[3] = new ArrayList(Arrays.asList(7, 8, 9, 10)); + Class dataClass = arr_int_data.getClass(); + assertTrue("testH5DArraywr.getClass: " + dataClass, dataClass.isArray()); + + try { + dtype_int_id = H5.H5Tarray_create(HDF5Constants.H5T_STD_U32LE, 1, dims); + assertTrue("testH5DArraywr.H5Tarray_create: ", dtype_int_id >= 0); + } + catch (Exception err) { + if (dtype_int_id > 0) + try { + H5.H5Tclose(dtype_int_id); + } + catch (Exception ex) { + } + err.printStackTrace(); + fail("H5.testH5DArraywr: " + err); + } + + try { + dspace_id = H5.H5Screate_simple(1, dims, null); + assertTrue(dspace_id > 0); + dset_int_id = + H5.H5Dcreate(H5fid, dset_int_name, dtype_int_id, dspace_id, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + assertTrue("testH5DVLwr: ", dset_int_id >= 0); + + H5.H5DwriteVL(dset_int_id, dtype_int_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, arr_int_data); + } + catch (Exception err) { + if (dset_int_id > 0) + try { + H5.H5Dclose(dset_int_id); + } + catch (Exception ex) { + } + if (dtype_int_id > 0) + try { + H5.H5Tclose(dtype_int_id); + } + catch (Exception ex) { + } + err.printStackTrace(); + fail("H5.testH5DVLwr: " + err); + } + finally { + if (dspace_id > 0) + try { + H5.H5Sclose(dspace_id); + } + catch (Exception ex) { + } + } + + H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL); + + for (int j = 0; j < dims.length; j++) + lsize *= dims[j]; + + // Read Integer data + ArrayList[] arr_readbuf = new ArrayList[4]; + for (int j = 0; j < lsize; j++) + arr_readbuf[j] = new ArrayList(); + + try { + H5.H5DreadVL(dset_int_id, dtype_int_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, arr_readbuf); + } + catch (Exception ex) { + ex.printStackTrace(); + } + assertTrue("testH5DVLwr:" + arr_readbuf[0].get(0), + arr_int_data[0].get(0).equals(arr_readbuf[0].get(0))); + assertTrue("testH5DVLwr:" + arr_readbuf[1].get(0), + arr_int_data[1].get(0).equals(arr_readbuf[1].get(0))); + assertTrue("testH5DVLwr:" + arr_readbuf[2].get(0), + arr_int_data[2].get(0).equals(arr_readbuf[2].get(0))); + assertTrue("testH5DVLwr:" + arr_readbuf[3].get(0), + arr_int_data[3].get(0).equals(arr_readbuf[3].get(0))); + } + catch (Throwable err) { + err.printStackTrace(); + fail("H5.testH5DArraywr: " + err); + } + finally { + if (dset_int_id > 0) + try { + H5.H5Dclose(dset_int_id); + } + catch (Exception ex) { + } + if (dtype_int_id > 0) + try { + H5.H5Tclose(dtype_int_id); + } + catch (Exception ex) { + } + } + } + + @Test + public void testH5DArray_string_buffer() throws Throwable + { + String dset_str_name = "ArrayStringdata"; + long dset_str_id = HDF5Constants.H5I_INVALID_HID; + long dtype_str_id = HDF5Constants.H5I_INVALID_HID; + long dspace_id = HDF5Constants.H5I_INVALID_HID; + long[] strdims = {4}; + long[] dims = {6}; + long lsize = 1; + + String[] str_data0 = {"Parting", "is such", "sweet", "sorrow."}; + String[] str_data1 = {"Testing", "one", "two", "three."}; + String[] str_data2 = {"Dog,", "man's", "best", "friend."}; + String[] str_data3 = {"Diamonds", "are", "a", "girls!"}; + String[] str_data4 = {"S A", "T U R", "D A Y", "night"}; + String[] str_data5 = {"That's", "all", "folks", "!!!"}; + + ArrayList[] arr_str_data = new ArrayList[6]; + arr_str_data[0] = new ArrayList(Arrays.asList(str_data0)); + arr_str_data[1] = new ArrayList(Arrays.asList(str_data1)); + arr_str_data[2] = new ArrayList(Arrays.asList(str_data2)); + arr_str_data[3] = new ArrayList(Arrays.asList(str_data3)); + arr_str_data[4] = new ArrayList(Arrays.asList(str_data4)); + arr_str_data[5] = new ArrayList(Arrays.asList(str_data5)); + + try { + H5dtid = H5.H5Tcopy(HDF5Constants.H5T_C_S1); + } + catch (Throwable err) { + err.printStackTrace(); + fail("testH5DArray_string_buffer.H5.H5Tcopy: " + err); + } + assertTrue("testH5DArray_string_buffer.H5Tcopy: ", H5dtid >= 0); + try { + H5.H5Tset_size(H5dtid, HDF5Constants.H5T_VARIABLE); + assertTrue("testH5DArray_string_buffer.H5Tis_variable_str", H5.H5Tis_variable_str(H5dtid)); + } + catch (Throwable err) { + err.printStackTrace(); + fail("testH5DArray_string_buffer.H5Tset_size: " + err); + } + try { + dtype_str_id = H5.H5Tarray_create(H5dtid, 1, strdims); + assertTrue("testH5DArray_string_buffer.H5Tarray_create: ", dtype_str_id >= 0); + } + catch (Exception err) { + if (dtype_str_id > 0) + try { + H5.H5Tclose(dtype_str_id); + } + catch (Exception ex) { + } + err.printStackTrace(); + fail("testH5DArray_string_buffer: " + err); + } + + try { + dspace_id = H5.H5Screate_simple(1, dims, null); + assertTrue(dspace_id > 0); + dset_str_id = + H5.H5Dcreate(H5fid, dset_str_name, dtype_str_id, dspace_id, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + assertTrue("testH5DArray_string_buffer: ", dset_str_id >= 0); + + H5.H5DwriteVL(dset_str_id, dtype_str_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, arr_str_data); + } + catch (Exception err) { + if (dset_str_id > 0) + try { + H5.H5Dclose(dset_str_id); + } + catch (Exception ex) { + } + if (dtype_str_id > 0) + try { + H5.H5Tclose(dtype_str_id); + } + catch (Exception ex) { + } + err.printStackTrace(); + fail("testH5DArray_string_buffer: " + err); + } + finally { + if (dspace_id > 0) + try { + H5.H5Sclose(dspace_id); + } + catch (Exception ex) { + } + } + + H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL); + + for (int j = 0; j < dims.length; j++) + lsize *= dims[j]; + + ArrayList[] arr_readbuf = new ArrayList[6]; + for (int j = 0; j < lsize; j++) + arr_readbuf[j] = new ArrayList(); + + try { + H5.H5DreadVL(dset_str_id, dtype_str_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, arr_readbuf); + } + catch (Exception ex) { + ex.printStackTrace(); + } + finally { + if (dset_str_id > 0) + try { + H5.H5Dclose(dset_str_id); + } + catch (Exception ex) { + } + if (dtype_str_id > 0) + try { + H5.H5Tclose(dtype_str_id); + } + catch (Exception ex) { + } + } + assertTrue("testH5DArray_string_buffer:" + arr_readbuf[0].get(0), + arr_str_data[0].get(0).equals(arr_readbuf[0].get(0))); + assertTrue("testH5DArray_string_buffer:" + arr_readbuf[1].get(0), + arr_str_data[1].get(0).equals(arr_readbuf[1].get(0))); + assertTrue("testH5DArray_string_buffer:" + arr_readbuf[2].get(0), + arr_str_data[2].get(0).equals(arr_readbuf[2].get(0))); + assertTrue("testH5DArray_string_buffer:" + arr_readbuf[3].get(0), + arr_str_data[3].get(0).equals(arr_readbuf[3].get(0))); + } + + @Test + public void testH5DArrayenum_rw() + { + String dset_enum_name = "ArrayEnumdata"; + long dset_enum_id = HDF5Constants.H5I_INVALID_HID; + long dtype_enum_id = HDF5Constants.H5I_INVALID_HID; + long dtype_arr_enum_id = HDF5Constants.H5I_INVALID_HID; + long dspace_id = HDF5Constants.H5I_INVALID_HID; + long[] dims = {4}; + long lsize = 1; + String enum_type = "Enum_type"; + byte[] enum_val = new byte[1]; + String enum_name = null; + + // Create a enumerate datatype + try { + dtype_enum_id = H5.H5Tcreate(HDF5Constants.H5T_ENUM, (long)1); + assertTrue("testH5DArrayenum_wr.H5Tarray_create: ", dtype_enum_id >= 0); + } + catch (Throwable err) { + if (dtype_enum_id > 0) + try { + H5.H5Tclose(dtype_enum_id); + } + catch (Exception ex) { + } + err.printStackTrace(); + fail("testH5DArrayenum_rw:H5Tcreate " + err); + } + + try { + enum_val[0] = 10; + H5.H5Tenum_insert(dtype_enum_id, "RED", enum_val); + enum_val[0] = 11; + H5.H5Tenum_insert(dtype_enum_id, "GREEN", enum_val); + enum_val[0] = 12; + H5.H5Tenum_insert(dtype_enum_id, "BLUE", enum_val); + enum_val[0] = 13; + H5.H5Tenum_insert(dtype_enum_id, "ORANGE", enum_val); + enum_val[0] = 14; + H5.H5Tenum_insert(dtype_enum_id, "YELLOW", enum_val); + + // Query member number and member index by member name, for enumeration type. + assertTrue("Can't get member number", H5.H5Tget_nmembers(dtype_enum_id) == 5); + assertTrue("Can't get correct index number", + H5.H5Tget_member_index(dtype_enum_id, "ORANGE") == 3); + + // Commit enumeration datatype and close it */ + H5.H5Tcommit(H5fid, enum_type, dtype_enum_id, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + + H5.H5Tclose(dtype_enum_id); + + // Open the dataytpe for query + dtype_enum_id = H5.H5Topen(H5fid, enum_type, HDF5Constants.H5P_DEFAULT); + assertTrue("testH5DArrayenum_rw:H5Tcreate", dtype_enum_id >= 0); + + // Query member number and member index by member name, for enumeration type + assertTrue("Can't get member number", H5.H5Tget_nmembers(dtype_enum_id) == 5); + assertTrue("Can't get correct index number", + H5.H5Tget_member_index(dtype_enum_id, "ORANGE") == 3); + + // Query member value by member name, for enumeration type + H5.H5Tenum_valueof(dtype_enum_id, "ORANGE", enum_val); + assertTrue("Incorrect value for enum member", enum_val[0] == 13); + + // Query member value by member index, for enumeration type + H5.H5Tget_member_value(dtype_enum_id, 2, enum_val); + assertTrue("Incorrect value for enum member", enum_val[0] == 12); + + // Query member name by member value, for enumeration type + enum_val[0] = 14; + enum_name = H5.H5Tenum_nameof(dtype_enum_id, enum_val, 16); + assertTrue("Incorrect name for enum member", enum_name.compareTo("YELLOW") == 0); + + ArrayList[] arr_enum_data = new ArrayList[4]; + try { + // Write Integer data + arr_enum_data[0] = new ArrayList(Arrays.asList(10, 11, 12, 13)); + arr_enum_data[1] = new ArrayList(Arrays.asList(11, 12, 13, 14)); + arr_enum_data[2] = new ArrayList(Arrays.asList(12, 13, 14, 10)); + arr_enum_data[3] = new ArrayList(Arrays.asList(13, 14, 10, 11)); + Class dataClass = arr_enum_data.getClass(); + assertTrue("testH5DArrayenum_wr.getClass: " + dataClass, dataClass.isArray()); + + try { + dtype_arr_enum_id = H5.H5Tarray_create(HDF5Constants.H5T_STD_U32LE, 1, dims); + assertTrue("testH5DArrayenum_wr.H5Tarray_create: ", dtype_arr_enum_id >= 0); + } + catch (Exception err) { + if (dtype_arr_enum_id > 0) + try { + H5.H5Tclose(dtype_arr_enum_id); + } + catch (Exception ex) { + } + err.printStackTrace(); + fail("H5.testH5DArrayenum_wr: " + err); + } + + dspace_id = H5.H5Screate_simple(1, dims, null); + assertTrue(dspace_id > 0); + dset_enum_id = H5.H5Dcreate(H5fid, dset_enum_name, dtype_arr_enum_id, dspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + assertTrue("testH5DVLwr: ", dset_enum_id >= 0); + + H5.H5DwriteVL(dset_enum_id, dtype_arr_enum_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, arr_enum_data); + } + catch (Throwable err) { + if (dset_enum_id > 0) + try { + H5.H5Dclose(dset_enum_id); + } + catch (Exception ex) { + } + if (dtype_enum_id > 0) + try { + H5.H5Tclose(dtype_enum_id); + } + catch (Exception ex) { + } + if (dtype_arr_enum_id > 0) + try { + H5.H5Tclose(dtype_arr_enum_id); + } + catch (Exception ex) { + } + err.printStackTrace(); + fail("testH5DArrayenum_rw:query " + err); + } + finally { + if (dspace_id > 0) + try { + H5.H5Sclose(dspace_id); + } + catch (Exception ex) { + } + } + + H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL); + + for (int j = 0; j < dims.length; j++) + lsize *= dims[j]; + + // Read Integer data + ArrayList[] arr_readbuf = new ArrayList[4]; + for (int j = 0; j < lsize; j++) + arr_readbuf[j] = new ArrayList(); + + try { + H5.H5DreadVL(dset_enum_id, dtype_arr_enum_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, arr_readbuf); + } + catch (Exception ex) { + ex.printStackTrace(); + } + assertTrue("testH5DVLArrayenum_wr:" + arr_readbuf[0].get(0), + arr_enum_data[0].get(0).equals(arr_readbuf[0].get(0))); + assertTrue("testH5DVLArrayenum_wr:" + arr_readbuf[1].get(0), + arr_enum_data[1].get(0).equals(arr_readbuf[1].get(0))); + assertTrue("testH5DVLArrayenum_wr:" + arr_readbuf[2].get(0), + arr_enum_data[2].get(0).equals(arr_readbuf[2].get(0))); + assertTrue("testH5DVLArrayenum_wr:" + arr_readbuf[3].get(0), + arr_enum_data[3].get(0).equals(arr_readbuf[3].get(0))); + } + catch (Throwable err) { + err.printStackTrace(); + fail("H5.testH5DArrayenum_wr: " + err); + } + finally { + if (dset_enum_id > 0) + try { + H5.H5Dclose(dset_enum_id); + } + catch (Exception ex) { + } + if (dtype_enum_id > 0) + try { + H5.H5Tclose(dtype_enum_id); + } + catch (Exception ex) { + } + if (dtype_arr_enum_id > 0) + try { + H5.H5Tclose(dtype_arr_enum_id); + } + catch (Exception ex) { + } + } + } } diff --git a/java/test/TestH5R.java b/java/test/TestH5R.java index 221a71ef4d8..1dcf992ce11 100644 --- a/java/test/TestH5R.java +++ b/java/test/TestH5R.java @@ -600,7 +600,7 @@ public void testH5RVLattr_ref() HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); assertTrue("testH5RVLattr_ref: ", attr_obj_id >= 0); - H5.H5AwriteVL(attr_obj_id, atype_obj_id, vl_obj_data); + H5.H5Awrite(attr_obj_id, atype_obj_id, vl_obj_data); } catch (Exception err) { if (attr_obj_id > 0) @@ -657,7 +657,7 @@ public void testH5RVLattr_ref() HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); assertTrue("testH5RVLattr_ref: ", attr_reg_id >= 0); - H5.H5AwriteVL(attr_reg_id, atype_reg_id, vl_reg_data); + H5.H5Awrite(attr_reg_id, atype_reg_id, vl_reg_data); } catch (Exception err) { if (attr_reg_id > 0) @@ -696,7 +696,7 @@ public void testH5RVLattr_ref() vl_readbuf[j] = new ArrayList(); try { - H5.H5AreadVL(attr_obj_id, atype_obj_id, vl_readbuf); + H5.H5Aread(attr_obj_id, atype_obj_id, vl_readbuf); } catch (Exception ex) { ex.printStackTrace(); @@ -716,7 +716,7 @@ public void testH5RVLattr_ref() vl_readbuf[j] = new ArrayList(); try { - H5.H5AreadVL(attr_reg_id, atype_reg_id, vl_readbuf); + H5.H5Aread(attr_reg_id, atype_reg_id, vl_readbuf); } catch (Exception ex) { ex.printStackTrace(); @@ -831,8 +831,8 @@ public void testH5RVLdset_ref() HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); assertTrue("testH5RVLdset_ref: ", dset_obj_id >= 0); - H5.H5DwriteVL(dset_obj_id, dtype_obj_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, - HDF5Constants.H5P_DEFAULT, vl_obj_data); + H5.H5Dwrite(dset_obj_id, dtype_obj_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, vl_obj_data); } catch (Exception err) { if (dset_obj_id > 0) @@ -890,8 +890,8 @@ public void testH5RVLdset_ref() HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); assertTrue("testH5RVLdset_ref: ", dset_reg_id >= 0); - H5.H5DwriteVL(dset_reg_id, dtype_reg_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, - HDF5Constants.H5P_DEFAULT, vl_reg_data); + H5.H5Dwrite(dset_reg_id, dtype_reg_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, vl_reg_data); } catch (Exception err) { if (dset_reg_id > 0) @@ -930,8 +930,8 @@ public void testH5RVLdset_ref() vl_readbuf[j] = new ArrayList(); try { - H5.H5DreadVL(dset_obj_id, dtype_obj_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, - HDF5Constants.H5P_DEFAULT, vl_readbuf); + H5.H5Dread(dset_obj_id, dtype_obj_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, vl_readbuf); } catch (Exception ex) { ex.printStackTrace(); @@ -951,8 +951,8 @@ public void testH5RVLdset_ref() vl_readbuf[j] = new ArrayList(); try { - H5.H5DreadVL(dset_reg_id, dtype_reg_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, - HDF5Constants.H5P_DEFAULT, vl_readbuf); + H5.H5Dread(dset_reg_id, dtype_reg_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, vl_readbuf); } catch (Exception ex) { ex.printStackTrace(); diff --git a/java/test/junit.sh.in b/java/test/junit.sh.in index 4e5152b0e75..0690081386a 100644 --- a/java/test/junit.sh.in +++ b/java/test/junit.sh.in @@ -67,8 +67,8 @@ $top_builddir/java/src/jni/.libs/libhdf5_java.* LIST_JAR_TESTFILES=" $HDFLIB_HOME/hamcrest-core.jar $HDFLIB_HOME/junit.jar -$HDFLIB_HOME/slf4j-api-1.7.33.jar -$HDFLIB_HOME/ext/slf4j-simple-1.7.33.jar +$HDFLIB_HOME/slf4j-api-2.0.6.jar +$HDFLIB_HOME/ext/slf4j-simple-2.0.6.jar " LIST_JAR_FILES=" $top_builddir/java/src/$JARFILE @@ -304,7 +304,7 @@ JAVAEXEFLAGS=@H5_JAVAFLAGS@ COPY_LIBFILES_TO_BLDLIBDIR COPY_DATAFILES_TO_BLDDIR -CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$BLDLIBDIR"/junit.jar:"$BLDLIBDIR"/hamcrest-core.jar:"$BLDLIBDIR"/slf4j-api-1.7.33.jar:"$BLDLIBDIR"/slf4j-simple-1.7.33.jar:"$TESTJARFILE"" +CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$BLDLIBDIR"/junit.jar:"$BLDLIBDIR"/hamcrest-core.jar:"$BLDLIBDIR"/slf4j-api-2.0.6.jar:"$BLDLIBDIR"/slf4j-simple-2.0.6.jar:"$TESTJARFILE"" TEST=/usr/bin/test if [ ! -x /usr/bin/test ] diff --git a/java/test/testfiles/JUnit-TestH5A.txt b/java/test/testfiles/JUnit-TestH5A.txt index efdd4a2743f..f31c86c6f4a 100644 --- a/java/test/testfiles/JUnit-TestH5A.txt +++ b/java/test/testfiles/JUnit-TestH5A.txt @@ -11,6 +11,7 @@ JUnit version 4.11 .testH5Aget_info_by_name .testH5Aget_create_plist .testH5Adelete_by_name +.testH5AVLwrVL .testH5Aopen_by_name .testH5Aget_info .testH5Aget_name @@ -22,14 +23,16 @@ JUnit version 4.11 .testH5Adelete_by_idx_order .testH5Arename_by_name .testH5Acreate2_invalidobject +.testH5AArraywr .testH5Acreate2 .testH5Aiterate_by_name .testH5Adelete_by_idx_null .testH5Adelete_by_idx_invalidobject .testH5Awrite_readVL +.testH5AArray_string_buffer .testH5Aget_info1 Time: XXXX -OK (29 tests) +OK (32 tests) diff --git a/java/test/testfiles/JUnit-TestH5D.txt b/java/test/testfiles/JUnit-TestH5D.txt index 288e6d09190..4c306213450 100644 --- a/java/test/testfiles/JUnit-TestH5D.txt +++ b/java/test/testfiles/JUnit-TestH5D.txt @@ -1,5 +1,8 @@ JUnit version 4.11 +.testH5DArrayenum_rw +.testH5DVLwrVL .testH5Dget_storage_size +.testH5DArraywr .testH5Diterate_write .testH5Dcreate .testH5Dget_offset @@ -12,15 +15,14 @@ JUnit version 4.11 .testH5Dget_storage_size_empty .testH5Diterate .testH5Dget_access_plist -.testH5Dvlen_get_buf_size .testH5Dget_space_closed +.testH5DArray_string_buffer .testH5Dget_space_status .testH5Dvlen_write_read .testH5Dget_space .testH5Dget_type_closed -.testH5Dvlen_read_default_buffer Time: XXXX -OK (20 tests) +OK (22 tests) diff --git a/release_docs/INSTALL_CMake.txt b/release_docs/INSTALL_CMake.txt index 6d6660f6f93..0d867d453f4 100644 --- a/release_docs/INSTALL_CMake.txt +++ b/release_docs/INSTALL_CMake.txt @@ -374,7 +374,7 @@ IV. Further considerations the settings for the developers' environment. Then the only options needed on the command line are those options that are different. Example using HDF default cache file: - cmake -C ../config/cmake/cacheinit.cmake -G "Visual Studio 12 2013" \ + cmake -C ../config/cmake/cacheinit.cmake -G "Visual Studio 16 2019" \ -DHDF5_ENABLE_SZIP_SUPPORT:BOOL=OFF -DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=OFF \ -DCMAKE_BUILD_TYPE:STRING=Release .. @@ -775,6 +775,8 @@ BUILD_SHARED_LIBS "Build Shared Libraries" ON BUILD_STATIC_LIBS "Build Static Libraries" ON BUILD_STATIC_EXECS "Build Static Executables" OFF BUILD_TESTING "Build HDF5 Unit Testing" ON +if (WINDOWS) + DISABLE_PDB_FILES "Do not install PDB files" OFF ---------------- HDF5 Build Options --------------------- HDF5_BUILD_CPP_LIB "Build HDF5 C++ Library" OFF @@ -783,7 +785,7 @@ HDF5_BUILD_FORTRAN "Build FORTRAN support" OFF HDF5_BUILD_JAVA "Build JAVA support" OFF HDF5_BUILD_HL_LIB "Build HIGH Level HDF5 Library" ON HDF5_BUILD_TOOLS "Build HDF5 Tools" ON -HDF5_BUILD_HL_TOOLS "Build HIGH Level HDF5 Tools" ON +HDF5_BUILD_HL_GIF_TOOLS "Build HIGH Level HDF5 GIF Tools" OFF ---------------- HDF5 Folder Build Options --------------------- Defaults relative to $ diff --git a/release_docs/README_HDF5_CMake b/release_docs/README_HDF5_CMake index 6d95fd5c395..d4fa9dbb02c 100644 --- a/release_docs/README_HDF5_CMake +++ b/release_docs/README_HDF5_CMake @@ -10,7 +10,7 @@ This tar file contains LIBAEC.tar.gz source for building SZIP replacement ZLib.tar.gz source for building ZLIB hdf5_plugins.tar.gz source for building compression plugins - HDF5Examples-2.0.1-Source.tar.gz source for building examples + HDF5Examples-2.0.3-Source.tar.gz source for building examples For more information about building HDF5 with CMake, see USING_HDF5_CMake.txt in hdf5-1.12.3/release_docs, or diff --git a/release_docs/USING_HDF5_VS.txt b/release_docs/USING_HDF5_VS.txt index ae9be6903a9..22f2f542316 100644 --- a/release_docs/USING_HDF5_VS.txt +++ b/release_docs/USING_HDF5_VS.txt @@ -38,7 +38,10 @@ Using Visual Studio 2010 and above with HDF5 Libraries built with Visual Studio library, and then optionally the HDF5 High Level, Fortran or C++ libraries. For example, to compile a C++ application, enter: - szip.lib zlib.lib hdf5.lib hdf5_cpp.lib + hdf5.lib hdf5_cpp.lib + + For static linking: + libszaec.lib libaec.lib libzlib.lib libhdf5.lib libhdf5_cpp.lib ========================================================================== diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 2f30e97d708..193174bfbf6 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -1018,7 +1018,7 @@ endif () #### make the H5detect program set (lib_prog_deps) add_executable (H5detect ${HDF5_SRC_DIR}/H5detect.c) -target_include_directories (H5detect PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_include_directories (H5detect PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_definitions(H5detect PUBLIC ${HDF_EXTRA_C_FLAGS} ${HDF_EXTRA_FLAGS}) TARGET_C_PROPERTIES (H5detect STATIC) target_link_libraries (H5detect @@ -1112,7 +1112,7 @@ endif () # make the H5make_libsettings program add_executable (H5make_libsettings ${HDF5_SRC_DIR}/H5make_libsettings.c) -target_include_directories (H5make_libsettings PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_include_directories (H5make_libsettings PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_definitions(H5make_libsettings PUBLIC ${HDF_EXTRA_C_FLAGS} ${HDF_EXTRA_FLAGS}) TARGET_C_PROPERTIES (H5make_libsettings STATIC) target_link_libraries (H5make_libsettings @@ -1157,10 +1157,6 @@ if (BUILD_SHARED_LIBS) ) endif () -## all_packages="AC,B,B2,D,F,FA,FL,FS,HL,I,O,S,ST,T,Z" -#all_packages="AC,B2,CX,D,F,HL,I,O,S,ST,T,Z" -option (HDF5_ENABLE_DEBUG_APIS "Turn on extra debug output in all packages" OFF) - #----------------------------------------------------------------------------- # Add H5Tinit source to build - generated by H5detect/CMake at configure time #----------------------------------------------------------------------------- @@ -1173,8 +1169,8 @@ if (BUILD_STATIC_LIBS) add_library (${HDF5_LIB_TARGET} STATIC ${common_SRCS} ${gen_SRCS} ${H5_PUBLIC_HEADERS} ${H5_PRIVATE_HEADERS} ${H5_GENERATED_HEADERS} ${H5_MODULE_HEADERS}) target_include_directories (${HDF5_LIB_TARGET} - PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" - INTERFACE "$/include>;$" + PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" + INTERFACE "$/include>;$" ) target_compile_options(${HDF5_LIB_TARGET} PRIVATE "${HDF5_CMAKE_C_FLAGS}") target_compile_definitions(${HDF5_LIB_TARGET} @@ -1182,8 +1178,9 @@ if (BUILD_STATIC_LIBS) ${HDF_EXTRA_C_FLAGS} ${HDF_EXTRA_FLAGS} PRIVATE - "$<$:H5_DEBUG_API>" # Enable tracing of the API - "$<$:H5Z_DEBUG;H5T_DEBUG;H5S_DEBUG;H5O_DEBUG;H5I_DEBUG;H5HL_DEBUG;H5F_DEBUG;H5D_DEBUG;H5B2_DEBUG;H5AC_DEBUG>" + "$<$:H5_DEBUG_API>" # Enable tracing of the API + "$<$:${HDF5_DEBUG_APIS}>" + "$<$:${HDF5_DEVELOPER_DEFS}>" ) TARGET_C_PROPERTIES (${HDF5_LIB_TARGET} STATIC) target_link_libraries (${HDF5_LIB_TARGET} @@ -1212,9 +1209,9 @@ if (BUILD_SHARED_LIBS) add_library (${HDF5_LIBSH_TARGET} SHARED ${common_SRCS} ${shared_gen_SRCS} ${H5_PUBLIC_HEADERS} ${H5_PRIVATE_HEADERS} ${H5_GENERATED_HEADERS} ${H5_MODULE_HEADERS}) target_include_directories (${HDF5_LIBSH_TARGET} - PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" + PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" PUBLIC "$<$:${HDFS_INCLUDE_DIR}>" - INTERFACE "$/include>;$" + INTERFACE "$/include>;$" ) target_compile_options(${HDF5_LIBSH_TARGET} PRIVATE "${HDF5_CMAKE_C_FLAGS}") target_compile_definitions(${HDF5_LIBSH_TARGET} @@ -1225,7 +1222,8 @@ if (BUILD_SHARED_LIBS) PRIVATE "$<$:H5_HAVE_THREADSAFE>" "$<$:H5_DEBUG_API>" # Enable tracing of the API - "$<$:H5Z_DEBUG;H5T_DEBUG;H5S_DEBUG;H5O_DEBUG;H5I_DEBUG;H5HL_DEBUG;H5F_DEBUG;H5D_DEBUG;H5B2_DEBUG;H5AC_DEBUG>" + "$<$:${HDF5_DEBUG_APIS}>" + "$<$:${HDF5_DEVELOPER_DEFS}>" ) TARGET_C_PROPERTIES (${HDF5_LIBSH_TARGET} SHARED) target_link_libraries (${HDF5_LIBSH_TARGET} @@ -1374,17 +1372,7 @@ endif () # Option to build documentation #----------------------------------------------------------------------------- if (DOXYGEN_FOUND) -# This cmake function requires that the non-default doxyfile settings are provided with set (DOXYGEN_xxx) commands -# In addition the doxyfile aliases @INCLUDE option is not supported and would need to be provided in a set (DOXYGEN_ALIASES) command. -# doxygen_add_docs (hdf5lib_doc -## ${common_SRCS} ${shared_gen_SRCS} ${H5_PUBLIC_HEADERS} ${H5_PRIVATE_HEADERS} ${H5_GENERATED_HEADERS} ${HDF5_DOXYGEN_DIR}/dox -# ${DOXYGEN_INPUT_DIRECTORY} -# ALL -# WORKING_DIRECTORY ${HDF5_SRC_DIR} -# COMMENT "Generating HDF5 library Source Documentation" -# ) - -# This custom target and doxygen/configure work together + # This custom target and doxygen/configure work together # Replace variables inside @@ with the current values add_custom_target (hdf5lib_doc ALL COMMAND ${DOXYGEN_EXECUTABLE} ${HDF5_BINARY_DIR}/Doxyfile diff --git a/src/H5private.h b/src/H5private.h index 747c77d771d..89bc272d36b 100644 --- a/src/H5private.h +++ b/src/H5private.h @@ -1521,6 +1521,9 @@ H5_DLL H5_ATTR_CONST int Nflock(int fd, int operation); #ifndef HDstrncpy #define HDstrncpy(X, Y, Z) strncpy(X, Y, Z) #endif +#ifndef HDstrndup +#define HDstrndup(S, N) strndup(S, N) +#endif #ifndef HDstrpbrk #define HDstrpbrk(X, Y) strpbrk(X, Y) #endif diff --git a/src/H5system.c b/src/H5system.c index 94f883b8c74..17404a2377b 100644 --- a/src/H5system.c +++ b/src/H5system.c @@ -1059,6 +1059,49 @@ H5_expand_windows_env_vars(char **env_var) FUNC_LEAVE_NOAPI(ret_value) } /* end H5_expand_windows_env_vars() */ + +/*------------------------------------------------------------------------- + * Function: H5_strndup + * + * Purpose: Similar to strndup() for use on Windows. Allocates a new + * string and copies at most `n` bytes from the original + * string into the new string. If the original string is + * longer than `n`, only `n` bytes are copied from the + * original string. In either case, the string being returned + * is guaranteed to be terminated with a null byte. + * + * The returned pointer is allocated by H5MM_malloc in this + * routine and must be freed by the caller with H5MM_free or + * H5MM_xfree. + * + * Return: Pointer to copied string on success + * NULL on failure + * + *------------------------------------------------------------------------- + */ +char * +H5_strndup(const char *s, size_t n) +{ + size_t len; + char *ret_value = NULL; + + FUNC_ENTER_NOAPI_NOINIT + + if (!s) + HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, NULL, "string cannot be NULL"); + + for (len = 0; len < n && s[len] != '\0'; len++) + ; + + if (NULL == (ret_value = H5MM_malloc(len + 1))) + HGOTO_ERROR(H5E_RESOURCE, H5E_CANTALLOC, NULL, "can't allocate buffer for string"); + + H5MM_memcpy(ret_value, s, len); + ret_value[len] = '\0'; + +done: + FUNC_LEAVE_NOAPI(ret_value) +} #endif /* H5_HAVE_WIN32_API */ /* Global variables */ diff --git a/src/H5win32defs.h b/src/H5win32defs.h index 38a350be09e..d7839fc767e 100644 --- a/src/H5win32defs.h +++ b/src/H5win32defs.h @@ -76,6 +76,7 @@ struct timezone { #define HDsleep(S) Sleep(S * 1000) #define HDstat(S, B) _stati64(S, B) #define HDstrcasecmp(A, B) _stricmp(A, B) +#define HDstrndup(S, N) H5_strndup(S, N) #define HDstrdup(S) _strdup(S) #define HDstrtok_r(X, Y, Z) strtok_s(X, Y, Z) #define HDtzset() _tzset() @@ -100,6 +101,7 @@ H5_DLL wchar_t *H5_get_utf16_str(const char *s); H5_DLL int Wopen_utf8(const char *path, int oflag, ...); H5_DLL int Wremove_utf8(const char *path); H5_DLL int H5_get_win32_times(H5_timevals_t *tvs); +H5_DLL char *H5_strndup(const char *s, size_t n); #ifdef __cplusplus } #endif /* __cplusplus */ diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt index 3102f9a72d1..16d8bedb497 100644 --- a/test/CMakeLists.txt +++ b/test/CMakeLists.txt @@ -30,12 +30,14 @@ set (TEST_LIB_HEADERS if (BUILD_STATIC_LIBS) add_library (${HDF5_TEST_LIB_TARGET} STATIC ${TEST_LIB_SOURCES} ${TEST_LIB_HEADERS}) target_include_directories (${HDF5_TEST_LIB_TARGET} - PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};${HDF5_TEST_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" + PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};${HDF5_TEST_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" INTERFACE "$/include>" ) target_compile_options(${HDF5_TEST_LIB_TARGET} PRIVATE "${HDF5_CMAKE_C_FLAGS}") target_compile_definitions(${HDF5_TEST_LIB_TARGET} - PRIVATE "H5_TEST_EXPRESS_LEVEL_DEFAULT=${H5_TEST_EXPRESS_LEVEL_DEFAULT}" + PRIVATE + "H5_TEST_EXPRESS_LEVEL_DEFAULT=${H5_TEST_EXPRESS_LEVEL_DEFAULT}" + "$<$:${HDF5_DEVELOPER_DEFS}>" ) TARGET_C_PROPERTIES (${HDF5_TEST_LIB_TARGET} STATIC) target_link_libraries (${HDF5_TEST_LIB_TARGET} @@ -52,18 +54,21 @@ endif () if (BUILD_SHARED_LIBS) add_library (${HDF5_TEST_LIBSH_TARGET} SHARED ${TEST_LIB_SOURCES} ${TEST_LIB_HEADERS}) target_include_directories (${HDF5_TEST_LIBSH_TARGET} - PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};${HDF5_TEST_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" + PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};${HDF5_TEST_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" INTERFACE "$/include>" ) target_compile_options(${HDF5_TEST_LIBSH_TARGET} PRIVATE "${HDF5_CMAKE_C_FLAGS}") target_compile_definitions(${HDF5_TEST_LIBSH_TARGET} - PUBLIC "H5_BUILT_AS_DYNAMIC_LIB" - PRIVATE "H5_TEST_EXPRESS_LEVEL_DEFAULT=${H5_TEST_EXPRESS_LEVEL_DEFAULT}" + PUBLIC + "H5_BUILT_AS_DYNAMIC_LIB" + PRIVATE + "H5_TEST_EXPRESS_LEVEL_DEFAULT=${H5_TEST_EXPRESS_LEVEL_DEFAULT}" + "$<$:${HDF5_DEVELOPER_DEFS}>" ) TARGET_C_PROPERTIES (${HDF5_TEST_LIBSH_TARGET} SHARED) target_link_libraries (${HDF5_TEST_LIBSH_TARGET} PUBLIC ${LINK_LIBS} ${HDF5_LIBSH_TARGET} - PRIVATE $<$,$>:ws2_32.lib> + PRIVATE "$<$,$>:ws2_32.lib>" ) if (MINGW) target_link_libraries (${HDF5_TEST_LIBSH_TARGET} PRIVATE "wsock32.lib") @@ -109,7 +114,7 @@ if (BUILD_SHARED_LIBS) set (HDF5_TEST_PLUGIN_TARGET ${HDF5_TEST_PLUGIN_CORENAME}) add_library (${HDF5_TEST_PLUGIN_TARGET} SHARED ${HDF5_TEST_SOURCE_DIR}/${plugin_name}.c) - target_include_directories (${HDF5_TEST_PLUGIN_TARGET} PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (${HDF5_TEST_PLUGIN_TARGET} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (${HDF5_TEST_PLUGIN_TARGET} SHARED) target_link_libraries (${HDF5_TEST_PLUGIN_TARGET} PUBLIC ${HDF5_TEST_LIBSH_TARGET}) H5_SET_LIB_OPTIONS (${HDF5_TEST_PLUGIN_TARGET} ${HDF5_TEST_PLUGIN_NAME} SHARED "LIB") @@ -141,7 +146,7 @@ if (BUILD_SHARED_LIBS) set (HDF5_TEST_PLUGIN_TARGET ${HDF5_TEST_PLUGIN_CORENAME}) add_library (${HDF5_TEST_PLUGIN_TARGET} SHARED ${HDF5_TEST_SOURCE_DIR}/${plugin_name}.c) - target_include_directories (${HDF5_TEST_PLUGIN_TARGET} PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (${HDF5_TEST_PLUGIN_TARGET} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (${HDF5_TEST_PLUGIN_TARGET} SHARED) target_link_libraries (${HDF5_TEST_PLUGIN_TARGET} PUBLIC ${HDF5_TEST_LIBSH_TARGET}) H5_SET_LIB_OPTIONS (${HDF5_TEST_PLUGIN_TARGET} ${HDF5_TEST_PLUGIN_NAME} SHARED "LIB") @@ -185,7 +190,7 @@ if (BUILD_SHARED_LIBS) set (HDF5_VOL_PLUGIN_LIB_TARGET ${HDF5_VOL_PLUGIN_LIB_CORENAME}) add_library (${HDF5_VOL_PLUGIN_LIB_TARGET} SHARED ${HDF5_TEST_SOURCE_DIR}/${vol_lib}.c) - target_include_directories (${HDF5_VOL_PLUGIN_LIB_TARGET} PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (${HDF5_VOL_PLUGIN_LIB_TARGET} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (${HDF5_VOL_PLUGIN_LIB_TARGET} SHARED) target_link_libraries (${HDF5_VOL_PLUGIN_LIB_TARGET} PUBLIC ${HDF5_TEST_LIBSH_TARGET}) H5_SET_LIB_OPTIONS (${HDF5_VOL_PLUGIN_LIB_TARGET} ${HDF5_VOL_PLUGIN_LIB_NAME} SHARED "LIB") @@ -344,8 +349,12 @@ endif () macro (ADD_H5_EXE file) add_executable (${file} ${HDF5_TEST_SOURCE_DIR}/${file}.c) - target_include_directories (${file} PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};${HDF5_TEST_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (${file} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};${HDF5_TEST_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(${file} PRIVATE "${HDF5_CMAKE_C_FLAGS}") + target_compile_definitions(${file} + PRIVATE + "$<$:${HDF5_DEVELOPER_DEFS}>" + ) if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (${file} STATIC) target_link_libraries (${file} PRIVATE ${HDF5_TEST_LIB_TARGET}) @@ -386,7 +395,11 @@ endforeach () #-- Adding test for chunk_info add_executable (chunk_info ${HDF5_TEST_SOURCE_DIR}/chunk_info.c) target_compile_options(chunk_info PRIVATE "${HDF5_CMAKE_C_FLAGS}") -target_include_directories (chunk_info PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};${HDF5_TEST_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_compile_definitions(chunk_info + PRIVATE + "$<$:${HDF5_DEVELOPER_DEFS}>" +) +target_include_directories (chunk_info PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};${HDF5_TEST_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (chunk_info STATIC) target_link_libraries (chunk_info PRIVATE ${HDF5_TEST_LIB_TARGET} ${LINK_COMP_LIBS}) @@ -406,7 +419,11 @@ endif () #-- Adding test for direct_chunk add_executable (direct_chunk ${HDF5_TEST_SOURCE_DIR}/direct_chunk.c) target_compile_options(direct_chunk PRIVATE "${HDF5_CMAKE_C_FLAGS}") -target_include_directories (direct_chunk PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};${HDF5_TEST_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_compile_definitions(direct_chunk + PRIVATE + "$<$:${HDF5_DEVELOPER_DEFS}>" +) +target_include_directories (direct_chunk PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};${HDF5_TEST_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (direct_chunk STATIC) target_link_libraries (direct_chunk PRIVATE ${HDF5_TEST_LIB_TARGET} ${LINK_COMP_LIBS}) @@ -427,7 +444,11 @@ endif () #-- Adding test for testhdf5 add_executable (testhdf5 ${testhdf5_SOURCES}) target_compile_options(testhdf5 PRIVATE "${HDF5_CMAKE_C_FLAGS}") -target_include_directories (testhdf5 PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_compile_definitions(testhdf5 + PRIVATE + "$<$:${HDF5_DEVELOPER_DEFS}>" +) +target_include_directories (testhdf5 PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (testhdf5 STATIC) target_link_libraries (testhdf5 PRIVATE ${HDF5_TEST_LIB_TARGET}) @@ -447,7 +468,11 @@ endif () #-- Adding test for cache_image add_executable (cache_image ${cache_image_SOURCES}) target_compile_options(cache_image PRIVATE "${HDF5_CMAKE_C_FLAGS}") -target_include_directories (cache_image PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_compile_definitions(cache_image + PRIVATE + "$<$:${HDF5_DEVELOPER_DEFS}>" +) +target_include_directories (cache_image PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (cache_image STATIC) target_link_libraries (cache_image PRIVATE ${HDF5_TEST_LIB_TARGET}) @@ -467,7 +492,11 @@ endif () #-- Adding test for ttsafe add_executable (ttsafe ${ttsafe_SOURCES}) target_compile_options(ttsafe PRIVATE "${HDF5_CMAKE_C_FLAGS}") -target_include_directories (ttsafe PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_compile_definitions(ttsafe + PRIVATE + "$<$:${HDF5_DEVELOPER_DEFS}>" +) +target_include_directories (ttsafe PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (ttsafe STATIC) target_link_libraries (ttsafe PRIVATE ${HDF5_TEST_LIB_TARGET}) @@ -478,7 +507,7 @@ if (NOT BUILD_SHARED_LIBS) endif () else () TARGET_C_PROPERTIES (ttsafe SHARED) - target_link_libraries (ttsafe PRIVATE ${HDF5_TEST_LIBSH_TARGET} $<$:Threads::Threads>) + target_link_libraries (ttsafe PRIVATE ${HDF5_TEST_LIBSH_TARGET} "$<$:Threads::Threads>") endif () set_target_properties (ttsafe PROPERTIES FOLDER test) @@ -493,7 +522,11 @@ endif () #-- Adding test for thread_id add_executable (thread_id ${HDF5_TEST_SOURCE_DIR}/thread_id.c) target_compile_options(thread_id PRIVATE "${HDF5_CMAKE_C_FLAGS}") -target_include_directories (thread_id PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_compile_definitions(thread_id + PRIVATE + "$<$:${HDF5_DEVELOPER_DEFS}>" +) +target_include_directories (thread_id PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (thread_id STATIC) target_link_libraries (thread_id PRIVATE ${HDF5_TEST_LIB_TARGET}) @@ -504,7 +537,7 @@ if (NOT BUILD_SHARED_LIBS) endif () else () TARGET_C_PROPERTIES (thread_id SHARED) - target_link_libraries (thread_id PRIVATE ${HDF5_TEST_LIBSH_TARGET} $<$:Threads::Threads>) + target_link_libraries (thread_id PRIVATE ${HDF5_TEST_LIBSH_TARGET} "$<$:Threads::Threads>") endif () set_target_properties (thread_id PROPERTIES FOLDER test) @@ -518,7 +551,7 @@ endif () if (HDF5_BUILD_UTILS) # requires mirror server #-- Adding test for mirror_vfd add_executable (mirror_vfd ${mirror_vfd_SOURCES}) - target_include_directories (mirror_vfd PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (mirror_vfd PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (mirror_vfd STATIC) target_link_libraries (mirror_vfd PRIVATE ${HDF5_TEST_LIB_TARGET}) @@ -597,8 +630,12 @@ set (H5_VDS_SWMR_TESTS macro (ADD_H5_VDS_EXE file) add_executable (${file} ${HDF5_TEST_SOURCE_DIR}/${file}.c ${HDF5_TEST_SOURCE_DIR}/vds_swmr.h) - target_include_directories (${file} PRIVATE "${HDF5_SRC_DIR};${HDF5_BINARY_DIR};${HDF5_TEST_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (${file} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_BINARY_DIR};${HDF5_TEST_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(${file} PRIVATE "${HDF5_CMAKE_C_FLAGS}") + target_compile_definitions(${file} + PRIVATE + "$<$:${HDF5_DEVELOPER_DEFS}>" + ) if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (${file} STATIC) target_link_libraries (${file} PRIVATE ${HDF5_TEST_LIB_TARGET}) @@ -625,7 +662,11 @@ endforeach () # and it can't be renamed (i.e., no -shared). add_executable (accum_swmr_reader ${HDF5_TEST_SOURCE_DIR}/accum_swmr_reader.c) target_compile_options(accum_swmr_reader PRIVATE "${HDF5_CMAKE_C_FLAGS}") -target_include_directories (accum_swmr_reader PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_compile_definitions(accum_swmr_reader + PRIVATE + "$<$:${HDF5_DEVELOPER_DEFS}>" +) +target_include_directories (accum_swmr_reader PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (accum_swmr_reader STATIC) target_link_libraries (accum_swmr_reader PRIVATE ${HDF5_TEST_LIB_TARGET} ${HDF5_LIB_TARGET}) @@ -650,7 +691,7 @@ set_target_properties (accum PROPERTIES DEPENDS accum_swmr_reader) ############################################################################## if (BUILD_SHARED_LIBS) add_executable (filter_plugin ${HDF5_TEST_SOURCE_DIR}/filter_plugin.c) - target_include_directories (filter_plugin PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (filter_plugin PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (filter_plugin SHARED) target_link_libraries (filter_plugin PRIVATE ${HDF5_TEST_LIBSH_TARGET}) set_target_properties (filter_plugin PROPERTIES FOLDER test) @@ -663,7 +704,7 @@ if (BUILD_SHARED_LIBS) endif () add_executable (vol_plugin ${HDF5_TEST_SOURCE_DIR}/vol_plugin.c) - target_include_directories (vol_plugin PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (vol_plugin PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (vol_plugin SHARED) target_link_libraries (vol_plugin PRIVATE ${HDF5_TEST_LIBSH_TARGET}) set_target_properties (vol_plugin PROPERTIES FOLDER test) @@ -682,7 +723,11 @@ endif () set (use_append_chunk_SOURCES ${HDF5_TEST_SOURCE_DIR}/use_append_chunk.c ${HDF5_TEST_SOURCE_DIR}/use_common.c ${HDF5_TEST_SOURCE_DIR}/use.h) add_executable (use_append_chunk ${use_append_chunk_SOURCES}) target_compile_options(use_append_chunk PRIVATE "${HDF5_CMAKE_C_FLAGS}") -target_include_directories (use_append_chunk PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_compile_definitions(use_append_chunk + PRIVATE + "$<$:${HDF5_DEVELOPER_DEFS}>" +) +target_include_directories (use_append_chunk PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (use_append_chunk STATIC) target_link_libraries (use_append_chunk PRIVATE ${HDF5_TEST_LIB_TARGET}) @@ -703,7 +748,11 @@ if (HDF5_BUILD_UTILS) # requires mirror server set (use_append_chunk_mirror_SOURCES ${HDF5_TEST_SOURCE_DIR}/use_append_chunk_mirror.c ${HDF5_TEST_SOURCE_DIR}/use_common.c ${HDF5_TEST_SOURCE_DIR}/use.h) add_executable (use_append_chunk_mirror ${use_append_chunk_mirror_SOURCES}) target_compile_options(use_append_chunk_mirror PRIVATE "${HDF5_CMAKE_C_FLAGS}") - target_include_directories (use_append_chunk_mirror PRIVATE "${HDF5_SRC_DIR};${HDF5_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_compile_definitions(use_append_chunk_mirror + PRIVATE + "$<$:${HDF5_DEVELOPER_DEFS}>" + ) + target_include_directories (use_append_chunk_mirror PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (use_append_chunk_mirror STATIC) target_link_libraries (use_append_chunk_mirror PRIVATE ${HDF5_TEST_LIB_TARGET}) @@ -724,7 +773,11 @@ endif () set (use_append_mchunks_SOURCES ${HDF5_TEST_SOURCE_DIR}/use_append_mchunks.c ${HDF5_TEST_SOURCE_DIR}/use_common.c ${HDF5_TEST_SOURCE_DIR}/use.h) add_executable (use_append_mchunks ${use_append_mchunks_SOURCES}) target_compile_options(use_append_mchunks PRIVATE "${HDF5_CMAKE_C_FLAGS}") -target_include_directories (use_append_mchunks PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_compile_definitions(use_append_mchunks + PRIVATE + "$<$:${HDF5_DEVELOPER_DEFS}>" +) +target_include_directories (use_append_mchunks PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (use_append_mchunks STATIC) target_link_libraries (use_append_mchunks PRIVATE ${HDF5_TEST_LIB_TARGET}) @@ -744,7 +797,11 @@ endif () set (use_disable_mdc_flushes_SOURCES ${HDF5_TEST_SOURCE_DIR}/use_disable_mdc_flushes.c) add_executable (use_disable_mdc_flushes ${use_disable_mdc_flushes_SOURCES}) target_compile_options(use_disable_mdc_flushes PRIVATE "${HDF5_CMAKE_C_FLAGS}") -target_include_directories (use_disable_mdc_flushes PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_compile_definitions(use_disable_mdc_flushes + PRIVATE + "$<$:${HDF5_DEVELOPER_DEFS}>" +) +target_include_directories (use_disable_mdc_flushes PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (use_disable_mdc_flushes STATIC) target_link_libraries (use_disable_mdc_flushes PRIVATE ${HDF5_TEST_LIB_TARGET}) diff --git a/test/CMakeTests.cmake b/test/CMakeTests.cmake index 5f4f89af986..08f5e3be985 100644 --- a/test/CMakeTests.cmake +++ b/test/CMakeTests.cmake @@ -114,7 +114,7 @@ foreach (plistfile ${HDF5_REFERENCE_PLIST_FILES}) endforeach () # -------------------------------------------------------------------- -#-- Copy all the HDF5 files from the test directory into the source directory +#-- Copy all the HDF5 files from the source directory into the test directory # -------------------------------------------------------------------- set (HDF5_REFERENCE_TEST_FILES aggr.h5 @@ -178,24 +178,24 @@ endforeach () add_custom_target(HDF5_TEST_LIB_files ALL COMMENT "Copying files needed by HDF5_TEST_LIB tests" DEPENDS ${HDF5_TEST_LIB_files_list}) set (testhdf5_CLEANFILES - coord.h5 - dtypes10.h5 - sys_file1 - tattr.h5 - tfile1.h5 - tfile2.h5 - tfile3.h5 - tfile4.h5 - tfile5.h5 - tfile6.h5 - tfile7.h5 - tfilespace.h5 - th5o_file - th5s1.h5 - tselect.h5 - tsohm.h5 - tsohm_dst.h5 - tsohm_src.h5 + coord.h5 + dtypes10.h5 + sys_file1 + tattr.h5 + tfile1.h5 + tfile2.h5 + tfile3.h5 + tfile4.h5 + tfile5.h5 + tfile6.h5 + tfile7.h5 + tfilespace.h5 + th5o_file + th5s1.h5 + tselect.h5 + tsohm.h5 + tsohm_dst.h5 + tsohm_src.h5 ) # Remove any output file left over from previous test run @@ -205,6 +205,12 @@ add_test ( WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/H5TEST ) set_tests_properties (H5TEST-testhdf5-clear-objects PROPERTIES FIXTURES_SETUP clear_testhdf5) +add_test ( + NAME H5TEST-testhdf5-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove ${testhdf5_CLEANFILES} + WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/H5TEST +) +set_tests_properties (H5TEST-testhdf5-clean-objects PROPERTIES FIXTURES_CLEANUP clear_testhdf5) add_test (NAME H5TEST-testhdf5-base COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ -x file -x select) set_tests_properties (H5TEST-testhdf5-base PROPERTIES @@ -232,179 +238,94 @@ set_tests_properties (H5TEST-testhdf5-select PROPERTIES ############################################################################## set (test_CLEANFILES - accum.h5 - cmpd_dset.h5 - compact_dataset.h5 - dataset.h5 - dset_offset.h5 - max_compact_dataset.h5 - simple.h5 - set_local.h5 - random_chunks.h5 - huge_chunks.h5 - chunk_cache.h5 - big_chunk.h5 - chunk_fast.h5 - chunk_expand.h5 - chunk_fixed.h5 - copy_dcpl_newfile.h5 - partial_chunks.h5 - layout_extend.h5 - zero_chunk.h5 - chunk_single.h5 - swmr_non_latest.h5 - earray_hdr_fd.h5 - farray_hdr_fd.h5 - bt2_hdr_fd.h5 - storage_size.h5 - dls_01_strings.h5 - power2up.h5 - version_bounds.h5 - alloc_0sized.h5 - extend.h5 - istore.h5 - extlinks*.h5 - frspace.h5 - links*.h5 - sys_file1 - tfile*.h5 - th5s*.h5 - lheap.h5 - fheap.h5 - ohdr.h5 - ohdr_min_a.h5 - ohdr_min_b.h5 + cmpd_dtransform.h5 + direct_chunk.h5 + dt_arith1.h5 + dt_arith2.h5 + dtransform.h5 + dtypes3.h5 + dtypes4.h5 min_dset_ohdr_testfile.h5 - stab.h5 - extern_*.h5 - extern_*.raw - gheap*.h5 - dt_arith1 - dt_arith2 - links.h5 - links*.h5 - extlinks*.h5 - tmp - tmp_links - tmp2_links - tmp_links_env - tmp_vds/* - tmp_vds_env/* - big.data - big*.h5 - stdio.h5 - sec2.h5 - dtypes*.h5 - dt_arith*.h5 - tattr.h5 - tselect.h5 - mtime.h5 - unlink.h5 - unicode.h5 - coord.h5 - fillval_*.h5 - fillval.raw - mount_*.h5 - testmeta.h5 - ttime.h5 - trefer*.h5 - trefer_*.h5 - tvltypes.h5 - tvlstr.h5 - tvlstr2.h5 - twriteorder.dat - enum1.h5 - titerate.h5 - ttsafe.h5 - tarray1.h5 - tgenprop.h5 - tmisc*.h5 - set_extent*.h5 - ext*.bin - getname.h5 - getname*.h5 + ohdr_min_a.h5 sec2_file.h5 - direct_file.h5 - family_file000*.h5 - new_family_v16_000*.h5 - multi_file-*.h5 - core_file - filter_plugin.h5 - new_move_*.h5 - ntypes.h5 - dangle.h5 - error_test.h5 - err_compat.h5 - dtransform.h5 - test_filters.h5 - get_file_name.h5 - tstint*.h5 - unlink_chunked.h5 - btree2.h5 - btree2_tmp.h5 - objcopy_src.h5 - objcopy_dst.h5 + single_latest.h5 + source_file.h5 + splitter_rw_file.h5 + splitter_tmp.h5 + splitter_wo_file.h5 + stdio_file.h5 + swmr0.h5 + tfile_is_accessible_non_hdf5.h5 + tfile_is_accessible.h5 + tfile1.h5.h5 + tfile8.h5 + tfile8.h5.h5 + tmisc15.h5.h5 + tstint1.h5 + tstint2.h5 + tverbounds_dtype.h5 + virtual_file1.h5 objcopy_ext.dat - app_ref.h5 - farray.h5 - farray_tmp.h5 - earray.h5 - earray_tmp.h5 - efc*.h5 log_vfd_out.log - log_ros3_out.log - log_s3comms_out.log - new_multi_file_v16-r.h5 - new_multi_file_v16-s.h5 - split_get_file_image_test-m.h5 - split_get_file_image_test-r.h5 - file_image_core_test.h5.copy - unregister_filter_1.h5 - unregister_filter_2.h5 - vds_virt.h5 - vds_dapl.h5 - vds_src_*.h5 - swmr_data.h5 - use_use_append_chunk.h5 - use_append_mchunks.h5 - use_disable_mdc_flushes.h5 - flushrefresh.h5 - flushrefresh_VERIFICATION_START - flushrefresh_VERIFICATION_CHECKPOINT1 - flushrefresh_VERIFICATION_CHECKPOINT2 - flushrefresh_VERIFICATION_DONE - filenotclosed.h5 - del_many_dense_attrs.h5 - atomic_data - accum_swmr_big.h5 - ohdr_swmr.h5 - test_swmr*.h5 - cache_logging.h5 - cache_logging.out - vds_swmr.h5 - vds_swmr_src_*.h5 - swmr*.h5 - swmr_writer.out - swmr_writer.log.* - swmr_reader.out.* - swmr_reader.log.* - tbogus.h5.copy - cache_image_test.h5 - direct_chunk.h5 - native_vol_test.h5 - splitter*.h5 splitter.log - mirror_rw/* - mirror_wo/* + tbogus.h5.copy + tmp_vds/vds_src_2.h5 +) + +set (EXTLINKS_CLEANFILES + extlinks16A-000000.h5 + extlinks16A-000001.h5 + extlinks16B-b.h5 + extlinks16B-g.h5 + extlinks16B-l.h5 + extlinks16B-r.h5 + extlinks16B-s.h5 + extlinks19B-000000.h5 + extlinks19B-000001.h5 + extlinks19B-000002.h5 + extlinks19B-000003.h5 + extlinks19B-000004.h5 + extlinks19B-000005.h5 + extlinks19B-000006.h5 + extlinks19B-000007.h5 + extlinks19B-000008.h5 + extlinks19B-000009.h5 + extlinks19B-000010.h5 + extlinks19B-000011.h5 + extlinks19B-000012.h5 + extlinks19B-000013.h5 + extlinks19B-000014.h5 + extlinks19B-000015.h5 + extlinks19B-000016.h5 + extlinks19B-000017.h5 + extlinks19B-000018.h5 + extlinks19B-000019.h5 + extlinks19B-000020.h5 + extlinks19B-000021.h5 + extlinks19B-000022.h5 + extlinks19B-000023.h5 + extlinks19B-000024.h5 + extlinks19B-000025.h5 + extlinks19B-000026.h5 + extlinks19B-000027.h5 + extlinks19B-000028.h5 ) # Remove any output file left over from previous test run add_test ( NAME H5TEST-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove ${test_CLEANFILES} + COMMAND ${CMAKE_COMMAND} -E remove ${test_CLEANFILES} ${EXTLINKS_CLEANFILES} + COMMAND_EXPAND_LISTS WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/H5TEST ) set_tests_properties (H5TEST-clear-objects PROPERTIES FIXTURES_SETUP clear_H5TEST) +add_test ( + NAME H5TEST-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove ${test_CLEANFILES} ${EXTLINKS_CLEANFILES} + COMMAND_EXPAND_LISTS + WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/H5TEST +) +set_tests_properties (H5TEST-clean-objects PROPERTIES FIXTURES_CLEANUP clear_H5TEST) set (H5TEST_SEPARATE_TESTS testhdf5 @@ -420,7 +341,7 @@ foreach (h5_test ${H5_TESTS}) if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5TEST-${h5_test} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) set_tests_properties (H5TEST-${h5_test} PROPERTIES - FIXTURES_REQUIRED clear_objects + FIXTURES_REQUIRED clear_H5TEST ENVIRONMENT "srcdir=${HDF5_TEST_BINARY_DIR}/H5TEST" WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/H5TEST ) @@ -461,10 +382,18 @@ set_tests_properties (H5TEST-objcopy PROPERTIES TIMEOUT ${CTEST_VERY_LONG_TIMEOU if (NOT CYGWIN) add_test ( NAME H5TEST-cache-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove cache_test.h5 + COMMAND ${CMAKE_COMMAND} -E remove + cache_test.h5 WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/H5TEST ) set_tests_properties (H5TEST-cache-clear-objects PROPERTIES FIXTURES_SETUP clear_cache) + add_test ( + NAME H5TEST-cache-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove + cache_test.h5 + WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/H5TEST + ) + set_tests_properties (H5TEST-cache-clean-objects PROPERTIES FIXTURES_CLEANUP clear_cache) if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5TEST-cache COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () @@ -496,6 +425,12 @@ if (TEST_CACHE_IMAGE) WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/H5TEST ) set_tests_properties (H5TEST-cache_image-clear-objects PROPERTIES FIXTURES_SETUP clear_cache_image) + add_test ( + NAME H5TEST-cache_image-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove cache_image_test.h5 + WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/H5TEST + ) + set_tests_properties (H5TEST-cache_image-clean-objects PROPERTIES FIXTURES_CLEANUP clear_cache_image) add_test (NAME H5TEST_cache_image COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) set_tests_properties (H5TEST-cache_image PROPERTIES FIXTURES_REQUIRED clear_cache_image @@ -505,21 +440,28 @@ if (TEST_CACHE_IMAGE) endif () #-- Adding test for external_env +set (H5TEST_EXTERNAL_CLEANFILES + extern_1r.raw + extern_2r.raw + extern_3r.raw + extern_4r.raw + extern_1w.raw + extern_2w.raw + extern_3w.raw + extern_4w.raw +) add_test ( NAME H5TEST-external_env-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove - extern_1r.raw - extern_2r.raw - extern_3r.raw - extern_4r.raw - extern_1w.raw - extern_2w.raw - extern_3w.raw - extern_4w.raw - WORKING_DIRECTORY - ${HDF5_TEST_BINARY_DIR}/H5TEST + COMMAND ${CMAKE_COMMAND} -E remove ${H5TEST_EXTERNAL_CLEANFILES} + WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/H5TEST ) set_tests_properties (H5TEST-external_env-clear-objects PROPERTIES FIXTURES_SETUP clear_external_env) +add_test ( + NAME H5TEST-external_env-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove ${H5TEST_EXTERNAL_CLEANFILES} + WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/H5TEST +) +set_tests_properties (H5TEST-external_env-clean-objects PROPERTIES FIXTURES_CLEANUP clear_external_env) if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5TEST-external_env COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () @@ -545,15 +487,13 @@ set_tests_properties (H5TEST-external_env PROPERTIES #-- Adding test for vds_env add_test ( - NAME H5TEST-vds_env-clear-objects + NAME H5TEST-vds_env-clean-objects COMMAND ${CMAKE_COMMAND} -E remove - vds_virt_0.h5 - vds_virt_3.h5 - vds_src_2.h5 + tmp_vds_env/vds_env_src_2.h5 WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/H5TEST ) -set_tests_properties (H5TEST-vds_env-clear-objects PROPERTIES FIXTURES_SETUP clear_vds_env) +set_tests_properties (H5TEST-vds_env-clean-objects PROPERTIES FIXTURES_CLEANUP clear_vds_env) if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5TEST-vds_env COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () @@ -578,20 +518,26 @@ set_tests_properties (H5TEST-vds_env PROPERTIES ) #-- Adding test for flush1/2 +set (FLUSH_CLEANFILES + flush.h5 + flush-swmr.h5 + noflush.h5 + noflush-swmr.h5 + flush_extend.h5 + flush_extend-swmr.h5 + noflush_extend.h5 + noflush_extend-swmr.h5 +) add_test (NAME H5TEST-flush-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove - flush.h5 - flush-swmr.h5 - noflush.h5 - noflush-swmr.h5 - flush_extend.h5 - flush_extend-swmr.h5 - noflush_extend.h5 - noflush_extend-swmr.h5 - WORKING_DIRECTORY - ${HDF5_TEST_BINARY_DIR}/H5TEST + COMMAND ${CMAKE_COMMAND} -E remove ${FLUSH_CLEANFILES} + WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/H5TEST ) set_tests_properties (H5TEST-flush-clear-objects PROPERTIES FIXTURES_SETUP clear_flush) +add_test (NAME H5TEST-flush-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove ${FLUSH_CLEANFILES} + WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/H5TEST +) +set_tests_properties (H5TEST-flush-clean-objects PROPERTIES FIXTURES_CLEANUP clear_flush) if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5TEST-flush1 COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () @@ -686,6 +632,12 @@ add_test ( WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/H5TEST ) set_tests_properties (H5TEST-filenotclosed-clear-objects PROPERTIES FIXTURES_SETUP clear_filenotclosed) +add_test ( + NAME H5TEST-filenotclosed-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove filenotclosed.h5 + WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/H5TEST +) +set_tests_properties (H5TEST-filenotclosed-clean-objects PROPERTIES FIXTURES_CLEANUP clear_filenotclosed) add_test (NAME H5TEST-filenotclosed COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) set_tests_properties (H5TEST-filenotclosed PROPERTIES FIXTURES_REQUIRED clear_filenotclosed @@ -700,6 +652,12 @@ add_test ( WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/H5TEST ) set_tests_properties (H5TEST-del_many_dense_attrs-clear-objects PROPERTIES FIXTURES_SETUP clear_del_many_dense_attrs) +add_test ( + NAME H5TEST-del_many_dense_attrs-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove del_many_dense_attrs.h5 + WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/H5TEST +) +set_tests_properties (H5TEST-del_many_dense_attrs-clean-objects PROPERTIES FIXTURES_CLEANUP clear_del_many_dense_attrs) add_test (NAME H5TEST-del_many_dense_attrs COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) set_tests_properties (H5TEST-del_many_dense_attrs PROPERTIES FIXTURES_REQUIRED clear_del_many_dense_attrs @@ -779,6 +737,14 @@ else () ENVIRONMENT "srcdir=${HDF5_TEST_BINARY_DIR}/H5TEST;HDF5_PLUGIN_PRELOAD=::" WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/H5TEST ) + add_test ( + NAME H5TEST-error_test-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove error_test.txt + ) + set_tests_properties (H5TEST-error_test-clean-objects PROPERTIES + DEPENDS H5TEST-error_test + WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/H5TEST + ) endif () #-- Adding test for links_env @@ -791,6 +757,15 @@ add_test (NAME H5TEST-links_env-clear-objects ${HDF5_TEST_BINARY_DIR}/H5TEST ) set_tests_properties (H5TEST-links_env-clear-objects PROPERTIES FIXTURES_SETUP clear_links_env) +add_test (NAME H5TEST-links_env-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove + extlinks_env0.h5 + extlinks_env1.h5 + tmp_links_env/extlinks_env1.h5 + WORKING_DIRECTORY + ${HDF5_TEST_BINARY_DIR}/H5TEST +) +set_tests_properties (H5TEST-links_env-clean-objects PROPERTIES FIXTURES_CLEANUP clear_links_env) if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5TEST-links_env COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () @@ -844,16 +819,6 @@ if (BUILD_SHARED_LIBS) ENVIRONMENT "HDF5_PLUGIN_PATH=${CMAKE_BINARY_DIR}/filter_plugin_dir1${CMAKE_SEP}${CMAKE_BINARY_DIR}/filter_plugin_dir2;srcdir=${HDF5_TEST_BINARY_DIR}" WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR} ) - -############################################################################## -# HDFFV-9655 relative plugin test disabled -# -# add_test (NAME H5PLUGIN-pluginRelative COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) -# set_tests_properties (H5PLUGIN-pluginRelative PROPERTIES -# ENVIRONMENT "HDF5_PLUGIN_PATH=@/${BIN_REL_PATH}testdir1${CMAKE_SEP}@/${BIN_REL_PATH}testdir2;srcdir=${HDF5_TEST_BINARY_DIR}" -# WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR} -# ) -############################################################################## endif () option (TEST_SHELL_SCRIPTS "Enable shell script tests" ON) @@ -876,10 +841,20 @@ if (ENABLE_EXTENDED_TESTS) if (H5_PERL_FOUND) add_test ( NAME H5TEST-testflushrefresh-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove flushrefresh.h5 + COMMAND ${CMAKE_COMMAND} -E remove + flushrefresh.h5 + flushrefresh_test/flushrefresh.h5 WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/H5TEST/flushrefresh_test ) set_tests_properties (H5TEST-testflushrefresh-clear-objects PROPERTIES FIXTURES_SETUP clear_testflushrefresh) + add_test ( + NAME H5TEST-testflushrefresh-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove + flushrefresh.h5 + flushrefresh_test/flushrefresh.h5 + WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/H5TEST/flushrefresh_test + ) + set_tests_properties (H5TEST-testflushrefresh-clean-objects PROPERTIES FIXTURES_CLEANUP clear_testflushrefresh) add_test (NAME H5TEST-testflushrefresh COMMAND "${CMAKE_COMMAND}" -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" -D "TEST_PROGRAM=$" @@ -952,7 +927,7 @@ endif () if (HDF5_BUILD_GENERATORS AND BUILD_STATIC_LIBS) macro (ADD_H5_GENERATOR genfile) add_executable (${genfile} ${HDF5_TEST_SOURCE_DIR}/${genfile}.c) - target_include_directories (${genfile} PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (${genfile} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (${genfile} STATIC) target_link_libraries (${genfile} PRIVATE ${HDF5_TEST_LIB_TARGET} ${HDF5_LIB_TARGET}) set_target_properties (${genfile} PROPERTIES FOLDER generator/test) diff --git a/test/CMakeVFDTests.cmake b/test/CMakeVFDTests.cmake index 87683fae08b..32877a41b94 100644 --- a/test/CMakeVFDTests.cmake +++ b/test/CMakeVFDTests.cmake @@ -138,21 +138,23 @@ add_custom_target(HDF5_VFDTEST_LIB_files ALL COMMENT "Copying files needed by HD endmacro () macro (DO_VFD_TEST vfdtest vfdname resultcode) - add_test (NAME VFD-${vfdname}-${vfdtest} - COMMAND "${CMAKE_COMMAND}" - -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" - -D "TEST_PROGRAM=$" - -D "TEST_ARGS:STRING=" - -D "TEST_VFD:STRING=${vfdname}" - -D "TEST_EXPECT=${resultcode}" - -D "TEST_OUTPUT=${vfdname}-${vfdtest}.out" - -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/${vfdname}" - -P "${HDF_RESOURCES_DIR}/vfdTest.cmake" - ) - set_tests_properties (VFD-${vfdname}-${vfdtest} PROPERTIES - ENVIRONMENT "srcdir=${HDF5_TEST_BINARY_DIR}/${vfdname}" - WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/${vfdname} - ) + if (NOT "${vfdtest}" IN_LIST H5_VFD_${vfdname}_SKIP_TESTS) + add_test (NAME VFD-${vfdname}-${vfdtest} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=$" + -D "TEST_ARGS:STRING=" + -D "TEST_VFD:STRING=${vfdname}" + -D "TEST_EXPECT=${resultcode}" + -D "TEST_OUTPUT=${vfdname}-${vfdtest}.out" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/${vfdname}" + -P "${HDF_RESOURCES_DIR}/vfdTest.cmake" + ) + set_tests_properties (VFD-${vfdname}-${vfdtest} PROPERTIES + ENVIRONMENT "srcdir=${HDF5_TEST_BINARY_DIR}/${vfdname}" + WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/${vfdname} + ) + endif () endmacro () macro (ADD_VFD_TEST vfdname resultcode) @@ -165,10 +167,18 @@ add_custom_target(HDF5_VFDTEST_LIB_files ALL COMMENT "Copying files needed by HD endif () endif () endforeach () - set_tests_properties (VFD-${vfdname}-flush2 PROPERTIES DEPENDS VFD-${vfdname}-flush1) - set_tests_properties (VFD-${vfdname}-flush1 PROPERTIES TIMEOUT 10) - set_tests_properties (VFD-${vfdname}-flush2 PROPERTIES TIMEOUT 10) - set_tests_properties (VFD-${vfdname}-istore PROPERTIES TIMEOUT ${CTEST_VERY_LONG_TIMEOUT}) + if (NOT "flush2" IN_LIST H5_VFD_${vfdname}_SKIP_TESTS) + if (NOT "flush1" IN_LIST H5_VFD_${vfdname}_SKIP_TESTS) + set_tests_properties (VFD-${vfdname}-flush2 PROPERTIES DEPENDS VFD-${vfdname}-flush1) + endif () + set_tests_properties (VFD-${vfdname}-flush2 PROPERTIES TIMEOUT 10) + endif () + if (NOT "flush1" IN_LIST H5_VFD_${vfdname}_SKIP_TESTS) + set_tests_properties (VFD-${vfdname}-flush1 PROPERTIES TIMEOUT 10) + endif () + if (NOT "istore" IN_LIST H5_VFD_${vfdname}_SKIP_TESTS) + set_tests_properties (VFD-${vfdname}-istore PROPERTIES TIMEOUT ${CTEST_VERY_LONG_TIMEOUT}) + endif () if (NOT CYGWIN) set_tests_properties (VFD-${vfdname}-cache PROPERTIES TIMEOUT ${CTEST_VERY_LONG_TIMEOUT}) endif () diff --git a/testpar/CMakeLists.txt b/testpar/CMakeLists.txt index eb5b8cd5852..0683338c043 100644 --- a/testpar/CMakeLists.txt +++ b/testpar/CMakeLists.txt @@ -23,8 +23,12 @@ set (testphdf5_SOURCES #-- Adding test for testhdf5 add_executable (testphdf5 ${testphdf5_SOURCES}) target_compile_options(testphdf5 PRIVATE "${HDF5_CMAKE_C_FLAGS}") +target_compile_definitions(testphdf5 + PRIVATE + "$<$:${HDF5_DEVELOPER_DEFS}>" +) target_include_directories (testphdf5 - PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" + PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" ) if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (testphdf5 STATIC) @@ -49,8 +53,12 @@ endif () macro (ADD_H5P_EXE file) add_executable (${file} ${HDF5_TEST_PAR_SOURCE_DIR}/${file}.c) target_compile_options(${file} PRIVATE "${HDF5_CMAKE_C_FLAGS}") + target_compile_definitions(${file} + PRIVATE + "$<$:${HDF5_DEVELOPER_DEFS}>" + ) target_include_directories (${file} - PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" + PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" ) if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (${file} STATIC) diff --git a/testpar/CMakeTests.cmake b/testpar/CMakeTests.cmake index ee69881f8d6..1a3f409de6a 100644 --- a/testpar/CMakeTests.cmake +++ b/testpar/CMakeTests.cmake @@ -22,6 +22,12 @@ add_test ( WORKING_DIRECTORY ${HDF5_TEST_PAR_BINARY_DIR} ) set_tests_properties (MPI_TEST-clear-testphdf5-objects PROPERTIES FIXTURES_SETUP par_clear_testphdf5) +add_test ( + NAME MPI_TEST-clean-testphdf5-objects + COMMAND ${CMAKE_COMMAND} -E remove ParaTest.h5 + WORKING_DIRECTORY ${HDF5_TEST_PAR_BINARY_DIR} +) +set_tests_properties (MPI_TEST-clean-testphdf5-objects PROPERTIES FIXTURES_CLEANUP par_clear_testphdf5) set (SKIP_tests cchunk1 @@ -101,6 +107,12 @@ add_test ( WORKING_DIRECTORY ${HDF5_TEST_PAR_BINARY_DIR} ) set_tests_properties (MPI_TEST-clear-objects PROPERTIES FIXTURES_SETUP par_clear_objects) +add_test ( + NAME MPI_TEST-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove ${test_par_CLEANFILES} + WORKING_DIRECTORY ${HDF5_TEST_PAR_BINARY_DIR} +) +set_tests_properties (MPI_TEST-clean-objects PROPERTIES FIXTURES_CLEANUP par_clear_objects) foreach (h5_testp ${H5P_TESTS}) add_test (NAME MPI_TEST_${h5_testp} COMMAND ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} ${MPIEXEC_MAX_NUMPROCS} ${MPIEXEC_PREFLAGS} $ ${MPIEXEC_POSTFLAGS}) diff --git a/testpar/CMakeVFDTests.cmake b/testpar/CMakeVFDTests.cmake index d6a402599ec..40624d59c6e 100644 --- a/testpar/CMakeVFDTests.cmake +++ b/testpar/CMakeVFDTests.cmake @@ -25,26 +25,34 @@ set (H5P_VFD_TESTS macro (ADD_VFD_TEST vfdname resultcode) if (NOT HDF5_ENABLE_USING_MEMCHECKER) foreach (h5_test ${H5P_VFD_TESTS}) - add_test ( - NAME MPI_TEST_VFD-${vfdname}-${h5_test} - COMMAND "${CMAKE_COMMAND}" - -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" - -D "TEST_PROGRAM=$" - -D "TEST_ARGS:STRING=" - -D "TEST_VFD:STRING=${vfdname}" - -D "TEST_EXPECT=${resultcode}" - -D "TEST_OUTPUT=${vfdname}-${h5_test}.out" - -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/${vfdname}" - -P "${HDF_RESOURCES_DIR}/vfdTest.cmake" - ) - set_tests_properties (MPI_TEST_VFD-${vfdname}-${h5_test} PROPERTIES - ENVIRONMENT "srcdir=${HDF5_TEST_PAR_BINARY_DIR}/${vfdname}" - WORKING_DIRECTORY ${HDF5_TEST_PAR_BINARY_DIR}/${vfdname} - ) + if (NOT "${h5_test}" IN_LIST H5P_VFD_${vfdname}_TESTS_SKIP) + add_test ( + NAME MPI_TEST_VFD-${vfdname}-${h5_test} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=$" + -D "TEST_ARGS:STRING=" + -D "TEST_VFD:STRING=${vfdname}" + -D "TEST_EXPECT=${resultcode}" + -D "TEST_OUTPUT=${vfdname}-${h5_test}.out" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/${vfdname}" + -P "${HDF_RESOURCES_DIR}/vfdTest.cmake" + ) + set_tests_properties (MPI_TEST_VFD-${vfdname}-${h5_test} PROPERTIES + ENVIRONMENT "srcdir=${HDF5_TEST_PAR_BINARY_DIR}/${vfdname}" + WORKING_DIRECTORY ${HDF5_TEST_PAR_BINARY_DIR}/${vfdname} + ) + endif () endforeach () - set_tests_properties (MPI_TEST_VFD-${vfdname}-t_pflush1 PROPERTIES WILL_FAIL "true") - #set_property (TEST MPI_TEST_t_pflush1 PROPERTY PASS_REGULAR_EXPRESSION "PASSED") - set_tests_properties (MPI_TEST_VFD-${vfdname}-t_pflush2 PROPERTIES DEPENDS MPI_TEST_VFD-${vfdname}-t_pflush1) + if (NOT "t_pflush1" IN_LIST H5P_VFD_${vfdname}_TESTS_SKIP) + set_tests_properties (MPI_TEST_VFD-${vfdname}-t_pflush1 PROPERTIES WILL_FAIL "true") + #set_property (TEST MPI_TEST_t_pflush1 PROPERTY PASS_REGULAR_EXPRESSION "PASSED") + endif () + if (NOT "t_pflush2" IN_LIST H5P_VFD_${vfdname}_TESTS_SKIP) + if (NOT "t_pflush1" IN_LIST H5P_VFD_${vfdname}_TESTS_SKIP) + set_tests_properties (MPI_TEST_VFD-${vfdname}-t_pflush2 PROPERTIES DEPENDS MPI_TEST_VFD-${vfdname}-t_pflush1) + endif () + endif () endif () endmacro () diff --git a/tools/lib/CMakeLists.txt b/tools/lib/CMakeLists.txt index f0e40fe21df..bf9d087af97 100644 --- a/tools/lib/CMakeLists.txt +++ b/tools/lib/CMakeLists.txt @@ -37,7 +37,7 @@ set (H5_TOOLS_LIB_HDRS if (BUILD_STATIC_LIBS) add_library (${HDF5_TOOLS_LIB_TARGET} STATIC ${H5_TOOLS_LIB_SOURCES} ${H5_TOOLS_LIB_HDRS}) target_include_directories (${HDF5_TOOLS_LIB_TARGET} - PRIVATE "${HDF5_TOOLS_LIB_SOURCE_DIR};${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" + PRIVATE "${HDF5_TOOLS_LIB_SOURCE_DIR};${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" INTERFACE "$/include>" ) target_compile_options(${HDF5_TOOLS_LIB_TARGET} PRIVATE "${HDF5_CMAKE_C_FLAGS}") @@ -57,7 +57,7 @@ endif () if (BUILD_SHARED_LIBS) add_library (${HDF5_TOOLS_LIBSH_TARGET} SHARED ${H5_TOOLS_LIB_SOURCES} ${H5_TOOLS_LIB_HDRS}) target_include_directories (${HDF5_TOOLS_LIBSH_TARGET} - PRIVATE "${HDF5_TOOLS_LIB_SOURCE_DIR};${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" + PRIVATE "${HDF5_TOOLS_LIB_SOURCE_DIR};${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" INTERFACE "$/include>" ) target_compile_options(${HDF5_TOOLS_LIBSH_TARGET} PRIVATE "${HDF5_CMAKE_C_FLAGS}") diff --git a/tools/libtest/CMakeLists.txt b/tools/libtest/CMakeLists.txt index 33df35b965b..a7d2eb3521b 100644 --- a/tools/libtest/CMakeLists.txt +++ b/tools/libtest/CMakeLists.txt @@ -6,7 +6,7 @@ project (HDF5_TOOLS_LIBTEST C) #----------------------------------------------------------------------------- add_executable (h5tools_test_utils ${HDF5_TOOLS_LIBTEST_SOURCE_DIR}/h5tools_test_utils.c) target_compile_options(h5tools_test_utils PRIVATE "${HDF5_CMAKE_C_FLAGS}") -target_include_directories(h5tools_test_utils PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_include_directories(h5tools_test_utils PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (BUILD_STATIC_LIBS) TARGET_C_PROPERTIES (h5tools_test_utils STATIC) target_link_libraries (h5tools_test_utils PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET} ${HDF5_TEST_LIB_TARGET}) diff --git a/tools/libtest/CMakeTests.cmake b/tools/libtest/CMakeTests.cmake index 2d8e877f021..79810c413fa 100644 --- a/tools/libtest/CMakeTests.cmake +++ b/tools/libtest/CMakeTests.cmake @@ -24,21 +24,10 @@ ############################################################################## macro (ADD_H5_TEST resultfile resultcode) - add_test ( - NAME H5LIBTEST-${resultfile}-clear-objects - COMMAND ${CMAKE_COMMAND} - -E remove - ${resultfile}.out - ${resultfile}.out.err - ) - if (NOT "${last_test}" STREQUAL "") - set_tests_properties (H5LIBTEST-${resultfile}-clear-objects PROPERTIES DEPENDS ${last_test}) - endif () add_test (NAME H5LIBTEST-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) if (NOT "${resultcode}" STREQUAL "0") set_tests_properties (H5LIBTEST-${resultfile} PROPERTIES WILL_FAIL "true") endif () - set_tests_properties (H5LIBTEST-${resultfile} PROPERTIES DEPENDS H5LIBTEST-${resultfile}-clear-objects) endmacro () ############################################################################## diff --git a/tools/src/h5copy/CMakeLists.txt b/tools/src/h5copy/CMakeLists.txt index 9bf98cceb8c..686dbad2750 100644 --- a/tools/src/h5copy/CMakeLists.txt +++ b/tools/src/h5copy/CMakeLists.txt @@ -6,7 +6,7 @@ project (HDF5_TOOLS_SRC_H5COPY C) # -------------------------------------------------------------------- if (BUILD_STATIC_LIBS) add_executable (h5copy ${HDF5_TOOLS_SRC_H5COPY_SOURCE_DIR}/h5copy.c) - target_include_directories (h5copy PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5copy PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(h5copy PRIVATE "${HDF5_CMAKE_C_FLAGS}") TARGET_C_PROPERTIES (h5copy STATIC) target_link_libraries (h5copy PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) @@ -18,7 +18,7 @@ endif () if (BUILD_SHARED_LIBS) add_executable (h5copy-shared ${HDF5_TOOLS_SRC_H5COPY_SOURCE_DIR}/h5copy.c) - target_include_directories (h5copy-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5copy-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(h5copy-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}") TARGET_C_PROPERTIES (h5copy-shared SHARED) target_link_libraries (h5copy-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET}) diff --git a/tools/src/h5diff/CMakeLists.txt b/tools/src/h5diff/CMakeLists.txt index 718e55220a9..cd28c41905f 100644 --- a/tools/src/h5diff/CMakeLists.txt +++ b/tools/src/h5diff/CMakeLists.txt @@ -10,7 +10,7 @@ if (BUILD_STATIC_LIBS) ${HDF5_TOOLS_SRC_H5DIFF_SOURCE_DIR}/h5diff_main.c ${HDF5_TOOLS_SRC_H5DIFF_SOURCE_DIR}/h5diff_common.h ) - target_include_directories (h5diff PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5diff PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(h5diff PRIVATE "${HDF5_CMAKE_C_FLAGS}") #target_compile_definitions (h5diff PRIVATE H5_TOOLS_DEBUG) TARGET_C_PROPERTIES (h5diff STATIC) @@ -26,7 +26,7 @@ if (BUILD_SHARED_LIBS) ${HDF5_TOOLS_SRC_H5DIFF_SOURCE_DIR}/h5diff_main.c ${HDF5_TOOLS_SRC_H5DIFF_SOURCE_DIR}/h5diff_common.h ) - target_include_directories (h5diff-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5diff-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(h5diff-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}") #target_compile_definitions (h5diff-shared PRIVATE H5_TOOLS_DEBUG) TARGET_C_PROPERTIES (h5diff-shared SHARED) @@ -54,7 +54,7 @@ if (H5_HAVE_PARALLEL) ${HDF5_TOOLS_SRC_H5DIFF_SOURCE_DIR}/h5diff_common.c ${HDF5_TOOLS_SRC_H5DIFF_SOURCE_DIR}/ph5diff_main.c ) - target_include_directories (ph5diff PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (ph5diff PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(ph5diff PRIVATE "${HDF5_CMAKE_C_FLAGS}") TARGET_C_PROPERTIES (ph5diff STATIC) target_link_libraries (ph5diff PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:MPI::MPI_C>") @@ -67,7 +67,7 @@ if (H5_HAVE_PARALLEL) ${HDF5_TOOLS_SRC_H5DIFF_SOURCE_DIR}/h5diff_common.c ${HDF5_TOOLS_SRC_H5DIFF_SOURCE_DIR}/ph5diff_main.c ) - target_include_directories (ph5diff-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (ph5diff-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(ph5diff-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}") TARGET_C_PROPERTIES (ph5diff-shared SHARED) target_link_libraries (ph5diff-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$:MPI::MPI_C>") diff --git a/tools/src/h5dump/CMakeLists.txt b/tools/src/h5dump/CMakeLists.txt index ad39e772ad1..b0131459a18 100644 --- a/tools/src/h5dump/CMakeLists.txt +++ b/tools/src/h5dump/CMakeLists.txt @@ -15,7 +15,7 @@ if (BUILD_STATIC_LIBS) ${HDF5_TOOLS_SRC_H5DUMP_SOURCE_DIR}/h5dump_ddl.h ${HDF5_TOOLS_SRC_H5DUMP_SOURCE_DIR}/h5dump_xml.h ) - target_include_directories (h5dump PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5dump PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(h5dump PRIVATE "${HDF5_CMAKE_C_FLAGS}") TARGET_C_PROPERTIES (h5dump STATIC) target_link_libraries (h5dump PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) @@ -36,7 +36,7 @@ if (BUILD_SHARED_LIBS) ${HDF5_TOOLS_SRC_H5DUMP_SOURCE_DIR}/h5dump_ddl.h ${HDF5_TOOLS_SRC_H5DUMP_SOURCE_DIR}/h5dump_xml.h ) - target_include_directories (h5dump-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5dump-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(h5dump-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}") TARGET_C_PROPERTIES (h5dump-shared SHARED) target_link_libraries (h5dump-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET}) diff --git a/tools/src/h5format_convert/CMakeLists.txt b/tools/src/h5format_convert/CMakeLists.txt index 59b1ff57239..d6d9027f53c 100644 --- a/tools/src/h5format_convert/CMakeLists.txt +++ b/tools/src/h5format_convert/CMakeLists.txt @@ -6,7 +6,7 @@ project (HDF5_TOOLS_SRC_H5FC C) # -------------------------------------------------------------------- if (BUILD_STATIC_LIBS) add_executable (h5format_convert ${HDF5_TOOLS_SRC_H5FC_SOURCE_DIR}/h5format_convert.c) - target_include_directories (h5format_convert PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5format_convert PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(h5format_convert PRIVATE "${HDF5_CMAKE_C_FLAGS}") TARGET_C_PROPERTIES (h5format_convert STATIC) target_link_libraries (h5format_convert PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) @@ -17,7 +17,7 @@ if (BUILD_STATIC_LIBS) endif () if (BUILD_SHARED_LIBS) add_executable (h5format_convert-shared ${HDF5_TOOLS_SRC_H5FC_SOURCE_DIR}/h5format_convert.c) - target_include_directories (h5format_convert-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5format_convert-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(h5format_convert-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}") TARGET_C_PROPERTIES (h5format_convert-shared SHARED) target_link_libraries (h5format_convert-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET}) diff --git a/tools/src/h5import/CMakeLists.txt b/tools/src/h5import/CMakeLists.txt index bc96122e088..c8ee6b841c6 100644 --- a/tools/src/h5import/CMakeLists.txt +++ b/tools/src/h5import/CMakeLists.txt @@ -6,7 +6,7 @@ project (HDF5_TOOLS_SRC_H5IMPORT C) # -------------------------------------------------------------------- if (BUILD_STATIC_LIBS) add_executable (h5import ${HDF5_TOOLS_SRC_H5IMPORT_SOURCE_DIR}/h5import.c ${HDF5_TOOLS_SRC_H5IMPORT_SOURCE_DIR}/h5import.h) - target_include_directories (h5import PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5import PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (h5import STATIC) target_link_libraries (h5import PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) #set_target_properties (h5import PROPERTIES COMPILE_DEFINITIONS H5DEBUGIMPORT) @@ -18,7 +18,7 @@ endif () if (BUILD_SHARED_LIBS) add_executable (h5import-shared ${HDF5_TOOLS_SRC_H5IMPORT_SOURCE_DIR}/h5import.c ${HDF5_TOOLS_SRC_H5IMPORT_SOURCE_DIR}/h5import.h) - target_include_directories (h5import-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5import-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (h5import-shared SHARED) target_link_libraries (h5import-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET}) #set_target_properties (h5import-shared PROPERTIES COMPILE_DEFINITIONS H5DEBUGIMPORT) diff --git a/tools/src/h5jam/CMakeLists.txt b/tools/src/h5jam/CMakeLists.txt index 0755be289e4..aabdd00589e 100644 --- a/tools/src/h5jam/CMakeLists.txt +++ b/tools/src/h5jam/CMakeLists.txt @@ -6,14 +6,14 @@ project (HDF5_TOOLS_SRC_H5JAM C) # -------------------------------------------------------------------- if (BUILD_STATIC_LIBS) add_executable (h5jam ${HDF5_TOOLS_SRC_H5JAM_SOURCE_DIR}/h5jam.c) - target_include_directories (h5jam PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5jam PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (h5jam STATIC) target_link_libraries (h5jam PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) set_target_properties (h5jam PROPERTIES FOLDER tools) set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};h5jam") add_executable (h5unjam ${HDF5_TOOLS_SRC_H5JAM_SOURCE_DIR}/h5unjam.c) - target_include_directories (h5unjam PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5unjam PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (h5unjam STATIC) target_link_libraries (h5unjam PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) set_target_properties (h5unjam PROPERTIES FOLDER tools) @@ -27,14 +27,14 @@ endif () if (BUILD_SHARED_LIBS) add_executable (h5jam-shared ${HDF5_TOOLS_SRC_H5JAM_SOURCE_DIR}/h5jam.c) - target_include_directories (h5jam-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5jam-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (h5jam-shared SHARED) target_link_libraries (h5jam-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET}) set_target_properties (h5jam-shared PROPERTIES FOLDER tools) set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};h5jam-shared") add_executable (h5unjam-shared ${HDF5_TOOLS_SRC_H5JAM_SOURCE_DIR}/h5unjam.c) - target_include_directories (h5unjam-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5unjam-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (h5unjam-shared SHARED) target_link_libraries (h5unjam-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET}) set_target_properties (h5unjam-shared PROPERTIES FOLDER tools) diff --git a/tools/src/h5ls/CMakeLists.txt b/tools/src/h5ls/CMakeLists.txt index 5d19d159101..055d545b155 100644 --- a/tools/src/h5ls/CMakeLists.txt +++ b/tools/src/h5ls/CMakeLists.txt @@ -6,7 +6,7 @@ project (HDF5_TOOLS_SRC_H5LS C) #----------------------------------------------------------------------------- if (BUILD_STATIC_LIBS) add_executable (h5ls ${HDF5_TOOLS_SRC_H5LS_SOURCE_DIR}/h5ls.c) - target_include_directories (h5ls PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5ls PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(h5ls PRIVATE "${HDF5_CMAKE_C_FLAGS}") #target_compile_definitions(h5ls PRIVATE H5_TOOLS_DEBUG) TARGET_C_PROPERTIES (h5ls STATIC) @@ -19,7 +19,7 @@ endif () if (BUILD_SHARED_LIBS) add_executable (h5ls-shared ${HDF5_TOOLS_SRC_H5LS_SOURCE_DIR}/h5ls.c) - target_include_directories (h5ls-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5ls-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(h5ls-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}") #target_compile_definitions(h5ls-shared PRIVATE H5_TOOLS_DEBUG) TARGET_C_PROPERTIES (h5ls-shared SHARED) diff --git a/tools/src/h5perf/CMakeLists.txt b/tools/src/h5perf/CMakeLists.txt index a0dc72922b7..9fd9366e260 100644 --- a/tools/src/h5perf/CMakeLists.txt +++ b/tools/src/h5perf/CMakeLists.txt @@ -2,15 +2,14 @@ cmake_minimum_required (VERSION 3.12) project (HDF5_TOOLS_SRC_H5PERF C) # -------------------------------------------------------------------- -# Add the executables +# h5perf_serial # -------------------------------------------------------------------- -#-- Adding test for h5perf_serial set (h5perf_serial_SOURCES ${HDF5_TOOLS_SRC_H5PERF_SOURCE_DIR}/sio_perf.c ${HDF5_TOOLS_SRC_H5PERF_SOURCE_DIR}/sio_engine.c ) add_executable (h5perf_serial ${h5perf_serial_SOURCES}) -target_include_directories (h5perf_serial PRIVATE "${HDF5_TEST_SRC_DIR};${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_include_directories (h5perf_serial PRIVATE "${HDF5_TEST_SRC_DIR};${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (BUILD_STATIC_LIBS) TARGET_C_PROPERTIES (h5perf_serial STATIC) target_link_libraries (h5perf_serial PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) @@ -23,48 +22,20 @@ set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};h5perf_serial set (H5_DEP_EXECUTABLES h5perf_serial) -#----------------------------------------------------------------------------- -# Add Target to clang-format -#----------------------------------------------------------------------------- if (HDF5_ENABLE_FORMATTERS) clang_format (HDF5_TOOLS_SRC_H5PERF_h5perf_serial_FORMAT h5perf_serial) endif () +# -------------------------------------------------------------------- +# h5perf +# -------------------------------------------------------------------- if (H5_HAVE_PARALLEL) - if (UNIX) - #-- Adding test for perf - only on unix systems - set (perf_SOURCES - ${HDF5_TOOLS_SRC_H5PERF_SOURCE_DIR}/perf.c - ) - add_executable (perf ${perf_SOURCES}) - target_include_directories (perf PRIVATE "${HDF5_TEST_SRC_DIR};${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") - if (BUILD_STATIC_LIBS) - TARGET_C_PROPERTIES (perf STATIC) - target_link_libraries (perf PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:${MPI_C_LIBRARIES}>") - else () - TARGET_C_PROPERTIES (perf SHARED) - target_link_libraries (perf PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$:${MPI_C_LIBRARIES}>") - endif () - set_target_properties (perf PROPERTIES FOLDER perform) - set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};perf") - - set (H5_DEP_EXECUTABLES perf) - - #----------------------------------------------------------------------------- - # Add Target to clang-format - #----------------------------------------------------------------------------- - if (HDF5_ENABLE_FORMATTERS) - clang_format (HDF5_TOOLS_SRC_H5PERF_perf_FORMAT perf) - endif () - endif () - - #-- Adding test for h5perf set (h5perf_SOURCES ${HDF5_TOOLS_SRC_H5PERF_SOURCE_DIR}/pio_perf.c ${HDF5_TOOLS_SRC_H5PERF_SOURCE_DIR}/pio_engine.c ) add_executable (h5perf ${h5perf_SOURCES}) - target_include_directories (h5perf PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5perf PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (BUILD_STATIC_LIBS) TARGET_C_PROPERTIES (h5perf STATIC) target_link_libraries (h5perf PRIVATE ${LINK_LIBS} ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:MPI::MPI_C>") @@ -77,9 +48,6 @@ if (H5_HAVE_PARALLEL) set (H5_DEP_EXECUTABLES h5perf) - #----------------------------------------------------------------------------- - # Add Target to clang-format - #----------------------------------------------------------------------------- if (HDF5_ENABLE_FORMATTERS) clang_format (HDF5_TOOLS_SRC_H5PERF_h5perf_FORMAT h5perf) endif () diff --git a/tools/src/h5repack/CMakeLists.txt b/tools/src/h5repack/CMakeLists.txt index 0a4bddc65dc..261c05a27d2 100644 --- a/tools/src/h5repack/CMakeLists.txt +++ b/tools/src/h5repack/CMakeLists.txt @@ -17,7 +17,7 @@ set (REPACK_COMMON_SOURCES if (BUILD_STATIC_LIBS) add_executable (h5repack ${REPACK_COMMON_SOURCES} ${HDF5_TOOLS_SRC_H5REPACK_SOURCE_DIR}/h5repack_main.c) - target_include_directories (h5repack PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5repack PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(h5repack PRIVATE "${HDF5_CMAKE_C_FLAGS}") TARGET_C_PROPERTIES (h5repack STATIC) target_link_libraries (h5repack PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) @@ -29,7 +29,7 @@ endif () if (BUILD_SHARED_LIBS) add_executable (h5repack-shared ${REPACK_COMMON_SOURCES} ${HDF5_TOOLS_SRC_H5REPACK_SOURCE_DIR}/h5repack_main.c) - target_include_directories (h5repack-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5repack-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(h5repack-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}") TARGET_C_PROPERTIES (h5repack-shared SHARED) target_link_libraries (h5repack-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET}) diff --git a/tools/src/h5stat/CMakeLists.txt b/tools/src/h5stat/CMakeLists.txt index 5ee091c48ba..661b7f85d9f 100644 --- a/tools/src/h5stat/CMakeLists.txt +++ b/tools/src/h5stat/CMakeLists.txt @@ -6,7 +6,7 @@ project (HDF5_TOOLS_SRC_H5STAT C) # -------------------------------------------------------------------- if (BUILD_STATIC_LIBS) add_executable (h5stat ${HDF5_TOOLS_SRC_H5STAT_SOURCE_DIR}/h5stat.c) - target_include_directories (h5stat PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5stat PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(h5stat PRIVATE "${HDF5_CMAKE_C_FLAGS}") TARGET_C_PROPERTIES (h5stat STATIC) target_link_libraries (h5stat PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) @@ -18,7 +18,7 @@ endif () if (BUILD_SHARED_LIBS) add_executable (h5stat-shared ${HDF5_TOOLS_SRC_H5STAT_SOURCE_DIR}/h5stat.c) - target_include_directories (h5stat-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5stat-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(h5stat-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}") TARGET_C_PROPERTIES (h5stat-shared SHARED) target_link_libraries (h5stat-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET}) diff --git a/tools/src/misc/CMakeLists.txt b/tools/src/misc/CMakeLists.txt index 751fe092438..b9e7f28351c 100644 --- a/tools/src/misc/CMakeLists.txt +++ b/tools/src/misc/CMakeLists.txt @@ -7,7 +7,7 @@ project (HDF5_TOOLS_SRC_MISC C) #-- Misc Executables if (BUILD_STATIC_LIBS) add_executable (h5debug ${HDF5_TOOLS_SRC_MISC_SOURCE_DIR}/h5debug.c) - target_include_directories (h5debug PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5debug PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(h5debug PRIVATE "${HDF5_CMAKE_C_FLAGS}") TARGET_C_PROPERTIES (h5debug STATIC) target_link_libraries (h5debug PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) @@ -15,7 +15,7 @@ if (BUILD_STATIC_LIBS) set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};h5debug") add_executable (h5repart ${HDF5_TOOLS_SRC_MISC_SOURCE_DIR}/h5repart.c) - target_include_directories (h5repart PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5repart PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(h5repart PRIVATE "${HDF5_CMAKE_C_FLAGS}") TARGET_C_PROPERTIES (h5repart STATIC) target_link_libraries (h5repart PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) @@ -23,7 +23,7 @@ if (BUILD_STATIC_LIBS) set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};h5repart") add_executable (h5mkgrp ${HDF5_TOOLS_SRC_MISC_SOURCE_DIR}/h5mkgrp.c) - target_include_directories (h5mkgrp PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5mkgrp PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(h5mkgrp PRIVATE "${HDF5_CMAKE_C_FLAGS}") TARGET_C_PROPERTIES (h5mkgrp STATIC) target_link_libraries (h5mkgrp PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) @@ -31,7 +31,7 @@ if (BUILD_STATIC_LIBS) set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};h5mkgrp") add_executable (h5clear ${HDF5_TOOLS_SRC_MISC_SOURCE_DIR}/h5clear.c) - target_include_directories (h5clear PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5clear PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(h5clear PRIVATE "${HDF5_CMAKE_C_FLAGS}") TARGET_C_PROPERTIES (h5clear STATIC) target_link_libraries (h5clear PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) @@ -47,7 +47,7 @@ if (BUILD_STATIC_LIBS) endif () if (BUILD_SHARED_LIBS) add_executable (h5debug-shared ${HDF5_TOOLS_SRC_MISC_SOURCE_DIR}/h5debug.c) - target_include_directories (h5debug-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5debug-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (h5debug-shared SHARED) target_compile_options(h5debug-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}") target_link_libraries (h5debug-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET}) @@ -55,7 +55,7 @@ if (BUILD_SHARED_LIBS) set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};h5debug-shared") add_executable (h5repart-shared ${HDF5_TOOLS_SRC_MISC_SOURCE_DIR}/h5repart.c) - target_include_directories (h5repart-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5repart-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(h5repart-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}") TARGET_C_PROPERTIES (h5repart-shared SHARED) target_link_libraries (h5repart-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET}) @@ -63,7 +63,7 @@ if (BUILD_SHARED_LIBS) set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};h5repart-shared") add_executable (h5mkgrp-shared ${HDF5_TOOLS_SRC_MISC_SOURCE_DIR}/h5mkgrp.c) - target_include_directories (h5mkgrp-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5mkgrp-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(h5mkgrp-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}") TARGET_C_PROPERTIES (h5mkgrp-shared SHARED) target_link_libraries (h5mkgrp-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET}) @@ -71,7 +71,7 @@ if (BUILD_SHARED_LIBS) set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};h5mkgrp-shared") add_executable (h5clear-shared ${HDF5_TOOLS_SRC_MISC_SOURCE_DIR}/h5clear.c) - target_include_directories (h5clear-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5clear-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(h5clear-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}") TARGET_C_PROPERTIES (h5clear-shared SHARED) target_link_libraries (h5clear-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET}) diff --git a/tools/test/h5copy/CMakeLists.txt b/tools/test/h5copy/CMakeLists.txt index fb88473785c..c2b375e3a99 100644 --- a/tools/test/h5copy/CMakeLists.txt +++ b/tools/test/h5copy/CMakeLists.txt @@ -6,7 +6,7 @@ project (HDF5_TOOLS_TEST_H5COPY C) # -------------------------------------------------------------------- if (HDF5_BUILD_GENERATORS AND BUILD_STATIC_LIBS) add_executable (h5copygentest ${HDF5_TOOLS_TEST_H5COPY_SOURCE_DIR}/h5copygentest.c) - target_include_directories (h5copygentest PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5copygentest PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (h5copygentest STATIC) target_link_libraries (h5copygentest PRIVATE ${HDF5_LIB_TARGET}) set_target_properties (h5copygentest PROPERTIES FOLDER generator/tools) @@ -30,7 +30,7 @@ if (BUILD_SHARED_LIBS) set (H5COPY_TOOL_PLUGIN_LIB_TARGET ${H5COPY_TOOL_PLUGIN_LIB_CORENAME}) add_library (${H5COPY_TOOL_PLUGIN_LIB_TARGET} SHARED dynlib_copy.c) - target_include_directories (${H5COPY_TOOL_PLUGIN_LIB_TARGET} PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (${H5COPY_TOOL_PLUGIN_LIB_TARGET} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (${H5COPY_TOOL_PLUGIN_LIB_TARGET} SHARED) target_link_libraries (${H5COPY_TOOL_PLUGIN_LIB_TARGET} PRIVATE ${HDF5_LIBSH_TARGET}) H5_SET_LIB_OPTIONS (${H5COPY_TOOL_PLUGIN_LIB_TARGET} ${H5COPY_TOOL_PLUGIN_LIB_NAME} SHARED "LIB") diff --git a/tools/test/h5copy/CMakeTests.cmake b/tools/test/h5copy/CMakeTests.cmake index 9310ec0252c..b47000a3193 100644 --- a/tools/test/h5copy/CMakeTests.cmake +++ b/tools/test/h5copy/CMakeTests.cmake @@ -57,25 +57,17 @@ # Perform h5copy according to passing parameters # macro (ADD_H5_F_TEST testname resultcode infile fparam vparam sparam srcname dparam dstname) - if (NOT HDF5_ENABLE_USING_MEMCHECKER) - # Remove any output file left over from previous test run - add_test ( - NAME H5COPY_F-${testname}-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/${testname}.out.h5 - ) - endif () + # Remove any output file left over from previous test run + add_test ( + NAME H5COPY_F-${testname}-clear-objects + COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/${testname}.out.h5 + ) add_test ( NAME H5COPY_F-${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ -f ${fparam} -i ./testfiles/${infile} -o ./testfiles/${testname}.out.h5 ${vparam} ${sparam} ${srcname} ${dparam} ${dstname} ${ARGN} ) - if (HDF5_ENABLE_USING_MEMCHECKER) - if (last_test) - set_tests_properties (H5COPY_F-${testname} PROPERTIES DEPENDS ${last_test}) - endif () - else () - set_tests_properties (H5COPY_F-${testname} PROPERTIES DEPENDS H5COPY_F-${testname}-clear-objects) - endif () + set_tests_properties (H5COPY_F-${testname} PROPERTIES DEPENDS H5COPY_F-${testname}-clear-objects) # resultcode=2 will cause the test to skip the diff test if (NOT "${resultcode}" STREQUAL "2") @@ -88,28 +80,29 @@ set_tests_properties (H5COPY_F-${testname}-DIFF PROPERTIES WILL_FAIL "true") endif () endif () + add_test ( + NAME H5COPY_F-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/${testname}.out.h5 + ) + if (NOT "${resultcode}" STREQUAL "2") + set_tests_properties (H5COPY_F-${testname}-clean-objects PROPERTIES DEPENDS H5COPY_F-${testname}-DIFF) + else () + set_tests_properties (H5COPY_F-${testname}-clean-objects PROPERTIES DEPENDS H5COPY_F-${testname}) + endif () endmacro () macro (ADD_H5_TEST testname resultcode infile vparam sparam srcname dparam dstname) - if (NOT HDF5_ENABLE_USING_MEMCHECKER) - # Remove any output file left over from previous test run - add_test ( - NAME H5COPY-${testname}-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/${testname}.out.h5 - ) - endif () + # Remove any output file left over from previous test run + add_test ( + NAME H5COPY-${testname}-clear-objects + COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/${testname}.out.h5 + ) add_test ( NAME H5COPY-${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ -i ./testfiles/${infile} -o ./testfiles/${testname}.out.h5 ${vparam} ${sparam} ${srcname} ${dparam} ${dstname} ${ARGN} ) - if (HDF5_ENABLE_USING_MEMCHECKER) - if (last_test) - set_tests_properties (H5COPY-${testname} PROPERTIES DEPENDS ${last_test}) - endif () - else () - set_tests_properties (H5COPY-${testname} PROPERTIES DEPENDS H5COPY-${testname}-clear-objects) - endif () + set_tests_properties (H5COPY-${testname} PROPERTIES DEPENDS H5COPY-${testname}-clear-objects) # resultcode=2 will cause the test to skip the diff test if (NOT "${resultcode}" STREQUAL "2") @@ -122,6 +115,15 @@ set_tests_properties (H5COPY-${testname}-DIFF PROPERTIES WILL_FAIL "true") endif () endif () + add_test ( + NAME H5COPY-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/${testname}.out.h5 + ) + if (NOT "${resultcode}" STREQUAL "2") + set_tests_properties (H5COPY-${testname}-clean-objects PROPERTIES DEPENDS H5COPY-${testname}-DIFF) + else () + set_tests_properties (H5COPY-${testname}-clean-objects PROPERTIES DEPENDS H5COPY-${testname}) + endif () endmacro () macro (ADD_SKIP_H5_TEST testname skipresultfile) @@ -135,25 +137,17 @@ endmacro () macro (ADD_H5_TEST2 testname resultcode infile psparam pdparam vparam sparam srcname dparam dstname) - if (NOT HDF5_ENABLE_USING_MEMCHECKER) - # Remove any output file left over from previous test run - add_test ( - NAME H5COPY-${testname}-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/${testname}.out.h5 - ) - endif () + # Remove any output file left over from previous test run + add_test ( + NAME H5COPY-${testname}-clear-objects + COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/${testname}.out.h5 + ) add_test ( NAME H5COPY-${testname}-prefill COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ -i ./testfiles/${infile} -o ./testfiles/${testname}.out.h5 -v -s ${psparam} -d ${pdparam} ) - if (HDF5_ENABLE_USING_MEMCHECKER) - if (last_test) - set_tests_properties (H5COPY-${testname}-prefill PROPERTIES DEPENDS ${last_test}) - endif () - else () - set_tests_properties (H5COPY-${testname}-prefill PROPERTIES DEPENDS H5COPY-${testname}-clear-objects) - endif () + set_tests_properties (H5COPY-${testname}-prefill PROPERTIES DEPENDS H5COPY-${testname}-clear-objects) add_test ( NAME H5COPY-${testname} @@ -171,28 +165,29 @@ set_tests_properties (H5COPY-${testname}-DIFF PROPERTIES WILL_FAIL "true") endif () endif () + add_test ( + NAME H5COPY-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/${testname}.out.h5 + ) + if (NOT "${resultcode}" STREQUAL "2") + set_tests_properties (H5COPY-${testname}-clean-objects PROPERTIES DEPENDS H5COPY-${testname}-DIFF) + else () + set_tests_properties (H5COPY-${testname}-clean-objects PROPERTIES DEPENDS H5COPY-${testname}) + endif () endmacro () macro (ADD_H5_TEST_SAME testname resultcode pfile psparam pdparam vparam sparam srcname dparam dstname) - if (NOT HDF5_ENABLE_USING_MEMCHECKER) - # Remove any output file left over from previous test run - add_test ( - NAME H5COPY_SAME-${testname}-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/${testname}.out.h5 - ) - endif () + # Remove any output file left over from previous test run + add_test ( + NAME H5COPY_SAME-${testname}-clear-objects + COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/${testname}.out.h5 + ) add_test ( NAME H5COPY_SAME-${testname}-prefill COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ -i ./testfiles/${pfile} -o ./testfiles/${testname}.out.h5 -v -s ${psparam} -d ${pdparam} ) - if (HDF5_ENABLE_USING_MEMCHECKER) - if (last_test) - set_tests_properties (H5COPY_SAME-${testname}-prefill PROPERTIES DEPENDS ${last_test}) - endif () - else (HDF5_ENABLE_USING_MEMCHECKER) - set_tests_properties (H5COPY_SAME-${testname}-prefill PROPERTIES DEPENDS H5COPY_SAME-${testname}-clear-objects) - endif () + set_tests_properties (H5COPY_SAME-${testname}-prefill PROPERTIES DEPENDS H5COPY_SAME-${testname}-clear-objects) add_test ( NAME H5COPY_SAME-${testname} @@ -210,6 +205,15 @@ set_tests_properties (H5COPY_SAME-${testname}-DIFF PROPERTIES WILL_FAIL "true") endif () endif () + add_test ( + NAME H5COPY_SAME-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/${testname}.out.h5 + ) + if (NOT "${resultcode}" STREQUAL "2") + set_tests_properties (H5COPY_SAME-${testname}-clean-objects PROPERTIES DEPENDS H5COPY_SAME-${testname}-DIFF) + else () + set_tests_properties (H5COPY_SAME-${testname}-clean-objects PROPERTIES DEPENDS H5COPY_SAME-${testname}) + endif () endmacro () # @@ -217,21 +221,19 @@ # files instead of checking with h5ls. # macro (ADD_H5_CMP_TEST testname resultcode infile vparam sparam srcname dparam dstname) + # Remove any output file left over from previous test run + add_test ( + NAME H5COPY-CMP-${testname}-clear-objects + COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/${testname}.out.h5 + ) # If using memchecker add tests without using scripts if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5COPY-CMP-${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ -i ./testfiles/${infile} -o ./testfiles/${testname}.out.h5 ${vparam} ${sparam} ${srcname} ${dparam} ${dstname} ${ARGN}) if ("${resultcode}" STREQUAL "1") set_tests_properties (H5COPY-CMP-${testname} PROPERTIES WILL_FAIL "true") endif () - if (last_test) - set_tests_properties (H5COPY-CMP-${testname} PROPERTIES DEPENDS ${last_test}) - endif () + set_tests_properties (H5COPY-CMP-${testname} PROPERTIES DEPENDS H5COPY-CMP-${testname}-clear-objects) else () - # Remove any output file left over from previous test run - add_test ( - NAME H5COPY-CMP-${testname}-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/${testname}.out.h5 - ) add_test ( NAME H5COPY-CMP-${testname} COMMAND "${CMAKE_COMMAND}" @@ -248,6 +250,11 @@ ) set_tests_properties (H5COPY-CMP-${testname} PROPERTIES DEPENDS H5COPY-CMP-${testname}-clear-objects) endif () + add_test ( + NAME H5COPY-CMP-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/${testname}.out.h5 + ) + set_tests_properties (H5COPY-CMP-${testname}-clean-objects PROPERTIES DEPENDS H5COPY-CMP-${testname}) endmacro () macro (ADD_H5_UD_TEST testname resultcode infile sparam srcname dparam dstname cmpfile) @@ -310,6 +317,11 @@ -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5COPY_UD-${testname}-DIFF PROPERTIES DEPENDS H5COPY_UD-${testname}) + add_test ( + NAME H5COPY_UD-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove testfiles/${testname}.out.h5 + ) + set_tests_properties (H5COPY_UD-${testname}-clean-objects PROPERTIES DEPENDS H5COPY_UD-${testname}-DIFF) endif () endmacro () @@ -317,7 +329,7 @@ if (NOT HDF5_ENABLE_USING_MEMCHECKER) # Remove any output file left over from previous test run add_test ( - NAME H5COPY_UD_ERR-${testname}-clearall-objects + NAME H5COPY_UD_ERR-${testname}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove testfiles/${testname}_ERR.out.h5 ) if ("${resultcode}" STREQUAL "2") @@ -357,7 +369,7 @@ -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () - set_tests_properties (H5COPY_UD_ERR-${testname} PROPERTIES DEPENDS H5COPY_UD_ERR-${testname}-clearall-objects) + set_tests_properties (H5COPY_UD_ERR-${testname} PROPERTIES DEPENDS H5COPY_UD_ERR-${testname}-clear-objects) add_test ( NAME H5COPY_UD_ERR-${testname}-DIFF COMMAND "${CMAKE_COMMAND}" @@ -375,6 +387,11 @@ -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5COPY_UD_ERR-${testname}-DIFF PROPERTIES DEPENDS H5COPY_UD_ERR-${testname}) + add_test ( + NAME H5COPY_UD_ERR-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove testfiles/${testname}_ERR.out.h5 + ) + set_tests_properties (H5COPY_UD_ERR-${testname}-clean-objects PROPERTIES DEPENDS H5COPY_UD_ERR-${testname}-DIFF) endif () endmacro () @@ -418,51 +435,6 @@ set (HDF_EXT_SRC_FILE h5copy_extlinks_src) set (HDF_EXT_TRG_FILE h5copy_extlinks_trg) - if (HDF5_ENABLE_USING_MEMCHECKER) - # Remove any output file left over from previous test run - add_test ( - NAME H5COPY-clearall-objects - COMMAND ${CMAKE_COMMAND} -E remove - simple.out.h5 - chunk.out.h5 - compact.out.h5 - compound.out.h5 - compressed.out.h5 - named_vl.out.h5 - nested_vl.out.h5 - simple_top.out.h5 - dsrename.out.h5 - grp_empty.out.h5 - grp_dsets.out.h5 - grp_nested.out.h5 - simple_group.out.h5 - grp_rename.out.h5 - grp_dsets_rename.out.h5 - A_B1_simple.out.h5 - A_B2_simple2.out.h5 - C_D_simple.out.h5 - E_F_grp_dsets.out.h5 - G_H_grp_nested.out.h5 - region_ref.out.h5 - ext_link.out.h5 - ext_link_f.out.h5 - ext_dangle_noobj.out.h5 - ext_dangle_noobj_f.out.h5 - ext_dangle_nofile.out.h5 - ext_dangle_nofile_f.out.h5 - ext_link_group.out.h5 - ext_link_group_f.out.h5 - samefile1.out.h5 - samefile2.out.h5 - h5copy_misc1.out.h5 - ) - set_tests_properties (H5COPY-clearall-objects PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles") - if (last_test) - set_tests_properties (H5COPY-clearall-objects PROPERTIES DEPENDS ${last_test}) - endif () - set (last_test "H5COPY-clearall-objects") - endif () - # See which filters are usable (and skip tests for filters we # don't have). Do this by searching H5pubconf.h to see which # filters are defined. diff --git a/tools/test/h5diff/CMakeLists.txt b/tools/test/h5diff/CMakeLists.txt index e51680660bb..dbddad68380 100644 --- a/tools/test/h5diff/CMakeLists.txt +++ b/tools/test/h5diff/CMakeLists.txt @@ -6,7 +6,7 @@ project (HDF5_TOOLS_TEST_H5DIFF C) # -------------------------------------------------------------------- if (HDF5_BUILD_GENERATORS AND BUILD_STATIC_LIBS) add_executable (h5diffgentest ${HDF5_TOOLS_TEST_H5DIFF_SOURCE_DIR}/h5diffgentest.c) - target_include_directories (h5diffgentest PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5diffgentest PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (h5diffgentest STATIC) target_link_libraries (h5diffgentest PRIVATE ${HDF5_LIB_TARGET}) set_target_properties (h5diffgentest PROPERTIES FOLDER generator/tools) @@ -30,7 +30,7 @@ if (BUILD_SHARED_LIBS) set (H5DIFF_TOOL_PLUGIN_LIB_TARGET ${H5DIFF_TOOL_PLUGIN_LIB_CORENAME}) add_library (${H5DIFF_TOOL_PLUGIN_LIB_TARGET} SHARED dynlib_diff.c) - target_include_directories (${H5DIFF_TOOL_PLUGIN_LIB_TARGET} PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (${H5DIFF_TOOL_PLUGIN_LIB_TARGET} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (${H5DIFF_TOOL_PLUGIN_LIB_TARGET} SHARED) target_link_libraries (${H5DIFF_TOOL_PLUGIN_LIB_TARGET} PRIVATE ${HDF5_LIBSH_TARGET}) H5_SET_LIB_OPTIONS (${H5DIFF_TOOL_PLUGIN_LIB_TARGET} ${H5DIFF_TOOL_PLUGIN_LIB_NAME} SHARED "LIB") diff --git a/tools/test/h5diff/CMakeTests.cmake b/tools/test/h5diff/CMakeTests.cmake index d2f0617b922..7e437af09ca 100644 --- a/tools/test/h5diff/CMakeTests.cmake +++ b/tools/test/h5diff/CMakeTests.cmake @@ -387,13 +387,9 @@ # If using memchecker add tests without using scripts if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5DIFF-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) - set_tests_properties (H5DIFF-${resultfile} PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles") if (${resultcode}) set_tests_properties (H5DIFF-${resultfile} PROPERTIES WILL_FAIL "true") endif () - if (last_test) - set_tests_properties (H5DIFF-${resultfile} PROPERTIES DEPENDS ${last_test}) - endif () else () add_test ( NAME H5DIFF-${resultfile} @@ -408,10 +404,10 @@ -D "TEST_APPEND=EXIT CODE:" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) - if (last_test) - set_tests_properties (H5DIFF-${resultfile} PROPERTIES DEPENDS ${last_test}) - endif () endif () + set_tests_properties (H5DIFF-${resultfile} PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles" + ) endmacro () macro (ADD_PH5_TEST resultfile resultcode) @@ -422,9 +418,6 @@ if (${resultcode}) set_tests_properties (MPI_TEST_H5DIFF-${resultfile} PROPERTIES WILL_FAIL "true") endif () - if (last_test) - set_tests_properties (MPI_TEST_H5DIFF-${resultfile} PROPERTIES DEPENDS ${last_test}) - endif () else () add_test ( NAME MPI_TEST_H5DIFF-${resultfile} @@ -441,11 +434,10 @@ -D "TEST_SORT_COMPARE=TRUE" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) - if (last_test) - set_tests_properties (MPI_TEST_H5DIFF-${resultfile} PROPERTIES DEPENDS ${last_test}) - endif () - set (last_test "MPI_TEST_H5DIFF-${resultfile}") endif () + set_tests_properties (MPI_TEST_H5DIFF-${resultfile} PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/PAR/testfiles" + ) endmacro () macro (ADD_H5_UD_TEST testname resultcode resultfile) @@ -485,9 +477,6 @@ -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () - if (last_test) - set_tests_properties (H5DIFF_UD-${testname} PROPERTIES DEPENDS ${last_test}) - endif () endif () endmacro () @@ -562,394 +551,6 @@ set (FILEV4 4_vds.h5) set (FILEV5 5_vds.h5) - if (HDF5_ENABLE_USING_MEMCHECKER) - # Remove any output file left over from previous test run - add_test ( - NAME H5DIFF-clearall-objects - COMMAND ${CMAKE_COMMAND} - -E remove - h5diff_10.out - h5diff_10.out.err - h5diff_100.out - h5diff_100.out.err - h5diff_101.out - h5diff_101.out.err - h5diff_102.out - h5diff_102.out.err - h5diff_103.out - h5diff_103.out.err - h5diff_104.out - h5diff_104.out.err - h5diff_11.out - h5diff_11.out.err - h5diff_12.out - h5diff_12.out.err - h5diff_13.out - h5diff_13.out.err - h5diff_14.out - h5diff_14.out.err - h5diff_15.out - h5diff_15.out.err - h5diff_16_1.out - h5diff_16_1.out.err - h5diff_16_2.out - h5diff_16_2.out.err - h5diff_16_3.out - h5diff_16_3.out.err - h5diff_17.out - h5diff_17.out.err - h5diff_171.out - h5diff_171.out.err - h5diff_172.out - h5diff_172.out.err - h5diff_18_1.out - h5diff_18_1.out.err - h5diff_18.out - h5diff_18.out.err - h5diff_20.out - h5diff_20.out.err - h5diff_200.out - h5diff_200.out.err - h5diff_201.out - h5diff_201.out.err - h5diff_202.out - h5diff_202.out.err - h5diff_203.out - h5diff_203.out.err - h5diff_204.out - h5diff_204.out.err - h5diff_205.out - h5diff_205.out.err - h5diff_206.out - h5diff_206.out.err - h5diff_207.out - h5diff_207.out.err - h5diff_208.out - h5diff_208.out.err - h5diff_220.out - h5diff_220.out.err - h5diff_221.out - h5diff_221.out.err - h5diff_222.out - h5diff_222.out.err - h5diff_223.out - h5diff_223.out.err - h5diff_224.out - h5diff_224.out.err - h5diff_21.out - h5diff_21.out.err - h5diff_22.out - h5diff_22.out.err - h5diff_23.out - h5diff_23.out.err - h5diff_24.out - h5diff_24.out.err - h5diff_25.out - h5diff_25.out.err - h5diff_26.out - h5diff_26.out.err - h5diff_27.out - h5diff_27.out.err - h5diff_28.out - h5diff_28.out.err - h5diff_300.out - h5diff_300.out.err - h5diff_400.out - h5diff_400.out.err - h5diff_401.out - h5diff_401.out.err - h5diff_402.out - h5diff_402.out.err - h5diff_403.out - h5diff_403.out.err - h5diff_404.out - h5diff_404.out.err - h5diff_405.out - h5diff_405.out.err - h5diff_406.out - h5diff_406.out.err - h5diff_407.out - h5diff_407.out.err - h5diff_408.out - h5diff_408.out.err - h5diff_409.out - h5diff_409.out.err - h5diff_410.out - h5diff_410.out.err - h5diff_411.out - h5diff_411.out.err - h5diff_412.out - h5diff_412.out.err - h5diff_413.out - h5diff_413.out.err - h5diff_414.out - h5diff_414.out.err - h5diff_415.out - h5diff_415.out.err - h5diff_416.out - h5diff_416.out.err - h5diff_417.out - h5diff_417.out.err - h5diff_418.out - h5diff_418.out.err - h5diff_419.out - h5diff_419.out.err - h5diff_420.out - h5diff_420.out.err - h5diff_421.out - h5diff_421.out.err - h5diff_422.out - h5diff_422.out.err - h5diff_423.out - h5diff_423.out.err - h5diff_424.out - h5diff_424.out.err - h5diff_425.out - h5diff_425.out.err - h5diff_450.out - h5diff_450.out.err - h5diff_451.out - h5diff_451.out.err - h5diff_452.out - h5diff_452.out.err - h5diff_453.out - h5diff_453.out.err - h5diff_454.out - h5diff_454.out.err - h5diff_455.out - h5diff_455.out.err - h5diff_456.out - h5diff_456.out.err - h5diff_457.out - h5diff_457.out.err - h5diff_458.out - h5diff_458.out.err - h5diff_459.out - h5diff_459.out.err - h5diff_465.out - h5diff_465.out.err - h5diff_466.out - h5diff_466.out.err - h5diff_467.out - h5diff_467.out.err - h5diff_468.out - h5diff_468.out.err - h5diff_469.out - h5diff_469.out.err - h5diff_471.out - h5diff_471.out.err - h5diff_472.out - h5diff_472.out.err - h5diff_473.out - h5diff_473.out.err - h5diff_474.out - h5diff_474.out.err - h5diff_475.out - h5diff_475.out.err - h5diff_480.out - h5diff_480.out.err - h5diff_481.out - h5diff_481.out.err - h5diff_482.out - h5diff_482.out.err - h5diff_483.out - h5diff_483.out.err - h5diff_484.out - h5diff_484.out.err - h5diff_50.out - h5diff_50.out.err - h5diff_51.out - h5diff_51.out.err - h5diff_52.out - h5diff_52.out.err - h5diff_53.out - h5diff_53.out.err - h5diff_54.out - h5diff_54.out.err - h5diff_55.out - h5diff_55.out.err - h5diff_56.out - h5diff_56.out.err - h5diff_57.out - h5diff_57.out.err - h5diff_58.out - h5diff_58.out.err - h5diff_59.out - h5diff_59.out.err - h5diff_500.out - h5diff_500.out.err - h5diff_501.out - h5diff_501.out.err - h5diff_502.out - h5diff_502.out.err - h5diff_503.out - h5diff_503.out.err - h5diff_504.out - h5diff_504.out.err - h5diff_505.out - h5diff_505.out.err - h5diff_506.out - h5diff_506.out.err - h5diff_507.out - h5diff_507.out.err - h5diff_508.out - h5diff_508.out.err - h5diff_509.out - h5diff_509.out.err - h5diff_510.out - h5diff_510.out.err - h5diff_511.out - h5diff_511.out.err - h5diff_512.out - h5diff_512.out.err - h5diff_513.out - h5diff_513.out.err - h5diff_514.out - h5diff_514.out.err - h5diff_515.out - h5diff_515.out.err - h5diff_516.out - h5diff_516.out.err - h5diff_517.out - h5diff_517.out.err - h5diff_518.out - h5diff_518.out.err - h5diff_530.out - h5diff_530.out.err - h5diff_540.out - h5diff_540.out.err - h5diff_60.out - h5diff_60.out.err - h5diff_61.out - h5diff_61.out.err - h5diff_62.out - h5diff_62.out.err - h5diff_63.out - h5diff_63.out.err - h5diff_600.out - h5diff_600.out.err - h5diff_601.out - h5diff_601.out.err - h5diff_603.out - h5diff_603.out.err - h5diff_604.out - h5diff_604.out.err - h5diff_605.out - h5diff_605.out.err - h5diff_606.out - h5diff_606.out.err - h5diff_607.out - h5diff_607.out.err - h5diff_608.out - h5diff_608.out.err - h5diff_609.out - h5diff_609.out.err - h5diff_610.out - h5diff_610.out.err - h5diff_612.out - h5diff_612.out.err - h5diff_613.out - h5diff_613.out.err - h5diff_614.out - h5diff_614.out.err - h5diff_615.out - h5diff_615.out.err - h5diff_616.out - h5diff_616.out.err - h5diff_617.out - h5diff_617.out.err - h5diff_618.out - h5diff_618.out.err - h5diff_619.out - h5diff_619.out.err - h5diff_621.out - h5diff_621.out.err - h5diff_622.out - h5diff_622.out.err - h5diff_623.out - h5diff_623.out.err - h5diff_624.out - h5diff_624.out.err - h5diff_625.out - h5diff_625.out.err - h5diff_626.out - h5diff_626.out.err - h5diff_627.out - h5diff_627.out.err - h5diff_628.out - h5diff_628.out.err - h5diff_629.out - h5diff_629.out.err - h5diff_640.out - h5diff_640.out.err - h5diff_641.out - h5diff_641.out.err - h5diff_642.out - h5diff_642.out.err - h5diff_643.out - h5diff_643.out.err - h5diff_644.out - h5diff_644.out.err - h5diff_645.out - h5diff_645.out.err - h5diff_646.out - h5diff_646.out.err - h5diff_70.out - h5diff_70.out.err - h5diff_700.out - h5diff_700.out.err - h5diff_701.out - h5diff_701.out.err - h5diff_702.out - h5diff_702.out.err - h5diff_703.out - h5diff_703.out.err - h5diff_704.out - h5diff_704.out.err - h5diff_705.out - h5diff_705.out.err - h5diff_706.out - h5diff_706.out.err - h5diff_707.out - h5diff_707.out.err - h5diff_708.out - h5diff_708.out.err - h5diff_709.out - h5diff_709.out.err - h5diff_710.out - h5diff_710.out.err - h5diff_80.out - h5diff_80.out.err - h5diff_800.out - h5diff_800.out.err - h5diff_801.out - h5diff_801.out.err - h5diff_830.out - h5diff_830.out.err - h5diff_8625.out - h5diff_8625.out.err - h5diff_8639.out - h5diff_8639.out.err - h5diff_90.out - h5diff_90.out.err - h5diff_v1.out - h5diff_v1.out.err - h5diff_v2.out - h5diff_v2.out.err - h5diff_v3.out - h5diff_v3.out.err - h5diff_vlstr.out - h5diff_vlstr.out.err - h5diff_eps.out - h5diff_eps.out.err - ) - set_tests_properties (H5DIFF-clearall-objects PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles") - if (last_test) - set_tests_properties (H5DIFF-clearall-objects PROPERTIES DEPENDS ${last_test}) - endif () - set (last_test "H5DIFF-clearall-objects") - endif () - # ############################################################################ # # Common usage # ############################################################################ diff --git a/tools/test/h5dump/CMakeLists.txt b/tools/test/h5dump/CMakeLists.txt index 19f3ca840fa..6a5e57badc7 100644 --- a/tools/test/h5dump/CMakeLists.txt +++ b/tools/test/h5dump/CMakeLists.txt @@ -10,7 +10,7 @@ if (BUILD_SHARED_LIBS) set (H5DUMP_TOOL_PLUGIN_LIB_TARGET ${H5DUMP_TOOL_PLUGIN_LIB_CORENAME}) add_library (${H5DUMP_TOOL_PLUGIN_LIB_TARGET} SHARED dynlib_dump.c) - target_include_directories (${H5DUMP_TOOL_PLUGIN_LIB_TARGET} PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (${H5DUMP_TOOL_PLUGIN_LIB_TARGET} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (${H5DUMP_TOOL_PLUGIN_LIB_TARGET} SHARED) target_link_libraries (${H5DUMP_TOOL_PLUGIN_LIB_TARGET} PRIVATE ${HDF5_LIBSH_TARGET}) H5_SET_LIB_OPTIONS (${H5DUMP_TOOL_PLUGIN_LIB_TARGET} ${H5DUMP_TOOL_PLUGIN_LIB_NAME} SHARED "LIB") @@ -42,7 +42,7 @@ endif () # -------------------------------------------------------------------- if (HDF5_BUILD_GENERATORS AND BUILD_STATIC_LIBS) add_executable (h5dumpgentest ${HDF5_TOOLS_TEST_H5DUMP_SOURCE_DIR}/h5dumpgentest.c) - target_include_directories (h5dumpgentest PRIVATE "${HDF5_SRC_DIR};${HDF5_TEST_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5dumpgentest PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_TEST_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (h5dumpgentest STATIC) target_link_libraries (h5dumpgentest PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) set_target_properties (h5dumpgentest PROPERTIES FOLDER generator/tools) diff --git a/tools/test/h5dump/CMakeTests.cmake b/tools/test/h5dump/CMakeTests.cmake index 2cba7b7f73e..0ae7bbdb3ab 100644 --- a/tools/test/h5dump/CMakeTests.cmake +++ b/tools/test/h5dump/CMakeTests.cmake @@ -434,11 +434,6 @@ # If using memchecker add tests without using scripts if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5DUMP-${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) - set_tests_properties (H5DUMP-${testname} PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std") - if (last_test) - set_tests_properties (H5DUMP-${testname} PROPERTIES DEPENDS ${last_test}) - endif () - set (last_test "H5DUMP-${testname}") else () add_test ( NAME H5DUMP-${testname} @@ -452,10 +447,10 @@ -D "TEST_REFERENCE=h5dump-${testname}.txt" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) - if (last_test) - set_tests_properties (H5DUMP-${testname} PROPERTIES DEPENDS ${last_test}) - endif () endif () + set_tests_properties (H5DUMP-${testname} PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) endmacro () macro (ADD_SKIP_H5_TEST skipresultfile skipresultcode testtype) @@ -476,19 +471,13 @@ # If using memchecker add tests without using scripts if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5DUMP-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) - set_tests_properties (H5DUMP-${resultfile} PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std") if (${resultcode}) set_tests_properties (H5DUMP-${resultfile} PROPERTIES WILL_FAIL "true") endif () - if (last_test) - set_tests_properties (H5DUMP-${resultfile} PROPERTIES DEPENDS ${last_test}) - endif () - else () - add_test ( - NAME H5DUMP-${resultfile}-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove ${resultfile}.bin + set_tests_properties (H5DUMP-${resultfile} PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" ) - set_tests_properties (H5DUMP-${resultfile}-clear-objects PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std") + else () add_test ( NAME H5DUMP-${resultfile} COMMAND "${CMAKE_COMMAND}" @@ -501,27 +490,32 @@ -D "TEST_REFERENCE=${resultfile}.ddl" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) - set_tests_properties (H5DUMP-${resultfile} PROPERTIES DEPENDS "H5DUMP-${resultfile}-clear-objects") endif () + set_tests_properties (H5DUMP-${resultfile} PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) endmacro () macro (ADD_H5_TEST_N resultfile resultcode) + add_test ( + NAME H5DUMP-N-${resultfile}-clear-objects + COMMAND ${CMAKE_COMMAND} -E remove + ${resultfile}-N.bin + ) + set_tests_properties (H5DUMP-N-${resultfile}-clear-objects PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) # If using memchecker add tests without using scripts if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5DUMP-N-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) - set_tests_properties (H5DUMP-N-${resultfile} PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std") if (${resultcode}) set_tests_properties (H5DUMP-N-${resultfile} PROPERTIES WILL_FAIL "true") endif () - if (last_test) - set_tests_properties (H5DUMP-N-${resultfile} PROPERTIES DEPENDS ${last_test}) - endif () - else () - add_test ( - NAME H5DUMP-N-${resultfile}-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove ${resultfile}-N.bin + set_tests_properties (H5DUMP-N-${resultfile} PROPERTIES + DEPENDS H5DUMP-N-${resultfile}-clear-objects + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" ) - set_tests_properties (H5DUMP-N-${resultfile}-clear-objects PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std") + else () add_test ( NAME H5DUMP-N-${resultfile} COMMAND "${CMAKE_COMMAND}" @@ -534,27 +528,41 @@ -D "TEST_REFERENCE=${resultfile}-N.ddl" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) - set_tests_properties (H5DUMP-N-${resultfile} PROPERTIES DEPENDS "H5DUMP-N-${resultfile}-clear-objects") endif () + set_tests_properties (H5DUMP-N-${resultfile} PROPERTIES + DEPENDS H5DUMP-N-${resultfile}-clear-objects + ) + add_test ( + NAME H5DUMP-N-${resultfile}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove + ${resultfile}-N.bin + ) + set_tests_properties (H5DUMP-N-${resultfile}-clean-objects PROPERTIES + DEPENDS H5DUMP-N-${resultfile} + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) endmacro () macro (ADD_H5_TEST_EXPORT resultfile targetfile resultcode) + add_test ( + NAME H5DUMP-${resultfile}-clear-objects + COMMAND ${CMAKE_COMMAND} -E remove + ${resultfile}.txt + ) + set_tests_properties (H5DUMP-${resultfile}-clear-objects PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) # If using memchecker add tests without using scripts if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5DUMP-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN} ${resultfile}.txt ${targetfile}) - set_tests_properties (H5DUMP-${resultfile} PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std") if (${resultcode}) set_tests_properties (H5DUMP-${resultfile} PROPERTIES WILL_FAIL "true") endif () - if (last_test) - set_tests_properties (H5DUMP-${resultfile} PROPERTIES DEPENDS ${last_test}) - endif () - else () - add_test ( - NAME H5DUMP-${resultfile}-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove ${resultfile}.txt + set_tests_properties (H5DUMP-${resultfile} PROPERTIES + DEPENDS H5DUMP-${resultfile}-clear-objects + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" ) - set_tests_properties (H5DUMP-${resultfile}-clear-objects PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std") + else () add_test ( NAME H5DUMP-${resultfile} COMMAND "${CMAKE_COMMAND}" @@ -567,35 +575,58 @@ -D "TEST_REFERENCE=${resultfile}.ddl" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) - set_tests_properties (H5DUMP-${resultfile} PROPERTIES DEPENDS "H5DUMP-${resultfile}-clear-objects") + set_tests_properties (H5DUMP-${resultfile} PROPERTIES + DEPENDS H5DUMP-${resultfile}-clear-objects + ) add_test ( NAME H5DUMP-${resultfile}-output-cmp - COMMAND ${CMAKE_COMMAND} -E compare_files ${CMAKE_IGNORE_EOL} ${resultfile}.txt ${resultfile}.exp + COMMAND ${CMAKE_COMMAND} -E compare_files --ignore-eol ${resultfile}.txt ${resultfile}.exp + ) + set_tests_properties (H5DUMP-${resultfile}-output-cmp PROPERTIES + DEPENDS H5DUMP-${resultfile} + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" ) - set_tests_properties (H5DUMP-${resultfile}-output-cmp PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std") set_tests_properties (H5DUMP-${resultfile}-output-cmp PROPERTIES DEPENDS H5DUMP-${resultfile}) endif () + add_test ( + NAME H5DUMP-${resultfile}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove + ${resultfile}.txt + ) + if (HDF5_ENABLE_USING_MEMCHECKER) + set_tests_properties (H5DUMP-${resultfile}-clean-objects PROPERTIES + DEPENDS H5DUMP-${resultfile} + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) + else () + set_tests_properties (H5DUMP-${resultfile}-clean-objects PROPERTIES + DEPENDS H5DUMP-${resultfile}-output-cmp + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) + endif () endmacro () macro (ADD_H5_TEST_EXPORT_DDL resultfile targetfile resultcode ddlfile) + add_test ( + NAME H5DUMP-${resultfile}-clear-objects + COMMAND ${CMAKE_COMMAND} -E remove + ${ddlfile}.txt + ${resultfile}.txt + ) + set_tests_properties (H5DUMP-${resultfile}-clear-objects PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) # If using memchecker add tests without using scripts if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5DUMP-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ --ddl=${ddlfile}.txt ${ARGN} ${resultfile}.txt ${targetfile}) - set_tests_properties (H5DUMP-${resultfile} PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std") if (${resultcode}) set_tests_properties (H5DUMP-${resultfile} PROPERTIES WILL_FAIL "true") endif () - if (last_test) - set_tests_properties (H5DUMP-${resultfile} PROPERTIES DEPENDS ${last_test}) - endif () - else () - add_test ( - NAME H5DUMP-${resultfile}-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove - ${ddlfile}.txt - ${resultfile}.txt + set_tests_properties (H5DUMP-${resultfile} PROPERTIES + DEPENDS H5DUMP-${resultfile}-clear-objects + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" ) - set_tests_properties (H5DUMP-${resultfile}-clear-objects PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std") + else () add_test ( NAME H5DUMP-${resultfile} COMMAND "${CMAKE_COMMAND}" @@ -608,19 +639,43 @@ -D "TEST_REFERENCE=${resultfile}.ddl" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) - set_tests_properties (H5DUMP-${resultfile} PROPERTIES DEPENDS "H5DUMP-${resultfile}-clear-objects") + set_tests_properties (H5DUMP-${resultfile} PROPERTIES + DEPENDS H5DUMP-${resultfile}-clear-objects + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) add_test ( NAME H5DUMP-${resultfile}-output-cmp - COMMAND ${CMAKE_COMMAND} -E compare_files ${CMAKE_IGNORE_EOL} ${resultfile}.txt ${resultfile}.exp + COMMAND ${CMAKE_COMMAND} -E compare_files --ignore-eol ${resultfile}.txt ${resultfile}.exp + ) + set_tests_properties (H5DUMP-${resultfile}-output-cmp PROPERTIES + DEPENDS H5DUMP-${resultfile} + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" ) - set_tests_properties (H5DUMP-${resultfile}-output-cmp PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std") - set_tests_properties (H5DUMP-${resultfile}-output-cmp PROPERTIES DEPENDS H5DUMP-${resultfile}) add_test ( NAME H5DUMP-${resultfile}-output-cmp-ddl - COMMAND ${CMAKE_COMMAND} -E compare_files ${CMAKE_IGNORE_EOL} ${ddlfile}.txt ${ddlfile}.exp + COMMAND ${CMAKE_COMMAND} -E compare_files --ignore-eol ${ddlfile}.txt ${ddlfile}.exp + ) + set_tests_properties (H5DUMP-${resultfile}-output-cmp-ddl PROPERTIES + DEPENDS H5DUMP-${resultfile}-output-cmp + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) + endif () + add_test ( + NAME H5DUMP-${resultfile}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove + ${ddlfile}.txt + ${resultfile}.txt + ) + if (HDF5_ENABLE_USING_MEMCHECKER) + set_tests_properties (H5DUMP-${resultfile}-clean-objects PROPERTIES + DEPENDS H5DUMP-${resultfile} + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) + else () + set_tests_properties (H5DUMP-${resultfile}-clean-objects PROPERTIES + DEPENDS H5DUMP-${resultfile}-output-cmp-ddl + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" ) - set_tests_properties (H5DUMP-${resultfile}-output-cmp-ddl PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std") - set_tests_properties (H5DUMP-${resultfile}-output-cmp-ddl PROPERTIES DEPENDS H5DUMP-${resultfile}-output-cmp) endif () endmacro () @@ -628,21 +683,37 @@ if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5DUMP-output-${resultfile}-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove ${resultfile}.txt + COMMAND ${CMAKE_COMMAND} -E remove + ${resultfile}.txt + ) + set_tests_properties (H5DUMP-output-${resultfile}-clear-objects PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" ) - set_tests_properties (H5DUMP-output-${resultfile}-clear-objects PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std") add_test ( NAME H5DUMP-output-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN} ${resultfile}.txt ${targetfile} ) - set_tests_properties (H5DUMP-output-${resultfile} PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std") - set_tests_properties (H5DUMP-output-${resultfile} PROPERTIES DEPENDS H5DUMP-output-${resultfile}-clear-objects) + set_tests_properties (H5DUMP-output-${resultfile} PROPERTIES + DEPENDS H5DUMP-output-${resultfile}-clear-objects + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) add_test ( NAME H5DUMP-output-cmp-${resultfile} - COMMAND ${CMAKE_COMMAND} -E compare_files ${CMAKE_IGNORE_EOL} ${resultfile}.txt ${resultfile}.exp + COMMAND ${CMAKE_COMMAND} -E compare_files --ignore-eol ${resultfile}.txt ${resultfile}.exp + ) + set_tests_properties (H5DUMP-output-cmp-${resultfile} PROPERTIES + DEPENDS H5DUMP-output-${resultfile} + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) + add_test ( + NAME H5DUMP-output-${resultfile}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove + ${resultfile}.txt + ) + set_tests_properties (H5DUMP-output-${resultfile}-clean-objects PROPERTIES + DEPENDS H5DUMP-output-cmp-${resultfile} + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" ) - set_tests_properties (H5DUMP-output-cmp-${resultfile} PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std") - set_tests_properties (H5DUMP-output-cmp-${resultfile} PROPERTIES DEPENDS H5DUMP-output-${resultfile}) endif () endmacro () @@ -661,6 +732,9 @@ -D "TEST_MASK_ERROR=true" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) + set_tests_properties (H5DUMP-${resultfile} PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) endif () endmacro () @@ -678,6 +752,9 @@ -D "TEST_REFERENCE=${result_check}" -P "${HDF_RESOURCES_DIR}/grepTest.cmake" ) + set_tests_properties (H5DUMP-${resultfile} PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) endif () endmacro () @@ -696,6 +773,9 @@ -D "TEST_ERRREF=${result_errcheck}" -P "${HDF_RESOURCES_DIR}/grepTest.cmake" ) + set_tests_properties (H5DUMP-${resultfile} PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) endif () endmacro () @@ -716,6 +796,46 @@ -D "TEST_ENV_VALUE:STRING=${envval}" -P "${HDF_RESOURCES_DIR}/grepTest.cmake" ) + set_tests_properties (H5DUMP-${resultfile} PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) + endif () + endmacro () + + macro (ADD_H5_BIN_EXPORT conffile resultcode testfile) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) + add_test ( + NAME H5DUMP-BIN_EXPORT-${conffile}-clear-objects + COMMAND ${CMAKE_COMMAND} -E remove + ${conffile}.bin + ) + set_tests_properties (H5DUMP-BIN_EXPORT-${conffile}-clear-objects PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) + add_test ( + NAME H5DUMP-BIN_EXPORT-${conffile} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=$" + -D "TEST_ARGS:STRING=${ARGN};-o;${conffile}.bin;${testfile}" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/testfiles/std" + -D "TEST_OUTPUT=${conffile}.out" + -D "TEST_EXPECT=${resultcode}" + -D "TEST_REFERENCE=${conffile}.ddl" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" + ) + set_tests_properties (H5DUMP-BIN_EXPORT-${conffile} PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) + add_test ( + NAME H5DUMP-BIN_EXPORT-${conffile}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove + ${conffile}.bin + ) + set_tests_properties (H5DUMP-BIN_EXPORT-${conffile}-clean-objects PROPERTIES + DEPENDS H5DUMP-BIN_EXPORT-${conffile} + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) endif () endmacro () @@ -728,7 +848,9 @@ ${resultfile}.bin ${resultfile}.h5 ) - set_tests_properties (H5DUMP-IMPORT-${resultfile}-clear-objects PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std") + set_tests_properties (H5DUMP-IMPORT-${resultfile}-clear-objects PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) add_test ( NAME H5DUMP-IMPORT-${resultfile} COMMAND "${CMAKE_COMMAND}" @@ -741,20 +863,37 @@ -D "TEST_REFERENCE=${conffile}.ddl" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) - set_tests_properties (H5DUMP-IMPORT-${resultfile} PROPERTIES DEPENDS "H5DUMP-IMPORT-${resultfile}-clear-objects") + set_tests_properties (H5DUMP-IMPORT-${resultfile} PROPERTIES + DEPENDS H5DUMP-IMPORT-${resultfile}-clear-objects + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) add_test (NAME H5DUMP-IMPORT-h5import-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${resultfile}.bin -c ${conffile}.out -o ${resultfile}.h5) - set_tests_properties (H5DUMP-IMPORT-h5import-${resultfile} PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std") - set_tests_properties (H5DUMP-IMPORT-h5import-${resultfile} PROPERTIES DEPENDS H5DUMP-IMPORT-${resultfile}) + set_tests_properties (H5DUMP-IMPORT-h5import-${resultfile} PROPERTIES + DEPENDS H5DUMP-IMPORT-${resultfile} + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) add_test (NAME H5DUMP-IMPORT-h5diff-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${testfile} ${resultfile}.h5 /integer /integer) - set_tests_properties (H5DUMP-IMPORT-h5diff-${resultfile} PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std") - set_tests_properties (H5DUMP-IMPORT-h5diff-${resultfile} PROPERTIES DEPENDS H5DUMP-IMPORT-h5import-${resultfile}) + set_tests_properties (H5DUMP-IMPORT-h5diff-${resultfile} PROPERTIES + DEPENDS H5DUMP-IMPORT-h5import-${resultfile} + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) + add_test ( + NAME H5DUMP-IMPORT-${resultfile}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove + ${resultfile}.bin + ${resultfile}.h5 + ) + set_tests_properties (H5DUMP-IMPORT-${resultfile}-clean-objects PROPERTIES + DEPENDS H5DUMP-IMPORT-h5diff-${resultfile} + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) endif () endmacro () macro (ADD_H5_UD_TEST testname resultcode resultfile) if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( - NAME H5DUMP_UD-${testname} + NAME H5DUMP_UD-${testname}-${resultfile} COMMAND "${CMAKE_COMMAND}" -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" -D "TEST_PROGRAM=$" @@ -768,6 +907,9 @@ -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) + set_tests_properties (H5DUMP_UD-${testname}-${resultfile} PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" + ) endif () endmacro () @@ -1103,7 +1245,7 @@ ADD_H5_TEST (tvms 0 --enable-error-stack tvms.h5) # test for binary output - ADD_H5_TEST (tbin1LE 0 --enable-error-stack -d integer -o tbin1LE.bin -b LE tbinary.h5) + ADD_H5_BIN_EXPORT (tbin1LE 0 tbinary.h5 --enable-error-stack -d integer -b LE) # test for string binary output ADD_H5_EXPORT_TEST (tstr2bin2 tstr2.h5 0 --enable-error-stack -d /g2/dset2 -b -o) @@ -1113,14 +1255,14 @@ # ADD_H5_TEST_IMPORT (tbin1 out1D tbinary.h5 0 --enable-error-stack -d integer -b) if (NOT HDF5_ENABLE_USING_MEMCHECKER) - ADD_H5_TEST (tbin2 0 --enable-error-stack -b BE -d float -o tbin2.bin tbinary.h5) + ADD_H5_BIN_EXPORT (tbin2 0 tbinary.h5 --enable-error-stack -b BE -d float) endif () # the NATIVE test can be validated with h5import/h5diff # ADD_H5_TEST_IMPORT (tbin3 out3D tbinary.h5 0 --enable-error-stack -d integer -b NATIVE) if (NOT HDF5_ENABLE_USING_MEMCHECKER) - ADD_H5_TEST (tbin4 0 --enable-error-stack -d double -b FILE -o tbin4.bin tbinary.h5) + ADD_H5_BIN_EXPORT (tbin4 0 tbinary.h5 --enable-error-stack -d double -b FILE) endif () # test for dataset region references diff --git a/tools/test/h5dump/CMakeTestsPBITS.cmake b/tools/test/h5dump/CMakeTestsPBITS.cmake index 3c188c41328..caca4b7acbc 100644 --- a/tools/test/h5dump/CMakeTestsPBITS.cmake +++ b/tools/test/h5dump/CMakeTestsPBITS.cmake @@ -123,13 +123,9 @@ # If using memchecker add tests without using scripts if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5DUMP-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) - set_tests_properties (H5DUMP-${resultfile} PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/pbits") if (${resultcode}) set_tests_properties (H5DUMP-${resultfile} PROPERTIES WILL_FAIL "true") endif () - if (last_pbits_test) - set_tests_properties (H5DUMP-${resultfile} PROPERTIES DEPENDS ${last_pbits_test}) - endif () else () add_test ( NAME H5DUMP-${resultfile} @@ -144,6 +140,9 @@ -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () + set_tests_properties (H5DUMP-${resultfile} PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/pbits" + ) endmacro () ############################################################################## diff --git a/tools/test/h5dump/CMakeTestsVDS.cmake b/tools/test/h5dump/CMakeTestsVDS.cmake index 31624e608aa..42f08c9a67b 100644 --- a/tools/test/h5dump/CMakeTestsVDS.cmake +++ b/tools/test/h5dump/CMakeTestsVDS.cmake @@ -119,13 +119,9 @@ # If using memchecker add tests without using scripts if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5DUMP-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) - set_tests_properties (H5DUMP-${resultfile} PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/vds") if (${resultcode}) set_tests_properties (H5DUMP-${resultfile} PROPERTIES WILL_FAIL "true") endif () - if (last_vds_test) - set_tests_properties (H5DUMP-${resultfile} PROPERTIES DEPENDS ${last_VDS_test}) - endif () else () add_test ( NAME H5DUMP-${resultfile} @@ -140,6 +136,9 @@ -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () + set_tests_properties (H5DUMP-${resultfile} PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/vds" + ) endmacro () macro (ADD_H5_VDS_PREFIX_TEST resultfile resultcode) diff --git a/tools/test/h5dump/CMakeTestsXML.cmake b/tools/test/h5dump/CMakeTestsXML.cmake index a8083fbcbfc..833a616443e 100644 --- a/tools/test/h5dump/CMakeTestsXML.cmake +++ b/tools/test/h5dump/CMakeTestsXML.cmake @@ -177,13 +177,9 @@ macro (ADD_XML_H5_TEST resultfile resultcode) if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5DUMP_XML-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ --xml ${ARGN}) - set_tests_properties (H5DUMP_XML-${resultfile} PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/xml") if (${resultcode}) set_tests_properties (H5DUMP_XML-${resultfile} PROPERTIES WILL_FAIL "true") endif () - if (last_xml_test) - set_tests_properties (H5DUMP_XML-${resultfile} PROPERTIES DEPENDS ${last_xml_test}) - endif () else () add_test ( NAME H5DUMP_XML-${resultfile} @@ -198,6 +194,9 @@ -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () + set_tests_properties (H5DUMP_XML-${resultfile} PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/xml" + ) endmacro () ############################################################################## diff --git a/tools/test/h5dump/h5dump_plugin.sh.in b/tools/test/h5dump/h5dump_plugin.sh.in index a552f60fb78..d9b77ee1017 100644 --- a/tools/test/h5dump/h5dump_plugin.sh.in +++ b/tools/test/h5dump/h5dump_plugin.sh.in @@ -3,7 +3,7 @@ # Copyright by The HDF Group. # All rights reserved. # -# This file is part of HDF5. The full HDF5 copyright notice, including +# This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code # distribution tree, or in https://www.hdfgroup.org/licenses. @@ -88,7 +88,7 @@ if [ $? != 0 ]; then fi # setup plugin path -ENVCMD="env HDF5_PLUGIN_PATH=../${PLUGIN_LIBDIR}" +ENVCMD="env HDF5_PLUGIN_PATH=../${PLUGIN_LIBDIR}:${HDF5_PLUGIN_PATH}" # # copy test files and expected output files from source dirs to test dir diff --git a/tools/test/h5format_convert/CMakeLists.txt b/tools/test/h5format_convert/CMakeLists.txt index a77335a468a..4d61bf70d24 100644 --- a/tools/test/h5format_convert/CMakeLists.txt +++ b/tools/test/h5format_convert/CMakeLists.txt @@ -5,7 +5,7 @@ project (HDF5_TOOLS_TEST_H5FC C) # Add the h5format_convert test executables # -------------------------------------------------------------------- add_executable (h5fc_chk_idx ${HDF5_TOOLS_TEST_H5FC_SOURCE_DIR}/h5fc_chk_idx.c) -target_include_directories (h5fc_chk_idx PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_include_directories (h5fc_chk_idx PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (BUILD_STATIC_LIBS) TARGET_C_PROPERTIES (h5fc_chk_idx STATIC) target_link_libraries (h5fc_chk_idx PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) @@ -24,7 +24,7 @@ endif () if (HDF5_BUILD_GENERATORS AND BUILD_STATIC_LIBS) add_executable (h5fc_gentest ${HDF5_TOOLS_TEST_H5FC_SOURCE_DIR}/h5fc_gentest.c) - target_include_directories (h5fc_gentest PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5fc_gentest PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (h5fc_gentest STATIC) target_link_libraries (h5fc_gentest PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) set_target_properties (h5fc_gentest PROPERTIES FOLDER generator/tools) diff --git a/tools/test/h5format_convert/CMakeTests.cmake b/tools/test/h5format_convert/CMakeTests.cmake index 5b8c51fca64..73cb2ee6f59 100644 --- a/tools/test/h5format_convert/CMakeTests.cmake +++ b/tools/test/h5format_convert/CMakeTests.cmake @@ -101,49 +101,58 @@ # If using memchecker add tests without using scripts if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( - NAME H5FC-${testname}-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/outtmp.h5 + NAME H5FC-${testname}-${testfile}-clear-objects + COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/${testname}-tmp.h5 ) - if (last_test) - set_tests_properties (H5FC-${testname}-clear-objects PROPERTIES DEPENDS ${last_test}) - endif () if (${testfile}) add_test ( NAME H5FC-${testname}-${testfile}-tmpfile - COMMAND ${CMAKE_COMMAND} -E copy_if_different ${HDF5_TOOLS_TEST_H5FC_SOURCE_DIR}/testfiles/${testfile} ./testfiles/outtmp.h5 + COMMAND ${CMAKE_COMMAND} -E copy_if_different ${HDF5_TOOLS_TEST_H5FC_SOURCE_DIR}/testfiles/${testfile} ./testfiles/${testname}-tmp.h5 + ) + set_tests_properties (H5FC-${testname}-${testfile}-tmpfile PROPERTIES + DEPENDS H5FC-${testname}-${testfile}-clear-objects ) - set_tests_properties (H5FC-${testname}-${testfile}-tmpfile PROPERTIES DEPENDS "H5FC-${testname}-clear-objects") add_test ( NAME H5FC-${testname}-${testfile} COMMAND "${CMAKE_COMMAND}" -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" -D "TEST_PROGRAM=$" - -D "TEST_ARGS=${ARGN};outtmp.h5" + -D "TEST_ARGS=${ARGN};${testname}-tmp.h5" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/testfiles" - -D "TEST_OUTPUT=${testname}.out" + -D "TEST_OUTPUT=${testname}-${testfile}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}" -D "TEST_ERRREF=${resultfile}.err" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) - set_tests_properties (H5FC-${testname}-${testfile} PROPERTIES DEPENDS "H5FC-${testname}-${testfile}-tmpfile") + set_tests_properties (H5FC-${testname}-${testfile} PROPERTIES + DEPENDS H5FC-${testname}-${testfile}-tmpfile + ) set (last_test "H5FC-${testname}-${testfile}") else () add_test ( - NAME H5FC-${testname}-NA + NAME H5FC-${testname}-${testfile}-NA COMMAND "${CMAKE_COMMAND}" -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" -D "TEST_PROGRAM=$" -D "TEST_ARGS=${ARGN}" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/testfiles" - -D "TEST_OUTPUT=${testname}.out" + -D "TEST_OUTPUT=${testname}-${testfile}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) - set_tests_properties (H5FC-${testname}-NA PROPERTIES DEPENDS "H5FC-${testname}-clear-objects") - set (last_test "H5FC-${testname}-NA") + set_tests_properties (H5FC-${testname}-${testfile}-NA PROPERTIES + DEPENDS H5FC-${testname}-${testfile}-tmpfile + ) endif () + add_test ( + NAME H5FC-${testname}-${testfile}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/${testname}-tmp.h5 + ) + set_tests_properties (H5FC-${testname}-${testfile}-clean-objects PROPERTIES + DEPENDS H5FC-${testname}-${testfile}-NA + ) endif () endmacro () @@ -151,31 +160,38 @@ # If using memchecker add tests without using scripts if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( - NAME H5FC-${testname}-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/outtmp.h5 + NAME H5FC-${testname}-${testfile}-clear-objects + COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/${testname}-tmp.h5 ) - if (last_test) - set_tests_properties (H5FC-${testname}-clear-objects PROPERTIES DEPENDS ${last_test}) - endif () add_test ( NAME H5FC-${testname}-${testfile}-tmpfile - COMMAND ${CMAKE_COMMAND} -E copy_if_different ${HDF5_TOOLS_TEST_H5FC_SOURCE_DIR}/testfiles/${testfile} ./testfiles/outtmp.h5 + COMMAND ${CMAKE_COMMAND} -E copy_if_different ${HDF5_TOOLS_TEST_H5FC_SOURCE_DIR}/testfiles/${testfile} ./testfiles/${testname}-tmp.h5 + ) + set_tests_properties (H5FC-${testname}-${testfile}-tmpfile PROPERTIES + DEPENDS H5FC-${testname}-${testfile}-clear-object ) - set_tests_properties (H5FC-${testname}-${testfile}-tmpfile PROPERTIES DEPENDS "H5FC-${testname}-clear-objects") add_test ( NAME H5FC-${testname}-${testfile} COMMAND "${CMAKE_COMMAND}" -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" -D "TEST_PROGRAM=$" - -D "TEST_ARGS=${ARGN};outtmp.h5" + -D "TEST_ARGS=${ARGN};${testname}-tmp.h5" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/testfiles" - -D "TEST_OUTPUT=${testname}.out" + -D "TEST_OUTPUT=${testname}-${testfile}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) - set_tests_properties (H5FC-${testname}-${testfile} PROPERTIES DEPENDS "H5FC-${testname}-${testfile}-tmpfile") - set (last_test "H5FC-${testname}-${testfile}") + set_tests_properties (H5FC-${testname}-${testfile} PROPERTIES + DEPENDS H5FC-${testname}-${testfile}-tmpfile + ) + add_test ( + NAME H5FC-${testname}-${testfile}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/${testname}-tmp.h5 + ) + set_tests_properties (H5FC-${testname}-${testfile}-clean-objects PROPERTIES + DEPENDS H5FC-${testname}-${testfile} + ) endif () endmacro () @@ -183,32 +199,39 @@ # If using memchecker add tests without using scripts if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( - NAME H5FC-${testname}-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/outtmp.h5 + NAME H5FC-${testname}-${testfile}-clear-objects + COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/${testname}-tmp.h5 ) - if (last_test) - set_tests_properties (H5FC-${testname}-clear-objects PROPERTIES DEPENDS ${last_test}) - endif () add_test ( NAME H5FC-${testname}-${testfile}-tmpfile - COMMAND ${CMAKE_COMMAND} -E copy_if_different ${HDF5_TOOLS_TEST_H5FC_SOURCE_DIR}/testfiles/${testfile} ./testfiles/outtmp.h5 + COMMAND ${CMAKE_COMMAND} -E copy_if_different ${HDF5_TOOLS_TEST_H5FC_SOURCE_DIR}/testfiles/${testfile} ./testfiles/${testname}-tmp.h5 + ) + set_tests_properties (H5FC-${testname}-${testfile}-tmpfile PROPERTIES + DEPENDS H5FC-${testname}-${testfile}-clear-objects ) - set_tests_properties (H5FC-${testname}-${testfile}-tmpfile PROPERTIES DEPENDS "H5FC-${testname}-clear-objects") add_test ( NAME H5FC-${testname}-${testfile} COMMAND "${CMAKE_COMMAND}" -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" -D "TEST_PROGRAM=$" - -D "TEST_ARGS=${ARGN};outtmp.h5" + -D "TEST_ARGS=${ARGN};${testname}-tmp.h5" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/testfiles" - -D "TEST_OUTPUT=${testname}.out" + -D "TEST_OUTPUT=${testname}-${testfile}.out" -D "TEST_EXPECT=${resultcode}" -D "TEST_REFERENCE=${resultfile}" -D "TEST_ERRREF=${result_errcheck}" -P "${HDF_RESOURCES_DIR}/grepTest.cmake" ) - set_tests_properties (H5FC-${testname}-${testfile} PROPERTIES DEPENDS "H5FC-${testname}-${testfile}-tmpfile") - set (last_test "H5FC-${testname}-${testfile}") + set_tests_properties (H5FC-${testname}-${testfile} PROPERTIES + DEPENDS H5FC-${testname}-${testfile}-tmpfile + ) + add_test ( + NAME H5FC-${testname}-${testfile}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/${testname}-tmp.h5 + ) + set_tests_properties (H5FC-${testname}-${testfile}-clean-objects PROPERTIES + DEPENDS H5FC-${testname}-${testfile} + ) endif () endmacro () @@ -217,30 +240,44 @@ if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5FC-${testname}-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/tmp.h5 + COMMAND ${CMAKE_COMMAND} -E remove + ./testfiles/${testname}-tmp.h5 ) - if (last_test) - set_tests_properties (H5FC-${testname}-clear-objects PROPERTIES DEPENDS ${last_test}) - endif () + set_tests_properties (H5FC-${testname}-clear-objects PROPERTIES + FIXTURES_SETUP clear_H5FC-${testname} + ) + add_test ( + NAME H5FC_CHECK_IDX-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove + ./testfiles/${testname}-tmp.h5 + ) + set_tests_properties (H5FC_CHECK_IDX-${testname}-clean-objects PROPERTIES + FIXTURES_CLEANUP clear_H5FC-${testname} + ) + add_test ( NAME H5FC-${testname}-tmpfile - COMMAND ${CMAKE_COMMAND} -E copy_if_different ${HDF5_TOOLS_TEST_H5FC_SOURCE_DIR}/testfiles/${testfile} testfiles/tmp.h5 + COMMAND ${CMAKE_COMMAND} -E copy_if_different ${HDF5_TOOLS_TEST_H5FC_SOURCE_DIR}/testfiles/${testfile} ./testfiles/${testname}-tmp.h5 + ) + set_tests_properties (H5FC-${testname}-tmpfile PROPERTIES + FIXTURES_REQUIRED clear_H5FC-${testname} ) - set_tests_properties (H5FC-${testname}-tmpfile PROPERTIES DEPENDS "H5FC-${testname}-clear-objects") add_test ( NAME H5FC-${testname} COMMAND "${CMAKE_COMMAND}" -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" -D "TEST_PROGRAM=$" - -D "TEST_ARGS=${ARGN};./testfiles/tmp.h5" + -D "TEST_ARGS=${ARGN};./testfiles/${testname}-tmp.h5" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" -D "TEST_OUTPUT=testfiles/${testname}.out" -D "TEST_SKIP_COMPARE=TRUE" -D "TEST_EXPECT=${resultcode}" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) - set_tests_properties (H5FC-${testname} PROPERTIES DEPENDS "H5FC-${testname}-tmpfile") - set (last_test "H5FC-${testname}") + set_tests_properties (H5FC-${testname} PROPERTIES + DEPENDS "H5FC-${testname}-tmpfile" + FIXTURES_REQUIRED clear_H5FC-${testname} + ) endif () endmacro () @@ -248,47 +285,61 @@ # If using memchecker add tests without using scripts if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( - NAME H5FC_CHECK_IDX-${testname} - COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ./testfiles/tmp.h5 ${ARGN} + NAME H5FC_CHECK_IDX-${dependtest}-${testname} + COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ./testfiles/${dependtest}-tmp.h5 ${ARGN} ) - set_tests_properties (H5FC_CHECK_IDX-${testname} PROPERTIES DEPENDS "H5FC-${dependtest}") - endif () + set_tests_properties (H5FC_CHECK_IDX-${dependtest}-${testname} PROPERTIES + DEPENDS "H5FC-${dependtest}" + FIXTURES_REQUIRED clear_H5FC-${dependtest} + ) + endif () endmacro () macro (ADD_H5_TEST_CHECK_IDX testname resultcode testfile) # If using memchecker add tests without using scripts if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( - NAME H5FC-${testname}-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/chktmp.h5 + NAME H5FC_TEST_CHECK_IDX-${testname}-clear-objects + COMMAND ${CMAKE_COMMAND} -E remove + ./testfiles/${testname}-tmp.h5 ) - if (last_test) - set_tests_properties (H5FC-${testname}-clear-objects PROPERTIES DEPENDS ${last_test}) - endif () add_test ( - NAME H5FC-${testname}-tmpfile - COMMAND ${CMAKE_COMMAND} -E copy_if_different ${HDF5_TOOLS_TEST_H5FC_SOURCE_DIR}/testfiles/${testfile} testfiles/chktmp.h5 + NAME H5FC_TEST_CHECK_IDX-${testname}-tmpfile + COMMAND ${CMAKE_COMMAND} -E copy_if_different ${HDF5_TOOLS_TEST_H5FC_SOURCE_DIR}/testfiles/${testfile} ./testfiles/${testname}-tmp.h5 + ) + set_tests_properties (H5FC_TEST_CHECK_IDX-${testname}-tmpfile PROPERTIES + DEPENDS "H5FC_TEST_CHECK_IDX-${testname}-clear-objects" ) - set_tests_properties (H5FC-${testname}-tmpfile PROPERTIES DEPENDS "H5FC-${testname}-clear-objects") add_test ( - NAME H5FC-${testname} + NAME H5FC_TEST_CHECK_IDX-${testname} COMMAND "${CMAKE_COMMAND}" -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" -D "TEST_PROGRAM=$" - -D "TEST_ARGS=-d;${ARGN};./testfiles/chktmp.h5" + -D "TEST_ARGS=-d;${ARGN};./testfiles/${testname}-tmp.h5" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" -D "TEST_OUTPUT=testfiles/${testname}.out" -D "TEST_SKIP_COMPARE=TRUE" -D "TEST_EXPECT=${resultcode}" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) - set_tests_properties (H5FC-${testname} PROPERTIES DEPENDS "H5FC-${testname}-tmpfile") + set_tests_properties (H5FC_TEST_CHECK_IDX-${testname} PROPERTIES + DEPENDS "H5FC_TEST_CHECK_IDX-${testname}-tmpfile" + ) + add_test ( + NAME H5FC_TEST_CHECK_IDX-${testname}-check + COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ./testfiles/${testname}-tmp.h5 ${ARGN} + ) + set_tests_properties (H5FC_TEST_CHECK_IDX-${testname}-check PROPERTIES + DEPENDS "H5FC_TEST_CHECK_IDX-${testname}" + ) add_test ( - NAME H5FC_CHECK_IDX-${testname} - COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ./testfiles/chktmp.h5 ${ARGN} + NAME H5FC_TEST_CHECK_IDX-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove + ./testfiles/${testname}-tmp.h5 + ) + set_tests_properties (H5FC_TEST_CHECK_IDX-${testname}-clean-objects PROPERTIES + DEPENDS H5FC_TEST_CHECK_IDX-${testname}-check ) - set_tests_properties (H5FC_CHECK_IDX-${testname} PROPERTIES DEPENDS "H5FC-${testname}") - set (last_test "H5FC_CHECK_IDX-${testname}") endif () endmacro () @@ -296,44 +347,55 @@ # If using memchecker skip tests if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( - NAME H5FC-${testname}-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/dmptmp.h5 + NAME H5FC_H5DUMP_CHECK-${testname}-clear-objects + COMMAND ${CMAKE_COMMAND} -E remove + ./testfiles/${testname}-tmp.h5 ) - if (last_test) - set_tests_properties (H5FC-${testname}-clear-objects PROPERTIES DEPENDS ${last_test}) - endif () add_test ( - NAME H5FC-${testname}-tmpfile - COMMAND ${CMAKE_COMMAND} -E copy_if_different ${HDF5_TOOLS_TEST_H5FC_SOURCE_DIR}/testfiles/${testname}.h5 testfiles/dmptmp.h5 + NAME H5FC_H5DUMP_CHECK-${testname}-tmpfile + COMMAND ${CMAKE_COMMAND} -E copy_if_different ${HDF5_TOOLS_TEST_H5FC_SOURCE_DIR}/testfiles/${testname}.h5 ./testfiles/${testname}-tmp.h5 + ) + set_tests_properties (H5FC_H5DUMP_CHECK-${testname}-tmpfile PROPERTIES + DEPENDS "H5FC_H5DUMP_CHECK-${testname}-clear-objects" ) - set_tests_properties (H5FC-${testname}-tmpfile PROPERTIES DEPENDS "H5FC-${testname}-clear-objects") add_test ( - NAME H5FC-${testname} + NAME H5FC_H5DUMP_CHECK-${testname} COMMAND "${CMAKE_COMMAND}" -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" -D "TEST_PROGRAM=$" - -D "TEST_ARGS=${ARGN};./testfiles/dmptmp.h5" + -D "TEST_ARGS=${ARGN};./testfiles/${testname}-tmp.h5" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" -D "TEST_OUTPUT=testfiles/${testname}.out" -D "TEST_SKIP_COMPARE=TRUE" -D "TEST_EXPECT=0" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) - set_tests_properties (H5FC-${testname} PROPERTIES DEPENDS "H5FC-${testname}-tmpfile") + set_tests_properties (H5FC_H5DUMP_CHECK-${testname} PROPERTIES + DEPENDS "H5FC_H5DUMP_CHECK-${testname}-tmpfile" + ) add_test ( - NAME H5FC_CHECK_DUMP-${testname} + NAME H5FC_H5DUMP_CHECK-${testname}-dump COMMAND "${CMAKE_COMMAND}" -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" -D "TEST_PROGRAM=$" - -D "TEST_ARGS:STRING=-BH;./testfiles/dmptmp.h5" + -D "TEST_ARGS:STRING=-BH;./testfiles/${testname}-tmp.h5" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" -D "TEST_OUTPUT=testfiles/${testname}_chk.out" -D "TEST_EXPECT=0" -D "TEST_REFERENCE=testfiles/${testname}.ddl" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) - set_tests_properties (H5FC_CHECK_DUMP-${testname} PROPERTIES DEPENDS "H5FC-${testname}") - set (last_test "H5FC_CHECK_DUMP-${testname}") + set_tests_properties (H5FC_H5DUMP_CHECK-${testname}-dump PROPERTIES + DEPENDS "H5FC_H5DUMP_CHECK-${testname}" + ) + add_test ( + NAME H5FC_H5DUMP_CHECK-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove + ./testfiles/${testname}-tmp.h5 + ) + set_tests_properties (H5FC_H5DUMP_CHECK-${testname}-clean-objects PROPERTIES + DEPENDS H5FC_H5DUMP_CHECK-${testname}-dump + ) endif () endmacro () @@ -343,22 +405,6 @@ ############################################################################## ############################################################################## - if (HDF5_ENABLE_USING_MEMCHECKER) - # Remove any output file left over from previous test run - add_test ( - NAME H5FC-clearall-objects - COMMAND ${CMAKE_COMMAND} -E remove - outtmp.h5 - tmp.h5 - chktmp.h5 - dmptmp.h5 - ) - if (last_test) - set_tests_properties (H5FC-clearall-objects PROPERTIES DEPENDS ${last_test}) - endif () - set (last_test "H5FC-clearall-objects") - endif () - # h5format_convert --help # h5format_convert (no options) # h5format_convert nonexist.h5 (no options, file does not exist) diff --git a/tools/test/h5format_convert/testfiles/h5fc_ext1_f.ddl b/tools/test/h5format_convert/testfiles/h5fc_ext1_f.ddl index db00a99dd67..f78891384db 100644 --- a/tools/test/h5format_convert/testfiles/h5fc_ext1_f.ddl +++ b/tools/test/h5format_convert/testfiles/h5fc_ext1_f.ddl @@ -1,4 +1,4 @@ -HDF5 "./testfiles/dmptmp.h5" { +HDF5 "./testfiles/h5fc_ext1_f-tmp.h5" { SUPER_BLOCK { SUPERBLOCK_VERSION 2 FREELIST_VERSION 0 diff --git a/tools/test/h5format_convert/testfiles/h5fc_ext1_i.ddl b/tools/test/h5format_convert/testfiles/h5fc_ext1_i.ddl index 4be6d90b030..65640314077 100644 --- a/tools/test/h5format_convert/testfiles/h5fc_ext1_i.ddl +++ b/tools/test/h5format_convert/testfiles/h5fc_ext1_i.ddl @@ -1,4 +1,4 @@ -HDF5 "./testfiles/dmptmp.h5" { +HDF5 "./testfiles/h5fc_ext1_i-tmp.h5" { SUPER_BLOCK { SUPERBLOCK_VERSION 2 FREELIST_VERSION 0 diff --git a/tools/test/h5format_convert/testfiles/h5fc_ext1_s.ddl b/tools/test/h5format_convert/testfiles/h5fc_ext1_s.ddl index db00a99dd67..746de2b1d7d 100644 --- a/tools/test/h5format_convert/testfiles/h5fc_ext1_s.ddl +++ b/tools/test/h5format_convert/testfiles/h5fc_ext1_s.ddl @@ -1,4 +1,4 @@ -HDF5 "./testfiles/dmptmp.h5" { +HDF5 "./testfiles/h5fc_ext1_s-tmp.h5" { SUPER_BLOCK { SUPERBLOCK_VERSION 2 FREELIST_VERSION 0 diff --git a/tools/test/h5format_convert/testfiles/h5fc_ext2_if.ddl b/tools/test/h5format_convert/testfiles/h5fc_ext2_if.ddl index 4be6d90b030..57781ecdada 100644 --- a/tools/test/h5format_convert/testfiles/h5fc_ext2_if.ddl +++ b/tools/test/h5format_convert/testfiles/h5fc_ext2_if.ddl @@ -1,4 +1,4 @@ -HDF5 "./testfiles/dmptmp.h5" { +HDF5 "./testfiles/h5fc_ext2_if-tmp.h5" { SUPER_BLOCK { SUPERBLOCK_VERSION 2 FREELIST_VERSION 0 diff --git a/tools/test/h5format_convert/testfiles/h5fc_ext2_is.ddl b/tools/test/h5format_convert/testfiles/h5fc_ext2_is.ddl index 4be6d90b030..8fd061d5c78 100644 --- a/tools/test/h5format_convert/testfiles/h5fc_ext2_is.ddl +++ b/tools/test/h5format_convert/testfiles/h5fc_ext2_is.ddl @@ -1,4 +1,4 @@ -HDF5 "./testfiles/dmptmp.h5" { +HDF5 "./testfiles/h5fc_ext2_is-tmp.h5" { SUPER_BLOCK { SUPERBLOCK_VERSION 2 FREELIST_VERSION 0 diff --git a/tools/test/h5format_convert/testfiles/h5fc_ext2_sf.ddl b/tools/test/h5format_convert/testfiles/h5fc_ext2_sf.ddl index db00a99dd67..435ed464384 100644 --- a/tools/test/h5format_convert/testfiles/h5fc_ext2_sf.ddl +++ b/tools/test/h5format_convert/testfiles/h5fc_ext2_sf.ddl @@ -1,4 +1,4 @@ -HDF5 "./testfiles/dmptmp.h5" { +HDF5 "./testfiles/h5fc_ext2_sf-tmp.h5" { SUPER_BLOCK { SUPERBLOCK_VERSION 2 FREELIST_VERSION 0 diff --git a/tools/test/h5format_convert/testfiles/h5fc_ext3_isf.ddl b/tools/test/h5format_convert/testfiles/h5fc_ext3_isf.ddl index 4be6d90b030..57a78d3398e 100644 --- a/tools/test/h5format_convert/testfiles/h5fc_ext3_isf.ddl +++ b/tools/test/h5format_convert/testfiles/h5fc_ext3_isf.ddl @@ -1,4 +1,4 @@ -HDF5 "./testfiles/dmptmp.h5" { +HDF5 "./testfiles/h5fc_ext3_isf-tmp.h5" { SUPER_BLOCK { SUPERBLOCK_VERSION 2 FREELIST_VERSION 0 diff --git a/tools/test/h5format_convert/testfiles/h5fc_v_all.ddl b/tools/test/h5format_convert/testfiles/h5fc_v_all.ddl index a1af831bf73..c5e55c7883d 100644 --- a/tools/test/h5format_convert/testfiles/h5fc_v_all.ddl +++ b/tools/test/h5format_convert/testfiles/h5fc_v_all.ddl @@ -1,5 +1,5 @@ Process command line options -Open the file outtmp.h5 +Open the file h5fc_v_all-tmp.h5 Processing all datasets in the file... Going to process dataset:/DSET_CONTIGUOUS... Open the dataset diff --git a/tools/test/h5format_convert/testfiles/h5fc_v_bt1.ddl b/tools/test/h5format_convert/testfiles/h5fc_v_bt1.ddl index 31de12ade26..23f775dfb6f 100644 --- a/tools/test/h5format_convert/testfiles/h5fc_v_bt1.ddl +++ b/tools/test/h5format_convert/testfiles/h5fc_v_bt1.ddl @@ -1,5 +1,5 @@ Process command line options -Open the file outtmp.h5 +Open the file h5fc_v_bt1-tmp.h5 Going to process dataset: /GROUP/DSET_BT2... Open the dataset Retrieve the dataset's layout diff --git a/tools/test/h5format_convert/testfiles/h5fc_v_err.ddl b/tools/test/h5format_convert/testfiles/h5fc_v_err.ddl index 0b7d0ac18a5..066f2d50d12 100644 --- a/tools/test/h5format_convert/testfiles/h5fc_v_err.ddl +++ b/tools/test/h5format_convert/testfiles/h5fc_v_err.ddl @@ -1,5 +1,5 @@ Process command line options -Open the file outtmp.h5 +Open the file h5fc_v_err-tmp.h5 Processing all datasets in the file... Going to process dataset:/DSET_ERR... Open the dataset diff --git a/tools/test/h5format_convert/testfiles/h5fc_v_n_1d.ddl b/tools/test/h5format_convert/testfiles/h5fc_v_n_1d.ddl index fcdadd80aa8..1c6d7fbcadf 100644 --- a/tools/test/h5format_convert/testfiles/h5fc_v_n_1d.ddl +++ b/tools/test/h5format_convert/testfiles/h5fc_v_n_1d.ddl @@ -1,6 +1,6 @@ Process command line options It is noop... -Open the file outtmp.h5 +Open the file h5fc_v_n_1d-tmp.h5 Going to process dataset: /DSET_EA... Open the dataset Retrieve the dataset's layout diff --git a/tools/test/h5format_convert/testfiles/h5fc_v_n_all.ddl b/tools/test/h5format_convert/testfiles/h5fc_v_n_all.ddl index 074ce6f2b54..ad00d92040d 100644 --- a/tools/test/h5format_convert/testfiles/h5fc_v_n_all.ddl +++ b/tools/test/h5format_convert/testfiles/h5fc_v_n_all.ddl @@ -1,6 +1,6 @@ Process command line options It is noop... -Open the file outtmp.h5 +Open the file h5fc_v_n_all-tmp.h5 Processing all datasets in the file... Going to process dataset:/DSET_CONTIGUOUS... Open the dataset diff --git a/tools/test/h5format_convert/testfiles/h5fc_v_ndata_bt1.ddl b/tools/test/h5format_convert/testfiles/h5fc_v_ndata_bt1.ddl index c75699a745e..e79b11a2208 100644 --- a/tools/test/h5format_convert/testfiles/h5fc_v_ndata_bt1.ddl +++ b/tools/test/h5format_convert/testfiles/h5fc_v_ndata_bt1.ddl @@ -1,6 +1,6 @@ Process command line options It is noop... -Open the file outtmp.h5 +Open the file h5fc_v_ndata_bt1-tmp.h5 Going to process dataset: /DSET_NDATA_BT2... Open the dataset Retrieve the dataset's layout diff --git a/tools/test/h5format_convert/testfiles/h5fc_v_non_chunked.ddl b/tools/test/h5format_convert/testfiles/h5fc_v_non_chunked.ddl index 59453897aa4..50575c0f3b0 100644 --- a/tools/test/h5format_convert/testfiles/h5fc_v_non_chunked.ddl +++ b/tools/test/h5format_convert/testfiles/h5fc_v_non_chunked.ddl @@ -1,5 +1,5 @@ Process command line options -Open the file outtmp.h5 +Open the file h5fc_v_non_chunked-tmp.h5 Going to process dataset: /DSET_CONTIGUOUS... Open the dataset Retrieve the dataset's layout diff --git a/tools/test/h5format_convert/testfiles/old_h5fc_ext1_f.ddl b/tools/test/h5format_convert/testfiles/old_h5fc_ext1_f.ddl index db00a99dd67..45fa3fbd438 100644 --- a/tools/test/h5format_convert/testfiles/old_h5fc_ext1_f.ddl +++ b/tools/test/h5format_convert/testfiles/old_h5fc_ext1_f.ddl @@ -1,4 +1,4 @@ -HDF5 "./testfiles/dmptmp.h5" { +HDF5 "./testfiles/old_h5fc_ext1_f-tmp.h5" { SUPER_BLOCK { SUPERBLOCK_VERSION 2 FREELIST_VERSION 0 diff --git a/tools/test/h5format_convert/testfiles/old_h5fc_ext1_i.ddl b/tools/test/h5format_convert/testfiles/old_h5fc_ext1_i.ddl index d9cc0b7d00e..67a71164ac8 100644 --- a/tools/test/h5format_convert/testfiles/old_h5fc_ext1_i.ddl +++ b/tools/test/h5format_convert/testfiles/old_h5fc_ext1_i.ddl @@ -1,4 +1,4 @@ -HDF5 "./testfiles/dmptmp.h5" { +HDF5 "./testfiles/old_h5fc_ext1_i-tmp.h5" { SUPER_BLOCK { SUPERBLOCK_VERSION 1 FREELIST_VERSION 0 diff --git a/tools/test/h5format_convert/testfiles/old_h5fc_ext1_s.ddl b/tools/test/h5format_convert/testfiles/old_h5fc_ext1_s.ddl index db00a99dd67..7f67d9f72a9 100644 --- a/tools/test/h5format_convert/testfiles/old_h5fc_ext1_s.ddl +++ b/tools/test/h5format_convert/testfiles/old_h5fc_ext1_s.ddl @@ -1,4 +1,4 @@ -HDF5 "./testfiles/dmptmp.h5" { +HDF5 "./testfiles/old_h5fc_ext1_s-tmp.h5" { SUPER_BLOCK { SUPERBLOCK_VERSION 2 FREELIST_VERSION 0 diff --git a/tools/test/h5format_convert/testfiles/old_h5fc_ext2_if.ddl b/tools/test/h5format_convert/testfiles/old_h5fc_ext2_if.ddl index 4be6d90b030..350d3ba4eb3 100644 --- a/tools/test/h5format_convert/testfiles/old_h5fc_ext2_if.ddl +++ b/tools/test/h5format_convert/testfiles/old_h5fc_ext2_if.ddl @@ -1,4 +1,4 @@ -HDF5 "./testfiles/dmptmp.h5" { +HDF5 "./testfiles/old_h5fc_ext2_if-tmp.h5" { SUPER_BLOCK { SUPERBLOCK_VERSION 2 FREELIST_VERSION 0 diff --git a/tools/test/h5format_convert/testfiles/old_h5fc_ext2_is.ddl b/tools/test/h5format_convert/testfiles/old_h5fc_ext2_is.ddl index 4be6d90b030..6b2b2c366fa 100644 --- a/tools/test/h5format_convert/testfiles/old_h5fc_ext2_is.ddl +++ b/tools/test/h5format_convert/testfiles/old_h5fc_ext2_is.ddl @@ -1,4 +1,4 @@ -HDF5 "./testfiles/dmptmp.h5" { +HDF5 "./testfiles/old_h5fc_ext2_is-tmp.h5" { SUPER_BLOCK { SUPERBLOCK_VERSION 2 FREELIST_VERSION 0 diff --git a/tools/test/h5format_convert/testfiles/old_h5fc_ext2_sf.ddl b/tools/test/h5format_convert/testfiles/old_h5fc_ext2_sf.ddl index db00a99dd67..4a038e381d9 100644 --- a/tools/test/h5format_convert/testfiles/old_h5fc_ext2_sf.ddl +++ b/tools/test/h5format_convert/testfiles/old_h5fc_ext2_sf.ddl @@ -1,4 +1,4 @@ -HDF5 "./testfiles/dmptmp.h5" { +HDF5 "./testfiles/old_h5fc_ext2_sf-tmp.h5" { SUPER_BLOCK { SUPERBLOCK_VERSION 2 FREELIST_VERSION 0 diff --git a/tools/test/h5format_convert/testfiles/old_h5fc_ext3_isf.ddl b/tools/test/h5format_convert/testfiles/old_h5fc_ext3_isf.ddl index 4be6d90b030..602627f4614 100644 --- a/tools/test/h5format_convert/testfiles/old_h5fc_ext3_isf.ddl +++ b/tools/test/h5format_convert/testfiles/old_h5fc_ext3_isf.ddl @@ -1,4 +1,4 @@ -HDF5 "./testfiles/dmptmp.h5" { +HDF5 "./testfiles/old_h5fc_ext3_isf-tmp.h5" { SUPER_BLOCK { SUPERBLOCK_VERSION 2 FREELIST_VERSION 0 diff --git a/tools/test/h5format_convert/testh5fc.sh.in b/tools/test/h5format_convert/testh5fc.sh.in index d5fef406443..cd2b8014e0d 100644 --- a/tools/test/h5format_convert/testh5fc.sh.in +++ b/tools/test/h5format_convert/testh5fc.sh.in @@ -66,10 +66,10 @@ TESTDIR=./testfiles test -d $TESTDIR || mkdir $TESTDIR # Copy the testfile to a temporary file for testing as h5format_convert is changing the file in place -TMPOUTFILE=outtmp.h5 +TMPOUTFILE=tmp.h5 TMPFILE=tmp.h5 -TMPCHKFILE=chktmp.h5 -TMPDMPFILE=dmptmp.h5 +TMPCHKFILE=tmp.h5 +TMPDMPFILE=tmp.h5 ###################################################################### # test files @@ -182,10 +182,10 @@ CLEAN_TESTFILES_AND_TESTDIR() if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then $RM $TESTDIR else - $RM $TESTDIR/$TMPFILE + $RM $TESTDIR/$TMPFILE $RM $TESTDIR/$TMPOUTFILE $RM $TESTDIR/$TMPCHKFILE - $RM $TESTDIR/$TMPDMPFILE + $RM $TESTDIR/*-$TMPDMPFILE fi } @@ -224,13 +224,14 @@ TOOLTEST_OUT() { actual_err="$TESTDIR/`basename $1 .ddl`.out.err" actual_sav=${actual}-sav actual_err_sav=${actual_err}-sav + testfile="`basename $1 .ddl`-tmp.h5" # Prepare the test file - $RM $TESTDIR/$TMPOUTFILE + $RM $TESTDIR/$testfile TFILE=$2 if [ ! -z "$2" ] && [ -e $TESTDIR/$2 ] ; then - $CP $TESTDIR/$2 $TESTDIR/$TMPOUTFILE - TFILE=$TMPOUTFILE + $CP $TESTDIR/$2 $TESTDIR/$testfile + TFILE=$testfile fi # Run test. @@ -261,13 +262,14 @@ TOOLTEST_MASK_OUT() { actual_err="$TESTDIR/`basename $1 .ddl`.out.err" actual_sav=${actual}-sav actual_err_sav=${actual_err}-sav + testfile="`basename $1 .ddl`-tmp.h5" # Prepare the test file - $RM $TESTDIR/$TMPOUTFILE + $RM $TESTDIR/$testfile TFILE=$2 if [ ! -z "$2" ] && [ -e $TESTDIR/$2 ] ; then - $CP $TESTDIR/$2 $TESTDIR/$TMPOUTFILE - TFILE=$TMPOUTFILE + $CP $TESTDIR/$2 $TESTDIR/$testfile + TFILE=$testfile fi # Run test. @@ -321,13 +323,14 @@ TOOLTEST_ERR() { actual_err="$TESTDIR/`basename $1 .ddl`.out.err" actual_sav=${actual}-sav actual_err_sav=${actual_err}-sav + testfile="`basename $1 .ddl`-tmp.h5" # Prepare the test file - $RM $TESTDIR/$TMPOUTFILE + $RM $TESTDIR/$testfile TFILE=$2 if [ ! -z "$2" ] && [ -e $TESTDIR/$2 ] ; then - $CP $TESTDIR/$2 $TESTDIR/$TMPOUTFILE - TFILE=$TMPOUTFILE + $CP $TESTDIR/$2 $TESTDIR/$testfile + TFILE=$testfile fi # Run test. @@ -360,9 +363,10 @@ TOOLTEST_ERR() { # -n TOOLTEST() { TESTING $FORMCONV $3 $4 $5 $1 - $RM $TESTDIR/$2 - $CP $TESTDIR/$1 $TESTDIR/$2 - $RUNSERIAL $FORMCONV_BIN $3 $4 $5 $TESTDIR/$2 + testfile="`basename $1 .h5`-tmp.h5" + $RM $TESTDIR/$testfile + $CP $TESTDIR/$1 $TESTDIR/$testfile + $RUNSERIAL $FORMCONV_BIN $3 $4 $5 $TESTDIR/$testfile exitcode=$? if [ $exitcode -ne 0 ]; then echo "*FAILED*" @@ -382,7 +386,7 @@ CHECKING() { # $1 dataset name IDX_CHECK() { CHECKING $1 - $RUNSERIAL $CHK_IDX_BIN $TESTDIR/$TMPCHKFILE $1 + $RUNSERIAL $CHK_IDX_BIN $TESTDIR/$2 $1 ret=$? if [ $ret -eq 0 ]; then echo " PASSED" @@ -414,7 +418,8 @@ H5DUMP_CHECK() { expect="$TESTDIR/$2" actual="$TESTDIR/`basename $2 .ddl`.out" actual_err="$TESTDIR/`basename $2 .ddl`.err" - $RUNSERIAL $H5DUMP_BIN -BH $TESTDIR/$TMPDMPFILE > $actual 2>$actual_err + testfile="`basename $2 .ddl`-tmp.h5" + $RUNSERIAL $H5DUMP_BIN -BH $TESTDIR/$testfile > $actual 2>$actual_err cat $actual_err >> $actual # Compare output @@ -497,45 +502,45 @@ TOOLTEST_MASK_OUT h5fc_v_err.ddl h5fc_err_level.h5 -v # h5format_convert -d /GROUP/DSET_FA h5fc_ext_none.h5 # h5format_convert -d /DSET_NONE h5fc_ext_none.h5 # h5format_convert -d /GROUP/DSET_NDATA_NONE h5fc_ext_none.h5 -TOOLTEST h5fc_ext_none.h5 $TMPCHKFILE -d /DSET_EA -IDX_CHECK /DSET_EA +TOOLTEST h5fc_ext_none.h5 h5fc_ext_none-tmp.h5 -d /DSET_EA +IDX_CHECK /DSET_EA h5fc_ext_none-tmp.h5 # -TOOLTEST h5fc_ext_none.h5 $TMPCHKFILE -d /GROUP/DSET_NDATA_EA -IDX_CHECK /GROUP/DSET_NDATA_EA +TOOLTEST h5fc_ext_none.h5 h5fc_ext_none-tmp.h5 -d /GROUP/DSET_NDATA_EA +IDX_CHECK /GROUP/DSET_NDATA_EA h5fc_ext_none-tmp.h5 # -TOOLTEST h5fc_ext_none.h5 $TMPCHKFILE -d /GROUP/DSET_BT2 -IDX_CHECK /GROUP/DSET_BT2 +TOOLTEST h5fc_ext_none.h5 h5fc_ext_none-tmp.h5 -d /GROUP/DSET_BT2 +IDX_CHECK /GROUP/DSET_BT2 h5fc_ext_none-tmp.h5 # -TOOLTEST h5fc_ext_none.h5 $TMPCHKFILE -d /DSET_NDATA_BT2 -IDX_CHECK /DSET_NDATA_BT2 +TOOLTEST h5fc_ext_none.h5 h5fc_ext_none-tmp.h5 -d /DSET_NDATA_BT2 +IDX_CHECK /DSET_NDATA_BT2 h5fc_ext_none-tmp.h5 # -TOOLTEST h5fc_ext_none.h5 $TMPCHKFILE -d /DSET_FA -IDX_CHECK /DSET_FA +TOOLTEST h5fc_ext_none.h5 h5fc_ext_none-tmp.h5 -d /DSET_FA +IDX_CHECK /DSET_FA h5fc_ext_none-tmp.h5 # -TOOLTEST h5fc_ext_none.h5 $TMPCHKFILE -d /GROUP/DSET_NDATA_FA -IDX_CHECK /GROUP/DSET_NDATA_FA +TOOLTEST h5fc_ext_none.h5 h5fc_ext_none-tmp.h5 -d /GROUP/DSET_NDATA_FA +IDX_CHECK /GROUP/DSET_NDATA_FA h5fc_ext_none-tmp.h5 # -TOOLTEST h5fc_ext_none.h5 $TMPCHKFILE -d /DSET_NONE -IDX_CHECK /DSET_NONE +TOOLTEST h5fc_ext_none.h5 h5fc_ext_none-tmp.h5 -d /DSET_NONE +IDX_CHECK /DSET_NONE h5fc_ext_none-tmp.h5 # -TOOLTEST h5fc_ext_none.h5 $TMPCHKFILE -d /GROUP/DSET_NDATA_NONE -IDX_CHECK /GROUP/DSET_NDATA_NONE +TOOLTEST h5fc_ext_none.h5 h5fc_ext_none-tmp.h5 -d /GROUP/DSET_NDATA_NONE +IDX_CHECK /GROUP/DSET_NDATA_NONE h5fc_ext_none-tmp.h5 # # # # No output from tests: just check exit code # h5format_convert -d /DSET_NDATA_BT2 old_h5fc_ext_none.h5 (v1-btree dataset) # h5format_convert -d /DSET_CONTIGUOUS h5fc_non_v3.h5 (non-chunked dataset) -TOOLTEST old_h5fc_ext_none.h5 $TMPFILE -d /DSET_NDATA_BT2 -TOOLTEST h5fc_non_v3.h5 $TMPFILE -d /DSET_CONTIGUOUS +TOOLTEST old_h5fc_ext_none.h5 old_h5fc_ext_none-tmp.h5 -d /DSET_NDATA_BT2 +TOOLTEST h5fc_non_v3.h5 h5fc_non_v3-tmp.h5 -d /DSET_CONTIGUOUS # # # # No output from tests: just check exit code # h5format_convert -d /GROUP/DSET_BT2 -n h5fc_non_v3.h5 (noop, one dataset) # h5format_convert -n h5fc_non_v3.h5 (noop, all datasets) -TOOLTEST h5fc_non_v3.h5 $TMPFILE -d /GROUP/DSET_BT2 -n -TOOLTEST h5fc_non_v3.h5 $TMPFILE -n +TOOLTEST h5fc_non_v3.h5 h5fc_non_v3-tmp.h5 -d /GROUP/DSET_BT2 -n +TOOLTEST h5fc_non_v3.h5 h5fc_non_v3-tmp.h5 -n # # # @@ -543,11 +548,11 @@ TOOLTEST h5fc_non_v3.h5 $TMPFILE -n # h5format_convert h5fc_non_v3.h5 # 1) convert all datasets # 2) verify indexing types -TOOLTEST h5fc_non_v3.h5 $TMPCHKFILE -IDX_CHECK /DSET_NDATA_EA -IDX_CHECK /DSET_NDATA_BT2 -IDX_CHECK /GROUP/DSET_BT2 -IDX_CHECK /GROUP/DSET_EA +TOOLTEST h5fc_non_v3.h5 h5fc_non_v3-tmp.h5 +IDX_CHECK /DSET_NDATA_EA h5fc_non_v3-tmp.h5 +IDX_CHECK /DSET_NDATA_BT2 h5fc_non_v3-tmp.h5 +IDX_CHECK /GROUP/DSET_BT2 h5fc_non_v3-tmp.h5 +IDX_CHECK /GROUP/DSET_EA h5fc_non_v3-tmp.h5 # # # @@ -555,47 +560,47 @@ IDX_CHECK /GROUP/DSET_EA # h5format_convert h5fc_edge_v3.h5 # 1) convert the chunked dataset (filter, no-filter-edge-chunk) # 2) verify the indexing type -TOOLTEST h5fc_edge_v3.h5 $TMPCHKFILE -IDX_CHECK /DSET_EDGE +TOOLTEST h5fc_edge_v3.h5 h5fc_edge_v3-tmp.h5 +IDX_CHECK /DSET_EDGE h5fc_edge_v3-tmp.h5 # # # The following test files have messages in the superblock extension. # Verify h5dump output for correctness after conversion -TOOLTEST h5fc_ext1_i.h5 $TMPDMPFILE +TOOLTEST h5fc_ext1_i.h5 h5fc_ext1_i-tmp.h5 H5DUMP_CHECK h5fc_ext1_i.h5 h5fc_ext1_i.ddl -TOOLTEST h5fc_ext1_s.h5 $TMPDMPFILE +TOOLTEST h5fc_ext1_s.h5 h5fc_ext1_s-tmp.h5 H5DUMP_CHECK h5fc_ext1_s.h5 h5fc_ext1_s.ddl -TOOLTEST h5fc_ext1_f.h5 $TMPDMPFILE +TOOLTEST h5fc_ext1_f.h5 h5fc_ext1_f-tmp.h5 H5DUMP_CHECK h5fc_ext1_f.h5 h5fc_ext1_f.ddl # -TOOLTEST h5fc_ext2_if.h5 $TMPDMPFILE +TOOLTEST h5fc_ext2_if.h5 h5fc_ext2_if-tmp.h5 H5DUMP_CHECK h5fc_ext2_if.h5 h5fc_ext2_if.ddl -TOOLTEST h5fc_ext2_is.h5 $TMPDMPFILE +TOOLTEST h5fc_ext2_is.h5 h5fc_ext2_is-tmp.h5 H5DUMP_CHECK h5fc_ext2_is.h5 h5fc_ext2_is.ddl -TOOLTEST h5fc_ext2_sf.h5 $TMPDMPFILE +TOOLTEST h5fc_ext2_sf.h5 h5fc_ext2_sf-tmp.h5 H5DUMP_CHECK h5fc_ext2_sf.h5 h5fc_ext2_sf.ddl # -TOOLTEST h5fc_ext3_isf.h5 $TMPDMPFILE +TOOLTEST h5fc_ext3_isf.h5 h5fc_ext3_isf-tmp.h5 H5DUMP_CHECK h5fc_ext3_isf.h5 h5fc_ext3_isf.ddl # # # -TOOLTEST old_h5fc_ext1_i.h5 $TMPDMPFILE +TOOLTEST old_h5fc_ext1_i.h5 old_h5fc_ext1_i-tmp.h5 H5DUMP_CHECK old_h5fc_ext1_i.h5 old_h5fc_ext1_i.ddl -TOOLTEST old_h5fc_ext1_s.h5 $TMPDMPFILE +TOOLTEST old_h5fc_ext1_s.h5 old_h5fc_ext1_s-tmp.h5 H5DUMP_CHECK old_h5fc_ext1_s.h5 old_h5fc_ext1_s.ddl -TOOLTEST old_h5fc_ext1_f.h5 $TMPDMPFILE +TOOLTEST old_h5fc_ext1_f.h5 old_h5fc_ext1_f-tmp.h5 H5DUMP_CHECK old_h5fc_ext1_f.h5 old_h5fc_ext1_f.ddl # -TOOLTEST old_h5fc_ext2_if.h5 $TMPDMPFILE +TOOLTEST old_h5fc_ext2_if.h5 old_h5fc_ext2_if-tmp.h5 H5DUMP_CHECK old_h5fc_ext2_if.h5 old_h5fc_ext2_if.ddl -TOOLTEST old_h5fc_ext2_is.h5 $TMPDMPFILE +TOOLTEST old_h5fc_ext2_is.h5 old_h5fc_ext2_is-tmp.h5 H5DUMP_CHECK old_h5fc_ext2_is.h5 old_h5fc_ext2_is.ddl -TOOLTEST old_h5fc_ext2_sf.h5 $TMPDMPFILE +TOOLTEST old_h5fc_ext2_sf.h5 old_h5fc_ext2_sf-tmp.h5 H5DUMP_CHECK old_h5fc_ext2_sf.h5 old_h5fc_ext2_sf.ddl # -TOOLTEST old_h5fc_ext3_isf.h5 $TMPDMPFILE +TOOLTEST old_h5fc_ext3_isf.h5 old_h5fc_ext3_isf-tmp.h5 H5DUMP_CHECK old_h5fc_ext3_isf.h5 old_h5fc_ext3_isf.ddl # # Clean up temporary files/directories diff --git a/tools/test/h5import/CMakeLists.txt b/tools/test/h5import/CMakeLists.txt index 567d9f6d6f6..710ba47d429 100644 --- a/tools/test/h5import/CMakeLists.txt +++ b/tools/test/h5import/CMakeLists.txt @@ -5,7 +5,7 @@ project (HDF5_TOOLS_TEST_H5IMPORT C) # Add the h5import executables # -------------------------------------------------------------------- add_executable (h5importtest ${HDF5_TOOLS_TEST_H5IMPORT_SOURCE_DIR}/h5importtest.c) -target_include_directories (h5importtest PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_include_directories (h5importtest PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (BUILD_STATIC_LIBS) TARGET_C_PROPERTIES (h5importtest STATIC) target_link_libraries (h5importtest PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) diff --git a/tools/test/h5import/CMakeTests.cmake b/tools/test/h5import/CMakeTests.cmake index 81ebc4aeefb..750611698f4 100644 --- a/tools/test/h5import/CMakeTests.cmake +++ b/tools/test/h5import/CMakeTests.cmake @@ -97,26 +97,23 @@ ############################################################################## macro (ADD_H5_TEST testname importfile conffile testfile) - # If using memchecker skip macro based tests - if (HDF5_ENABLE_USING_MEMCHECKER) - add_test (NAME H5IMPORT-${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${importfile} -c ${conffile} -o ${testfile}) - set_tests_properties (H5IMPORT-${testname} PROPERTIES - FIXTURES_REQUIRED set_h5importtest - ) - else () - add_test ( - NAME H5IMPORT-${testname}-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove ${testfile} - ) - set_tests_properties (H5IMPORT-${testname}-clear-objects PROPERTIES - FIXTURES_REQUIRED set_h5importtest - ) - - add_test (NAME H5IMPORT-${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${importfile} -c ${conffile} -o ${testfile}) - set_tests_properties (H5IMPORT-${testname} PROPERTIES - DEPENDS H5IMPORT-${testname}-clear-objects - ) + add_test ( + NAME H5IMPORT-${testname}-clear-objects + COMMAND ${CMAKE_COMMAND} -E remove + ${testfile} + ${testfile}.new + ) + set_tests_properties (H5IMPORT-${testname}-clear-objects PROPERTIES + FIXTURES_REQUIRED set_h5importtest + ) + add_test (NAME H5IMPORT-${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${importfile} -c ${conffile} -o ${testfile}) + set_tests_properties (H5IMPORT-${testname} PROPERTIES + DEPENDS H5IMPORT-${testname}-clear-objects + FIXTURES_REQUIRED set_h5importtest + ) + # If using memchecker skip macro based tests + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5IMPORT-${testname}-H5DMP COMMAND "${CMAKE_COMMAND}" @@ -125,6 +122,7 @@ -D "TEST_ARGS:STRING=${testfile}" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" -D "TEST_OUTPUT=${testfile}.new" + -D "TEST_SAVE=1" -D "TEST_EXPECT=0" -D "TEST_FILTER=(^(HDF5)[^\n]*)" -D "TEST_SKIP_COMPARE=TRUE" @@ -132,6 +130,7 @@ ) set_tests_properties (H5IMPORT-${testname}-H5DMP PROPERTIES DEPENDS H5IMPORT-${testname} + FIXTURES_REQUIRED set_h5importtest ) add_test ( NAME H5IMPORT-${testname}-H5DMP_CMP @@ -148,8 +147,20 @@ ) set_tests_properties (H5IMPORT-${testname}-H5DMP_CMP PROPERTIES DEPENDS H5IMPORT-${testname}-H5DMP + FIXTURES_REQUIRED set_h5importtest ) endif () + + add_test ( + NAME H5IMPORT-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove + ${testfile} + ${testfile}.new + ) + set_tests_properties (H5IMPORT-${testname}-clean-objects PROPERTIES + DEPENDS H5IMPORT-${testname}-H5DMP_CMP + FIXTURES_REQUIRED set_h5importtest + ) endmacro () macro (ADD_H5_DUMPTEST testname datasetname testfile) @@ -160,9 +171,11 @@ COMMAND ${CMAKE_COMMAND} -E remove d${testfile} d${testfile}.bin + d${testfile}.dmp ) set_tests_properties (H5IMPORT-DUMP-${testname}-clear-objects PROPERTIES DEPENDS H5IMPORT-${testname}-H5DMP_CMP + FIXTURES_REQUIRED set_h5importtest ) if ("${ARGN}" STREQUAL "BINARY") @@ -174,6 +187,7 @@ -D "TEST_ARGS:STRING=-p;-d;${datasetname};-o;d${testfile}.bin;-b;NATIVE;testfiles/${testfile}" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" -D "TEST_OUTPUT=d${testfile}.dmp" + -D "TEST_SAVE=1" -D "TEST_EXPECT=0" -D "TEST_SKIP_COMPARE=TRUE" -P "${HDF_RESOURCES_DIR}/runTest.cmake" @@ -187,6 +201,7 @@ -D "TEST_ARGS:STRING=-p;-d;${datasetname};-o;d${testfile}.bin;-y;--width=1;testfiles/${testfile}" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" -D "TEST_OUTPUT=d${testfile}.dmp" + -D "TEST_SAVE=1" -D "TEST_EXPECT=0" -D "TEST_SKIP_COMPARE=TRUE" -P "${HDF_RESOURCES_DIR}/runTest.cmake" @@ -194,6 +209,7 @@ endif () set_tests_properties (H5IMPORT-DUMP-${testname}-H5DMP PROPERTIES DEPENDS "H5IMPORT-DUMP-${testname}-clear-objects" + FIXTURES_REQUIRED set_h5importtest ) add_test ( @@ -210,6 +226,7 @@ ) set_tests_properties (H5IMPORT-DUMP-${testname} PROPERTIES DEPENDS "H5IMPORT-DUMP-${testname}-H5DMP" + FIXTURES_REQUIRED set_h5importtest ) add_test ( @@ -222,13 +239,26 @@ -D "TEST_OUTPUT=d${testfile}.dff" -D "TEST_EXPECT=0" -D "TEST_FILTER=(^(Warning)[^\n]*)" - -D "TEST_REFERENCE=testfiles/d${testfile}.txt" + -D "TEST_REFERENCE=testfiles/${testfile}.txt" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5IMPORT-DUMP-${testname}-H5DFF PROPERTIES DEPENDS "H5IMPORT-DUMP-${testname}" + FIXTURES_REQUIRED set_h5importtest ) endif () + + add_test ( + NAME H5IMPORT-DUMP-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove + d${testfile} + d${testfile}.bin + d${testfile}.dmp + ) + set_tests_properties (H5IMPORT-DUMP-${testname}-clean-objects PROPERTIES + DEPENDS H5IMPORT-DUMP-${testname}-H5DFF + FIXTURES_REQUIRED set_h5importtest + ) endmacro () macro (ADD_H5_DUMPSUBTEST testname testfile datasetname) @@ -237,8 +267,9 @@ add_test ( NAME H5IMPORT_SUB-DUMP-${testname}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove - d-${testname}.h5 - ${testname}.bin + ds${testname}.h5 + ds${testname}.bin + ds${testname}.dmp ) set_tests_properties (H5IMPORT_SUB-DUMP-${testname}-clear-objects PROPERTIES FIXTURES_REQUIRED set_h5importtest @@ -249,15 +280,17 @@ COMMAND "${CMAKE_COMMAND}" -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" -D "TEST_PROGRAM=$" - -D "TEST_ARGS:STRING=-p;-d;${datasetname};${ARGN};-o;${testname}.bin;-b;NATIVE;testfiles/${testfile}" + -D "TEST_ARGS:STRING=-p;-d;${datasetname};${ARGN};-o;ds${testname}.bin;-b;NATIVE;testfiles/${testfile}" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_OUTPUT=${testname}.dmp" + -D "TEST_OUTPUT=ds${testname}.dmp" + -D "TEST_SAVE=1" -D "TEST_EXPECT=0" -D "TEST_SKIP_COMPARE=TRUE" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5IMPORT_SUB-DUMP-${testname}-H5DMP PROPERTIES DEPENDS "H5IMPORT_SUB-DUMP-${testname}-clear-objects" + FIXTURES_REQUIRED set_h5importtest ) add_test ( @@ -265,32 +298,46 @@ COMMAND "${CMAKE_COMMAND}" -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" -D "TEST_PROGRAM=$" - -D "TEST_ARGS:STRING=${testname}.bin;-c;${testname}.dmp;-o;d-${testname}.h5" + -D "TEST_ARGS:STRING=ds${testname}.bin;-c;ds${testname}.dmp;-o;ds${testname}.h5" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_OUTPUT=${testname}.imp" + -D "TEST_OUTPUT=ds${testname}.imp" -D "TEST_EXPECT=0" -D "TEST_SKIP_COMPARE=TRUE" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5IMPORT_SUB-DUMP-${testname}-H5IMP PROPERTIES DEPENDS "H5IMPORT_SUB-DUMP-${testname}-H5DMP" + FIXTURES_REQUIRED set_h5importtest ) add_test ( NAME H5IMPORT_SUB-DUMP-${testname}-CMP COMMAND "${CMAKE_COMMAND}" -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" -D "TEST_PROGRAM=$" - -D "TEST_ARGS:STRING=-p;d-${testname}.h5" + -D "TEST_ARGS:STRING=-p;ds${testname}.h5" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}" - -D "TEST_OUTPUT=d-${testname}.dmp" + -D "TEST_OUTPUT=ds${testname}.dmp" -D "TEST_EXPECT=0" -D "TEST_REFERENCE=testfiles/${testname}.ddl" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5IMPORT_SUB-DUMP-${testname}-CMP PROPERTIES - DEPENDS "H5IMPORT_SUB-DUMP-${testname}-H5IMP" + DEPENDS "H5IMPORT_SUB-DUMP-${testname}-H5IMP" + FIXTURES_REQUIRED set_h5importtest ) endif () + + add_test ( + NAME H5IMPORT_SUB-DUMP-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove + ds${testname}.h5 + ds${testname}.bin + ds${testname}.dmp + ) + set_tests_properties (H5IMPORT_SUB-DUMP-${testname}-clean-objects PROPERTIES + DEPENDS "H5IMPORT_SUB-DUMP-${testname}-CMP" + FIXTURES_REQUIRED set_h5importtest + ) endmacro () macro (ADD_H5_SKIP_DUMPTEST testname datasetname testfile) @@ -368,23 +415,55 @@ set (last_test "H5IMPORT-clear-objects") endif () - add_test ( - NAME H5IMPORT-h5importtest-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove + set (H5IMPORTTEST_CLEANFILES binfp64.bin binfp64.conf + binfp64.h5 + binfp64.h5.new binin8.bin binin8.conf + binin8.h5 + binin8.h5.new binin8w.bin binin8w.conf + binin8w.h5 + binin8w.h5.new binin16.bin binin16.conf + binin16.h5 + binin16.h5.new binin32.bin binin32.conf + binin32.h5 + binin32.h5.new binuin16.bin binuin16.conf + binuin16.h5 + binuin16.h5.new binuin32.bin binuin32.conf + binuin32.h5 + binuin32.h5.new + txtfp32.h5 + txtfp32.h5.new + txtfp64.h5 + txtfp64.h5.new + txtin8.h5 + txtin8.h5.new + txtin8w.h5 + txtin8w.h5.new + txtin16.h5 + txtin16.h5.new + txtin32.h5 + txtin32.h5.new + txtuin16.h5 + txtuin16.h5.new + txtuin32.h5 + txtuin32.h5.new + ) + add_test ( + NAME H5IMPORT-h5importtest-clear-objects + COMMAND ${CMAKE_COMMAND} -E remove ${H5IMPORTTEST_CLEANFILES} ) add_test (NAME H5IMPORT-h5importtest COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) @@ -392,6 +471,13 @@ FIXTURES_SETUP set_h5importtest DEPENDS H5IMPORT-h5importtest-clear-objects ) + add_test ( + NAME H5IMPORT-h5importtest-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove ${H5IMPORTTEST_CLEANFILES} + ) + set_tests_properties (H5IMPORT-h5importtest-clean-objects PROPERTIES + FIXTURES_CLEANUP set_h5importtest + ) # ----- TESTING "ASCII I32 rank 3 - Output BE " ; ADD_H5_TEST (ASCII_I32 testfiles/txtin32.txt testfiles/txtin32.conf txtin32.h5) diff --git a/tools/test/h5import/h5importtestutil.sh.in b/tools/test/h5import/h5importtestutil.sh.in index 59ae34cd684..ba91ee55c0e 100644 --- a/tools/test/h5import/h5importtestutil.sh.in +++ b/tools/test/h5import/h5importtestutil.sh.in @@ -267,9 +267,9 @@ fi TOOLTEST5() { err=0 -$RUNSERIAL $DUMPER_BIN -p -d $3 $4 -o d-$1.bin -b NATIVE tmp_testfiles/$2 > d-$1.dmp -$RUNSERIAL $H5IMPORT_BIN d-$1.bin -c d-$1.dmp -o d-$1.h5 > d-$1.imp -$RUNSERIAL $DUMPER_BIN -p d-$1.h5 > log2 +$RUNSERIAL $DUMPER_BIN -p -d $3 $4 -o ds$1.bin -b NATIVE tmp_testfiles/$2 > ds$1.dmp +$RUNSERIAL $H5IMPORT_BIN ds$1.bin -c ds$1.dmp -o ds$1.h5 > ds$1.imp +$RUNSERIAL $DUMPER_BIN -p ds$1.h5 > log2 $CP -f $SRC_H5IMPORT_TESTFILES/$1.ddl log1 cmp -s log1 log2 || err=1 diff --git a/tools/test/h5import/testfiles/tall_fp32.ddl b/tools/test/h5import/testfiles/tall_fp32.ddl index 2a0dc1b7ed8..53f24bfceed 100644 --- a/tools/test/h5import/testfiles/tall_fp32.ddl +++ b/tools/test/h5import/testfiles/tall_fp32.ddl @@ -1,4 +1,4 @@ -HDF5 "d-tall_fp32.h5" { +HDF5 "dstall_fp32.h5" { GROUP "/" { GROUP "g2" { DATASET "dset2.2" { diff --git a/tools/test/h5import/testfiles/tall_i32.ddl b/tools/test/h5import/testfiles/tall_i32.ddl index d9280bcf7bc..b05dd533c62 100644 --- a/tools/test/h5import/testfiles/tall_i32.ddl +++ b/tools/test/h5import/testfiles/tall_i32.ddl @@ -1,4 +1,4 @@ -HDF5 "d-tall_i32.h5" { +HDF5 "dstall_i32.h5" { GROUP "/" { GROUP "g1" { GROUP "g1.1" { diff --git a/tools/test/h5import/testfiles/tintsattrs_u32.ddl b/tools/test/h5import/testfiles/tintsattrs_u32.ddl index cf8889c1a81..a53217a8e26 100644 --- a/tools/test/h5import/testfiles/tintsattrs_u32.ddl +++ b/tools/test/h5import/testfiles/tintsattrs_u32.ddl @@ -1,4 +1,4 @@ -HDF5 "d-tintsattrs_u32.h5" { +HDF5 "dstintsattrs_u32.h5" { GROUP "/" { DATASET "DU32BITS" { DATATYPE H5T_STD_U32LE diff --git a/tools/test/h5jam/CMakeLists.txt b/tools/test/h5jam/CMakeLists.txt index 97e6b76fc51..a7feb20e854 100644 --- a/tools/test/h5jam/CMakeLists.txt +++ b/tools/test/h5jam/CMakeLists.txt @@ -6,7 +6,7 @@ project (HDF5_TOOLS_TEST_H5JAM C) # -------------------------------------------------------------------- if (HDF5_BUILD_GENERATORS AND BUILD_STATIC_LIBS) add_executable (h5jamgentest ${HDF5_TOOLS_TEST_H5JAM_SOURCE_DIR}/h5jamgentest.c) - target_include_directories (h5jamgentest PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5jamgentest PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (h5jamgentest STATIC) target_link_libraries (h5jamgentest PRIVATE ${HDF5_LIB_TARGET}) set_target_properties (h5jamgentest PROPERTIES FOLDER generator/tools) @@ -22,7 +22,7 @@ if (HDF5_BUILD_GENERATORS AND BUILD_STATIC_LIBS) endif () add_executable (getub ${HDF5_TOOLS_TEST_H5JAM_SOURCE_DIR}/getub.c) -target_include_directories (getub PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_include_directories (getub PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (BUILD_STATIC_LIBS) TARGET_C_PROPERTIES (getub STATIC) target_link_libraries (getub PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) @@ -33,7 +33,7 @@ endif () set_target_properties (getub PROPERTIES FOLDER tools) add_executable (tellub ${HDF5_TOOLS_TEST_H5JAM_SOURCE_DIR}/tellub.c) -target_include_directories (tellub PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_include_directories (tellub PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (BUILD_STATIC_LIBS) TARGET_C_PROPERTIES (tellub STATIC) target_link_libraries (tellub PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) diff --git a/tools/test/h5jam/testh5jam.sh.in b/tools/test/h5jam/testh5jam.sh.in index b2464edd027..805ce11e06e 100644 --- a/tools/test/h5jam/testh5jam.sh.in +++ b/tools/test/h5jam/testh5jam.sh.in @@ -182,7 +182,7 @@ CLEANUP() { # SETUP file tocopy # Clone a standard input file in the test directory -# Modification: +# # Was using "cp" command which means file $2 will inherit the permission # setting of file $1. If $1 is read-only, so will $2. That will cause # failure when the test attempts to write it later on. Changed to use diff --git a/tools/test/h5ls/CMakeLists.txt b/tools/test/h5ls/CMakeLists.txt index ba6e4688399..b6c105759c7 100644 --- a/tools/test/h5ls/CMakeLists.txt +++ b/tools/test/h5ls/CMakeLists.txt @@ -10,7 +10,7 @@ if (BUILD_SHARED_LIBS) set (H5LS_TOOL_PLUGIN_LIB_TARGET ${H5LS_TOOL_PLUGIN_LIB_CORENAME}) add_library (${H5LS_TOOL_PLUGIN_LIB_TARGET} SHARED dynlib_ls.c) - target_include_directories (${H5LS_TOOL_PLUGIN_LIB_TARGET} PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (${H5LS_TOOL_PLUGIN_LIB_TARGET} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (${H5LS_TOOL_PLUGIN_LIB_TARGET} SHARED) target_link_libraries (${H5LS_TOOL_PLUGIN_LIB_TARGET} PRIVATE ${HDF5_LIBSH_TARGET}) H5_SET_LIB_OPTIONS (${H5LS_TOOL_PLUGIN_LIB_TARGET} ${H5LS_TOOL_PLUGIN_LIB_NAME} SHARED "LIB") diff --git a/tools/test/h5ls/CMakeTests.cmake b/tools/test/h5ls/CMakeTests.cmake index 34f8e6fd210..15f18b5e7a8 100644 --- a/tools/test/h5ls/CMakeTests.cmake +++ b/tools/test/h5ls/CMakeTests.cmake @@ -138,7 +138,9 @@ # If using memchecker add tests without using scripts if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5LS-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) - set_tests_properties (H5LS-${resultfile} PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles") + set_tests_properties (H5LS-${resultfile} PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles" + ) if ("${resultcode}" STREQUAL "1") set_tests_properties (H5LS-${resultfile} PROPERTIES WILL_FAIL "true") endif () @@ -157,6 +159,9 @@ -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () + set_tests_properties (H5LS-${resultfile} PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles" + ) endmacro () macro (ADD_H5_ERR_TEST resultfile resultcode) @@ -182,12 +187,15 @@ -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () + set_tests_properties (H5LS-${resultfile} PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles" + ) endmacro () macro (ADD_H5_UD_TEST testname resultcode resultfile) if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( - NAME H5LS_UD-${testname} + NAME H5LS_UD-${testname}-${resultfile} COMMAND "${CMAKE_COMMAND}" -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" -D "TEST_PROGRAM=$" @@ -201,6 +209,9 @@ -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) + set_tests_properties (H5LS_UD-${testname}-${resultfile} PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles" + ) endif () endmacro () diff --git a/tools/test/h5ls/h5ls_plugin.sh.in b/tools/test/h5ls/h5ls_plugin.sh.in index 0077b07a06f..02e0cf7c499 100644 --- a/tools/test/h5ls/h5ls_plugin.sh.in +++ b/tools/test/h5ls/h5ls_plugin.sh.in @@ -3,7 +3,7 @@ # Copyright by The HDF Group. # All rights reserved. # -# This file is part of HDF5. The full HDF5 copyright notice, including +# This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code # distribution tree, or in https://www.hdfgroup.org/licenses. @@ -88,7 +88,7 @@ if [ $? != 0 ]; then fi # setup plugin path -ENVCMD="env HDF5_PLUGIN_PATH=../${PLUGIN_LIBDIR}" +ENVCMD="env HDF5_PLUGIN_PATH=../${PLUGIN_LIBDIR}:${HDF5_PLUGIN_PATH}" # # copy test files and expected output files from source dirs to test dir diff --git a/tools/test/h5repack/CMakeLists.txt b/tools/test/h5repack/CMakeLists.txt index 2f79763b344..2fd2827e0a6 100644 --- a/tools/test/h5repack/CMakeLists.txt +++ b/tools/test/h5repack/CMakeLists.txt @@ -6,7 +6,7 @@ project (HDF5_TOOLS_TEST_H5REPACK C) # -------------------------------------------------------------------- add_executable (testh5repack_detect_szip ${HDF5_TOOLS_TEST_H5REPACK_SOURCE_DIR}/testh5repack_detect_szip.c) target_include_directories (testh5repack_detect_szip - PRIVATE "${HDF5_TOOLS_SRC_H5REPACK_SOURCE_DIR};${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" + PRIVATE "${HDF5_TOOLS_SRC_H5REPACK_SOURCE_DIR};${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" ) if (BUILD_STATIC_LIBS) TARGET_C_PROPERTIES (testh5repack_detect_szip STATIC) @@ -29,7 +29,7 @@ set (REPACK_COMMON_SOURCES ) add_executable (h5repacktest ${REPACK_COMMON_SOURCES} ${HDF5_TOOLS_TEST_H5REPACK_SOURCE_DIR}/h5repacktst.c) target_include_directories (h5repacktest - PRIVATE "${HDF5_TOOLS_SRC_H5REPACK_SOURCE_DIR};${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_TEST_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" + PRIVATE "${HDF5_TOOLS_SRC_H5REPACK_SOURCE_DIR};${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_INCLUDE_DIRS};${HDF5_TEST_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>" ) if (BUILD_STATIC_LIBS) TARGET_C_PROPERTIES (h5repacktest STATIC) @@ -52,13 +52,13 @@ if (BUILD_SHARED_LIBS) set (H5REPACK_TOOL_PLUGIN_LIB_VTARGET ${H5REPACK_TOOL_PLUGIN_LIB_VCORENAME}) add_library (${H5REPACK_TOOL_PLUGIN_LIB_TARGET} SHARED dynlib_rpk.c) - target_include_directories (${H5REPACK_TOOL_PLUGIN_LIB_TARGET} PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (${H5REPACK_TOOL_PLUGIN_LIB_TARGET} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (${H5REPACK_TOOL_PLUGIN_LIB_TARGET} SHARED) target_link_libraries (${H5REPACK_TOOL_PLUGIN_LIB_TARGET} PRIVATE ${HDF5_LIBSH_TARGET}) H5_SET_LIB_OPTIONS (${H5REPACK_TOOL_PLUGIN_LIB_TARGET} ${H5REPACK_TOOL_PLUGIN_LIB_NAME} SHARED "LIB") add_library (${H5REPACK_TOOL_PLUGIN_LIB_VTARGET} SHARED dynlib_vrpk.c) - target_include_directories (${H5REPACK_TOOL_PLUGIN_LIB_VTARGET} PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (${H5REPACK_TOOL_PLUGIN_LIB_VTARGET} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (${H5REPACK_TOOL_PLUGIN_LIB_VTARGET} SHARED) target_link_libraries (${H5REPACK_TOOL_PLUGIN_LIB_VTARGET} PRIVATE ${HDF5_LIBSH_TARGET}) H5_SET_LIB_OPTIONS (${H5REPACK_TOOL_PLUGIN_LIB_VTARGET} ${H5REPACK_TOOL_PLUGIN_LIB_VNAME} SHARED "LIB") @@ -99,7 +99,7 @@ endif () # -------------------------------------------------------------------- if (HDF5_BUILD_GENERATORS AND BUILD_STATIC_LIBS) add_executable (h5repackgentest ${HDF5_TOOLS_TEST_H5REPACK_SOURCE_DIR}/h5repackgentest.c) - target_include_directories (h5repackgentest PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5repackgentest PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (h5repackgentest STATIC) target_link_libraries (h5repackgentest PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) set_target_properties (h5repackgentest PROPERTIES FOLDER generator/tools) diff --git a/tools/test/h5repack/CMakeTests.cmake b/tools/test/h5repack/CMakeTests.cmake index 936ee3e08b7..9ced1428ba4 100644 --- a/tools/test/h5repack/CMakeTests.cmake +++ b/tools/test/h5repack/CMakeTests.cmake @@ -222,9 +222,6 @@ # If using memchecker add tests without using scripts if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5REPACK-h5repack-${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) - set_tests_properties (H5REPACK-h5repack-${testname} PROPERTIES - WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles" - ) else () add_test ( NAME H5REPACK-h5repack-${testname} @@ -240,7 +237,7 @@ ) endif () set_tests_properties (H5REPACK-h5repack-${testname} PROPERTIES - FIXTURES_REQUIRED clear_h5repack + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles" ) endmacro () @@ -257,9 +254,6 @@ NAME H5REPACK_OLD-${testname}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${testfile} ) - set_tests_properties (H5REPACK_OLD-${testname}-clear-objects PROPERTIES - FIXTURES_REQUIRED clear_h5repack - ) add_test ( NAME H5REPACK_OLD-${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN} -i ${PROJECT_BINARY_DIR}/testfiles/${testfile} -o ${PROJECT_BINARY_DIR}/testfiles/out-${testname}.${testfile} @@ -274,6 +268,13 @@ set_tests_properties (H5REPACK_OLD-${testname}_DFF PROPERTIES DEPENDS H5REPACK_OLD-${testname} ) + add_test ( + NAME H5REPACK_OLD-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${testfile} + ) + set_tests_properties (H5REPACK_OLD-${testname}-clean-objects PROPERTIES + DEPENDS H5REPACK_OLD-${testname}_DFF + ) endif () endif () endmacro () @@ -292,9 +293,6 @@ NAME H5REPACK-${testname}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${testfile} ) - set_tests_properties (H5REPACK-${testname}-clear-objects PROPERTIES - FIXTURES_REQUIRED clear_h5repack - ) add_test ( NAME H5REPACK-${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ --enable-error-stack ${ARGN} ${PROJECT_BINARY_DIR}/testfiles/${testfile} ${PROJECT_BINARY_DIR}/testfiles/out-${testname}.${testfile} @@ -309,6 +307,13 @@ set_tests_properties (H5REPACK-${testname}_DFF PROPERTIES DEPENDS H5REPACK-${testname} ) + add_test ( + NAME H5REPACK-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${testfile} + ) + set_tests_properties (H5REPACK-${testname}-clean-objects PROPERTIES + DEPENDS H5REPACK-${testname}_DFF + ) endif () endmacro () @@ -322,20 +327,20 @@ set_property(TEST H5REPACK_CMP-${testname} PROPERTY DISABLED) endif () else () + add_test ( + NAME H5REPACK_CMP-${testname}-clear-objects + COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${resultfile} + ) # If using memchecker add tests without using scripts if (HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5REPACK_CMP-${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN} ${PROJECT_BINARY_DIR}/testfiles/${resultfile} ${PROJECT_BINARY_DIR}/testfiles/out-${testname}.${resultfile} ) - else () - add_test ( - NAME H5REPACK_CMP-${testname}-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${resultfile} - ) - set_tests_properties (H5REPACK_CMP-${testname}-clear-objects PROPERTIES - FIXTURES_REQUIRED clear_h5repack + set_tests_properties (H5REPACK_CMP-${testname} PROPERTIES + DEPENDS H5REPACK_CMP-${testname}-clear-objects ) + else () add_test ( NAME H5REPACK_CMP-${testname} COMMAND "${CMAKE_COMMAND}" @@ -353,6 +358,13 @@ DEPENDS H5REPACK_CMP-${testname}-clear-objects ) endif () + add_test ( + NAME H5REPACK_CMP-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${resultfile} + ) + set_tests_properties (H5REPACK_CMP-${testname}-clean-objects PROPERTIES + DEPENDS H5REPACK_CMP-${testname} + ) endif () endmacro () @@ -366,23 +378,17 @@ set_property(TEST H5REPACK_MASK-${testname} PROPERTY DISABLED) endif () else () + add_test ( + NAME H5REPACK_MASK-${testname}-clear-objects + COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${resultfile} + ) # If using memchecker add tests without using scripts if (HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5REPACK_MASK-${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN} ${PROJECT_BINARY_DIR}/testfiles/${resultfile} ${PROJECT_BINARY_DIR}/testfiles/out-${testname}.${resultfile} ) - set_tests_properties (H5REPACK_MASK-${testname} PROPERTIES - FIXTURES_REQUIRED clear_h5repack - ) else (HDF5_ENABLE_USING_MEMCHECKER) - add_test ( - NAME H5REPACK_MASK-${testname}-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${resultfile} - ) - set_tests_properties (H5REPACK_MASK-${testname}-clear-objects PROPERTIES DEPENDS ${last_test} - FIXTURES_REQUIRED clear_h5repack - ) add_test ( NAME H5REPACK_MASK-${testname} COMMAND "${CMAKE_COMMAND}" @@ -397,12 +403,77 @@ -D "TEST_ERRREF=${result_errcheck}" -P "${HDF_RESOURCES_DIR}/grepTest.cmake" ) - set_tests_properties (H5REPACK_MASK-${testname} PROPERTIES DEPENDS H5REPACK_MASK-${testname}-clear-objects) endif () + set_tests_properties (H5REPACK_MASK-${testname} PROPERTIES + DEPENDS H5REPACK_MASK-${testname}-clear-objects + ) + add_test ( + NAME H5REPACK_MASK-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${resultfile} + ) + set_tests_properties (H5REPACK_MASK-${testname}-clean-objects PROPERTIES + DEPENDS H5REPACK_MASK-${testname} + ) endif () endmacro () macro (ADD_H5_DMP_TEST testname testtype resultcode resultfile) + if ("${testtype}" STREQUAL "SKIP") + if (NOT HDF5_ENABLE_USING_MEMCHECKER) + add_test ( + NAME H5REPACK_DMP-${testname} + COMMAND ${CMAKE_COMMAND} -E echo "SKIP ${ARGN} ${PROJECT_BINARY_DIR}/testfiles/${resultfile} ${PROJECT_BINARY_DIR}/testfiles/out-${testname}.${resultfile}" + ) + set_property(TEST H5REPACK_DMP-${testname} PROPERTY DISABLED) + endif () + else () + add_test ( + NAME H5REPACK_DMP-${testname}-clear-objects + COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${resultfile} + ) + add_test ( + NAME H5REPACK_DMP-${testname} + COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN} ${PROJECT_BINARY_DIR}/testfiles/${resultfile} ${PROJECT_BINARY_DIR}/testfiles/out-${testname}.${resultfile} + ) + set_tests_properties (H5REPACK_DMP-${testname} PROPERTIES + DEPENDS H5REPACK_DMP-${testname}-clear-objects + ) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) + add_test ( + NAME H5REPACK_DMP-h5dump-${testname} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=$" + -D "TEST_ARGS:STRING=-q;creation_order;-pH;out-${testname}.${resultfile}" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/testfiles" + -D "TEST_OUTPUT=${resultfile}-${testname}.out" + -D "TEST_EXPECT=${resultcode}" + -D "TEST_REFERENCE=${testname}.${resultfile}.ddl" + -P "${HDF_RESOURCES_DIR}/runTest.cmake" + ) + set_tests_properties (H5REPACK_DMP-h5dump-${testname} PROPERTIES + DEPENDS H5REPACK_DMP-${testname} + ) + add_test ( + NAME H5REPACK_DMP-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${resultfile} + ) + set_tests_properties (H5REPACK_DMP-${testname}-clean-objects PROPERTIES + DEPENDS H5REPACK_DMP-h5dump-${testname} + ) + else () + add_test ( + NAME H5REPACK_DMP-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${resultfile} + ) + set_tests_properties (H5REPACK_DMP-${testname}-clean-objects PROPERTIES + DEPENDS H5REPACK_DMP-${testname} + ) + endif () + endif () + endmacro () + + macro (ADD_H5_DMP_NO_OPT_TEST testname testtype resultcode resultfile) if ("${testtype}" STREQUAL "SKIP") if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( @@ -432,7 +503,7 @@ COMMAND "${CMAKE_COMMAND}" -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" -D "TEST_PROGRAM=$" - -D "TEST_ARGS:STRING=-q;creation_order;-pH;out-${testname}.${resultfile}" + -D "TEST_ARGS:STRING=out-${testname}.${resultfile}" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/testfiles" -D "TEST_OUTPUT=${resultfile}-${testname}.out" -D "TEST_EXPECT=${resultcode}" @@ -460,9 +531,6 @@ NAME H5REPACK_DIFF-${testname}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${testfile} ) - set_tests_properties (H5REPACK_DIFF-${testname}-clear-objects PROPERTIES - FIXTURES_REQUIRED clear_h5repack - ) add_test ( NAME H5REPACK_DIFF-${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ --enable-error-stack ${ARGN} ${PROJECT_BINARY_DIR}/testfiles/${testfile} ${PROJECT_BINARY_DIR}/testfiles/out-${testname}.${testfile} @@ -485,6 +553,13 @@ set_tests_properties (H5REPACK_DIFF-${testname}_DFF PROPERTIES DEPENDS H5REPACK_DIFF-${testname} ) + add_test ( + NAME H5REPACK_DIFF-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${testfile} + ) + set_tests_properties (H5REPACK_DIFF-${testname}-clean-objects PROPERTIES + DEPENDS H5REPACK_DIFF-${testname}_DFF + ) endif () endmacro () @@ -502,9 +577,6 @@ NAME H5REPACK_STAT-${testname}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${statarg}.${resultfile} ) - set_tests_properties (H5REPACK_STAT-${testname}-clear-objects PROPERTIES - FIXTURES_REQUIRED clear_h5repack - ) add_test ( NAME H5REPACK_STAT-${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN} ${PROJECT_BINARY_DIR}/testfiles/${resultfile} ${PROJECT_BINARY_DIR}/testfiles/out-${statarg}.${resultfile} @@ -526,7 +598,22 @@ -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5REPACK_STAT-h5stat-${testname} PROPERTIES - DEPENDS "H5REPACK_STAT-${testname}" + DEPENDS H5REPACK_STAT-${testname} + ) + add_test ( + NAME H5REPACK_STAT-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${statarg}.${resultfile} + ) + set_tests_properties (H5REPACK_STAT-${testname}-clean-objects PROPERTIES + DEPENDS H5REPACK_STAT-h5stat-${testname} + ) + else () + add_test ( + NAME H5REPACK_STAT-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${statarg}.${resultfile} + ) + set_tests_properties (H5REPACK_STAT-${testname}-clean-objects PROPERTIES + DEPENDS H5REPACK_STAT-${testname} ) endif () endif () @@ -547,9 +634,6 @@ NAME H5REPACK_VERIFY_LAYOUT-${testname}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${testfile} ) - set_tests_properties (H5REPACK_VERIFY_LAYOUT-${testname}-clear-objects PROPERTIES - FIXTURES_REQUIRED clear_h5repack - ) add_test ( NAME H5REPACK_VERIFY_LAYOUT-${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN} ${PROJECT_BINARY_DIR}/testfiles/${testfile} ${PROJECT_BINARY_DIR}/testfiles/out-${testname}.${testfile} @@ -608,6 +692,13 @@ DEPENDS H5REPACK_VERIFY_LAYOUT-${testname}_DFF ) endif () + add_test ( + NAME H5REPACK_VERIFY_LAYOUT-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${testfile} + ) + set_tests_properties (H5REPACK_VERIFY_LAYOUT-${testname}-clean-objects PROPERTIES + DEPENDS H5REPACK_VERIFY_LAYOUT-${testname}_DMP + ) endif () endif () endmacro () @@ -628,9 +719,6 @@ NAME H5REPACK_VERIFY_LAYOUT_VDS-${testname}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${testfile} ) - set_tests_properties (H5REPACK_VERIFY_LAYOUT_VDS-${testname}-clear-objects PROPERTIES - FIXTURES_REQUIRED clear_h5repack - ) add_test ( NAME H5REPACK_VERIFY_LAYOUT_VDS-${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN} ${PROJECT_BINARY_DIR}/testfiles/${testfile} ${PROJECT_BINARY_DIR}/testfiles/out-${testname}.${testfile} @@ -655,6 +743,13 @@ WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles" DEPENDS H5REPACK_VERIFY_LAYOUT_VDS-${testname} ) + add_test ( + NAME H5REPACK_VERIFY_LAYOUT_VDS-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${testfile} + ) + set_tests_properties (H5REPACK_VERIFY_LAYOUT_VDS-${testname}-clean-objects PROPERTIES + DEPENDS H5REPACK_VERIFY_LAYOUT_VDS-${testname}_DMP + ) endif () endif () endmacro () @@ -666,9 +761,6 @@ NAME H5REPACK_VERIFY_SUPERBLOCK-${testname}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${testfile} ) - set_tests_properties (H5REPACK_VERIFY_SUPERBLOCK-${testname}-clear-objects PROPERTIES - FIXTURES_REQUIRED clear_h5repack - ) add_test ( NAME H5REPACK_VERIFY_SUPERBLOCK-${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ -j;${lowbound};-k;${highbound} ${PROJECT_BINARY_DIR}/testfiles/${testfile} ${PROJECT_BINARY_DIR}/testfiles/out-${testname}.${testfile} @@ -692,6 +784,13 @@ set_tests_properties (H5REPACK_VERIFY_SUPERBLOCK-${testname}_DMP PROPERTIES DEPENDS H5REPACK_VERIFY_SUPERBLOCK-${testname} ) + add_test ( + NAME H5REPACK_VERIFY_SUPERBLOCK-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${testfile} + ) + set_tests_properties (H5REPACK_VERIFY_SUPERBLOCK-${testname}-clean-objects PROPERTIES + DEPENDS H5REPACK_VERIFY_SUPERBLOCK-${testname}_DMP + ) endif () endmacro () @@ -700,9 +799,6 @@ NAME ADD_H5_VERIFY_INVALIDBOUNDS-h5repack-${testname}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${testfile} ) - set_tests_properties (ADD_H5_VERIFY_INVALIDBOUNDS-h5repack-${testname}-clear-objects PROPERTIES - FIXTURES_REQUIRED clear_h5repack - ) add_test ( NAME ADD_H5_VERIFY_INVALIDBOUNDS-h5repack-${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ -j;${lowbound};-k;${highbound} ${PROJECT_BINARY_DIR}/testfiles/${testfile} ${PROJECT_BINARY_DIR}/testfiles/out-${testname}.${testfile} @@ -711,6 +807,13 @@ DEPENDS ADD_H5_VERIFY_INVALIDBOUNDS-h5repack-${testname}-clear-objects WILL_FAIL "true" ) + add_test ( + NAME ADD_H5_VERIFY_INVALIDBOUNDS-h5repack-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${testfile} + ) + set_tests_properties (ADD_H5_VERIFY_INVALIDBOUNDS-h5repack-${testname}-clean-objects PROPERTIES + DEPENDS ADD_H5_VERIFY_INVALIDBOUNDS-h5repack-${testname} + ) endmacro () macro (ADD_H5_TEST_META testname testfile) @@ -719,14 +822,7 @@ NAME H5REPACK_META-${testname}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}_N.${testname}.h5 - testfiles/out-${testname}_N.${testname}.out - testfiles/out-${testname}_N.${testname}.out.err testfiles/out-${testname}_M.${testname}.h5 - testfiles/out-${testname}_M.${testname}.out - testfiles/out-${testname}_M.${testname}.out.err - ) - set_tests_properties (H5REPACK_META-${testname}-clear-objects PROPERTIES - FIXTURES_REQUIRED clear_h5repack ) add_test ( NAME H5REPACK_META-${testname}_N @@ -775,7 +871,7 @@ add_test (NAME H5REPACK_META-${testname} COMMAND "${CMAKE_COMMAND}" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/testfiles" - -D "TEST_ONEFILE=out-${testname}_N.${testname}.out" + -D "TEST_ONEFILE=out-${testname}_N.${testname}.h5" -D "TEST_TWOFILE=out-${testname}_M.${testname}.h5" -D "TEST_FUNCTION=LTEQ" -P "${HDF_RESOURCES_DIR}/fileCompareTest.cmake" @@ -788,6 +884,15 @@ set_tests_properties (H5REPACK_META-${testname} PROPERTIES DEPENDS H5REPACK_META-${testname}_M_DFF ) + add_test ( + NAME H5REPACK_META-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove + testfiles/out-${testname}_N.${testname}.h5 + testfiles/out-${testname}_M.${testname}.h5 + ) + set_tests_properties (H5REPACK_META-${testname}-clean-objects PROPERTIES + DEPENDS H5REPACK_META-${testname} + ) endmacro () macro (ADD_H5_UD_TEST testname resultcode resultfile) @@ -797,9 +902,6 @@ NAME H5REPACK_UD-${testname}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${resultfile} ) - set_tests_properties (H5REPACK_UD-${testname}-clear-objects PROPERTIES - FIXTURES_REQUIRED clear_h5repack - ) add_test ( NAME H5REPACK_UD-${testname} COMMAND "${CMAKE_COMMAND}" @@ -835,7 +937,14 @@ -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) set_tests_properties (H5REPACK_UD-${testname}-h5dump PROPERTIES - DEPENDS "H5REPACK_UD-${testname}" + DEPENDS H5REPACK_UD-${testname} + ) + add_test ( + NAME H5REPACK_UD-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${resultfile} + ) + set_tests_properties (H5REPACK_UD-${testname}-clean-objects PROPERTIES + DEPENDS H5REPACK_UD-${testname}-h5dump ) endif () endmacro () @@ -859,7 +968,6 @@ ) set_tests_properties (H5REPACK_EXTERNAL-${testname}-clear-objects PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles" - FIXTURES_REQUIRED clear_h5repack ) # make sure external data file 0 is available add_test ( @@ -934,6 +1042,14 @@ DEPENDS H5REPACK_EXTERNAL-${testname}_DFF4 WILL_FAIL "true" ) + add_test ( + NAME H5REPACK_EXTERNAL-${testname}-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove h5repack_${testfile}_rp.h5 + ) + set_tests_properties (H5REPACK_EXTERNAL-${testname}-clean-objects PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles" + DEPENDS H5REPACK_EXTERNAL-${testname}_DFF_FAIL + ) endif () endif () endmacro () @@ -976,123 +1092,6 @@ set (FILEV4 4_vds.h5) set (FILEV5 5_vds.h5) - if (HDF5_ENABLE_USING_MEMCHECKER) - # Remove any output file left over from previous test run - set (LIST_TO_CLEAR - out-family.tfamily%05d.h5 - out-HDFFV-7840.h5diff_attr1.h5 - out-attr.h5repack_attr.h5 - out-native_attr.h5repack_attr.h5 - out-HDFFV-5932.h5repack_attr_refs.h5 - out-deflate_copy.h5repack_deflate.h5 - out-deflate_remove.h5repack_deflate.h5 - out-early.h5repack_early.h5 - out-fill.h5repack_fill.h5 - out-native_fill.h5repack_fill.h5 - out-gzip_verbose_filters.h5repack_filters.h5 - out-fletcher_copy.h5repack_fletcher.h5 - out-fletcher_remove.h5repack_fletcher.h5 - out-hlink.h5repack_hlink.h5 - out-chunk_18x13.h5repack_layout.h5 - out-chunk_20x10.h5repack_layout.h5 - out-chunk_compa.h5repack_layout.h5 - out-chunk_conti.h5repack_layout.h5 - out-compa.h5repack_layout.h5 - out-conti.h5repack_layout.h5 - out-deflate_file.h5repack_layout.h5 - out-deflate_limit.h5repack_layout.h5 - out-dset2_chunk_20x10.h5repack_layout.h5 - out-dset2_compa.h5repack_layout.h5 - out-dset2_conti.h5repack_layout.h5 - out-dset_compa_chunk.h5repack_layout.h5 - out-dset_compa_compa.h5repack_layout.h5 - out-dset_compa_conti.h5repack_layout.h5 - out-dset_conti_chunk.h5repack_layout.h5 - out-dset_conti_compa.h5repack_layout.h5 - out-dset_conti_conti.h5repack_layout.h5 - out-fletcher_all.h5repack_layout.h5 - out-fletcher_individual.h5repack_layout.h5 - out-global_filters.h5repack_layout.h5 - out-gzip_all.h5repack_layout.h5 - out-gzip_individual.h5repack_layout.h5 - out-layout.h5repack_layout.h5 - out-layout_long_switches.h5repack_layout.h5 - out-layout_short_switches.h5repack_layout.h5 - out-old_style_layout_short_switches.h5repack_layout.h5 - out-plugin_test.h5repack_layout.h5 - out-shuffle_all.h5repack_layout.h5 - out-shuffle_individual.h5repack_layout.h5 - out-upgrade_layout.h5repack_layouto.h5 - out-contig_small_compa.h5repack_layout2.h5 - out-contig_small_fixed_compa.h5repack_layout2.h5 - out-ckdim_biger.h5repack_layout3.h5 - out-ckdim_smaller.h5repack_layout3.h5 - out-chunk2chunk.h5repack_layout3.h5 - out-chunk2compa.h5repack_layout3.h5 - out-chunk2conti.h5repack_layout3.h5 - out-error1.h5repack_layout3.h5 - out-error2.h5repack_layout3.h5 - out-error3.h5repack_layout3.h5 - out-error4.h5repack_layout3.h5 - out-committed_dt.h5repack_named_dtypes.h5 - out-nbit_add.h5repack_nbit.h5 - out-nbit_copy.h5repack_nbit.h5 - out-nbit_remove.h5repack_nbit.h5 - out-add_alignment.h5repack_objs.h5 - out-add_userblock.h5repack_objs.h5 - out-objs.h5repack_objs.h5 - out-gt_mallocsize.h5repack_objs.h5 - out-bug1814.h5repack_refs.h5 - out-shuffle_copy.h5repack_shuffle.h5 - out-shuffle_remove.h5repack_shuffle.h5 - out-scale_add.h5repack_soffset.h5 - out-scale_copy.h5repack_soffset.h5 - out-scale_remove.h5repack_soffset.h5 - out-meta_short_M.meta_short.h5 - out-meta_short_N.meta_short.h5 - out-meta_long_M.meta_long.h5 - out-meta_long_N.meta_long.h5 - out-vds_compa.4_vds.h5 - out-vds_conti.4_vds.h5 - out-vds_chunk2x5x8.3_1_vds.h5 - out-vds_chunk3x6x9.2_vds.h5 - out-vds_dset_chunk20x10x5.1_vds.h5 - ) - - set (LIST_TO_CLEAR ${LIST_TO_CLEAR} ${LIST_OTHER_TEST_FILES}) - - foreach (h5_file ${LIST_HDF5_TEST_FILES}) - get_filename_component(fname "${h5_file}" NAME) - set (LIST_TO_CLEAR ${LIST_TO_CLEAR} - ${h5_file}.h5 - ) - endforeach () - - foreach (h5_file ${LIST_TST_TEST_FILES}) - get_filename_component(fname "${h5_file}" NAME) - set (LIST_TO_CLEAR ${LIST_TO_CLEAR} - ${h5_file}.tst.out - ${h5_file}.tst.out.err - ) - endforeach () - - foreach (h5_file ${LIST_DDL_TEST_FILES}) - get_filename_component(fname "${h5_file}" NAME) - set (LIST_TO_CLEAR ${LIST_TO_CLEAR} - ${h5_file}.ddl.out - ${h5_file}.ddl.out.err - ) - endforeach () - add_test ( - NAME H5REPACK-clearall-objects - COMMAND ${CMAKE_COMMAND} -E remove ${LIST_TO_CLEAR} - ) - set_tests_properties (H5REPACK-clearall-objects PROPERTIES - FIXTURES_SETUP clear_h5repack - WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles" - ) - endif () - ADD_HELP_TEST(help 0 -h) add_test (NAME H5REPACK-testh5repack_detect_szip COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) @@ -1108,7 +1107,7 @@ set (passRegex "no") set_tests_properties (H5REPACK-testh5repack_detect_szip PROPERTIES PASS_REGULAR_EXPRESSION "no") endif () - set_tests_properties (H5REPACK-testh5repack_detect_szip PROPERTIES DEPENDS H5REPACK-clearall-objects) + set_tests_properties (H5REPACK-testh5repack_detect_szip PROPERTIES DEPENDS H5REPACK-h5repack-${testname}) set (last_test "H5REPACK-testh5repack_detect_szip") # add_test (NAME H5REPACK-h5repacktest COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) diff --git a/tools/test/h5repack/CMakeVFDTests.cmake b/tools/test/h5repack/CMakeVFDTests.cmake index f8ac10b505d..335ac28d3f4 100644 --- a/tools/test/h5repack/CMakeVFDTests.cmake +++ b/tools/test/h5repack/CMakeVFDTests.cmake @@ -22,59 +22,73 @@ H5_CREATE_VFD_DIR() ### T H E T E S T S M A C R O S ### ############################################################################## ############################################################################## - +set (H5REPACK_CLEANFILES + bounds_latest_latest.h5 + h5repack_attr.h5 + h5repack_attr_refs.h5 + h5repack_deflate.h5 + h5repack_early.h5 + h5repack_ext.h5 + h5repack_fill.h5 + h5repack_filters.h5 + h5repack_fletcher.h5 + h5repack_hlink.h5 + h5repack_layout.h5 + h5repack_layouto.h5 + h5repack_layout2.h5 + h5repack_layout3.h5 + h5repack_layout.UD.h5 + h5repack_named_dtypes.h5 + h5repack_nested_8bit_enum.h5 + h5repack_nested_8bit_enum_deflated.h5 + h5repack_nbit.h5 + h5repack_objs.h5 + h5repack_refs.h5 + h5repack_shuffle.h5 + h5repack_soffset.h5 + h5repack_szip.h5 + # fsm + h5repack_aggr.h5 + h5repack_fsm_aggr_nopersist.h5 + h5repack_fsm_aggr_persist.h5 + h5repack_none.h5 + h5repack_paged_nopersist.h5 + h5repack_paged_persist.h5 +) macro (ADD_VFD_TEST vfdname resultcode) if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5REPACK-${vfdname}-h5repacktest-clear-objects - COMMAND ${CMAKE_COMMAND} -E remove - bounds_latest_latest.h5 - h5repack_attr.h5 - h5repack_attr_refs.h5 - h5repack_deflate.h5 - h5repack_early.h5 - h5repack_ext.h5 - h5repack_fill.h5 - h5repack_filters.h5 - h5repack_fletcher.h5 - h5repack_hlink.h5 - h5repack_layout.h5 - h5repack_layouto.h5 - h5repack_layout2.h5 - h5repack_layout3.h5 - h5repack_layout.UD.h5 - h5repack_named_dtypes.h5 - h5repack_nested_8bit_enum.h5 - h5repack_nested_8bit_enum_deflated.h5 - h5repack_nbit.h5 - h5repack_objs.h5 - h5repack_refs.h5 - h5repack_shuffle.h5 - h5repack_soffset.h5 - h5repack_szip.h5 - # fsm - h5repack_aggr.h5 - h5repack_fsm_aggr_nopersist.h5 - h5repack_fsm_aggr_persist.h5 - h5repack_none.h5 - h5repack_paged_nopersist.h5 - h5repack_paged_persist.h5 + COMMAND ${CMAKE_COMMAND} -E remove ${H5REPACK_CLEANFILES} WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/${vfdname} ) - add_test ( - NAME H5REPACK_VFD-${vfdname}-h5repacktest - COMMAND "${CMAKE_COMMAND}" - -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" - -D "TEST_PROGRAM=$" - -D "TEST_ARGS:STRING=" - -D "TEST_VFD:STRING=${vfdname}" - -D "TEST_EXPECT=${resultcode}" - -D "TEST_OUTPUT=${vfdname}-h5repacktest.out" - -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/${vfdname}" - -P "${HDF_RESOURCES_DIR}/vfdTest.cmake" - ) - set_tests_properties (H5REPACK_VFD-${vfdname}-h5repacktest PROPERTIES DEPENDS H5REPACK_VFD-${vfdname}-h5repacktest-clear-objects) - set_tests_properties (H5REPACK_VFD-${vfdname}-h5repacktest PROPERTIES TIMEOUT ${CTEST_SHORT_TIMEOUT}) + if (NOT "h5repacktest" IN_LIST H5REPACK_VFD_${vfdname}_SKIP_TESTS) + add_test ( + NAME H5REPACK_VFD-${vfdname}-h5repacktest + COMMAND "${CMAKE_COMMAND}" + -D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}" + -D "TEST_PROGRAM=$" + -D "TEST_ARGS:STRING=" + -D "TEST_VFD:STRING=${vfdname}" + -D "TEST_EXPECT=${resultcode}" + -D "TEST_OUTPUT=${vfdname}-h5repacktest.out" + -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/${vfdname}" + -P "${HDF_RESOURCES_DIR}/vfdTest.cmake" + ) + set_tests_properties (H5REPACK_VFD-${vfdname}-h5repacktest PROPERTIES + DEPENDS H5REPACK_VFD-${vfdname}-h5repacktest-clear-objects + TIMEOUT ${CTEST_SHORT_TIMEOUT} + ) + add_test ( + NAME H5REPACK_VFD-${vfdname}-h5repacktest-clean-objects + COMMAND ${CMAKE_COMMAND} -E remove + ${H5REPACK_CLEANFILES} + ) + set_tests_properties (H5REPACK_VFD-${vfdname}-h5repacktest-clean-objects PROPERTIES + DEPENDS H5REPACK_VFD-${vfdname}-h5repacktest + WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/${vfdname} + ) + endif () endif () endmacro () diff --git a/tools/test/h5repack/h5repack.sh.in b/tools/test/h5repack/h5repack.sh.in index 7776c3e1abc..f6e981a4f90 100644 --- a/tools/test/h5repack/h5repack.sh.in +++ b/tools/test/h5repack/h5repack.sh.in @@ -12,10 +12,6 @@ # # Tests for the h5repack tool # -# Modification: -# Pedro Vicente Nunes, 11/15/2006 -# Added $FILEN variables for file names -# srcdir=@srcdir@ @@ -1129,6 +1125,56 @@ TOOLTEST_DUMP() rm -f $outfile } +# This is same as TOOLTEST_DUMP() with comparing h5dump output +# without any option +# +TOOLTEST_DUMP_NO_OPT() +{ + infile=$2 + outfile=out-$1.$2 + expect="$TESTDIR/$1.$2.ddl" + actual="$TESTDIR/out-$1.$2.out" + actual_err="$TESTDIR/out-$1.$2.err" + + shift + shift + + # Run test. + TESTING $H5REPACK $@ + ( + cd $TESTDIR + $RUNSERIAL $H5REPACK_BIN "$@" $infile $outfile + ) >$actual 2>$actual_err + RET=$? + if [ $RET != 0 ] ; then + echo "*FAILED*" + nerrors="`expr $nerrors + 1`" + else + echo " PASSED" + VERIFY h5dump output $@ + ( + cd $TESTDIR + $RUNSERIAL $H5DUMP_BIN $outfile + ) >$actual 2>$actual_err + cat $actual_err >> $actual + + RET=$? + + fi + + if cmp -s $expect $actual; then + echo " PASSED" + else + echo "*FAILED*" + echo " Expected result (*.ddl) differs from actual result (*.out)" + nerrors="`expr $nerrors + 1`" + test yes = "$verbose" && diff -c $expect $actual |sed 's/^/ /' + fi + + rm -f $actual $actual_err + rm -f $outfile +} + # This is similar to TOOLTEST_DUMP(). # Test h5repack with options added for paged aggregation. # h5stat is used on the repacked file and the expected output diff --git a/tools/test/h5repack/h5repack_plugin.sh.in b/tools/test/h5repack/h5repack_plugin.sh.in index 0331d85b1ac..bd7c3a1229c 100644 --- a/tools/test/h5repack/h5repack_plugin.sh.in +++ b/tools/test/h5repack/h5repack_plugin.sh.in @@ -3,7 +3,7 @@ # Copyright by The HDF Group. # All rights reserved. # -# This file is part of HDF5. The full HDF5 copyright notice, including +# This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code # distribution tree, or in https://www.hdfgroup.org/licenses. @@ -98,7 +98,7 @@ if [ $? != 0 ]; then fi # setup plugin path -ENVCMD="env HDF5_PLUGIN_PATH=../${PLUGIN_LIBDIR}" +ENVCMD="env HDF5_PLUGIN_PATH=../${PLUGIN_LIBDIR}:${HDF5_PLUGIN_PATH}" COPY_TESTFILES_TO_TESTDIR() { diff --git a/tools/test/h5stat/CMakeLists.txt b/tools/test/h5stat/CMakeLists.txt index a79f61ccbb9..19dbe7841af 100644 --- a/tools/test/h5stat/CMakeLists.txt +++ b/tools/test/h5stat/CMakeLists.txt @@ -6,7 +6,7 @@ project (HDF5_TOOLS_TEST_H5STAT C) # -------------------------------------------------------------------- if (HDF5_BUILD_GENERATORS AND BUILD_STATIC_LIBS) add_executable (h5stat_gentest ${HDF5_TOOLS_TEST_H5STAT_SOURCE_DIR}/h5stat_gentest.c) - target_include_directories (h5stat_gentest PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5stat_gentest PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (h5stat_gentest STATIC) target_link_libraries (h5stat_gentest PRIVATE ${HDF5_LIB_TARGET}) set_target_properties (h5stat_gentest PROPERTIES FOLDER generator/tools) diff --git a/tools/test/h5stat/CMakeTests.cmake b/tools/test/h5stat/CMakeTests.cmake index c79c0a17bca..14bb5ea3a8c 100644 --- a/tools/test/h5stat/CMakeTests.cmake +++ b/tools/test/h5stat/CMakeTests.cmake @@ -114,6 +114,9 @@ -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () + set_tests_properties (H5STAT-${resultfile} PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}" + ) endmacro () macro (ADD_H5_ERR_TEST resultfile resultcode) @@ -138,6 +141,9 @@ -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) endif () + set_tests_properties (H5STAT-${resultfile} PROPERTIES + WORKING_DIRECTORY "${PROJECT_BINARY_DIR}" + ) endmacro () ############################################################################## @@ -146,17 +152,6 @@ ############################################################################## ############################################################################## - if (HDF5_ENABLE_USING_MEMCHECKER) - # Remove any output file left over from previous test run - foreach (ddl_file ${HDF5_REFERENCE_FILES}) - set (CLEAR_LIST ${CLEAR_LIST} ${ddl_file}.out ${ddl_file}.out.err) - endforeach () - add_test ( - NAME H5STAT-clearall-objects - COMMAND ${CMAKE_COMMAND} -E remove ${CLEAR_LIST} - ) - endif () - # Test for help flag ADD_H5_TEST (h5stat_help1 0 -h) ADD_H5_TEST (h5stat_help2 0 --help) diff --git a/tools/test/h5stat/testh5stat.sh.in b/tools/test/h5stat/testh5stat.sh.in index 9cbedce7276..65e40c4d820 100644 --- a/tools/test/h5stat/testh5stat.sh.in +++ b/tools/test/h5stat/testh5stat.sh.in @@ -12,10 +12,6 @@ # # Tests for the h5stat tool # -# Modifications: -# Vailin Choi; July 2013 -# Add tests for -l, -m, -a options -# srcdir=@srcdir@ diff --git a/tools/test/misc/CMakeLists.txt b/tools/test/misc/CMakeLists.txt index fe66e942c99..df8b43e1bea 100644 --- a/tools/test/misc/CMakeLists.txt +++ b/tools/test/misc/CMakeLists.txt @@ -6,7 +6,7 @@ project (HDF5_TOOLS_TEST_MISC C) # -------------------------------------------------------------------- if (HDF5_BUILD_GENERATORS AND BUILD_STATIC_LIBS) add_executable (h5repart_gentest ${HDF5_TOOLS_TEST_MISC_SOURCE_DIR}/h5repart_gentest.c) - target_include_directories (h5repart_gentest PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5repart_gentest PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (h5repart_gentest STATIC) target_link_libraries (h5repart_gentest PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_TEST_LIB_TARGET}) set_target_properties (h5repart_gentest PROPERTIES FOLDER generator/tools) @@ -20,7 +20,7 @@ if (HDF5_BUILD_GENERATORS AND BUILD_STATIC_LIBS) endif () add_executable (h5clear_gentest ${HDF5_TOOLS_TEST_MISC_SOURCE_DIR}/h5clear_gentest.c) - target_include_directories (h5clear_gentest PRIVATE "${HDF5_SRC_DIR};${HDF5_TEST_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (h5clear_gentest PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_TEST_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") TARGET_C_PROPERTIES (h5clear_gentest STATIC) target_link_libraries (h5clear_gentest PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_TEST_LIB_TARGET}) set_target_properties (h5clear_gentest PROPERTIES FOLDER tools) @@ -38,7 +38,7 @@ if (HDF5_BUILD_GENERATORS AND BUILD_STATIC_LIBS) endif () add_executable (h5repart_test ${HDF5_TOOLS_TEST_MISC_SOURCE_DIR}/repart_test.c) -target_include_directories (h5repart_test PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_include_directories (h5repart_test PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (BUILD_STATIC_LIBS) TARGET_C_PROPERTIES (h5repart_test STATIC) target_link_libraries (h5repart_test PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) @@ -56,7 +56,7 @@ if (HDF5_ENABLE_FORMATTERS) endif () add_executable (clear_open_chk ${HDF5_TOOLS_TEST_MISC_SOURCE_DIR}/clear_open_chk.c) -target_include_directories (clear_open_chk PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_include_directories (clear_open_chk PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (BUILD_STATIC_LIBS) TARGET_C_PROPERTIES (clear_open_chk STATIC) target_link_libraries (clear_open_chk PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) diff --git a/tools/test/misc/CMakeTestsRepart.cmake b/tools/test/misc/CMakeTestsRepart.cmake index 20ae263d8d6..04c74ecc257 100644 --- a/tools/test/misc/CMakeTestsRepart.cmake +++ b/tools/test/misc/CMakeTestsRepart.cmake @@ -72,6 +72,19 @@ family_to_sec2.h5 ) set_tests_properties (H5REPART-clearall-objects PROPERTIES FIXTURES_SETUP clear_testrepart) + add_test ( + NAME H5REPART-clean-objects + COMMAND ${CMAKE_COMMAND} + -E remove + fst_family00000.h5 + scd_family00000.h5 + scd_family00001.h5 + scd_family00002.h5 + scd_family00003.h5 + family_to_single.h5 + family_to_sec2.h5 + ) + set_tests_properties (H5REPART-clean-objects PROPERTIES FIXTURES_CLEANUP clear_testrepart) # repartition family member size to 20,000 bytes. add_test ( @@ -116,6 +129,7 @@ ) set_tests_properties (H5REPART-h5repart_test PROPERTIES DEPENDS "H5REPART-h5repart_20K;H5REPART-h5repart_5K;H5REPART-h5repart_single;H5REPART-h5repart_sec2" + FIXTURES_REQUIRED clear_testrepart ) set (H5_DEP_EXECUTABLES ${H5_DEP_EXECUTABLES} diff --git a/tools/test/misc/testh5mkgrp.sh.in b/tools/test/misc/testh5mkgrp.sh.in index aae5c3a3851..d581fe1e12a 100644 --- a/tools/test/misc/testh5mkgrp.sh.in +++ b/tools/test/misc/testh5mkgrp.sh.in @@ -12,9 +12,6 @@ # # Tests for the h5mkgrp tool # -# Quincey Koziol -# Tuesday, February 13, 2007 -# srcdir=@srcdir@ diff --git a/tools/test/perform/CMakeLists.txt b/tools/test/perform/CMakeLists.txt index 7bf79c5b738..adc9a04d951 100644 --- a/tools/test/perform/CMakeLists.txt +++ b/tools/test/perform/CMakeLists.txt @@ -1,44 +1,14 @@ cmake_minimum_required (VERSION 3.12) project (HDF5_TOOLS_TEST_PERFORM C) -# -------------------------------------------------------------------- -# Add the executables -# -------------------------------------------------------------------- - -if (HDF5_BUILD_PERFORM_STANDALONE) - #-- Adding test for h5perf_serial_alone - io_timer.c includes - set (h5perf_serial_alone_SOURCES - ${HDF5_TOOLS_DIR}/lib/io_timer.c - ${HDF5_TOOLS_SRC_H5PERF_SOURCE_DIR}/sio_perf.c - ${HDF5_TOOLS_SRC_H5PERF_SOURCE_DIR}/sio_engine.c - ${HDF5_TOOLS_TEST_PERFORM_SOURCE_DIR}/sio_standalone.c - ) - add_executable (h5perf_serial_alone ${h5perf_serial_alone_SOURCES}) - target_include_directories (h5perf_serial_alone PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};${HDF5_TOOLS_DIR}/lib;${HDF5_TOOLS_SRC_H5PERF_SOURCE_DIR};${HDF5_TOOLS_TEST_PERFORM_SOURCE_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") - if (NOT BUILD_SHARED_LIBS) - TARGET_C_PROPERTIES (h5perf_serial_alone STATIC) - target_link_libraries (h5perf_serial_alone PRIVATE ${HDF5_LIB_TARGET} "$<$:${MPI_C_LIBRARIES}>") - else () - TARGET_C_PROPERTIES (h5perf_serial_alone SHARED) - target_link_libraries (h5perf_serial_alone PRIVATE ${HDF5_LIBSH_TARGET} "$<$:${MPI_C_LIBRARIES}>") - endif () - set_target_properties (h5perf_serial_alone PROPERTIES FOLDER perform) - set_property (TARGET h5perf_serial_alone APPEND PROPERTY COMPILE_DEFINITIONS STANDALONE) - - #----------------------------------------------------------------------------- - # Add Target to clang-format - #----------------------------------------------------------------------------- - if (HDF5_ENABLE_FORMATTERS) - clang_format (HDF5_TOOLS_TEST_PERFORM_h5perf_serial_alone_FORMAT h5perf_serial_alone) - endif () -endif () - -#-- Adding test for chunk +#----------------------------------------------------------------------------- +# chunk +#----------------------------------------------------------------------------- set (chunk_SOURCES ${HDF5_TOOLS_TEST_PERFORM_SOURCE_DIR}/chunk.c ) add_executable(chunk ${chunk_SOURCES}) -target_include_directories (chunk PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_include_directories (chunk PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (chunk STATIC) target_link_libraries (chunk PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) @@ -48,19 +18,18 @@ else () endif () set_target_properties (chunk PROPERTIES FOLDER perform) -#----------------------------------------------------------------------------- -# Add Target to clang-format -#----------------------------------------------------------------------------- if (HDF5_ENABLE_FORMATTERS) clang_format (HDF5_TOOLS_TEST_PERFORM_chunk_FORMAT chunk) endif () -#-- Adding test for iopipe +#----------------------------------------------------------------------------- +# iopipe +#----------------------------------------------------------------------------- set (iopipe_SOURCES ${HDF5_TOOLS_TEST_PERFORM_SOURCE_DIR}/iopipe.c ) add_executable (iopipe ${iopipe_SOURCES}) -target_include_directories (iopipe PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_include_directories (iopipe PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (iopipe STATIC) target_link_libraries (iopipe PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) @@ -70,19 +39,18 @@ else () endif () set_target_properties (iopipe PROPERTIES FOLDER perform) -#----------------------------------------------------------------------------- -# Add Target to clang-format -#----------------------------------------------------------------------------- if (HDF5_ENABLE_FORMATTERS) clang_format (HDF5_TOOLS_TEST_PERFORM_iopipe_FORMAT iopipe) endif () -#-- Adding test for chunk_cache +#----------------------------------------------------------------------------- +# chunk_cache +#----------------------------------------------------------------------------- set (chunk_cache_SOURCES ${HDF5_TOOLS_TEST_PERFORM_SOURCE_DIR}/chunk_cache.c ) add_executable (chunk_cache ${chunk_cache_SOURCES}) -target_include_directories (chunk_cache PRIVATE "${HDF5_TEST_SRC_DIR};${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_include_directories (chunk_cache PRIVATE "${HDF5_TEST_SRC_DIR};${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (chunk_cache STATIC) target_link_libraries (chunk_cache PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) @@ -92,19 +60,18 @@ else () endif () set_target_properties (chunk_cache PROPERTIES FOLDER perform) -#----------------------------------------------------------------------------- -# Add Target to clang-format -#----------------------------------------------------------------------------- if (HDF5_ENABLE_FORMATTERS) clang_format (HDF5_TOOLS_TEST_PERFORM_chunk_cache_FORMAT chunk_cache) endif () -#-- Adding test for overhead +#----------------------------------------------------------------------------- +# overhead +#----------------------------------------------------------------------------- set (overhead_SOURCES ${HDF5_TOOLS_TEST_PERFORM_SOURCE_DIR}/overhead.c ) add_executable (overhead ${overhead_SOURCES}) -target_include_directories (overhead PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_include_directories (overhead PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (overhead STATIC) target_link_libraries (overhead PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET}) @@ -114,19 +81,18 @@ else () endif () set_target_properties (overhead PROPERTIES FOLDER perform) -#----------------------------------------------------------------------------- -# Add Target to clang-format -#----------------------------------------------------------------------------- if (HDF5_ENABLE_FORMATTERS) clang_format (HDF5_TOOLS_TEST_PERFORM_overhead_FORMAT overhead) endif () -#-- Adding test for perf_meta +#----------------------------------------------------------------------------- +# perf_meta +#----------------------------------------------------------------------------- set (perf_meta_SOURCES ${HDF5_TOOLS_TEST_PERFORM_SOURCE_DIR}/perf_meta.c ) add_executable (perf_meta ${perf_meta_SOURCES}) -target_include_directories (perf_meta PRIVATE "${HDF5_TEST_SRC_DIR};${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_include_directories (perf_meta PRIVATE "${HDF5_TEST_SRC_DIR};${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (perf_meta STATIC) target_link_libraries (perf_meta PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_TEST_LIB_TARGET} ${HDF5_LIB_TARGET}) @@ -136,19 +102,18 @@ else () endif () set_target_properties (perf_meta PROPERTIES FOLDER perform) -#----------------------------------------------------------------------------- -# Add Target to clang-format -#----------------------------------------------------------------------------- if (HDF5_ENABLE_FORMATTERS) clang_format (HDF5_TOOLS_TEST_PERFORM_perf_meta_FORMAT perf_meta) endif () -#-- Adding test for zip_perf +#----------------------------------------------------------------------------- +# zip_perf +#----------------------------------------------------------------------------- set (zip_perf_SOURCES ${HDF5_TOOLS_TEST_PERFORM_SOURCE_DIR}/zip_perf.c ) add_executable (zip_perf ${zip_perf_SOURCES}) -target_include_directories (zip_perf PRIVATE "${HDF5_TEST_SRC_DIR};${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_include_directories (zip_perf PRIVATE "${HDF5_TEST_SRC_DIR};${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (zip_perf STATIC) target_link_libraries (zip_perf PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET} ${LINK_COMP_LIBS}) @@ -158,45 +123,10 @@ else () endif () set_target_properties (zip_perf PROPERTIES FOLDER perform) -#----------------------------------------------------------------------------- -# Add Target to clang-format -#----------------------------------------------------------------------------- if (HDF5_ENABLE_FORMATTERS) clang_format (HDF5_TOOLS_TEST_PERFORM_zip_perf_FORMAT zip_perf) endif () -if (H5_HAVE_PARALLEL AND HDF5_TEST_PARALLEL) - if (HDF5_BUILD_PERFORM_STANDALONE) - #-- Adding test for h5perf - set (h5perf_alone_SOURCES - ${HDF5_TOOLS_DIR}/lib/io_timer.c - ${HDF5_TOOLS_SRC_H5PERF_SOURCE_DIR}/pio_perf.c - ${HDF5_TOOLS_SRC_H5PERF_SOURCE_DIR}/pio_engine.c - ${HDF5_TOOLS_TEST_PERFORM_SOURCE_DIR}/pio_standalone.c - ) - add_executable (h5perf_alone ${h5perf_alone_SOURCES}) - target_include_directories (h5perf_alone PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};${HDF5_TOOLS_DIR}/lib;${HDF5_TOOLS_SRC_H5PERF_SOURCE_DIR};${HDF5_TOOLS_TEST_PERFORM_SOURCE_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") - if (NOT BUILD_SHARED_LIBS) - TARGET_C_PROPERTIES (h5perf_alone STATIC) - target_link_libraries (h5perf_alone PRIVATE ${HDF5_LIB_TARGET} ${LINK_LIBS} "$<$:${MPI_C_LIBRARIES}>") - else () - TARGET_C_PROPERTIES (h5perf_alone SHARED) - target_link_libraries (h5perf_alone PRIVATE ${HDF5_LIBSH_TARGET} ${LINK_LIBS} "$<$:${MPI_C_LIBRARIES}>") - endif () - set_target_properties (h5perf_alone PROPERTIES FOLDER perform) - set_property (TARGET h5perf_alone - APPEND PROPERTY COMPILE_DEFINITIONS STANDALONE - ) - - #----------------------------------------------------------------------------- - # Add Target to clang-format - #----------------------------------------------------------------------------- - if (HDF5_ENABLE_FORMATTERS) - clang_format (HDF5_TOOLS_TEST_PERFORM_h5perf_alone_FORMAT h5perf_alone) - endif () - endif () -endif () - if (HDF5_TEST_TOOLS) include (CMakeTests.cmake) endif () diff --git a/tools/test/perform/CMakeTests.cmake b/tools/test/perform/CMakeTests.cmake index 17f4b48f4e7..acfe169fc9e 100644 --- a/tools/test/perform/CMakeTests.cmake +++ b/tools/test/perform/CMakeTests.cmake @@ -23,12 +23,10 @@ add_custom_target(zip_perf_files ALL COMMENT "Copying files needed by zip_perf t # Add Tests #----------------------------------------------------------------------------- if (HDF5_TEST_SERIAL) - # Remove any output file left over from previous test run - add_test ( - NAME PERFORM_h5perform-clearall-objects - COMMAND ${CMAKE_COMMAND} - -E remove + set (PERFORM_CLEANFILES chunk.h5 + direct_write.h5 + unix.raw iopipe.h5 iopipe.raw x-diag-rd.dat @@ -36,21 +34,21 @@ if (HDF5_TEST_SERIAL) x-rowmaj-rd.dat x-rowmaj-wr.dat x-gnuplot - h5perf_serial.txt - h5perf_serial.txt.err - chunk.txt - chunk.txt.err - iopipe.txt - iopipe.txt.err - overhead.txt - overhead.txt.err - perf_meta.txt - perf_meta.txt.err - zip_perf-h.txt - zip_perf-h.txt.err - zip_perf.txt - zip_perf.txt.err ) + # Remove any output file left over from previous test run + add_test ( + NAME PERFORM_h5perform-clear-objects + COMMAND ${CMAKE_COMMAND} + -E remove ${PERFORM_CLEANFILES} + ) + set_tests_properties (PERFORM_h5perform-clear-objects PROPERTIES FIXTURES_SETUP clear_perform) + + add_test ( + NAME PERFORM_h5perform-clean-objects + COMMAND ${CMAKE_COMMAND} + -E remove ${PERFORM_CLEANFILES} + ) + set_tests_properties (PERFORM_h5perform-clean-objects PROPERTIES FIXTURES_CLEANUP clear_perform) if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME PERFORM_h5perf_serial COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) @@ -69,13 +67,9 @@ if (HDF5_TEST_SERIAL) endif () set_tests_properties (PERFORM_h5perf_serial PROPERTIES TIMEOUT ${CTEST_VERY_LONG_TIMEOUT} - DEPENDS "PERFORM_h5perform-clearall-objects" + FIXTURES_REQUIRED clear_perform ) - if (HDF5_BUILD_PERFORM_STANDALONE) - add_test (NAME PERFORM_h5perf_serial_alone COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) - endif () - if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME PERFORM_chunk COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () @@ -92,7 +86,7 @@ if (HDF5_TEST_SERIAL) ) endif () set_tests_properties (PERFORM_chunk PROPERTIES - DEPENDS "PERFORM_h5perform-clearall-objects" + FIXTURES_REQUIRED clear_perform ) if (HDF5_ENABLE_USING_MEMCHECKER) @@ -111,7 +105,7 @@ if (HDF5_TEST_SERIAL) ) endif () set_tests_properties (PERFORM_iopipe PROPERTIES - DEPENDS "PERFORM_h5perform-clearall-objects" + FIXTURES_REQUIRED clear_perform ) if (HDF5_ENABLE_USING_MEMCHECKER) @@ -130,7 +124,7 @@ if (HDF5_TEST_SERIAL) ) endif () set_tests_properties (PERFORM_overhead PROPERTIES - DEPENDS "PERFORM_h5perform-clearall-objects" + FIXTURES_REQUIRED clear_perform ) if (HDF5_ENABLE_USING_MEMCHECKER) @@ -149,7 +143,7 @@ if (HDF5_TEST_SERIAL) ) endif () set_tests_properties (PERFORM_perf_meta PROPERTIES - DEPENDS "PERFORM_h5perform-clearall-objects" + FIXTURES_REQUIRED clear_perform ) if (HDF5_ENABLE_USING_MEMCHECKER) @@ -168,7 +162,7 @@ if (HDF5_TEST_SERIAL) ) endif () set_tests_properties (PERFORM_zip_perf_help PROPERTIES - DEPENDS "PERFORM_h5perform-clearall-objects" + FIXTURES_REQUIRED clear_perform ) if (HDF5_ENABLE_USING_MEMCHECKER) @@ -187,18 +181,12 @@ if (HDF5_TEST_SERIAL) ) endif () set_tests_properties (PERFORM_zip_perf PROPERTIES - DEPENDS "PERFORM_zip_perf_help;PERFORM_h5perform-clearall-objects" + DEPENDS "PERFORM_zip_perf_help" + FIXTURES_REQUIRED clear_perform ) endif () if (H5_HAVE_PARALLEL AND HDF5_TEST_PARALLEL) - if (UNIX) - add_test (NAME MPI_TEST_PERFORM_perf COMMAND ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} ${MPIEXEC_MAX_NUMPROCS} ${MPIEXEC_PREFLAGS} $ ${MPIEXEC_POSTFLAGS}) - endif () - add_test (NAME MPI_TEST_PERFORM_h5perf COMMAND ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} ${MPIEXEC_MAX_NUMPROCS} ${MPIEXEC_PREFLAGS} $ ${MPIEXEC_POSTFLAGS}) - if (HDF5_BUILD_PERFORM_STANDALONE) - add_test (NAME MPI_TEST_PERFORM_h5perf_alone COMMAND ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} ${MPIEXEC_MAX_NUMPROCS} ${MPIEXEC_PREFLAGS} $ ${MPIEXEC_POSTFLAGS}) - endif () endif () diff --git a/utils/mirror_vfd/CMakeLists.txt b/utils/mirror_vfd/CMakeLists.txt index 4765e2ce406..ef624178fbc 100644 --- a/utils/mirror_vfd/CMakeLists.txt +++ b/utils/mirror_vfd/CMakeLists.txt @@ -12,7 +12,7 @@ set (mirror_server_SOURCES ${HDF5_UTILS_MIRRORVFD_SOURCE_DIR}/mirror_remote.h ) add_executable (mirror_server ${mirror_server_SOURCES}) -target_include_directories (mirror_server PRIVATE "${HDF5_UTILS_DIR};${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_include_directories (mirror_server PRIVATE "${HDF5_UTILS_DIR};${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (mirror_server STATIC) target_link_libraries (mirror_server PRIVATE ${HDF5_LIB_TARGET}) @@ -37,7 +37,7 @@ endif () set (mirror_server_stop_SOURCES ${HDF5_UTILS_MIRRORVFD_SOURCE_DIR}/mirror_server_stop.c) add_executable (mirror_server_stop ${mirror_server_stop_SOURCES}) -target_include_directories (mirror_server_stop PRIVATE "${HDF5_UTILS_DIR};${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") +target_include_directories (mirror_server_stop PRIVATE "${HDF5_UTILS_DIR};${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (mirror_server_stop STATIC) target_link_libraries (mirror_server_stop PRIVATE ${HDF5_LIB_TARGET}) diff --git a/utils/test/CMakeLists.txt b/utils/test/CMakeLists.txt index 921fbd0a222..baf2ca531fe 100644 --- a/utils/test/CMakeLists.txt +++ b/utils/test/CMakeLists.txt @@ -7,7 +7,7 @@ project (HDF5_TEST C) macro (ADD_H5_EXE file) add_executable (${file} ${HDF5_TEST_SOURCE_DIR}/${file}.c) - target_include_directories (${file} PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};${HDF5_TEST_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") + target_include_directories (${file} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};${HDF5_TEST_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>") target_compile_options(${file} PRIVATE "${HDF5_CMAKE_C_FLAGS}") if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (${file} STATIC) From 2874ef08208aa007aeed6a1eefb01ea3da8e889b Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Tue, 22 Aug 2023 11:03:37 -0500 Subject: [PATCH 092/108] Update build script to match change in presets (#3409) --- .github/workflows/cmake-ctest.yml | 50 +++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/.github/workflows/cmake-ctest.yml b/.github/workflows/cmake-ctest.yml index 1d26fdbe2a3..4bfe5b5bdb2 100644 --- a/.github/workflows/cmake-ctest.yml +++ b/.github/workflows/cmake-ctest.yml @@ -65,13 +65,13 @@ jobs: - name: Publish binary (Windows) id: publish-ctest-binary run: | - mkdir "${{ runner.workspace }}/build" - mkdir "${{ runner.workspace }}/build/hdf5" - Copy-Item -Path ${{ runner.workspace }}/hdf5/hdfsrc/COPYING -Destination ${{ runner.workspace }}/build/hdf5/ - Copy-Item -Path ${{ runner.workspace }}/hdf5/hdfsrc/COPYING_LBNL_HDF5 -Destination ${{ runner.workspace }}/build/hdf5/ - Copy-Item -Path ${{ runner.workspace }}/hdf5/hdfsrc/README.md -Destination ${{ runner.workspace }}/build/hdf5/ - Copy-Item -Path ${{ runner.workspace }}/hdf5/build/ci-StdShar-MSVC/* -Destination ${{ runner.workspace }}/build/hdf5/ -Include *.zip - cd "${{ runner.workspace }}/build" + mkdir "${{ runner.workspace }}/build112" + mkdir "${{ runner.workspace }}/build112/hdf5" + Copy-Item -Path ${{ runner.workspace }}/hdf5/hdfsrc/COPYING -Destination ${{ runner.workspace }}/build112/hdf5/ + Copy-Item -Path ${{ runner.workspace }}/hdf5/hdfsrc/COPYING_LBNL_HDF5 -Destination ${{ runner.workspace }}/build112/hdf5/ + Copy-Item -Path ${{ runner.workspace }}/hdf5/hdfsrc/README.md -Destination ${{ runner.workspace }}/build112/hdf5/ + Copy-Item -Path ${{ runner.workspace }}/hdf5/build/ci-StdShar-MSVC/* -Destination ${{ runner.workspace }}/build112/hdf5/ -Include *.zip + cd "${{ runner.workspace }}/build112" 7z a -tzip ${{ steps.set-file-base.outputs.FILE_BASE }}-win_vs2022.zip hdf5 shell: pwsh @@ -86,7 +86,7 @@ jobs: uses: actions/upload-artifact@v3 with: name: zip-vs2022-binary - path: ${{ runner.workspace }}/build/${{ steps.set-file-base.outputs.FILE_BASE }}-win_vs2022.zip + path: ${{ runner.workspace }}/build112/${{ steps.set-file-base.outputs.FILE_BASE }}-win_vs2022.zip if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` build_and_test_linux: @@ -128,13 +128,13 @@ jobs: - name: Publish binary (Linux) id: publish-ctest-binary run: | - mkdir "${{ runner.workspace }}/build" - mkdir "${{ runner.workspace }}/build/hdf5" - cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING ${{ runner.workspace }}/build/hdf5 - cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING_LBNL_HDF5 ${{ runner.workspace }}/build/hdf5 - cp ${{ runner.workspace }}/hdf5/hdfsrc/README.md ${{ runner.workspace }}/build/hdf5 - cp ${{ runner.workspace }}/hdf5/build/ci-StdShar-GNUC/*.tar.gz ${{ runner.workspace }}/build/hdf5 - cd "${{ runner.workspace }}/build" + mkdir "${{ runner.workspace }}/build112" + mkdir "${{ runner.workspace }}/build112/hdf5" + cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING ${{ runner.workspace }}/build112/hdf5 + cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING_LBNL_HDF5 ${{ runner.workspace }}/build112/hdf5 + cp ${{ runner.workspace }}/hdf5/hdfsrc/README.md ${{ runner.workspace }}/build112/hdf5 + cp ${{ runner.workspace }}/hdf5/build/ci-StdShar-GNUC/*.tar.gz ${{ runner.workspace }}/build112/hdf5 + cd "${{ runner.workspace }}/build112" tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204.tar.gz hdf5 shell: bash @@ -148,7 +148,7 @@ jobs: uses: actions/upload-artifact@v3 with: name: tgz-ubuntu-2204-binary - path: ${{ runner.workspace }}/build/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204.tar.gz + path: ${{ runner.workspace }}/build112/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204.tar.gz if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` # Save doxygen files created by ctest script @@ -156,7 +156,7 @@ jobs: uses: actions/upload-artifact@v3 with: name: docs-doxygen - path: ${{ runner.workspace }}/hdf5/build/ci-StdShar-GNUC/hdf5lib_docs/html + path: ${{ runner.workspace }}/hdf5/build112/ci-StdShar-GNUC/hdf5lib_docs/html if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` build_and_test_mac: @@ -203,13 +203,13 @@ jobs: - name: Publish binary (MacOS) id: publish-ctest-binary run: | - mkdir "${{ runner.workspace }}/build" - mkdir "${{ runner.workspace }}/build/hdf5" - cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING ${{ runner.workspace }}/build/hdf5 - cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING_LBNL_HDF5 ${{ runner.workspace }}/build/hdf5 - cp ${{ runner.workspace }}/hdf5/hdfsrc/README.md ${{ runner.workspace }}/build/hdf5 - cp ${{ runner.workspace }}/hdf5/build/ci-StdShar-Clang/*.tar.gz ${{ runner.workspace }}/build/hdf5 - cd "${{ runner.workspace }}/build" + mkdir "${{ runner.workspace }}/build112" + mkdir "${{ runner.workspace }}/build112/hdf5" + cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING ${{ runner.workspace }}/build112/hdf5 + cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING_LBNL_HDF5 ${{ runner.workspace }}/build112/hdf5 + cp ${{ runner.workspace }}/hdf5/hdfsrc/README.md ${{ runner.workspace }}/build112/hdf5 + cp ${{ runner.workspace }}/hdf5/build/ci-StdShar-Clang/*.tar.gz ${{ runner.workspace }}/build112/hdf5 + cd "${{ runner.workspace }}/build112" tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-osx12.tar.gz hdf5 shell: bash @@ -223,6 +223,6 @@ jobs: uses: actions/upload-artifact@v3 with: name: tgz-osx12-binary - path: ${{ runner.workspace }}/build/${{ steps.set-file-base.outputs.FILE_BASE }}-osx12.tar.gz + path: ${{ runner.workspace }}/build112/${{ steps.set-file-base.outputs.FILE_BASE }}-osx12.tar.gz if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` From ecae25d2811f398aadfe22c6e8edc8361e5db73b Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Tue, 22 Aug 2023 19:44:48 -0500 Subject: [PATCH 093/108] Fix another build folder name (#3411) --- .github/workflows/cmake-ctest.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/cmake-ctest.yml b/.github/workflows/cmake-ctest.yml index 4bfe5b5bdb2..2bd207c9e92 100644 --- a/.github/workflows/cmake-ctest.yml +++ b/.github/workflows/cmake-ctest.yml @@ -70,7 +70,7 @@ jobs: Copy-Item -Path ${{ runner.workspace }}/hdf5/hdfsrc/COPYING -Destination ${{ runner.workspace }}/build112/hdf5/ Copy-Item -Path ${{ runner.workspace }}/hdf5/hdfsrc/COPYING_LBNL_HDF5 -Destination ${{ runner.workspace }}/build112/hdf5/ Copy-Item -Path ${{ runner.workspace }}/hdf5/hdfsrc/README.md -Destination ${{ runner.workspace }}/build112/hdf5/ - Copy-Item -Path ${{ runner.workspace }}/hdf5/build/ci-StdShar-MSVC/* -Destination ${{ runner.workspace }}/build112/hdf5/ -Include *.zip + Copy-Item -Path ${{ runner.workspace }}/hdf5/build112/ci-StdShar-MSVC/* -Destination ${{ runner.workspace }}/build112/hdf5/ -Include *.zip cd "${{ runner.workspace }}/build112" 7z a -tzip ${{ steps.set-file-base.outputs.FILE_BASE }}-win_vs2022.zip hdf5 shell: pwsh @@ -133,7 +133,7 @@ jobs: cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING ${{ runner.workspace }}/build112/hdf5 cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING_LBNL_HDF5 ${{ runner.workspace }}/build112/hdf5 cp ${{ runner.workspace }}/hdf5/hdfsrc/README.md ${{ runner.workspace }}/build112/hdf5 - cp ${{ runner.workspace }}/hdf5/build/ci-StdShar-GNUC/*.tar.gz ${{ runner.workspace }}/build112/hdf5 + cp ${{ runner.workspace }}/hdf5/build112/ci-StdShar-GNUC/*.tar.gz ${{ runner.workspace }}/build112/hdf5 cd "${{ runner.workspace }}/build112" tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204.tar.gz hdf5 shell: bash @@ -208,7 +208,7 @@ jobs: cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING ${{ runner.workspace }}/build112/hdf5 cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING_LBNL_HDF5 ${{ runner.workspace }}/build112/hdf5 cp ${{ runner.workspace }}/hdf5/hdfsrc/README.md ${{ runner.workspace }}/build112/hdf5 - cp ${{ runner.workspace }}/hdf5/build/ci-StdShar-Clang/*.tar.gz ${{ runner.workspace }}/build112/hdf5 + cp ${{ runner.workspace }}/hdf5/build112/ci-StdShar-Clang/*.tar.gz ${{ runner.workspace }}/build112/hdf5 cd "${{ runner.workspace }}/build112" tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-osx12.tar.gz hdf5 shell: bash From f9511f424f7f3ecaace301b5649f4c2d23bcf140 Mon Sep 17 00:00:00 2001 From: jhendersonHDF Date: Thu, 31 Aug 2023 21:06:08 -0500 Subject: [PATCH 094/108] Fix valgrind warning about write of uninitialized bytes in ScaleOffset filter (#3390) (#3462) --- src/H5Zscaleoffset.c | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/H5Zscaleoffset.c b/src/H5Zscaleoffset.c index 80ff80c1a01..3609a881f5a 100644 --- a/src/H5Zscaleoffset.c +++ b/src/H5Zscaleoffset.c @@ -1296,6 +1296,9 @@ H5Z__filter_scaleoffset(unsigned flags, size_t cd_nelmts, const unsigned cd_valu } /* output; compress */ else { + size_t used_bytes; + size_t unused_bytes; + HDassert(nbytes == d_nelmts * p.size); /* before preprocess, convert to memory endianness order if needed */ @@ -1347,7 +1350,10 @@ H5Z__filter_scaleoffset(unsigned flags, size_t cd_nelmts, const unsigned cd_valu /* (Looks like an error in the original determination of how many * bytes would be needed for parameters. - QAK, 2010/08/19) */ - HDmemset(outbuf + 13, 0, (size_t)8); + used_bytes = 4 + 1 + sizeof(unsigned long long); + assert(used_bytes <= size_out); + unused_bytes = size_out - used_bytes; + HDmemset(outbuf + 13, 0, unused_bytes); /* special case: minbits equal to full precision */ if (minbits == p.size * 8) { From 59bac69939119575196ea3e053631d45a78064af Mon Sep 17 00:00:00 2001 From: jhendersonHDF Date: Thu, 31 Aug 2023 23:02:19 -0500 Subject: [PATCH 095/108] [1.12 Merge] Fix assertion failure during file close on error (#3461) --- release_docs/RELEASE.txt | 11 +++++++ src/H5Cimage.c | 5 +++ src/H5Fint.c | 10 +++--- src/H5Fsuper.c | 20 ++++++++++-- src/H5MF.c | 68 +++++++++++++++++++++------------------- testpar/t_file.c | 51 ++++++++++++++++++++++++++++++ testpar/testphdf5.c | 3 ++ testpar/testphdf5.h | 1 + 8 files changed, 128 insertions(+), 41 deletions(-) diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index d6f3a570079..358a9e00abb 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -241,6 +241,17 @@ Bug Fixes since HDF5-1.12.2 release =================================== Library ------- + - Fixed an assertion failure in Parallel HDF5 when a file can't be created + due to an invalid library version bounds setting + + An assertion failure could occur in H5MF_settle_raw_data_fsm when a file + can't be created with Parallel HDF5 due to specifying the use of a paged, + persistent file free space manager + (H5Pset_file_space_strategy(..., H5F_FSPACE_STRATEGY_PAGE, 1, ...)) with + an invalid library version bounds combination + (H5Pset_libver_bounds(..., H5F_LIBVER_EARLIEST, H5F_LIBVER_V18)). This + has now been fixed. + - Fixed a bug in H5Ocopy that could generate invalid HDF5 files H5Ocopy was missing a check to determine whether the new object's diff --git a/src/H5Cimage.c b/src/H5Cimage.c index a25f144c7be..446dc8801fe 100644 --- a/src/H5Cimage.c +++ b/src/H5Cimage.c @@ -1113,6 +1113,11 @@ H5C__load_cache_image(H5F_t *f) } /* end if */ done: + if (ret_value < 0) { + if (H5F_addr_defined(cache_ptr->image_addr)) + cache_ptr->image_buffer = H5MM_xfree(cache_ptr->image_buffer); + } + FUNC_LEAVE_NOAPI(ret_value) } /* H5C__load_cache_image() */ diff --git a/src/H5Fint.c b/src/H5Fint.c index 372537678d7..e897d4f6509 100644 --- a/src/H5Fint.c +++ b/src/H5Fint.c @@ -80,7 +80,7 @@ static herr_t H5F__build_name(const char *prefix, const char *file_name, char ** static char *H5F__getenv_prefix_name(char **env_prefix /*in,out*/); static H5F_t *H5F__new(H5F_shared_t *shared, unsigned flags, hid_t fcpl_id, hid_t fapl_id, H5FD_t *lf); static herr_t H5F__check_if_using_file_locks(H5P_genplist_t *fapl, hbool_t *use_file_locking); -static herr_t H5F__dest(H5F_t *f, hbool_t flush); +static herr_t H5F__dest(H5F_t *f, hbool_t flush, hbool_t free_on_failure); static herr_t H5F__build_actual_name(const H5F_t *f, const H5P_genplist_t *fapl, const char *name, char ** /*out*/ actual_name); static herr_t H5F__flush_phase1(H5F_t *f); @@ -1381,7 +1381,7 @@ H5F__new(H5F_shared_t *shared, unsigned flags, hid_t fcpl_id, hid_t fapl_id, H5F *------------------------------------------------------------------------- */ static herr_t -H5F__dest(H5F_t *f, hbool_t flush) +H5F__dest(H5F_t *f, hbool_t flush, hbool_t free_on_failure) { herr_t ret_value = SUCCEED; /* Return value */ @@ -1648,7 +1648,7 @@ H5F__dest(H5F_t *f, hbool_t flush) HDONE_ERROR(H5E_FILE, H5E_CANTINIT, FAIL, "problems closing file") f->shared = NULL; - if (ret_value >= 0) + if ((ret_value >= 0) || free_on_failure) f = H5FL_FREE(H5F_t, f); FUNC_LEAVE_NOAPI(ret_value) @@ -2145,7 +2145,7 @@ H5F_open(const char *name, unsigned flags, hid_t fcpl_id, hid_t fapl_id) done: if ((NULL == ret_value) && file) - if (H5F__dest(file, FALSE) < 0) + if (H5F__dest(file, FALSE, TRUE) < 0) HDONE_ERROR(H5E_FILE, H5E_CANTCLOSEFILE, NULL, "problems closing file") FUNC_LEAVE_NOAPI(ret_value) @@ -2555,7 +2555,7 @@ H5F_try_close(H5F_t *f, hbool_t *was_closed /*out*/) * shared H5F_shared_t struct. If the reference count for the H5F_shared_t * struct reaches zero then destroy it also. */ - if (H5F__dest(f, TRUE) < 0) + if (H5F__dest(f, TRUE, FALSE) < 0) HGOTO_ERROR(H5E_FILE, H5E_CANTCLOSEFILE, FAIL, "problems closing file") /* Since we closed the file, this should be set to TRUE */ diff --git a/src/H5Fsuper.c b/src/H5Fsuper.c index b56c482bdd4..020c378d862 100644 --- a/src/H5Fsuper.c +++ b/src/H5Fsuper.c @@ -1087,8 +1087,9 @@ H5F__super_init(H5F_t *f) FALSE; /* Whether the driver info block has been inserted into the metadata cache */ H5P_genplist_t *plist; /* File creation property list */ H5AC_ring_t orig_ring = H5AC_RING_INV; - hsize_t userblock_size; /* Size of userblock, in bytes */ - hsize_t superblock_size; /* Size of superblock, in bytes */ + hsize_t userblock_size; /* Size of userblock, in bytes */ + hsize_t superblock_size = 0; /* Size of superblock, in bytes */ + haddr_t superblock_addr = HADDR_UNDEF; size_t driver_size; /* Size of driver info block (bytes) */ unsigned super_vers = HDF5_SUPERBLOCK_VERSION_DEF; /* Superblock version for file */ H5O_loc_t ext_loc; /* Superblock extension object location */ @@ -1288,7 +1289,7 @@ H5F__super_init(H5F_t *f) f->shared->sblock = sblock; /* Allocate space for the superblock */ - if (HADDR_UNDEF == H5MF_alloc(f, H5FD_MEM_SUPER, superblock_size)) + if (HADDR_UNDEF == (superblock_addr = H5MF_alloc(f, H5FD_MEM_SUPER, superblock_size))) HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "file allocation failed for superblock") /* set the drvinfo filed to NULL -- will overwrite this later if needed */ @@ -1479,6 +1480,19 @@ H5F__super_init(H5F_t *f) /* Check if the superblock has been allocated yet */ if (sblock) { + if (non_default_fs_settings && H5F_addr_defined(superblock_addr)) { + /* + * For non-default free-space settings, the allocation of + * space in the file for the superblock may have have allocated + * memory for the free-space manager and inserted it into the + * metadata cache. Clean that up before returning or we may fail + * to close the file later due to the metadata cache's metadata + * free space manager ring (H5AC_RING_MDFSM) not being clean. + */ + if (H5MF_try_close(f) < 0) + HDONE_ERROR(H5E_FILE, H5E_CANTFREE, FAIL, "can't close file free space manager"); + } + /* Check if we've cached it already */ if (sblock_in_cache) { /* Unpin superblock in cache */ diff --git a/src/H5MF.c b/src/H5MF.c index b75afaebf86..bd9f74d593f 100644 --- a/src/H5MF.c +++ b/src/H5MF.c @@ -2654,16 +2654,14 @@ H5MF_settle_raw_data_fsm(H5F_t *f, hbool_t *fsm_settled) hbool_t fsm_opened[H5F_MEM_PAGE_NTYPES]; /* State of FSM */ hbool_t fsm_visited[H5F_MEM_PAGE_NTYPES]; /* State of FSM */ - /* Sanity check */ - HDassert(f->shared->sblock); - /* should only be called if file is opened R/W */ HDassert(H5F_INTENT(f) & H5F_ACC_RDWR); /* shouldn't be called unless we have a superblock supporting the * superblock extension. */ - HDassert(f->shared->sblock->super_vers >= HDF5_SUPERBLOCK_VERSION_2); + if (f->shared->sblock) + HDassert(f->shared->sblock->super_vers >= HDF5_SUPERBLOCK_VERSION_2); /* Initialize fsm_opened and fsm_visited */ HDmemset(fsm_opened, 0, sizeof(fsm_opened)); @@ -2811,40 +2809,44 @@ H5MF_settle_raw_data_fsm(H5F_t *f, hbool_t *fsm_settled) * file space manager info message is guaranteed to exist. * Leave it in for now, but consider removing it. */ - if (H5F_addr_defined(f->shared->sblock->ext_addr)) - if (H5F__super_ext_remove_msg(f, H5O_FSINFO_ID) < 0) - HGOTO_ERROR(H5E_RESOURCE, H5E_CANTRELEASE, FAIL, - "error in removing message from superblock extension") + if (f->shared->sblock) { + if (H5F_addr_defined(f->shared->sblock->ext_addr)) + if (H5F__super_ext_remove_msg(f, H5O_FSINFO_ID) < 0) + HGOTO_ERROR(H5E_RESOURCE, H5E_CANTRELEASE, FAIL, + "error in removing message from superblock extension") + } /* As the final element in 1), shrink the EOA for the file */ if (H5MF__close_shrink_eoa(f) < 0) HGOTO_ERROR(H5E_RESOURCE, H5E_CANTSHRINK, FAIL, "can't shrink eoa") - /* 2) Ensure that space is allocated for the free space manager superblock - * extension message. Must do this now, before reallocating file space - * for free space managers, as it is possible that this allocation may - * grab the last section in a FSM -- making it unnecessary to - * re-allocate file space for it. - * - * Do this by writing a free space manager superblock extension message. - * - * Since no free space manager has file space allocated for it, this - * message must be invalid since we can't save addresses of FSMs when - * those addresses are unknown. This is OK -- we will write the correct - * values to the message at free space manager shutdown. - */ - for (fsm_type = H5F_MEM_PAGE_SUPER; fsm_type < H5F_MEM_PAGE_NTYPES; fsm_type++) - fsinfo.fs_addr[fsm_type - 1] = HADDR_UNDEF; - fsinfo.strategy = f->shared->fs_strategy; - fsinfo.persist = f->shared->fs_persist; - fsinfo.threshold = f->shared->fs_threshold; - fsinfo.page_size = f->shared->fs_page_size; - fsinfo.pgend_meta_thres = f->shared->pgend_meta_thres; - fsinfo.eoa_pre_fsm_fsalloc = HADDR_UNDEF; - - if (H5F__super_ext_write_msg(f, H5O_FSINFO_ID, &fsinfo, TRUE, H5O_MSG_FLAG_MARK_IF_UNKNOWN) < 0) - HGOTO_ERROR(H5E_RESOURCE, H5E_WRITEERROR, FAIL, - "error in writing fsinfo message to superblock extension") + if (f->shared->sblock) { + /* 2) Ensure that space is allocated for the free space manager superblock + * extension message. Must do this now, before reallocating file space + * for free space managers, as it is possible that this allocation may + * grab the last section in a FSM -- making it unnecessary to + * re-allocate file space for it. + * + * Do this by writing a free space manager superblock extension message. + * + * Since no free space manager has file space allocated for it, this + * message must be invalid since we can't save addresses of FSMs when + * those addresses are unknown. This is OK -- we will write the correct + * values to the message at free space manager shutdown. + */ + for (fsm_type = H5F_MEM_PAGE_SUPER; fsm_type < H5F_MEM_PAGE_NTYPES; fsm_type++) + fsinfo.fs_addr[fsm_type - 1] = HADDR_UNDEF; + fsinfo.strategy = f->shared->fs_strategy; + fsinfo.persist = f->shared->fs_persist; + fsinfo.threshold = f->shared->fs_threshold; + fsinfo.page_size = f->shared->fs_page_size; + fsinfo.pgend_meta_thres = f->shared->pgend_meta_thres; + fsinfo.eoa_pre_fsm_fsalloc = HADDR_UNDEF; + + if (H5F__super_ext_write_msg(f, H5O_FSINFO_ID, &fsinfo, TRUE, H5O_MSG_FLAG_MARK_IF_UNKNOWN) < 0) + HGOTO_ERROR(H5E_RESOURCE, H5E_WRITEERROR, FAIL, + "error in writing fsinfo message to superblock extension") + } /* 3) Scan all free space managers not involved in allocating * space for free space managers. For each such free space diff --git a/testpar/t_file.c b/testpar/t_file.c index d8189b99d41..4a3665f5573 100644 --- a/testpar/t_file.c +++ b/testpar/t_file.c @@ -947,3 +947,54 @@ test_file_properties(void) VRFY((mpi_ret >= 0), "MPI_Info_free succeeded"); } /* end test_file_properties() */ + +/* + * Tests for an assertion failure during file close that used + * to occur when the library fails to create a file in parallel + * due to an invalid library version bounds setting + */ +void +test_invalid_libver_bounds_file_close_assert(void) +{ + const char *filename = NULL; + MPI_Comm comm = MPI_COMM_WORLD; + MPI_Info info = MPI_INFO_NULL; + herr_t ret; + hid_t fid = H5I_INVALID_HID; + hid_t fapl_id = H5I_INVALID_HID; + hid_t fcpl_id = H5I_INVALID_HID; + + filename = (const char *)GetTestParameters(); + + /* set up MPI parameters */ + MPI_Comm_size(MPI_COMM_WORLD, &mpi_size); + MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank); + + /* setup file access plist */ + fapl_id = H5Pcreate(H5P_FILE_ACCESS); + VRFY((fapl_id != H5I_INVALID_HID), "H5Pcreate"); + ret = H5Pset_fapl_mpio(fapl_id, comm, info); + VRFY((SUCCEED == ret), "H5Pset_fapl_mpio"); + ret = H5Pset_libver_bounds(fapl_id, H5F_LIBVER_EARLIEST, H5F_LIBVER_V18); + VRFY((SUCCEED == ret), "H5Pset_libver_bounds"); + + /* setup file creation plist */ + fcpl_id = H5Pcreate(H5P_FILE_CREATE); + VRFY((fcpl_id != H5I_INVALID_HID), "H5Pcreate"); + + ret = H5Pset_file_space_strategy(fcpl_id, H5F_FSPACE_STRATEGY_PAGE, TRUE, 1); + VRFY((SUCCEED == ret), "H5Pset_file_space_strategy"); + + /* create the file */ + H5E_BEGIN_TRY + { + fid = H5Fcreate(filename, H5F_ACC_TRUNC, fcpl_id, fapl_id); + } + H5E_END_TRY + VRFY((fid == H5I_INVALID_HID), "H5Fcreate"); + + ret = H5Pclose(fapl_id); + VRFY((SUCCEED == ret), "H5Pclose"); + ret = H5Pclose(fcpl_id); + VRFY((SUCCEED == ret), "H5Pclose"); +} diff --git a/testpar/testphdf5.c b/testpar/testphdf5.c index 780a22e18bc..678db05d4f3 100644 --- a/testpar/testphdf5.c +++ b/testpar/testphdf5.c @@ -357,6 +357,9 @@ main(int argc, char **argv) AddTest("props", test_file_properties, NULL, "Coll Metadata file property settings", PARATESTFILE); + AddTest("invlibverassert", test_invalid_libver_bounds_file_close_assert, NULL, + "Invalid libver bounds assertion failure", PARATESTFILE); + AddTest("idsetw", dataset_writeInd, NULL, "dataset independent write", PARATESTFILE); AddTest("idsetr", dataset_readInd, NULL, "dataset independent read", PARATESTFILE); diff --git a/testpar/testphdf5.h b/testpar/testphdf5.h index 9cdd7168d31..d4f0162ff4c 100644 --- a/testpar/testphdf5.h +++ b/testpar/testphdf5.h @@ -231,6 +231,7 @@ void test_plist_ed(void); void external_links(void); void zero_dim_dset(void); void test_file_properties(void); +void test_invalid_libver_bounds_file_close_assert(void); void multiple_dset_write(void); void multiple_group_write(void); void multiple_group_read(void); From 2181af8181491ff90c6dbbcaa00377febd96444b Mon Sep 17 00:00:00 2001 From: jhendersonHDF Date: Fri, 1 Sep 2023 00:56:58 -0500 Subject: [PATCH 096/108] [1.12 Merge] Fix serial to parallel chunked dataset file space allocation bug (#3394) (#3476) --- release_docs/RELEASE.txt | 11 ++ src/H5Dint.c | 34 +++-- testpar/t_chunk_alloc.c | 295 +++++++++++++++++++++++++++++++++++++++ testpar/testphdf5.c | 2 + testpar/testphdf5.h | 1 + 5 files changed, 333 insertions(+), 10 deletions(-) diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 358a9e00abb..3a8208847d5 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -241,6 +241,17 @@ Bug Fixes since HDF5-1.12.2 release =================================== Library ------- + - Fixed a file space allocation bug in the parallel library for chunked + datasets + + With the addition of support for incremental file space allocation for + chunked datasets with filters applied to them that are created/accessed + in parallel, a bug was introduced to the library's parallel file space + allocation code. This could cause file space to not be allocated correctly + for datasets without filters applied to them that are created with serial + file access and later opened with parallel file access. In turn, this could + cause parallel writes to those datasets to place incorrect data in the file. + - Fixed an assertion failure in Parallel HDF5 when a file can't be created due to an invalid library version bounds setting diff --git a/src/H5Dint.c b/src/H5Dint.c index 95be82731e6..f5455e13a96 100644 --- a/src/H5Dint.c +++ b/src/H5Dint.c @@ -1715,12 +1715,13 @@ H5D__append_flush_setup(H5D_t *dset, hid_t dapl_id) static herr_t H5D__open_oid(H5D_t *dataset, hid_t dapl_id) { - H5P_genplist_t *plist; /* Property list */ - H5O_fill_t *fill_prop; /* Pointer to dataset's fill value info */ - unsigned alloc_time_state; /* Allocation time state */ - htri_t msg_exists; /* Whether a particular type of message exists */ - hbool_t layout_init = FALSE; /* Flag to indicate that chunk information was initialized */ - herr_t ret_value = SUCCEED; /* Return value */ + H5P_genplist_t *plist; /* Property list */ + H5O_fill_t *fill_prop; /* Pointer to dataset's fill value info */ + unsigned alloc_time_state; /* Allocation time state */ + htri_t msg_exists; /* Whether a particular type of message exists */ + hbool_t layout_init = FALSE; /* Flag to indicate that chunk information was initialized */ + hbool_t must_init_storage = FALSE; + herr_t ret_value = SUCCEED; /* Return value */ FUNC_ENTER_STATIC_TAG(dataset->oloc.addr) @@ -1862,17 +1863,30 @@ H5D__open_oid(H5D_t *dataset, hid_t dapl_id) * Make sure all storage is properly initialized. * This is important only for parallel I/O where the space must * be fully allocated before I/O can happen. + * + * Storage will be initialized here if either the VFD being used + * has set the H5FD_FEAT_ALLOCATE_EARLY flag to indicate that it + * wishes to force early space allocation OR a parallel VFD is + * being used and the dataset in question doesn't have any filters + * applied to it. If filters are applied to the dataset, collective + * I/O will be required when writing to the dataset, so we don't + * need to initialize storage here, as the collective I/O process + * will coordinate that. */ - if ((H5F_INTENT(dataset->oloc.file) & H5F_ACC_RDWR) && - !(*dataset->shared->layout.ops->is_space_alloc)(&dataset->shared->layout.storage) && - H5F_HAS_FEATURE(dataset->oloc.file, H5FD_FEAT_ALLOCATE_EARLY)) { + must_init_storage = (H5F_INTENT(dataset->oloc.file) & H5F_ACC_RDWR) && + !(*dataset->shared->layout.ops->is_space_alloc)(&dataset->shared->layout.storage); + must_init_storage = must_init_storage && (H5F_HAS_FEATURE(dataset->oloc.file, H5FD_FEAT_ALLOCATE_EARLY) || + (H5F_HAS_FEATURE(dataset->oloc.file, H5FD_FEAT_HAS_MPI) && + dataset->shared->dcpl_cache.pline.nused == 0)); + + if (must_init_storage) { H5D_io_info_t io_info; io_info.dset = dataset; if (H5D__alloc_storage(&io_info, H5D_ALLOC_OPEN, FALSE, NULL) < 0) HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize file storage") - } /* end if */ + } done: if (ret_value < 0) { diff --git a/testpar/t_chunk_alloc.c b/testpar/t_chunk_alloc.c index ac5b90b5a3c..c38d373b654 100644 --- a/testpar/t_chunk_alloc.c +++ b/testpar/t_chunk_alloc.c @@ -483,3 +483,298 @@ test_chunk_alloc(void) /* reopen dataset in parallel, read and verify the data */ verify_data(filename, CHUNK_FACTOR, all, CLOSE, &file_id, &dataset); } + +/* + * A test to verify the following: + * + * - That the library forces allocation of all space in the file + * for a chunked dataset opened with parallel file access when + * that dataset: + * + * - was created with serial file access + * - was created with the default incremental file space + * allocation time + * - has no filters applied to it + * + * In this case, the library has to ensure that all the file + * space for the dataset is allocated so that the MPI processes + * can write to chunks independently of each other and still have + * a consistent view of the file. + * + * - That the library DOES NOT force allocation of all space in + * the file for a chunked dataset opened with parallel file access + * when that dataset: + * + * - was created with serial file access + * - was created with the default incremental file space + * allocation time + * - has filters applied to it + * + * In this case, writes to the dataset are required to be collective, + * so file space can be allocated incrementally in a coordinated + * fashion. + */ +void +test_chunk_alloc_incr_ser_to_par(void) +{ + H5D_space_status_t space_status; + const char *filename; + hsize_t dset_dims[1]; + hsize_t mem_dims[1]; + hsize_t start[1]; + hsize_t stride[1]; + hsize_t count[1]; + hsize_t block[1]; + hsize_t alloc_size; + size_t nchunks; + herr_t ret; + hid_t fid = H5I_INVALID_HID; + hid_t fapl_id = H5I_INVALID_HID; + hid_t dset_id = H5I_INVALID_HID; + hid_t fspace_id = H5I_INVALID_HID; + hid_t mspace_id = H5I_INVALID_HID; + hid_t dxpl_id = H5I_INVALID_HID; + int *data = NULL; + int *correct_data = NULL; + int *read_data = NULL; + + MPI_Comm_size(MPI_COMM_WORLD, &mpi_size); + MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank); + + filename = (const char *)GetTestParameters(); + if (MAINPROCESS && VERBOSE_MED) + printf("Chunked dataset incremental file space allocation serial to parallel test on file %s\n", + filename); + + nchunks = (size_t)(CHUNK_FACTOR * mpi_size); + dset_dims[0] = (hsize_t)(nchunks * CHUNK_SIZE); + + if (mpi_rank == 0) { + hsize_t chunk_dims[1] = {CHUNK_SIZE}; + hid_t space_id = H5I_INVALID_HID; + hid_t dcpl_id = H5I_INVALID_HID; + + fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); + VRFY((fid >= 0), "H5Fcreate"); + + dcpl_id = H5Pcreate(H5P_DATASET_CREATE); + VRFY((dcpl_id >= 0), "H5Pcreate"); + + ret = H5Pset_chunk(dcpl_id, 1, chunk_dims); + VRFY((ret == SUCCEED), "H5Pset_chunk"); + + ret = H5Pset_alloc_time(dcpl_id, H5D_ALLOC_TIME_INCR); + VRFY((ret == SUCCEED), "H5Pset_alloc_time"); + + space_id = H5Screate_simple(1, dset_dims, NULL); + VRFY((space_id >= 0), "H5Screate_simple"); + + /* Create a chunked dataset without a filter applied to it */ + dset_id = + H5Dcreate2(fid, "dset_no_filter", H5T_NATIVE_INT, space_id, H5P_DEFAULT, dcpl_id, H5P_DEFAULT); + VRFY((dset_id >= 0), "H5Dcreate2"); + + ret = H5Dclose(dset_id); + VRFY((ret == SUCCEED), "H5Dclose"); + + /* Create a chunked dataset with a filter applied to it */ + ret = H5Pset_shuffle(dcpl_id); + VRFY((ret == SUCCEED), "H5Pset_shuffle"); + + dset_id = H5Dcreate2(fid, "dset_filter", H5T_NATIVE_INT, space_id, H5P_DEFAULT, dcpl_id, H5P_DEFAULT); + VRFY((dset_id >= 0), "H5Dcreate2"); + + ret = H5Dclose(dset_id); + VRFY((ret == SUCCEED), "H5Dclose"); + ret = H5Pclose(dcpl_id); + VRFY((ret == SUCCEED), "H5Pclose"); + ret = H5Sclose(space_id); + VRFY((ret == SUCCEED), "H5Sclose"); + ret = H5Fclose(fid); + VRFY((ret == SUCCEED), "H5Fclose"); + } + + MPI_Barrier(MPI_COMM_WORLD); + + fapl_id = H5Pcreate(H5P_FILE_ACCESS); + VRFY((fapl_id >= 0), "H5Pcreate"); + + ret = H5Pset_fapl_mpio(fapl_id, MPI_COMM_WORLD, MPI_INFO_NULL); + VRFY((ret == SUCCEED), "H5Pset_fapl_mpio"); + + fid = H5Fopen(filename, H5F_ACC_RDWR, fapl_id); + VRFY((fid >= 0), "H5Fopen"); + + data = malloc((dset_dims[0] / (hsize_t)mpi_size) * sizeof(int)); + VRFY(data, "malloc"); + read_data = malloc(dset_dims[0] * sizeof(int)); + VRFY(read_data, "malloc"); + correct_data = malloc(dset_dims[0] * sizeof(int)); + VRFY(correct_data, "malloc"); + + /* + * Check the file space allocation status/size and dataset + * data before and after writing to the dataset without a + * filter + */ + dset_id = H5Dopen2(fid, "dset_no_filter", H5P_DEFAULT); + VRFY((dset_id >= 0), "H5Dopen2"); + + ret = H5Dget_space_status(dset_id, &space_status); + VRFY((ret == SUCCEED), "H5Dread"); + + VRFY((space_status == H5D_SPACE_STATUS_ALLOCATED), "file space allocation status verification succeeded"); + + alloc_size = H5Dget_storage_size(dset_id); + VRFY(((dset_dims[0] * sizeof(int)) == alloc_size), "file space allocation size verification succeeded"); + + memset(read_data, 255, dset_dims[0] * sizeof(int)); + memset(correct_data, 0, dset_dims[0] * sizeof(int)); + + ret = H5Dread(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, read_data); + VRFY((ret == SUCCEED), "H5Dread"); + + MPI_Barrier(MPI_COMM_WORLD); + + VRFY((0 == memcmp(read_data, correct_data, dset_dims[0] * sizeof(int))), "data verification succeeded"); + + fspace_id = H5Dget_space(dset_id); + VRFY((ret == SUCCEED), "H5Dget_space"); + + start[0] = ((hsize_t)mpi_rank * (dset_dims[0] / (hsize_t)mpi_size)); + stride[0] = 1; + count[0] = (dset_dims[0] / (hsize_t)mpi_size); + block[0] = 1; + + ret = H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, stride, count, block); + VRFY((ret == SUCCEED), "H5Sselect_hyperslab"); + + mem_dims[0] = count[0] * block[0]; + + mspace_id = H5Screate_simple(1, mem_dims, NULL); + VRFY((mspace_id >= 0), "H5Screate_simple"); + + memset(data, 255, (dset_dims[0] / (hsize_t)mpi_size) * sizeof(int)); + + ret = H5Dwrite(dset_id, H5T_NATIVE_INT, mspace_id, fspace_id, H5P_DEFAULT, data); + VRFY((ret == SUCCEED), "H5Dwrite"); + + ret = H5Sclose(mspace_id); + VRFY((ret == SUCCEED), "H5Sclose"); + + MPI_Barrier(MPI_COMM_WORLD); + + ret = H5Dget_space_status(dset_id, &space_status); + VRFY((ret == SUCCEED), "H5Dread"); + + VRFY((space_status == H5D_SPACE_STATUS_ALLOCATED), "file space allocation status verification succeeded"); + + alloc_size = H5Dget_storage_size(dset_id); + VRFY(((dset_dims[0] * sizeof(int)) == alloc_size), "file space allocation size verification succeeded"); + + memset(read_data, 0, dset_dims[0] * sizeof(int)); + memset(correct_data, 255, dset_dims[0] * sizeof(int)); + + ret = H5Dread(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, read_data); + VRFY((ret == SUCCEED), "H5Dread"); + + MPI_Barrier(MPI_COMM_WORLD); + + VRFY((0 == memcmp(read_data, correct_data, dset_dims[0] * sizeof(int))), "data verification succeeded"); + + ret = H5Sclose(fspace_id); + VRFY((ret == SUCCEED), "H5Sclose"); + ret = H5Dclose(dset_id); + VRFY((ret == SUCCEED), "H5Dclose"); + + /* + * Check the file space allocation status/size and dataset + * data before and after writing to the dataset with a + * filter + */ + dset_id = H5Dopen2(fid, "dset_filter", H5P_DEFAULT); + VRFY((dset_id >= 0), "H5Dopen2"); + + ret = H5Dget_space_status(dset_id, &space_status); + VRFY((ret == SUCCEED), "H5Dread"); + + VRFY((space_status == H5D_SPACE_STATUS_NOT_ALLOCATED), + "file space allocation status verification succeeded"); + + alloc_size = H5Dget_storage_size(dset_id); + VRFY((0 == alloc_size), "file space allocation size verification succeeded"); + + memset(read_data, 255, dset_dims[0] * sizeof(int)); + memset(correct_data, 0, dset_dims[0] * sizeof(int)); + + ret = H5Dread(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, read_data); + VRFY((ret == SUCCEED), "H5Dread"); + + MPI_Barrier(MPI_COMM_WORLD); + + VRFY((0 == memcmp(read_data, correct_data, dset_dims[0] * sizeof(int))), "data verification succeeded"); + + fspace_id = H5Dget_space(dset_id); + VRFY((ret == SUCCEED), "H5Dget_space"); + + start[0] = ((hsize_t)mpi_rank * (dset_dims[0] / (hsize_t)mpi_size)); + stride[0] = 1; + count[0] = (dset_dims[0] / (hsize_t)mpi_size); + block[0] = 1; + + ret = H5Sselect_hyperslab(fspace_id, H5S_SELECT_SET, start, stride, count, block); + VRFY((ret == SUCCEED), "H5Sselect_hyperslab"); + + mem_dims[0] = count[0] * block[0]; + + mspace_id = H5Screate_simple(1, mem_dims, NULL); + VRFY((mspace_id >= 0), "H5Screate_simple"); + + memset(data, 255, (dset_dims[0] / (hsize_t)mpi_size) * sizeof(int)); + + dxpl_id = H5Pcreate(H5P_DATASET_XFER); + VRFY((dxpl_id >= 0), "H5Pcreate"); + + ret = H5Pset_dxpl_mpio(dxpl_id, H5FD_MPIO_COLLECTIVE); + VRFY((ret == SUCCEED), "H5Pset_dxpl_mpio"); + + ret = H5Dwrite(dset_id, H5T_NATIVE_INT, mspace_id, fspace_id, dxpl_id, data); + VRFY((ret == SUCCEED), "H5Dwrite"); + + ret = H5Sclose(mspace_id); + VRFY((ret == SUCCEED), "H5Sclose"); + + MPI_Barrier(MPI_COMM_WORLD); + + ret = H5Dget_space_status(dset_id, &space_status); + VRFY((ret == SUCCEED), "H5Dread"); + + VRFY((space_status == H5D_SPACE_STATUS_ALLOCATED), "file space allocation status verification succeeded"); + + alloc_size = H5Dget_storage_size(dset_id); + VRFY(((dset_dims[0] * sizeof(int)) == alloc_size), "file space allocation size verification succeeded"); + + memset(read_data, 0, dset_dims[0] * sizeof(int)); + memset(correct_data, 255, dset_dims[0] * sizeof(int)); + + ret = H5Dread(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, read_data); + VRFY((ret == SUCCEED), "H5Dread"); + + MPI_Barrier(MPI_COMM_WORLD); + + VRFY((0 == memcmp(read_data, correct_data, dset_dims[0] * sizeof(int))), "data verification succeeded"); + + ret = H5Pclose(dxpl_id); + VRFY((ret == SUCCEED), "H5Pclose"); + ret = H5Sclose(fspace_id); + VRFY((ret == SUCCEED), "H5Sclose"); + ret = H5Dclose(dset_id); + VRFY((ret == SUCCEED), "H5Dclose"); + + free(correct_data); + free(read_data); + free(data); + + H5Pclose(fapl_id); + H5Fclose(fid); +} diff --git a/testpar/testphdf5.c b/testpar/testphdf5.c index 678db05d4f3..ceaeff3290b 100644 --- a/testpar/testphdf5.c +++ b/testpar/testphdf5.c @@ -374,6 +374,8 @@ main(int argc, char **argv) AddTest("selnone", none_selection_chunk, NULL, "chunked dataset with none-selection", PARATESTFILE); AddTest("calloc", test_chunk_alloc, NULL, "parallel extend Chunked allocation on serial file", PARATESTFILE); + AddTest("chkallocser2par", test_chunk_alloc_incr_ser_to_par, NULL, + "chunk allocation from serial to parallel file access", PARATESTFILE); AddTest("fltread", test_filter_read, NULL, "parallel read of dataset written serially with filters", PARATESTFILE); diff --git a/testpar/testphdf5.h b/testpar/testphdf5.h index d4f0162ff4c..82393e56825 100644 --- a/testpar/testphdf5.h +++ b/testpar/testphdf5.h @@ -255,6 +255,7 @@ void none_selection_chunk(void); void actual_io_mode_tests(void); void no_collective_cause_tests(void); void test_chunk_alloc(void); +void test_chunk_alloc_incr_ser_to_par(void); void test_filter_read(void); void compact_dataset(void); void null_dataset(void); From 4e2fbaab151dc879010bb94fbc283d4e88048c4c Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Fri, 1 Sep 2023 13:17:27 -0500 Subject: [PATCH 097/108] Correct dimensions in java test (#3485) --- java/test/TestH5Arw.java | 2 +- java/test/TestH5Drw.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/java/test/TestH5Arw.java b/java/test/TestH5Arw.java index 56ac7f4b157..205f3b0e3a4 100644 --- a/java/test/TestH5Arw.java +++ b/java/test/TestH5Arw.java @@ -278,7 +278,7 @@ public void testH5Aread_16bit_ints() @Test public void testH5Aread_32bit_ints() { - int[][] attr_data = new int[DIM_X][DIM16_Y]; + int[][] attr_data = new int[DIM_X][DIM32_Y]; try { openH5file(H5_INTS_FILE, DATASETU32); diff --git a/java/test/TestH5Drw.java b/java/test/TestH5Drw.java index b18bed7ad2f..a5caabd72be 100644 --- a/java/test/TestH5Drw.java +++ b/java/test/TestH5Drw.java @@ -226,7 +226,7 @@ public void testH5Dread_16bit_ints() @Test public void testH5Dread_32bit_ints() { - int[][] dset_data = new int[DIM_X][DIM16_Y]; + int[][] dset_data = new int[DIM_X][DIM32_Y]; try { openH5file(H5_INTS_FILE, DATASETU32); From 870f7cfe245719d4e50372ac5e3fb97516863145 Mon Sep 17 00:00:00 2001 From: bmribler <39579120+bmribler@users.noreply.github.com> Date: Sun, 3 Sep 2023 02:05:36 -0400 Subject: [PATCH 098/108] Add the 1.12 CVE matrix file (#3497) --- CVE_list-1_12.md | 74 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 74 insertions(+) create mode 100644 CVE_list-1_12.md diff --git a/CVE_list-1_12.md b/CVE_list-1_12.md new file mode 100644 index 00000000000..d5c7f4173d0 --- /dev/null +++ b/CVE_list-1_12.md @@ -0,0 +1,74 @@ +| CVE issue number | 1.12.0 | 1.12.1 | 1.12.2 | 1.12.3 | +| :------------------------------------------------------------------------- | :----- | :----- | :----- | :----- | +| [CVE-2022-26061](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-26061) | UNTESTED | UNTESTED | UNTESTED | UNTESTED | +| [CVE-2022-25972](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-25972) | UNTESTED | UNTESTED | UNTESTED | UNTESTED | +| [CVE-2022-25942](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-25942) | UNTESTED | UNTESTED | UNTESTED | UNTESTED | +| [CVE-2021-46244](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-46244) | ❌ | ❌ | ❌ | ✅ | +| [CVE-2021-46243](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-46243) | ❌ | ❌ | ❌ | ✅ | +| [CVE-2021-46242](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-46242) | ❌ | ✅ | ✅ | ✅ | +| [CVE-2021-45833](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-45833) | ❌ | ❌ | ❌ | ✅ | +| [CVE-2021-45832](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-45832) | UNTESTED | UNTESTED | UNTESTED | UNTESTED | +| [CVE-2021-45830](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-45830) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2021-45829](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-45829) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2021-37501](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-37501) | ❌ | ❌ | ❌ | ✅ | +| [CVE-2021-36977](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-36977) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2021-31009](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-31009) | N/A | N/A | N/A | N/A | +| [CVE-2020-10812](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-10812) | ❌ | ✅ | ✅ | ✅ | +| [CVE-2020-10811](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-10811) | ❌ | ✅ | ✅ | ✅ | +| [CVE-2020-10810](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-10810) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2020-10809](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-10809) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2019-9152](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2019-9152) | ❌ | ❌ | ❌ | ✅ | +| [CVE-2019-9151](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2019-9151) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2019-8398](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2019-8398) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2019-8397](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2019-8397) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2019-8396](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2019-8396) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2018-17439](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-17439) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2018-17438](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-17438) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2018-17437](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-17437) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2018-17436](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-17436) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2018-17435](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-17435) | ❌ | ✅ | ✅ | ✅ | +| [CVE-2018-17434](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-17434) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2018-17433](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-17433) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2018-17432](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-17432) | ❌ | ✅ | ✅ | ✅ | +| [CVE-2018-17237](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-17237) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2018-17234](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-17234) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2018-17233](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-17233) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2018-16438](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-16438) | ✅ | ❌ | ❌ | ✅ | +| [CVE-2018-15672](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-15672) | ❌ | ✅ | ✅ | ✅ | +| [CVE-2018-15671](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-15671) | ❌ | ❌ | ❌ | ✅ | +| [CVE-2018-14460](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-14460) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2018-14035](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-14035) | ❌ | ✅ | ✅ | ✅ | +| [CVE-2018-14034](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-14034) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2018-14033](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-14033) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2018-14031](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-14031) | ✅ | ❌ | ❌ | ✅ | +| [CVE-2018-13876](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-13876) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2018-13875](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-13875) | ✅ | ❌ | ❌ | ✅ | +| [CVE-2018-13874](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-13874) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2018-13873](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-13873) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2018-13872](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-13872) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2018-13871](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-13871) | ✅ | ❌ | ❌ | ❌ | +| [CVE-2018-13870](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-13870) | ❌ | ✅ | ✅ | ✅ | +| [CVE-2018-13869](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-13869) | ❌ | ✅ | ✅ | ✅ | +| [CVE-2018-13868](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-13868) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2018-13867](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-13867) | ❌ | ❌ | ❌ | ❌ | +| [CVE-2018-13866](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-13866) | ❌ | ✅ | ✅ | ❌ | +| [CVE-2018-11207](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-11207) | ❌ | ✅ | ✅ | ✅ | +| [CVE-2018-11206](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-11206) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2018-11205](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-11205) | ❌ | ❌ | ❌ | ✅ | +| [CVE-2018-11204](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-11204) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2018-11203](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-11203) | ❌ | ✅ | ✅ | ✅ | +| [CVE-2018-11202](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-11202) | ❌ | ❌ | ❌ | ✅ | +| [CVE-2017-17509](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2017-17509) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2017-17508](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2017-17508) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2017-17507](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2017-17507) | ❌ | ❌ | ❌ | ❌ | +| [CVE-2017-17506](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2017-17506) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2017-17505](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2017-17505) | ✅ | ✅ | ✅ | ✅ | +| [CVE-2016-4333](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2016-4333) | ❌ | ✅ | ✅ | ✅ | +| [CVE-2016-4332](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2016-4332) | ❌ | ✅ | ✅ | ❌ | +| [CVE-2016-4331](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2016-4331) | ❌ | ✅ | ✅ | ✅ | +| [CVE-2016-4330](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2016-4330) | ✅ | ✅ | ✅ | ✅ | + +## NOTES +1. CVE-2021-45832 has no known proof of vulnerability file. The H5E code that could produce an infinite loop has been reworked, but without a vulnerable file or test program it's difficult to tell if this issue has been fixed. The stack trace provided with the CVE only contains part of the trace, so we don't even know the entry point into the library. +2. CVE-2021-31009 is not a specific vulnerability against HDF5. +3. CVE-2022-25942, CVE-2022-25972, and CVE-2022-26061 are not tested. Those vulnerabilities involve the high-level GIF tools and can be avoided by disabling those tools at build time. From b1e65333b5c19457c53d5d6e7fd5e9add81e307d Mon Sep 17 00:00:00 2001 From: bmribler <39579120+bmribler@users.noreply.github.com> Date: Sun, 3 Sep 2023 02:39:53 -0400 Subject: [PATCH 099/108] Fix links in RELEASE.txt (#3498) * Fixed incorrect links Updated documentation link to the new documentation Corrected the Software Changes link, it was the Developer's Guide. * Used a more appropriate page --- release_docs/RELEASE.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 3a8208847d5..db2a6df4bb7 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -13,9 +13,9 @@ HDF5 source. Note that documentation in the links below will be updated at the time of each final release. -Links to HDF5 documentation can be found on The HDF5 web page: +Links to HDF5 documentation can be found at: - https://portal.hdfgroup.org/display/HDF5/HDF5 + https://hdfgroup.github.io/hdf5/ The official HDF5 releases can be obtained from: @@ -24,7 +24,7 @@ The official HDF5 releases can be obtained from: Changes from Release to Release and New Features in the HDF5-1.12.x release series can be found at: - https://portal.hdfgroup.org/display/HDF5/HDF5+Application+Developer%27s+Guide + https://portal.hdfgroup.org/display/HDF5/HDF5+1.12+Release If you have any questions or comments, please send them to the HDF Help Desk: From 90852b28c729e963a7ebf4b21fe216a44ce7ad2b Mon Sep 17 00:00:00 2001 From: Dana Robinson <43805+derobins@users.noreply.github.com> Date: Sun, 3 Sep 2023 11:12:53 -0700 Subject: [PATCH 100/108] Cherry-pick of CVE fixes from 1.10 (#3490) * CVE-2016-4332 * CVE-2018-11202 * CVE-2018-11205 * CVE-2018-13866 * CVE-2018-13867 * CVE-2018-13871 * CVE-2018-15671 --- release_docs/RELEASE.txt | 26 +++++++++ src/H5Dbtree.c | 3 +- src/H5Dchunk.c | 15 ++++- src/H5Fsuper_cache.c | 106 +++++++++++++++++++++++----------- src/H5Gint.c | 22 +++---- src/H5HLcache.c | 78 ++++++++++++++++--------- src/H5Omessage.c | 5 +- tools/src/h5dump/h5dump_ddl.c | 9 +-- tools/testfiles/tgroup-2.ddl | 9 +-- 9 files changed, 175 insertions(+), 98 deletions(-) diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index db2a6df4bb7..02e0c731b45 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -241,6 +241,32 @@ Bug Fixes since HDF5-1.12.2 release =================================== Library ------- + - Fixed CVE-2018-11202 + + A malformed file could result in chunk index memory leaks. Under most + conditions (i.e., when the --enable-using-memchecker option is NOT + used), this would result in a small memory leak and and infinite loop + and abort when shutting down the library. The infinite loop would be + due to the "free list" package not being able to clear its resources + so the library couldn't shut down. When the "using a memory checker" + option is used, the free lists are disabled so there is just a memory + leak with no abort on library shutdown. + + The chunk index resources are now correctly cleaned up when reading + misparsed files and valgrind confirms no memory leaks. + + - Fixed an assertion in a previous fix for CVE-2016-4332 + + An assert could fail when processing corrupt files that have invalid + shared message flags (as in CVE-2016-4332). + + The assert statement in question has been replaced with pointer checks + that don't raise errors. Since the function is in cleanup code, we do + our best to close and free things, even when presented with partially + initialized structs. + + Fixes CVE-2016-4332 and HDFFV-9950 (confirmed via the cve_hdf5 repo) + - Fixed a file space allocation bug in the parallel library for chunked datasets diff --git a/src/H5Dbtree.c b/src/H5Dbtree.c index c3aee44fd6c..12d151f5f86 100644 --- a/src/H5Dbtree.c +++ b/src/H5Dbtree.c @@ -661,7 +661,8 @@ H5D__btree_decode_key(const H5B_shared_t *shared, const uint8_t *raw, void *_key /* Retrieve coordinate offset */ UINT64DECODE(raw, tmp_offset); - HDassert(0 == (tmp_offset % layout->dim[u])); + if (0 != (tmp_offset % layout->dim[u])) + HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, "bad coordinate offset"); /* Convert to a scaled offset */ key->scaled[u] = tmp_offset / layout->dim[u]; diff --git a/src/H5Dchunk.c b/src/H5Dchunk.c index a0f83714627..b5e1d5ea9bc 100644 --- a/src/H5Dchunk.c +++ b/src/H5Dchunk.c @@ -701,9 +701,12 @@ H5D__chunk_set_info_real(H5O_layout_chunk_t *layout, unsigned ndims, const hsize /* Sanity checks */ HDassert(layout); - HDassert(ndims > 0); HDassert(curr_dims); + /* Can happen when corrupt files are parsed */ + if (ndims == 0) + HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, "number of dimensions cannot be zero") + /* Compute the # of chunks in dataset dimensions */ for (u = 0, layout->nchunks = 1, layout->max_nchunks = 1; u < ndims; u++) { /* Round up to the next integer # of chunks, to accommodate partial chunks */ @@ -915,6 +918,7 @@ H5D__chunk_init(H5F_t *f, const H5D_t *const dset, hid_t dapl_id) H5D_rdcc_t *rdcc = &(dset->shared->cache.chunk); /* Convenience pointer to dataset's chunk cache */ H5P_genplist_t *dapl; /* Data access property list object pointer */ H5O_storage_chunk_t *sc = &(dset->shared->layout.storage.u.chunk); + hbool_t idx_init = FALSE; herr_t ret_value = SUCCEED; /* Return value */ FUNC_ENTER_STATIC @@ -990,12 +994,21 @@ H5D__chunk_init(H5F_t *f, const H5D_t *const dset, hid_t dapl_id) /* Allocate any indexing structures */ if (sc->ops->init && (sc->ops->init)(&idx_info, dset->shared->space, dset->oloc.addr) < 0) HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't initialize indexing information") + idx_init = TRUE; /* Set the number of chunks in dataset, etc. */ if (H5D__chunk_set_info(dset) < 0) HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to set # of chunks for dataset") done: + if (FAIL == ret_value) { + if (rdcc->slot) + rdcc->slot = H5FL_SEQ_FREE(H5D_rdcc_ent_ptr_t, rdcc->slot); + + if (idx_init && sc->ops->dest && (sc->ops->dest)(&idx_info) < 0) + HDONE_ERROR(H5E_DATASET, H5E_CANTFREE, FAIL, "unable to release chunk index info"); + } + FUNC_LEAVE_NOAPI(ret_value) } /* end H5D__chunk_init() */ diff --git a/src/H5Fsuper_cache.c b/src/H5Fsuper_cache.c index df61252cac3..0c297564d6b 100644 --- a/src/H5Fsuper_cache.c +++ b/src/H5Fsuper_cache.c @@ -409,13 +409,13 @@ H5F__cache_superblock_verify_chksum(const void *_image, size_t len, void *_udata *------------------------------------------------------------------------- */ static void * -H5F__cache_superblock_deserialize(const void *_image, size_t H5_ATTR_NDEBUG_UNUSED len, void *_udata, - hbool_t H5_ATTR_UNUSED *dirty) +H5F__cache_superblock_deserialize(const void *_image, size_t len, void *_udata, hbool_t H5_ATTR_UNUSED *dirty) { H5F_super_t *sblock = NULL; /* File's superblock */ H5F_superblock_cache_ud_t *udata = (H5F_superblock_cache_ud_t *)_udata; /* User data */ - const uint8_t *image = _image; /* Pointer into raw data buffer */ - H5F_super_t *ret_value = NULL; /* Return value */ + const uint8_t *image = _image; /* Pointer into raw data buffer */ + const uint8_t *end = image + len - 1; /* Pointer to end of buffer */ + H5F_super_t *ret_value = NULL; FUNC_ENTER_STATIC @@ -427,11 +427,11 @@ H5F__cache_superblock_deserialize(const void *_image, size_t H5_ATTR_NDEBUG_UNUS /* Allocate space for the superblock */ if (NULL == (sblock = H5FL_CALLOC(H5F_super_t))) - HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, NULL, "memory allocation failed") + HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, NULL, "memory allocation failed"); /* Deserialize the file superblock's prefix */ if (H5F__superblock_prefix_decode(sblock, &image, udata, FALSE) < 0) - HGOTO_ERROR(H5E_FILE, H5E_CANTDECODE, NULL, "can't decode file superblock prefix") + HGOTO_ERROR(H5E_FILE, H5E_CANTDECODE, NULL, "can't decode file superblock prefix"); /* Check for older version of superblock format */ if (sblock->super_vers < HDF5_SUPERBLOCK_VERSION_2) { @@ -441,85 +441,113 @@ H5F__cache_superblock_deserialize(const void *_image, size_t H5_ATTR_NDEBUG_UNUS unsigned chunk_btree_k; /* B-tree chunk internal node 'K' value */ /* Freespace version (hard-wired) */ + if (H5_IS_BUFFER_OVERFLOW(image, 1, end)) + HGOTO_ERROR(H5E_FILE, H5E_OVERFLOW, NULL, "image pointer is out of bounds"); if (HDF5_FREESPACE_VERSION != *image++) - HGOTO_ERROR(H5E_FILE, H5E_BADVALUE, NULL, "bad free space version number") + HGOTO_ERROR(H5E_FILE, H5E_BADVALUE, NULL, "bad free space version number"); /* Root group version number (hard-wired) */ + if (H5_IS_BUFFER_OVERFLOW(image, 1, end)) + HGOTO_ERROR(H5E_FILE, H5E_OVERFLOW, NULL, "image pointer is out of bounds"); if (HDF5_OBJECTDIR_VERSION != *image++) - HGOTO_ERROR(H5E_FILE, H5E_BADVALUE, NULL, "bad object directory version number") + HGOTO_ERROR(H5E_FILE, H5E_BADVALUE, NULL, "bad object directory version number"); /* Skip over reserved byte */ + if (H5_IS_BUFFER_OVERFLOW(image, 1, end)) + HGOTO_ERROR(H5E_FILE, H5E_OVERFLOW, NULL, "image pointer is out of bounds"); image++; /* Shared header version number (hard-wired) */ + if (H5_IS_BUFFER_OVERFLOW(image, 1, end)) + HGOTO_ERROR(H5E_FILE, H5E_OVERFLOW, NULL, "image pointer is out of bounds"); if (HDF5_SHAREDHEADER_VERSION != *image++) - HGOTO_ERROR(H5E_FILE, H5E_BADVALUE, NULL, "bad shared-header format version number") + HGOTO_ERROR(H5E_FILE, H5E_BADVALUE, NULL, "bad shared-header format version number"); /* Skip over size of file addresses (already decoded) */ + if (H5_IS_BUFFER_OVERFLOW(image, 1, end)) + HGOTO_ERROR(H5E_FILE, H5E_OVERFLOW, NULL, "image pointer is out of bounds"); image++; udata->f->shared->sizeof_addr = sblock->sizeof_addr; /* Keep a local copy also */ /* Skip over size of file sizes (already decoded) */ + if (H5_IS_BUFFER_OVERFLOW(image, 1, end)) + HGOTO_ERROR(H5E_FILE, H5E_OVERFLOW, NULL, "image pointer is out of bounds"); image++; udata->f->shared->sizeof_size = sblock->sizeof_size; /* Keep a local copy also */ /* Skip over reserved byte */ + if (H5_IS_BUFFER_OVERFLOW(image, 1, end)) + HGOTO_ERROR(H5E_FILE, H5E_OVERFLOW, NULL, "image pointer is out of bounds"); image++; /* Various B-tree sizes */ + if (H5_IS_BUFFER_OVERFLOW(image, 2, end)) + HGOTO_ERROR(H5E_FILE, H5E_OVERFLOW, NULL, "image pointer is out of bounds"); UINT16DECODE(image, sym_leaf_k); if (sym_leaf_k == 0) - HGOTO_ERROR(H5E_FILE, H5E_BADRANGE, NULL, "bad symbol table leaf node 1/2 rank") + HGOTO_ERROR(H5E_FILE, H5E_BADRANGE, NULL, "bad symbol table leaf node 1/2 rank"); udata->sym_leaf_k = sym_leaf_k; /* Keep a local copy also */ /* Need 'get' call to set other array values */ + if (H5_IS_BUFFER_OVERFLOW(image, 2, end)) + HGOTO_ERROR(H5E_FILE, H5E_OVERFLOW, NULL, "image pointer is out of bounds"); UINT16DECODE(image, snode_btree_k); if (snode_btree_k == 0) - HGOTO_ERROR(H5E_FILE, H5E_BADRANGE, NULL, "bad 1/2 rank for btree internal nodes") + HGOTO_ERROR(H5E_FILE, H5E_BADRANGE, NULL, "bad 1/2 rank for btree internal nodes"); udata->btree_k[H5B_SNODE_ID] = snode_btree_k; - /* - * Delay setting the value in the property list until we've checked + /* Delay setting the value in the property list until we've checked * for the indexed storage B-tree internal 'K' value later. */ /* File status flags (not really used yet) */ + if (H5_IS_BUFFER_OVERFLOW(image, 4, end)) + HGOTO_ERROR(H5E_FILE, H5E_OVERFLOW, NULL, "image pointer is out of bounds"); UINT32DECODE(image, status_flags); - HDassert(status_flags <= 255); + if (status_flags > 255) + HGOTO_ERROR(H5E_FILE, H5E_BADVALUE, NULL, "bad superblock status flags"); sblock->status_flags = (uint8_t)status_flags; if (sblock->status_flags & ~H5F_SUPER_ALL_FLAGS) - HGOTO_ERROR(H5E_FILE, H5E_BADVALUE, NULL, "bad flag value for superblock") + HGOTO_ERROR(H5E_FILE, H5E_BADVALUE, NULL, "bad flag value for superblock"); - /* - * If the superblock version # is greater than 0, read in the indexed + /* If the superblock version # is greater than 0, read in the indexed * storage B-tree internal 'K' value */ if (sblock->super_vers > HDF5_SUPERBLOCK_VERSION_DEF) { + if (H5_IS_BUFFER_OVERFLOW(image, 2, end)) + HGOTO_ERROR(H5E_FILE, H5E_OVERFLOW, NULL, "image pointer is out of bounds"); UINT16DECODE(image, chunk_btree_k); /* Reserved bytes are present only in version 1 */ - if (sblock->super_vers == HDF5_SUPERBLOCK_VERSION_1) - image += 2; /* reserved */ - } /* end if */ + if (sblock->super_vers == HDF5_SUPERBLOCK_VERSION_1) { + /* Reserved */ + if (H5_IS_BUFFER_OVERFLOW(image, 2, end)) + HGOTO_ERROR(H5E_FILE, H5E_OVERFLOW, NULL, "image pointer is out of bounds"); + image += 2; + } + } else chunk_btree_k = HDF5_BTREE_CHUNK_IK_DEF; udata->btree_k[H5B_CHUNK_ID] = chunk_btree_k; /* Remainder of "variable-sized" portion of superblock */ + if (H5_IS_BUFFER_OVERFLOW(image, H5F_sizeof_addr(udata->f) * 4, end)) + HGOTO_ERROR(H5E_FILE, H5E_OVERFLOW, NULL, "image pointer is out of bounds"); H5F_addr_decode(udata->f, (const uint8_t **)&image, &sblock->base_addr /*out*/); H5F_addr_decode(udata->f, (const uint8_t **)&image, &sblock->ext_addr /*out*/); H5F_addr_decode(udata->f, (const uint8_t **)&image, &udata->stored_eof /*out*/); H5F_addr_decode(udata->f, (const uint8_t **)&image, &sblock->driver_addr /*out*/); /* Allocate space for the root group symbol table entry */ - HDassert(!sblock->root_ent); + if (sblock->root_ent) + HGOTO_ERROR(H5E_FILE, H5E_BADVALUE, NULL, "root entry should not exist yet"); if (NULL == (sblock->root_ent = (H5G_entry_t *)H5MM_calloc(sizeof(H5G_entry_t)))) HGOTO_ERROR(H5E_FILE, H5E_CANTALLOC, NULL, - "can't allocate space for root group symbol table entry") + "can't allocate space for root group symbol table entry"); - /* decode the root group symbol table entry */ + /* Decode the root group symbol table entry */ if (H5G_ent_decode(udata->f, (const uint8_t **)&image, sblock->root_ent) < 0) - HGOTO_ERROR(H5E_FILE, H5E_CANTDECODE, NULL, "can't decode root group symbol table entry") + HGOTO_ERROR(H5E_FILE, H5E_CANTDECODE, NULL, "can't decode root group symbol table entry"); /* Set the root group address to the correct value */ sblock->root_addr = sblock->root_ent->header; @@ -533,26 +561,32 @@ H5F__cache_superblock_deserialize(const void *_image, size_t H5_ATTR_NDEBUG_UNUS /* Eliminate the driver info */ sblock->driver_addr = HADDR_UNDEF; udata->drvrinfo_removed = TRUE; - } /* end if */ + } /* NOTE: Driver info block is decoded separately, later */ - - } /* end if */ + } else { uint32_t read_chksum; /* Checksum read from file */ /* Skip over size of file addresses (already decoded) */ image++; udata->f->shared->sizeof_addr = sblock->sizeof_addr; /* Keep a local copy also */ - /* Skip over size of file sizes (already decoded) */ image++; udata->f->shared->sizeof_size = sblock->sizeof_size; /* Keep a local copy also */ + /* Check whether the image pointer is out of bounds */ + if (H5_IS_BUFFER_OVERFLOW(image, 1, end)) + HGOTO_ERROR(H5E_FILE, H5E_OVERFLOW, NULL, "image pointer is out of bounds"); + /* File status flags (not really used yet) */ sblock->status_flags = *image++; if (sblock->status_flags & ~H5F_SUPER_ALL_FLAGS) - HGOTO_ERROR(H5E_FILE, H5E_BADVALUE, NULL, "bad flag value for superblock") + HGOTO_ERROR(H5E_FILE, H5E_BADVALUE, NULL, "bad flag value for superblock"); + + /* Check whether the image pointer will be out of bounds */ + if (H5_IS_BUFFER_OVERFLOW(image, H5F_SIZEOF_ADDR(udata->f) * 4, end)) + HGOTO_ERROR(H5E_FILE, H5E_OVERFLOW, NULL, "image pointer is out of bounds"); /* Base, superblock extension, end of file & root group object header addresses */ H5F_addr_decode(udata->f, (const uint8_t **)&image, &sblock->base_addr /*out*/); @@ -562,6 +596,10 @@ H5F__cache_superblock_deserialize(const void *_image, size_t H5_ATTR_NDEBUG_UNUS /* checksum verification already done in verify_chksum cb */ + /* Check whether the image pointer will be out of bounds */ + if (H5_IS_BUFFER_OVERFLOW(image, sizeof(uint32_t), end)) + HGOTO_ERROR(H5E_FILE, H5E_OVERFLOW, NULL, "image pointer is out of bounds"); + /* Decode checksum */ UINT32DECODE(image, read_chksum); @@ -571,19 +609,19 @@ H5F__cache_superblock_deserialize(const void *_image, size_t H5_ATTR_NDEBUG_UNUS * any attempt to load the Driver Information Block. */ sblock->driver_addr = HADDR_UNDEF; - } /* end else */ + } - /* Sanity check */ - HDassert((size_t)(image - (const uint8_t *)_image) <= len); + /* Size check */ + if ((size_t)(image - (const uint8_t *)_image) > len) + HDONE_ERROR(H5E_FILE, H5E_BADVALUE, NULL, "bad decoded superblock size"); - /* Set return value */ ret_value = sblock; done: /* Release the [possibly partially initialized] superblock on error */ if (!ret_value && sblock) if (H5F__super_free(sblock) < 0) - HDONE_ERROR(H5E_FILE, H5E_CANTFREE, NULL, "unable to destroy superblock data") + HDONE_ERROR(H5E_FILE, H5E_CANTFREE, NULL, "unable to destroy superblock data"); FUNC_LEAVE_NOAPI(ret_value) } /* end H5F__cache_superblock_deserialize() */ diff --git a/src/H5Gint.c b/src/H5Gint.c index e388b326a40..eb983cdad52 100644 --- a/src/H5Gint.c +++ b/src/H5Gint.c @@ -909,15 +909,13 @@ H5G__visit_cb(const H5O_link_t *lnk, void *_udata) /* Check if we've seen the object the link references before */ if (NULL == H5SL_search(udata->visited, &obj_pos)) { H5O_type_t otype; /* Basic object type (group, dataset, etc.) */ - unsigned rc; /* Reference count of object */ /* Get the object's reference count and type */ - if (H5O_get_rc_and_type(&obj_oloc, &rc, &otype) < 0) + if (H5O_get_rc_and_type(&obj_oloc, NULL, &otype) < 0) HGOTO_ERROR(H5E_SYM, H5E_CANTGET, H5_ITER_ERROR, "unable to get object info") - /* If its ref count is > 1, we add it to the list of visited objects */ - /* (because it could come up again during traversal) */ - if (rc > 1) { + /* Add it to the list of visited objects */ + { H5_obj_t *new_node; /* New object node for visited list */ /* Allocate new object "position" node */ @@ -931,7 +929,7 @@ H5G__visit_cb(const H5O_link_t *lnk, void *_udata) if (H5SL_insert(udata->visited, new_node, new_node) < 0) HGOTO_ERROR(H5E_SYM, H5E_CANTINSERT, H5_ITER_ERROR, "can't insert object node into visited list") - } /* end if */ + } /* If it's a group, we recurse into it */ if (otype == H5O_TYPE_GROUP) { @@ -1026,7 +1024,6 @@ H5G_visit(H5G_loc_t *loc, const char *group_name, H5_index_t idx_type, H5_iter_o hid_t gid = H5I_INVALID_HID; /* Group ID */ H5G_t *grp = NULL; /* Group opened */ H5G_loc_t start_loc; /* Location of starting group */ - unsigned rc; /* Reference count of object */ herr_t ret_value = FAIL; /* Return value */ /* Portably clear udata struct (before FUNC_ENTER) */ @@ -1068,13 +1065,8 @@ H5G_visit(H5G_loc_t *loc, const char *group_name, H5_index_t idx_type, H5_iter_o if ((udata.visited = H5SL_create(H5SL_TYPE_OBJ, NULL)) == NULL) HGOTO_ERROR(H5E_SYM, H5E_CANTCREATE, FAIL, "can't create skip list for visited objects") - /* Get the group's reference count */ - if (H5O_get_rc_and_type(&grp->oloc, &rc, NULL) < 0) - HGOTO_ERROR(H5E_SYM, H5E_CANTGET, FAIL, "unable to get object info") - - /* If its ref count is > 1, we add it to the list of visited objects */ - /* (because it could come up again during traversal) */ - if (rc > 1) { + /* Add it to the list of visited objects */ + { H5_obj_t *obj_pos; /* New object node for visited list */ /* Allocate new object "position" node */ @@ -1088,7 +1080,7 @@ H5G_visit(H5G_loc_t *loc, const char *group_name, H5_index_t idx_type, H5_iter_o /* Add to list of visited objects */ if (H5SL_insert(udata.visited, obj_pos, obj_pos) < 0) HGOTO_ERROR(H5E_SYM, H5E_CANTINSERT, FAIL, "can't insert object node into visited list") - } /* end if */ + } /* Attempt to get the link info for this group */ if ((linfo_exists = H5G__obj_get_linfo(&(grp->oloc), &linfo)) < 0) diff --git a/src/H5HLcache.c b/src/H5HLcache.c index d7c7ce865dd..dcf1249ddaa 100644 --- a/src/H5HLcache.c +++ b/src/H5HLcache.c @@ -81,7 +81,8 @@ static herr_t H5HL__cache_datablock_notify(H5C_notify_action_t action, void *_th static herr_t H5HL__cache_datablock_free_icr(void *thing); /* Header deserialization */ -static herr_t H5HL__hdr_deserialize(H5HL_t *heap, const uint8_t *image, H5HL_cache_prfx_ud_t *udata); +static herr_t H5HL__hdr_deserialize(H5HL_t *heap, const uint8_t *image, size_t len, + H5HL_cache_prfx_ud_t *udata); /* Free list de/serialization */ static herr_t H5HL__fl_deserialize(H5HL_t *heap); @@ -148,9 +149,10 @@ const H5AC_class_t H5AC_LHEAP_DBLK[1] = {{ *------------------------------------------------------------------------- */ static herr_t -H5HL__hdr_deserialize(H5HL_t *heap, const uint8_t *image, H5HL_cache_prfx_ud_t *udata) +H5HL__hdr_deserialize(H5HL_t *heap, const uint8_t *image, size_t len, H5HL_cache_prfx_ud_t *udata) { - herr_t ret_value = SUCCEED; /* Return value */ + const uint8_t *p_end = image + len - 1; /* End of image buffer */ + herr_t ret_value = SUCCEED; FUNC_ENTER_STATIC @@ -159,16 +161,22 @@ H5HL__hdr_deserialize(H5HL_t *heap, const uint8_t *image, H5HL_cache_prfx_ud_t * HDassert(image); HDassert(udata); - /* Check magic number */ + /* Magic number */ + if (H5_IS_BUFFER_OVERFLOW(image, H5_SIZEOF_MAGIC, p_end)) + HGOTO_ERROR(H5E_HEAP, H5E_OVERFLOW, FAIL, "ran off end of input buffer while decoding"); if (HDmemcmp(image, H5HL_MAGIC, (size_t)H5_SIZEOF_MAGIC) != 0) - HGOTO_ERROR(H5E_HEAP, H5E_BADVALUE, FAIL, "bad local heap signature") + HGOTO_ERROR(H5E_HEAP, H5E_BADVALUE, FAIL, "bad local heap signature"); image += H5_SIZEOF_MAGIC; /* Version */ + if (H5_IS_BUFFER_OVERFLOW(image, 1, p_end)) + HGOTO_ERROR(H5E_HEAP, H5E_OVERFLOW, FAIL, "ran off end of input buffer while decoding"); if (H5HL_VERSION != *image++) - HGOTO_ERROR(H5E_HEAP, H5E_VERSION, FAIL, "wrong version number in local heap") + HGOTO_ERROR(H5E_HEAP, H5E_VERSION, FAIL, "wrong version number in local heap"); /* Reserved */ + if (H5_IS_BUFFER_OVERFLOW(image, 3, p_end)) + HGOTO_ERROR(H5E_HEAP, H5E_OVERFLOW, FAIL, "ran off end of input buffer while decoding"); image += 3; /* Store the prefix's address & length */ @@ -176,16 +184,28 @@ H5HL__hdr_deserialize(H5HL_t *heap, const uint8_t *image, H5HL_cache_prfx_ud_t * heap->prfx_size = udata->sizeof_prfx; /* Heap data size */ + if (H5_IS_BUFFER_OVERFLOW(image, udata->sizeof_size, p_end)) + HGOTO_ERROR(H5E_HEAP, H5E_OVERFLOW, FAIL, "ran off end of input buffer while decoding"); H5F_DECODE_LENGTH_LEN(image, heap->dblk_size, udata->sizeof_size); /* Free list head */ + if (H5_IS_BUFFER_OVERFLOW(image, udata->sizeof_size, p_end)) + HGOTO_ERROR(H5E_HEAP, H5E_OVERFLOW, FAIL, "ran off end of input buffer while decoding"); H5F_DECODE_LENGTH_LEN(image, heap->free_block, udata->sizeof_size); if (heap->free_block != H5HL_FREE_NULL && heap->free_block >= heap->dblk_size) - HGOTO_ERROR(H5E_HEAP, H5E_BADVALUE, FAIL, "bad heap free list") + HGOTO_ERROR(H5E_HEAP, H5E_BADVALUE, FAIL, "bad heap free list"); /* Heap data address */ + if (H5_IS_BUFFER_OVERFLOW(image, udata->sizeof_addr, p_end)) + HGOTO_ERROR(H5E_HEAP, H5E_OVERFLOW, FAIL, "ran off end of input buffer while decoding"); H5F_addr_decode_len(udata->sizeof_addr, &image, &(heap->dblk_addr)); + /* Check that the datablock address is valid (might not be true + * in a corrupt file) + */ + if (!H5F_addr_defined(heap->dblk_addr)) + HGOTO_ERROR(H5E_HEAP, H5E_BADVALUE, FAIL, "bad datablock address"); + done: FUNC_LEAVE_NOAPI(ret_value) } /* end H5HL__hdr_deserialize() */ @@ -222,11 +242,11 @@ H5HL__fl_deserialize(H5HL_t *heap) /* Sanity check */ if (free_block >= heap->dblk_size) - HGOTO_ERROR(H5E_HEAP, H5E_BADRANGE, FAIL, "bad heap free list") + HGOTO_ERROR(H5E_HEAP, H5E_BADRANGE, FAIL, "bad heap free list"); /* Allocate & initialize free list node */ if (NULL == (fl = H5FL_MALLOC(H5HL_free_t))) - HGOTO_ERROR(H5E_HEAP, H5E_CANTALLOC, FAIL, "memory allocation failed") + HGOTO_ERROR(H5E_HEAP, H5E_CANTALLOC, FAIL, "memory allocation failed"); fl->offset = (size_t)free_block; fl->prev = tail; fl->next = NULL; @@ -235,12 +255,12 @@ H5HL__fl_deserialize(H5HL_t *heap) image = heap->dblk_image + free_block; H5F_DECODE_LENGTH_LEN(image, free_block, heap->sizeof_size); if (0 == free_block) - HGOTO_ERROR(H5E_HEAP, H5E_BADVALUE, FAIL, "free block size is zero?") + HGOTO_ERROR(H5E_HEAP, H5E_BADVALUE, FAIL, "free block size is zero?"); /* Decode length of this free block */ H5F_DECODE_LENGTH_LEN(image, fl->size, heap->sizeof_size); if ((fl->offset + fl->size) > heap->dblk_size) - HGOTO_ERROR(H5E_HEAP, H5E_BADRANGE, FAIL, "bad heap free list") + HGOTO_ERROR(H5E_HEAP, H5E_BADRANGE, FAIL, "bad heap free list"); /* Append node onto list */ if (tail) @@ -344,8 +364,7 @@ H5HL__cache_prefix_get_initial_load_size(void H5_ATTR_UNUSED *_udata, size_t *im *------------------------------------------------------------------------- */ static herr_t -H5HL__cache_prefix_get_final_load_size(const void *_image, size_t H5_ATTR_NDEBUG_UNUSED image_len, - void *_udata, size_t *actual_len) +H5HL__cache_prefix_get_final_load_size(const void *_image, size_t image_len, void *_udata, size_t *actual_len) { const uint8_t *image = (const uint8_t *)_image; /* Pointer into raw data buffer */ H5HL_cache_prfx_ud_t *udata = (H5HL_cache_prfx_ud_t *)_udata; /* User data for callback */ @@ -361,8 +380,8 @@ H5HL__cache_prefix_get_final_load_size(const void *_image, size_t H5_ATTR_NDEBUG HDassert(*actual_len == image_len); /* Deserialize the heap's header */ - if (H5HL__hdr_deserialize(&heap, (const uint8_t *)image, udata) < 0) - HGOTO_ERROR(H5E_HEAP, H5E_CANTDECODE, FAIL, "can't decode local heap header") + if (H5HL__hdr_deserialize(&heap, (const uint8_t *)image, image_len, udata) < 0) + HGOTO_ERROR(H5E_HEAP, H5E_CANTDECODE, FAIL, "can't decode local heap header"); /* Set the final size for the cache image */ *actual_len = heap.prfx_size; @@ -394,12 +413,12 @@ H5HL__cache_prefix_get_final_load_size(const void *_image, size_t H5_ATTR_NDEBUG *------------------------------------------------------------------------- */ static void * -H5HL__cache_prefix_deserialize(const void *_image, size_t H5_ATTR_NDEBUG_UNUSED len, void *_udata, - hbool_t H5_ATTR_UNUSED *dirty) +H5HL__cache_prefix_deserialize(const void *_image, size_t len, void *_udata, hbool_t H5_ATTR_UNUSED *dirty) { H5HL_t *heap = NULL; /* Local heap */ H5HL_prfx_t *prfx = NULL; /* Heap prefix deserialized */ const uint8_t *image = (const uint8_t *)_image; /* Pointer into decoding buffer */ + const uint8_t *p_end = image + len - 1; /* End of image buffer */ H5HL_cache_prfx_ud_t *udata = (H5HL_cache_prfx_ud_t *)_udata; /* User data for callback */ void *ret_value = NULL; /* Return value */ @@ -420,8 +439,8 @@ H5HL__cache_prefix_deserialize(const void *_image, size_t H5_ATTR_NDEBUG_UNUSED HGOTO_ERROR(H5E_HEAP, H5E_CANTALLOC, NULL, "can't allocate local heap structure"); /* Deserialize the heap's header */ - if (H5HL__hdr_deserialize(heap, (const uint8_t *)image, udata) < 0) - HGOTO_ERROR(H5E_HEAP, H5E_CANTDECODE, NULL, "can't decode local heap header") + if (H5HL__hdr_deserialize(heap, (const uint8_t *)image, len, udata) < 0) + HGOTO_ERROR(H5E_HEAP, H5E_CANTDECODE, NULL, "can't decode local heap header"); /* Allocate the heap prefix */ if (NULL == (prfx = H5HL__prfx_new(heap))) @@ -436,7 +455,7 @@ H5HL__cache_prefix_deserialize(const void *_image, size_t H5_ATTR_NDEBUG_UNUSED /* Allocate space for the heap data image */ if (NULL == (heap->dblk_image = H5FL_BLK_MALLOC(lheap_chunk, heap->dblk_size))) - HGOTO_ERROR(H5E_HEAP, H5E_CANTALLOC, NULL, "memory allocation failed") + HGOTO_ERROR(H5E_HEAP, H5E_CANTALLOC, NULL, "memory allocation failed"); /* Set image to the start of the data block. This is necessary * because there may be a gap between the used portion of the @@ -444,11 +463,13 @@ H5HL__cache_prefix_deserialize(const void *_image, size_t H5_ATTR_NDEBUG_UNUSED image = ((const uint8_t *)_image) + heap->prfx_size; /* Copy the heap data from the speculative read buffer */ + if (H5_IS_BUFFER_OVERFLOW(image, heap->dblk_size, p_end)) + HGOTO_ERROR(H5E_HEAP, H5E_OVERFLOW, NULL, "ran off end of input buffer while decoding"); H5MM_memcpy(heap->dblk_image, image, heap->dblk_size); /* Build free list */ if (H5HL__fl_deserialize(heap) < 0) - HGOTO_ERROR(H5E_HEAP, H5E_CANTINIT, NULL, "can't initialize free list") + HGOTO_ERROR(H5E_HEAP, H5E_CANTINIT, NULL, "can't initialize free list"); } else /* Note that the heap should _NOT_ be a single @@ -538,7 +559,6 @@ H5HL__cache_prefix_serialize(const H5_ATTR_NDEBUG_UNUSED H5F_t *f, void *_image, H5HL_prfx_t *prfx = (H5HL_prfx_t *)_thing; /* Pointer to local heap prefix to query */ H5HL_t *heap; /* Pointer to the local heap */ uint8_t *image = (uint8_t *)_image; /* Pointer into image buffer */ - size_t buf_size; /* expected size of the image buffer */ FUNC_ENTER_STATIC_NOERR @@ -555,11 +575,13 @@ H5HL__cache_prefix_serialize(const H5_ATTR_NDEBUG_UNUSED H5F_t *f, void *_image, heap = prfx->heap; HDassert(heap); +#ifndef NDEBUG /* Compute the buffer size */ - buf_size = heap->prfx_size; + size_t buf_size = heap->prfx_size; /* expected size of the image buffer */ if (heap->single_cache_obj) buf_size += heap->dblk_size; HDassert(len == buf_size); +#endif /* Update the free block value from the free list */ heap->free_block = heap->freelist ? heap->freelist->offset : H5HL_FREE_NULL; @@ -647,7 +669,7 @@ H5HL__cache_prefix_free_icr(void *_thing) /* Destroy local heap prefix */ if (H5HL__prfx_dest(prfx) < 0) - HGOTO_ERROR(H5E_HEAP, H5E_CANTRELEASE, FAIL, "can't destroy local heap prefix") + HGOTO_ERROR(H5E_HEAP, H5E_CANTRELEASE, FAIL, "can't destroy local heap prefix"); done: FUNC_LEAVE_NOAPI(ret_value) @@ -865,7 +887,7 @@ H5HL__cache_datablock_notify(H5C_notify_action_t action, void *_thing) /* Pin the heap's prefix */ if (FAIL == H5AC_pin_protected_entry(dblk->heap->prfx)) - HGOTO_ERROR(H5E_HEAP, H5E_CANTPIN, FAIL, "unable to pin local heap prefix") + HGOTO_ERROR(H5E_HEAP, H5E_CANTPIN, FAIL, "unable to pin local heap prefix"); break; case H5AC_NOTIFY_ACTION_AFTER_FLUSH: @@ -885,11 +907,11 @@ H5HL__cache_datablock_notify(H5C_notify_action_t action, void *_thing) /* Unpin the local heap prefix */ if (FAIL == H5AC_unpin_entry(dblk->heap->prfx)) - HGOTO_ERROR(H5E_HEAP, H5E_CANTUNPIN, FAIL, "unable to unpin local heap prefix") + HGOTO_ERROR(H5E_HEAP, H5E_CANTUNPIN, FAIL, "unable to unpin local heap prefix"); break; default: - HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "unknown action from metadata cache") + HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "unknown action from metadata cache"); break; } @@ -929,7 +951,7 @@ H5HL__cache_datablock_free_icr(void *_thing) /* Destroy the data block */ if (H5HL__dblk_dest(dblk) < 0) - HGOTO_ERROR(H5E_HEAP, H5E_CANTFREE, FAIL, "unable to destroy local heap data block") + HGOTO_ERROR(H5E_HEAP, H5E_CANTFREE, FAIL, "unable to destroy local heap data block"); done: FUNC_LEAVE_NOAPI(ret_value) diff --git a/src/H5Omessage.c b/src/H5Omessage.c index 8b065865a43..8c2d4de4f95 100644 --- a/src/H5Omessage.c +++ b/src/H5Omessage.c @@ -675,12 +675,11 @@ H5O_msg_free_real(const H5O_msg_class_t *type, void *msg_native) { FUNC_ENTER_NOAPI_NOINIT_NOERR - /* check args */ - HDassert(type); + /* Don't assert on args since this could be called in cleanup code */ if (msg_native) { H5O__msg_reset_real(type, msg_native); - if (NULL != (type->free)) + if (type && type->free) (type->free)(msg_native); else H5MM_xfree(msg_native); diff --git a/tools/src/h5dump/h5dump_ddl.c b/tools/src/h5dump/h5dump_ddl.c index e45a68fab7f..9f96af3b0e0 100644 --- a/tools/src/h5dump/h5dump_ddl.c +++ b/tools/src/h5dump/h5dump_ddl.c @@ -853,10 +853,7 @@ dump_group(hid_t gid, const char *name) H5Oget_info3(gid, &oinfo, H5O_INFO_BASIC); - /* Must check for uniqueness of all objects if we've traversed an elink, - * otherwise only check if the reference count > 1. - */ - if (oinfo.rc > 1 || hit_elink) { + { obj_t *found_obj; /* Found object */ found_obj = search_obj(group_table, &oinfo.token); @@ -880,10 +877,6 @@ dump_group(hid_t gid, const char *name) link_iteration(gid, crt_order_flags); } } - else { - attr_iteration(gid, attr_crt_order_flags); - link_iteration(gid, crt_order_flags); - } dump_indent -= COL; ctx.indent_level--; diff --git a/tools/testfiles/tgroup-2.ddl b/tools/testfiles/tgroup-2.ddl index 2ac8ac6ef5b..5374742a73a 100644 --- a/tools/testfiles/tgroup-2.ddl +++ b/tools/testfiles/tgroup-2.ddl @@ -17,14 +17,7 @@ GROUP "/" { } } GROUP "g2" { - GROUP "g2.1" { - GROUP "g2.1.1" { - } - GROUP "g2.1.2" { - } - GROUP "g2.1.3" { - } - } + HARDLINK "/g2" } GROUP "g3" { GROUP "g3.1" { From 9873dd38dfaf54164c18fe2b0deb0610b4f6c01d Mon Sep 17 00:00:00 2001 From: bmribler <39579120+bmribler@users.noreply.github.com> Date: Thu, 21 Sep 2023 03:34:44 -0400 Subject: [PATCH 101/108] Switched from UNTESTED to UNT (#3569) * Switched from UNTESTED to UNT This makes the lines shorter to keep CVE numbers on one line instead of breaking them into three lines. * Realligned the columns --- CVE_list-1_12.md | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/CVE_list-1_12.md b/CVE_list-1_12.md index d5c7f4173d0..06b9b9ea922 100644 --- a/CVE_list-1_12.md +++ b/CVE_list-1_12.md @@ -1,18 +1,18 @@ | CVE issue number | 1.12.0 | 1.12.1 | 1.12.2 | 1.12.3 | | :------------------------------------------------------------------------- | :----- | :----- | :----- | :----- | -| [CVE-2022-26061](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-26061) | UNTESTED | UNTESTED | UNTESTED | UNTESTED | -| [CVE-2022-25972](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-25972) | UNTESTED | UNTESTED | UNTESTED | UNTESTED | -| [CVE-2022-25942](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-25942) | UNTESTED | UNTESTED | UNTESTED | UNTESTED | +| [CVE-2022-26061](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-26061) | UNT | UNT | UNT | UNT | +| [CVE-2022-25972](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-25972) | UNT | UNT | UNT | UNT | +| [CVE-2022-25942](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-25942) | UNT | UNT | UNT | UNT | | [CVE-2021-46244](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-46244) | ❌ | ❌ | ❌ | ✅ | | [CVE-2021-46243](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-46243) | ❌ | ❌ | ❌ | ✅ | | [CVE-2021-46242](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-46242) | ❌ | ✅ | ✅ | ✅ | | [CVE-2021-45833](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-45833) | ❌ | ❌ | ❌ | ✅ | -| [CVE-2021-45832](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-45832) | UNTESTED | UNTESTED | UNTESTED | UNTESTED | +| [CVE-2021-45832](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-45832) | UNT | UNT | UNT | UNT | | [CVE-2021-45830](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-45830) | ✅ | ✅ | ✅ | ✅ | | [CVE-2021-45829](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-45829) | ✅ | ✅ | ✅ | ✅ | | [CVE-2021-37501](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-37501) | ❌ | ❌ | ❌ | ✅ | | [CVE-2021-36977](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-36977) | ✅ | ✅ | ✅ | ✅ | -| [CVE-2021-31009](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-31009) | N/A | N/A | N/A | N/A | +| [CVE-2021-31009](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-31009) | N/A | N/A | N/A | N/A | | [CVE-2020-10812](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-10812) | ❌ | ✅ | ✅ | ✅ | | [CVE-2020-10811](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-10811) | ❌ | ✅ | ✅ | ✅ | | [CVE-2020-10810](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-10810) | ✅ | ✅ | ✅ | ✅ | @@ -69,6 +69,7 @@ | [CVE-2016-4330](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2016-4330) | ✅ | ✅ | ✅ | ✅ | ## NOTES +* "UNT" denotes "UNTESTED" 1. CVE-2021-45832 has no known proof of vulnerability file. The H5E code that could produce an infinite loop has been reworked, but without a vulnerable file or test program it's difficult to tell if this issue has been fixed. The stack trace provided with the CVE only contains part of the trace, so we don't even know the entry point into the library. 2. CVE-2021-31009 is not a specific vulnerability against HDF5. 3. CVE-2022-25942, CVE-2022-25972, and CVE-2022-26061 are not tested. Those vulnerabilities involve the high-level GIF tools and can be avoided by disabling those tools at build time. From 5bd7cf645342bdbcd9faafdb920ffef43a41e2e8 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Mon, 25 Sep 2023 12:42:36 -0500 Subject: [PATCH 102/108] Revert the removal of java class H5GroupInfo (#3548) --- java/src/Makefile.am | 1 + java/src/hdf/hdf5lib/CMakeLists.txt | 1 + java/src/hdf/hdf5lib/HDF5GroupInfo.java | 188 ++++++++++++++++++++++++ 3 files changed, 190 insertions(+) create mode 100644 java/src/hdf/hdf5lib/HDF5GroupInfo.java diff --git a/java/src/Makefile.am b/java/src/Makefile.am index 35232ffc3aa..6afd84a56fa 100644 --- a/java/src/Makefile.am +++ b/java/src/Makefile.am @@ -108,6 +108,7 @@ hdf5_java_JAVA = \ ${pkgpath}/structs/H5AC_cache_config_t.java \ ${pkgpath}/H5.java \ ${pkgpath}/HDF5Constants.java \ + ${pkgpath}/HDF5GroupInfo.java \ ${pkgpath}/HDFArray.java \ ${pkgpath}/HDFNativeData.java diff --git a/java/src/hdf/hdf5lib/CMakeLists.txt b/java/src/hdf/hdf5lib/CMakeLists.txt index 1738bcf971a..ee7cb15a53d 100644 --- a/java/src/hdf/hdf5lib/CMakeLists.txt +++ b/java/src/hdf/hdf5lib/CMakeLists.txt @@ -101,6 +101,7 @@ set (HDF5_JAVADOC_HDF_HDF5_STRUCTS_SOURCES set (HDF5_JAVA_HDF_HDF5_SOURCES HDFArray.java HDF5Constants.java + HDF5GroupInfo.java HDFNativeData.java H5.java ) diff --git a/java/src/hdf/hdf5lib/HDF5GroupInfo.java b/java/src/hdf/hdf5lib/HDF5GroupInfo.java new file mode 100644 index 00000000000..7836cf648aa --- /dev/null +++ b/java/src/hdf/hdf5lib/HDF5GroupInfo.java @@ -0,0 +1,188 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the COPYING file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +package hdf.hdf5lib; + +/** + *

    + * This class is a container for the information reported about an HDF5 Object + * from the H5Gget_obj_info() method. + *

    + * The fileno and objno fields contain four values which uniquely identify an + * object among those HDF5 files which are open: if all four values are the same + * between two objects, then the two objects are the same (provided both files + * are still open). The nlink field is the number of hard links to the object or + * zero when information is being returned about a symbolic link (symbolic links + * do not have hard links but all other objects always have at least one). The + * type field contains the type of the object, one of H5G_GROUP, H5G_DATASET, or + * H5G_LINK. The mtime field contains the modification time. If information is + * being returned about a symbolic link then linklen will be the length of the + * link value (the name of the pointed-to object with the null terminator); + * otherwise linklen will be zero. Other fields may be added to this structure + * in the future. + * + * @deprecated Not for public use. It is not used by the library. + * This class assumes that an object can contain four values which uniquely identify an + * object among those HDF5 files which are open. This is no longer valid in future + * HDF5 releases. + */ + +@Deprecated +public class HDF5GroupInfo { + long[] fileno; + long[] objno; + int nlink; + int type; + long mtime; + int linklen; + + /** + * Container for the information reported about an HDF5 Object + * from the H5Gget_obj_info() method + */ + public HDF5GroupInfo() + { + fileno = new long[2]; + objno = new long[2]; + nlink = -1; + type = -1; + mtime = 0; + linklen = 0; + } + + /** + * Sets the HDF5 group information. Used by the JHI5. + * + * @param fn + * File id number + * @param on + * Object id number + * @param nl + * Number of links + * @param t + * Type of the object + * @param mt + * Modification time + * @param len + * Length of link + **/ + public void setGroupInfo(long[] fn, long[] on, int nl, int t, long mt, int len) + { + fileno = fn; + objno = on; + nlink = nl; + type = t; + mtime = mt; + linklen = len; + } + + /** Resets all the group information to defaults. */ + public void reset() + { + fileno[0] = 0; + fileno[1] = 0; + objno[0] = 0; + objno[1] = 0; + nlink = -1; + type = -1; + mtime = 0; + linklen = 0; + } + + /** + * fileno accessors + * @return the file number if successful + */ + public long[] getFileno() { return fileno; } + + /** + * accessors + * @return the object number if successful + */ + public long[] getObjno() { return objno; } + + /** + * accessors + * @return type of group if successful + */ + public int getType() { return type; } + + /** + * accessors + * @return the number of links in the group if successful + */ + public int getNlink() { return nlink; } + + /** + * accessors + * @return the modified time value if successful + */ + public long getMtime() { return mtime; } + + /** + * accessors + * @return a length of link name if successful + */ + public int getLinklen() { return linklen; } + + /** + * The fileno and objno fields contain four values which uniquely identify + * an object among those HDF5 files. + */ + @Override + public boolean equals(Object obj) + { + if (!(obj instanceof HDF5GroupInfo)) { + return false; + } + + HDF5GroupInfo target = (HDF5GroupInfo)obj; + if ((fileno[0] == target.fileno[0]) && (fileno[1] == target.fileno[1]) && + (objno[0] == target.objno[0]) && (objno[1] == target.objno[1])) { + return true; + } + else { + return false; + } + } + + /** + * Returns the object id. + * + * @return the object id + */ + public long getOID() { return objno[0]; } + + /** + * Converts this object to a String representation. + * + * @return a string representation of this object + */ + @Override + public String toString() + { + String fileStr = "fileno=null"; + String objStr = "objno=null"; + + if (fileno != null) { + fileStr = "fileno[0]=" + fileno[0] + ",fileno[1]=" + fileno[1]; + } + + if (objno != null) { + objStr = "objno[0]=" + objno[0] + ",objno[1]=" + objno[1]; + } + + return getClass().getName() + "[" + fileStr + "," + objStr + ",type=" + type + ",nlink=" + nlink + + ",mtime=" + mtime + ",linklen=" + linklen + "]"; + } +} From 830bf561e1236308cdffe0c519c6e779ec0929e3 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Tue, 26 Sep 2023 08:46:15 -0500 Subject: [PATCH 103/108] Merge CMake,doxygen changes from develop,1.10 branches (#3578) * Merge CMake,doxygen changes from develop,1.10 branches * revert incorrect option in AT TS build * Use variable for ignore-eol usage. * Add last_test depends logic * Just print status on CMake below 3.14 --- .github/workflows/main.yml | 89 +- .github/workflows/netcdf.yml | 3 + CMakeFilters.cmake | 2 +- CMakeInstallation.cmake | 8 +- config/cmake/fileCompareTest.cmake | 4 +- config/cmake/hdf5-config.cmake.in | 13 +- config/cmake/{patch.xml => patch.xml.in} | 2 +- config/cmake/runTest.cmake | 4 + config/cmake/scripts/HDF5options.cmake | 6 +- config/sanitizer/README.md | 2 +- config/toolchain/build32.cmake | 2 +- config/toolchain/clang.cmake | 16 +- config/toolchain/crayle.cmake | 10 +- config/toolchain/gcc.cmake | 12 +- config/toolchain/mingw64.cmake | 2 +- config/toolchain/pgi.cmake | 12 +- doxygen/dox/ExamplesAPI.dox | 1010 ++++++++++++++++++++++ doxygen/dox/GettingStarted.dox | 4 +- doxygen/dox/IntroHDF5.dox | 2 +- doxygen/dox/IntroParExamples.dox | 569 ++++++++++++ doxygen/dox/IntroParHDF5.dox | 271 ++++++ doxygen/dox/LearnBasics1.dox | 2 +- doxygen/img/pchunk_figa.gif | Bin 0 -> 2754 bytes doxygen/img/pchunk_figb.gif | Bin 0 -> 2094 bytes doxygen/img/pchunk_figc.gif | Bin 0 -> 3194 bytes doxygen/img/pchunk_figd.gif | Bin 0 -> 2984 bytes doxygen/img/pcont_hy_figa.gif | Bin 0 -> 3201 bytes doxygen/img/pcont_hy_figb.gif | Bin 0 -> 2450 bytes doxygen/img/pcont_hy_figc.gif | Bin 0 -> 3694 bytes doxygen/img/pcont_hy_figd.gif | Bin 0 -> 2723 bytes doxygen/img/ppatt_figa.gif | Bin 0 -> 2359 bytes doxygen/img/ppatt_figb.gif | Bin 0 -> 2431 bytes doxygen/img/ppatt_figc.gif | Bin 0 -> 2616 bytes doxygen/img/ppatt_figd.gif | Bin 0 -> 2505 bytes doxygen/img/preg_figa.gif | Bin 0 -> 2359 bytes doxygen/img/preg_figb.gif | Bin 0 -> 2033 bytes doxygen/img/preg_figc.gif | Bin 0 -> 3242 bytes doxygen/img/preg_figd.gif | Bin 0 -> 2367 bytes examples/CMakeTests.cmake | 4 +- java/examples/datasets/CMakeLists.txt | 2 +- tools/test/h5diff/CMakeTests.cmake | 4 +- tools/test/h5dump/CMakeTests.cmake | 8 +- tools/test/misc/CMakeTestsClear.cmake | 8 + 43 files changed, 2004 insertions(+), 67 deletions(-) rename config/cmake/{patch.xml => patch.xml.in} (80%) create mode 100644 doxygen/dox/ExamplesAPI.dox create mode 100644 doxygen/dox/IntroParExamples.dox create mode 100644 doxygen/dox/IntroParHDF5.dox create mode 100644 doxygen/img/pchunk_figa.gif create mode 100644 doxygen/img/pchunk_figb.gif create mode 100644 doxygen/img/pchunk_figc.gif create mode 100644 doxygen/img/pchunk_figd.gif create mode 100644 doxygen/img/pcont_hy_figa.gif create mode 100644 doxygen/img/pcont_hy_figb.gif create mode 100644 doxygen/img/pcont_hy_figc.gif create mode 100644 doxygen/img/pcont_hy_figd.gif create mode 100644 doxygen/img/ppatt_figa.gif create mode 100644 doxygen/img/ppatt_figb.gif create mode 100644 doxygen/img/ppatt_figc.gif create mode 100644 doxygen/img/ppatt_figd.gif create mode 100644 doxygen/img/preg_figa.gif create mode 100644 doxygen/img/preg_figb.gif create mode 100644 doxygen/img/preg_figc.gif create mode 100644 doxygen/img/preg_figd.gif diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index e87b62763f6..10b3a9cf2de 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -336,30 +336,31 @@ jobs: - name: Dump matrix context run: echo '${{ toJSON(matrix) }}' - - name: Install CMake Dependencies (Linux) - run: sudo apt-get install ninja-build doxygen graphviz - if: matrix.os == 'ubuntu-latest' - - - name: Install Autotools Dependencies (Linux, serial) + # Only CMake need ninja-build, but we just install it unilaterally + # libssl, etc. are needed for the ros3 VFD + - name: Install Linux Dependencies run: | sudo apt update - sudo apt install automake autoconf libtool libtool-bin - sudo apt install gcc-12 g++-12 gfortran-12 + sudo apt-get install ninja-build doxygen graphviz sudo apt install libssl3 libssl-dev libcurl4 libcurl4-openssl-dev + sudo apt install gcc-12 g++-12 gfortran-12 echo "CC=gcc-12" >> $GITHUB_ENV echo "CXX=g++-12" >> $GITHUB_ENV echo "FC=gfortran-12" >> $GITHUB_ENV + if: matrix.os == 'ubuntu-latest' + + # CMake gets libaec from fetchcontent + - name: Install Autotools Dependencies (Linux) + run: | + sudo apt install automake autoconf libtool libtool-bin sudo apt install libaec0 libaec-dev - if: (matrix.generator == 'autogen') && (matrix.parallel != 'enable') + if: (matrix.generator == 'autogen') - name: Install Autotools Dependencies (Linux, parallel) run: | - sudo apt update - sudo apt install automake autoconf libtool libtool-bin sudo apt install openmpi-bin openmpi-common mpi-default-dev echo "CC=mpicc" >> $GITHUB_ENV echo "FC=mpif90" >> $GITHUB_ENV - sudo apt install libaec0 libaec-dev if: (matrix.generator == 'autogen') && (matrix.parallel == 'enable') - name: Install Dependencies (Windows) @@ -390,7 +391,19 @@ jobs: sh ./autogen.sh mkdir "${{ runner.workspace }}/build" cd "${{ runner.workspace }}/build" - ${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --${{ matrix.deprec_sym }}-deprecated-symbols --with-default-api-version=${{ matrix.default_api }} --enable-shared --${{ matrix.parallel }}-parallel --${{ matrix.cpp }}-cxx --${{ matrix.fortran }}-fortran --${{ matrix.java }}-java --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd --${{ matrix.ros3_vfd }}-ros3-vfd --with-szlib=${{ matrix.szip }} + ${{ matrix.flags }} $GITHUB_WORKSPACE/configure \ + --enable-build-mode=${{ matrix.build_mode.autotools }} \ + --${{ matrix.deprec_sym }}-deprecated-symbols \ + --with-default-api-version=${{ matrix.default_api }} \ + --enable-shared \ + --${{ matrix.parallel }}-parallel \ + --${{ matrix.cpp }}-cxx \ + --${{ matrix.fortran }}-fortran \ + --${{ matrix.java }}-java \ + --${{ matrix.mirror_vfd }}-mirror-vfd \ + --${{ matrix.direct_vfd }}-direct-vfd \ + --${{ matrix.ros3_vfd }}-ros3-vfd \ + --with-szlib=${{ matrix.szip }} shell: bash if: (matrix.generator == 'autogen') && ! (matrix.thread_safety.enabled) @@ -399,7 +412,15 @@ jobs: sh ./autogen.sh mkdir "${{ runner.workspace }}/build" cd "${{ runner.workspace }}/build" - ${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --enable-shared --enable-threadsafe --disable-hl --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd --${{ matrix.ros3_vfd }}-ros3-vfd --with-szlib=${{ matrix.szip }} + ${{ matrix.flags }} $GITHUB_WORKSPACE/configure \ + --enable-build-mode=${{ matrix.build_mode.autotools }} \ + --enable-shared \ + --enable-threadsafe \ + --disable-hl \ + --${{ matrix.mirror_vfd }}-mirror-vfd \ + --${{ matrix.direct_vfd }}-direct-vfd \ + --${{ matrix.ros3_vfd }}-ros3-vfd \ + --with-szlib=${{ matrix.szip }} shell: bash if: (matrix.generator == 'autogen') && (matrix.thread_safety.enabled) @@ -411,7 +432,25 @@ jobs: run: | mkdir "${{ runner.workspace }}/build" cd "${{ runner.workspace }}/build" - cmake -C $GITHUB_WORKSPACE/config/cmake/cacheinit.cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=${{ matrix.cpp }} -DHDF5_BUILD_FORTRAN=${{ matrix.fortran }} -DHDF5_BUILD_JAVA=${{ matrix.java }} -DHDF5_BUILD_DOC=${{ matrix.docs }} -DBUILD_SZIP_WITH_FETCHCONTENT=${{ matrix.libaecfc }} -DLIBAEC_USE_LOCALCONTENT=${{ matrix.localaec }} -DBUILD_ZLIB_WITH_FETCHCONTENT=${{ matrix.zlibfc }} -DZLIB_USE_LOCALCONTENT=${{ matrix.localzlib }} -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} -DHDF5_ENABLE_ROS3_VFD:BOOL=${{ matrix.ros3_vfd }} $GITHUB_WORKSPACE + cmake -C $GITHUB_WORKSPACE/config/cmake/cacheinit.cmake \ + ${{ matrix.generator }} \ + -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} \ + -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} \ + -DBUILD_SHARED_LIBS=ON \ + -DHDF5_ENABLE_ALL_WARNINGS=ON \ + -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} \ + -DHDF5_BUILD_CPP_LIB:BOOL=${{ matrix.cpp }} \ + -DHDF5_BUILD_FORTRAN=${{ matrix.fortran }} \ + -DHDF5_BUILD_JAVA=${{ matrix.java }} \ + -DHDF5_BUILD_DOC=${{ matrix.docs }} \ + -DBUILD_SZIP_WITH_FETCHCONTENT=${{ matrix.libaecfc }} \ + -DLIBAEC_USE_LOCALCONTENT=${{ matrix.localaec }} \ + -DBUILD_ZLIB_WITH_FETCHCONTENT=${{ matrix.zlibfc }} \ + -DZLIB_USE_LOCALCONTENT=${{ matrix.localzlib }} \ + -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} \ + -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} \ + -DHDF5_ENABLE_ROS3_VFD:BOOL=${{ matrix.ros3_vfd }} \ + $GITHUB_WORKSPACE shell: bash if: (matrix.generator != 'autogen') && ! (matrix.thread_safety.enabled) @@ -420,7 +459,27 @@ jobs: run: | mkdir "${{ runner.workspace }}/build" cd "${{ runner.workspace }}/build" - cmake -C $GITHUB_WORKSPACE/config/cmake/cacheinit.cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_THREADSAFE:BOOL=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=OFF -DHDF5_BUILD_FORTRAN:BOOL=OFF -DHDF5_BUILD_JAVA:BOOL=OFF -DHDF5_BUILD_HL_LIB:BOOL=OFF -DHDF5_BUILD_DOC=OFF -DBUILD_SZIP_WITH_FETCHCONTENT=${{ matrix.libaecfc }} -DLIBAEC_USE_LOCALCONTENT=${{ matrix.localaec }} -DBUILD_ZLIB_WITH_FETCHCONTENT=${{ matrix.zlibfc }} -DZLIB_USE_LOCALCONTENT=${{ matrix.localzlib }} -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} -DHDF5_ENABLE_ROS3_VFD:BOOL=${{ matrix.ros3_vfd }} $GITHUB_WORKSPACE + cmake -C $GITHUB_WORKSPACE/config/cmake/cacheinit.cmake \ + ${{ matrix.generator }} \ + -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} \ + -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} \ + -DBUILD_SHARED_LIBS=ON \ + -DHDF5_ENABLE_ALL_WARNINGS=ON \ + -DHDF5_ENABLE_THREADSAFE:BOOL=ON \ + -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} \ + -DHDF5_BUILD_CPP_LIB:BOOL=OFF \ + -DHDF5_BUILD_FORTRAN:BOOL=OFF \ + -DHDF5_BUILD_JAVA:BOOL=OFF \ + -DHDF5_BUILD_HL_LIB:BOOL=OFF \ + -DHDF5_BUILD_DOC=OFF \ + -DBUILD_SZIP_WITH_FETCHCONTENT=${{ matrix.libaecfc }} \ + -DLIBAEC_USE_LOCALCONTENT=${{ matrix.localaec }} \ + -DBUILD_ZLIB_WITH_FETCHCONTENT=${{ matrix.zlibfc }} \ + -DZLIB_USE_LOCALCONTENT=${{ matrix.localzlib }} \ + -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} \ + -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} \ + -DHDF5_ENABLE_ROS3_VFD:BOOL=${{ matrix.ros3_vfd }} \ + $GITHUB_WORKSPACE shell: bash if: (matrix.generator != 'autogen') && (matrix.thread_safety.enabled) diff --git a/.github/workflows/netcdf.yml b/.github/workflows/netcdf.yml index 0ec7541ee80..5b1ebf265ca 100644 --- a/.github/workflows/netcdf.yml +++ b/.github/workflows/netcdf.yml @@ -14,6 +14,9 @@ on: - 'COPYING**' - '**.md' +permissions: + contents: read + # Using concurrency to cancel any in-progress job or run concurrency: group: ${{ github.workflow }}-${{ github.sha || github.event.pull_request.number }} diff --git a/CMakeFilters.cmake b/CMakeFilters.cmake index 200634e1cb3..3a1a0de3a78 100644 --- a/CMakeFilters.cmake +++ b/CMakeFilters.cmake @@ -9,7 +9,7 @@ # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # -option (USE_LIBAEC "Use AEC library as SZip Filter" OFF) +option (USE_LIBAEC "Use AEC library as SZip Filter" ON) option (USE_LIBAEC_STATIC "Use static AEC library " OFF) option (ZLIB_USE_EXTERNAL "Use External Library Building for ZLIB" 0) option (SZIP_USE_EXTERNAL "Use External Library Building for SZIP" 0) diff --git a/CMakeInstallation.cmake b/CMakeInstallation.cmake index 1fb77141240..3aa79816c8c 100644 --- a/CMakeInstallation.cmake +++ b/CMakeInstallation.cmake @@ -390,7 +390,13 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES) set(CPACK_WIX_PROPERTY_ARPURLINFOABOUT "${HDF5_PACKAGE_URL}") set(CPACK_WIX_PROPERTY_ARPHELPLINK "${HDF5_PACKAGE_BUGREPORT}") if (BUILD_SHARED_LIBS) - set(CPACK_WIX_PATCH_FILE "${HDF_RESOURCES_DIR}/patch.xml") + if (${HDF_CFG_NAME} MATCHES "Debug" OR ${HDF_CFG_NAME} MATCHES "Developer") + set (WIX_CMP_NAME "${HDF5_LIB_NAME}${CMAKE_DEBUG_POSTFIX}") + else () + set (WIX_CMP_NAME "${HDF5_LIB_NAME}") + endif () + configure_file (${HDF_RESOURCES_DIR}/patch.xml.in ${HDF5_BINARY_DIR}/patch.xml @ONLY) + set(CPACK_WIX_PATCH_FILE "${HDF5_BINARY_DIR}/patch.xml") endif () elseif (APPLE) list (APPEND CPACK_GENERATOR "STGZ") diff --git a/config/cmake/fileCompareTest.cmake b/config/cmake/fileCompareTest.cmake index 4a8dc099618..f4c46f60ee7 100644 --- a/config/cmake/fileCompareTest.cmake +++ b/config/cmake/fileCompareTest.cmake @@ -59,7 +59,7 @@ if (TEST_STRINGS STREQUAL "YES") endif () else () if (CMAKE_VERSION VERSION_LESS "3.14.0") - message (FATAL_ERROR "CANNOT get file size, file command SIZE not supported") + message (STATUS "CANNOT get file size, file command SIZE not supported") else () file (SIZE ${TEST_FOLDER}/${TEST_ONEFILE} TEST_ONE_SIZE) file (SIZE ${TEST_FOLDER}/${TEST_TWOFILE} TEST_TWO_SIZE) @@ -74,7 +74,7 @@ else () elseif (TEST_FUNCTION MATCHES "LTEQ") if (TEST_ONE_SIZE LESS_EQUAL TEST_TWO_SIZE) if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSES "Passed: The size of ${TEST_FOLDER}/${TEST_ONEFILE} was less or equal ${TEST_FOLDER}/${TEST_TWOFILE}") + message (VERBOSE "Passed: The size of ${TEST_FOLDER}/${TEST_ONEFILE} was less or equal ${TEST_FOLDER}/${TEST_TWOFILE}") endif () else () message (FATAL_ERROR "The size of ${TEST_FOLDER}/${TEST_ONEFILE} was NOT less or equal ${TEST_FOLDER}/${TEST_TWOFILE}") diff --git a/config/cmake/hdf5-config.cmake.in b/config/cmake/hdf5-config.cmake.in index 699db896476..496d2607db2 100644 --- a/config/cmake/hdf5-config.cmake.in +++ b/config/cmake/hdf5-config.cmake.in @@ -44,6 +44,7 @@ set (${HDF5_PACKAGE_NAME}_ENABLE_PLUGIN_SUPPORT @HDF5_ENABLE_PLUGIN_SUPPORT@) set (${HDF5_PACKAGE_NAME}_ENABLE_Z_LIB_SUPPORT @HDF5_ENABLE_Z_LIB_SUPPORT@) set (${HDF5_PACKAGE_NAME}_ENABLE_SZIP_SUPPORT @HDF5_ENABLE_SZIP_SUPPORT@) set (${HDF5_PACKAGE_NAME}_ENABLE_SZIP_ENCODING @HDF5_ENABLE_SZIP_ENCODING@) +set (${HDF5_PACKAGE_NAME}_ENABLE_ROS3_VFD @HDF5_ENABLE_ROS3_VFD@) set (${HDF5_PACKAGE_NAME}_BUILD_SHARED_LIBS @H5_ENABLE_SHARED_LIB@) set (${HDF5_PACKAGE_NAME}_BUILD_STATIC_LIBS @H5_ENABLE_STATIC_LIB@) set (${HDF5_PACKAGE_NAME}_PACKAGE_EXTLIBS @HDF5_PACKAGE_EXTLIBS@) @@ -51,7 +52,8 @@ set (${HDF5_PACKAGE_NAME}_EXPORT_LIBRARIES @HDF5_LIBRARIES_TO_EXPORT@) set (${HDF5_PACKAGE_NAME}_ARCHITECTURE "@CMAKE_GENERATOR_ARCHITECTURE@") set (${HDF5_PACKAGE_NAME}_TOOLSET "@CMAKE_GENERATOR_TOOLSET@") set (${HDF5_PACKAGE_NAME}_DEFAULT_API_VERSION "@DEFAULT_API_VERSION@") -set (${HDF5_PACKAGE_NAME}_PARALLEL_FILTERED_WRITES "@PARALLEL_FILTERED_WRITES@") +set (${HDF5_PACKAGE_NAME}_PARALLEL_FILTERED_WRITES @PARALLEL_FILTERED_WRITES@) +set (${HDF5_PACKAGE_NAME}_INSTALL_MOD_FORTRAN "@HDF5_INSTALL_MOD_FORTRAN@") #----------------------------------------------------------------------------- # Dependencies @@ -67,6 +69,11 @@ if (${HDF5_PACKAGE_NAME}_ENABLE_PARALLEL) find_package(MPI QUIET REQUIRED) endif () +if (${HDF5_PACKAGE_NAME}_ENABLE_THREADSAFE) + set(THREADS_PREFER_PTHREAD_FLAG ON) + find_package(Threads QUIET REQUIRED) +endif () + if (${HDF5_PACKAGE_NAME}_BUILD_JAVA) set (${HDF5_PACKAGE_NAME}_JAVA_INCLUDE_DIRS @PACKAGE_CURRENT_BUILD_DIR@/lib/jarhdf5-@HDF5_VERSION_STRING@.jar @@ -143,14 +150,14 @@ foreach (comp IN LISTS ${HDF5_PACKAGE_NAME}_FIND_COMPONENTS) list (REMOVE_ITEM ${HDF5_PACKAGE_NAME}_FIND_COMPONENTS ${comp}) set (${HDF5_PACKAGE_NAME}_LIB_TYPE ${${HDF5_PACKAGE_NAME}_LIB_TYPE} ${comp}) - if (${HDF5_PACKAGE_NAME}_BUILD_FORTRAN) + if (${HDF5_PACKAGE_NAME}_BUILD_FORTRAN AND ${HDF5_PACKAGE_NAME}_INSTALL_MOD_FORTRAN STREQUAL "SHARED") set (${HDF5_PACKAGE_NAME}_INCLUDE_DIR_FORTRAN "@PACKAGE_INCLUDE_INSTALL_DIR@/shared") endif () elseif (comp STREQUAL "static") list (REMOVE_ITEM ${HDF5_PACKAGE_NAME}_FIND_COMPONENTS ${comp}) set (${HDF5_PACKAGE_NAME}_LIB_TYPE ${${HDF5_PACKAGE_NAME}_LIB_TYPE} ${comp}) - if (${HDF5_PACKAGE_NAME}_BUILD_FORTRAN) + if (${HDF5_PACKAGE_NAME}_BUILD_FORTRAN AND ${HDF5_PACKAGE_NAME}_INSTALL_MOD_FORTRAN STREQUAL "STATIC") set (${HDF5_PACKAGE_NAME}_INCLUDE_DIR_FORTRAN "@PACKAGE_INCLUDE_INSTALL_DIR@/static") endif () endif () diff --git a/config/cmake/patch.xml b/config/cmake/patch.xml.in similarity index 80% rename from config/cmake/patch.xml rename to config/cmake/patch.xml.in index 1bdff3e7c2e..d6843e12697 100644 --- a/config/cmake/patch.xml +++ b/config/cmake/patch.xml.in @@ -1,5 +1,5 @@ - + + +\section sec_exapi_desc Examples Description +The C, FORTRAN and Java examples below point to the examples in the hdf5-examples github repository. Examples for older versions of HDF5 +are handled by setting the appropriate USE_API_xxx definition. HDF5-1.6 examples are in a "16"-named subdirectory. + +The Java examples are in the HDF5-1.10 source code, and the Java Object package examples are in the HDFView source. +Please note that you must comment out the "package" statement at the top when downloading a Java Object example individually. + +The MATLAB and Python examples were generously provided by a user and are not tested. + +Languages are C, Fortran, Java (JHI5), Java Object Package, Python (High Level), and Python (Low Level APIs). + +\subsection sec_exapi_dsets Datasets + +

    The dataspace is used to describe both the logical layout of a dataset and a subset of a dataset.
    Create a file C Fortran C++ Java Python +C Fortran C++ Java Python
    Create a dataset C Fortran C++ Java Python +C Fortran C++ Java Python
    Read and write to a dataset C Fortran C++ Java Python +C Fortran C++ Java Python
    Create an attribute C Fortran C++ Java Python +C Fortran C++ Java Python
    Create a group C Fortran C++ Java Python +C Fortran C++ Java Python
    Create groups in a file using absolute and relative paths C Fortran C++ Java Python +C Fortran C++ Java Python
    Create datasets in a group C Fortran C++ Java Python +C Fortran C++ Java Python
    Create a file and dataset and select/read a subset from the dataset C Fortran C++ Java Python +C Fortran C++ Java Python Also see examples to Write by row (and column) below.
    Create an extendible (unlimited dimension) dataset C Fortran C++ Java Python +C Fortran C++ Java Python Also see examples to Extend by row (and column) below
    Create a chunked and compressed dataset C Fortran C++ Java Python +C Fortran C++ Java Python
    Family #H5FD_FAMILYWith this driver, the HDF5 file’s address space is partitioned into pieces and sent to -separate storage files using an underlying driver of the user’s choice. This driver is for +With this driver, the HDF5 file's address space is partitioned into pieces and sent to +separate storage files using an underlying driver of the user's choice. This driver is for systems that do not support files larger than 2 gigabytes. #H5Pset_fapl_family
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    FeatureLanguagesHDF5 FileOutputDDL
    Set Space Allocation Time for Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_alloc.h5h5ex_d_alloc.tsth5ex_d_alloc.ddl
    Read / Write Dataset using Fletcher32 Checksum Filter +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_checksum.h5h5ex_d_checksum.tsth5ex_d_checksum.ddl
    Read / Write Chunked Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_chunk.h5h5ex_d_chunk.tsth5ex_d_chunk.ddl
    Read / Write Compact Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_compact.h5h5ex_d_compact.tsth5ex_d_compact.ddl
    Read / Write to External Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_extern.h5h5ex_d_extern.tsth5ex_d_extern.ddl
    Read / Write Dataset w/ Fill Value +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_fillval.h5h5ex_d_fillval.tsth5ex_d_fillval.ddl
    Read / Write GZIP Compressed Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_gzip.h5h5ex_d_gzip.tsth5ex_d_gzip.ddl
    Read / Write Data by Hyperslabs +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_hyper.h5h5ex_d_hyper.tsth5ex_d_hyper.ddl
    Read / Write Dataset with n-bit Filter +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_nbit.h5h5ex_d_nbit.tsth5ex_d_nbit.ddl
    Read / Write Integer Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_rdwrc.h5h5ex_d_rdwrc.tsth5ex_d_rdwr.ddl
    Read / Write Dataset w/ Shuffle Filter and GZIP Compression +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_shuffle.h5h5ex_d_shuffle.tsth5ex_d_shuffle.ddl
    Read / Write Dataset using Scale-Offset Filter (float) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_sofloat.h5h5ex_d_sofloat.tsth5ex_d_sofloat.ddl
    Read / Write Dataset using Scale-Offset Filter (integer) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_soint.h5h5ex_d_soint.tsth5ex_d_soint.ddl
    Read / Write Dataset using SZIP Compression +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_szip.h5h5ex_d_szip.tsth5ex_d_szip.ddl
    Read / Write Dataset using Data Transform Expression +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_transform.h5h5ex_d_transform.tsth5ex_d_transform.ddl
    Read / Write Unlimited Dimension Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_unlimadd.h5h5ex_d_unlimadd.tsth5ex_d_unlimadd.ddl
    Read / Write GZIP Compressed Unlimited Dimension Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_unlimgzip.h5h5ex_d_unlimgzip.tsth5ex_d_unlimgzip.ddl
    Read / Write / Edit Unlimited Dimension Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_unlimmod.h5h5ex_d_unlimmod.tsth5ex_d_unlimmod.ddl
    + +\subsection sec_exapi_grps Groups + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    FeatureLanguagesHDF5 FileOutputDDL
    Create "compact-or-indexed" Format Groups +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_g_compact.h5h5ex_g_.tsth5ex_g_compact1.ddlh5ex_g_compact2.ddl
    Track links in a Group by Creation Order +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_g_corder.h5h5ex_g_corder.tsth5ex_g_corder.ddl
    Create / Open / Close a Group +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_g_create.h5h5ex_g_create.tsth5ex_g_create.ddl
    Create Intermediate Groups +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_g_intermediate.h5h5ex_g_intermediate.tsth5ex_g_intermediate.ddl
    Iterate over Groups w/ H5Literate +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_g_iterate.h5h5ex_g_iterate.tsth5ex_g_iterate.ddl
    Set Conditions to Convert between Compact and Dense Groups +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_g_phase.h5h5ex_g_phase.tsth5ex_g_phase.ddl
    Recursively Traverse a File with H5Literate +C + FORTRAN +Java + JavaObj MATLAB PyHigh PyLow +h5ex_g_traverse.h5h5ex_g_traverse.tsth5ex_g_traverse.ddl
    Recursively Traverse a File with H5Ovisit / H5Lvisit +C + FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_g_visit.h5h5ex_g_visit.tsth5ex_g_visit.ddl
    + +\subsection sec_exapi_dtypes Datatypes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    FeatureLanguagesHDF5 FileOutputDDL
    Read / Write Array (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_arrayatt.h5h5ex_t_arrayatt.tsth5ex_t_arrayatt.ddl
    Read / Write Array (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_array.h5h5ex_t_array.tsth5ex_t_array.ddl
    Read / Write Bitfield (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_bitatt.h5h5ex_t_bitatt.tsth5ex_t_bitatt.ddl
    Read / Write Bitfield (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_bit.h5h5ex_t_bit.tsth5ex_t_bit.ddl
    Read / Write Compound (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_cmpdatt.h5h5ex_t_cmpdatt.tsth5ex_t_cmpdatt.ddl
    Read / Write Compound (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_cmpd.h5h5ex_t_cmpd.tsth5ex_t_cmpd.ddl
    Commit Named Datatype and Read Back +C + FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_commit.h5h5ex_t_commit.tsth5ex_t_commit.ddl
    Convert Between Datatypes in Memory +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_t_convert.h5h5ex_t_convert.tsth5ex_t_convert.ddl
    Read / Write Complex Compound (Attribute) +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_t_cpxcmpdatt.h5h5ex_t_cpxcmpdatt.tsth5ex_t_cpxcmpdatt.ddl
    Read / Write Complex Compound (Dataset) +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_t_cpxcmpd.h5h5ex_t_cpxcmpd.tsth5ex_t_cpxcmpd.ddl
    Read / Write Enumerated (Attribute) +C +FORTRAN + Java JavaObj MATLAB PyHigh PyLow +h5ex_t_enumatt.h5h5ex_t_enumatt.tsth5ex_t_enumatt.ddl
    Read / Write Enumerated (Dataset) +C +FORTRAN + Java JavaObj MATLAB PyHigh PyLow +h5ex_t_enum.h5h5ex_t_enum.tsth5ex_t_enum.ddl
    Read / Write Floating Point (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_floatatt.h5h5ex_t_floatatt.tsth5ex_t_floatatt.ddl
    Read / Write Floating Point (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_float.h5h5ex_t_float.tsth5ex_t_float.ddl
    Read / Write Integer Datatype (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_intatt.h5h5ex_t_intatt.tsth5ex_t_intatt.ddl
    Read / Write Integer Datatype (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_int.h5h5ex_t_int.tsth5ex_t_int.ddl
    Read / Write Object References (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_objrefatt.h5h5ex_t_objrefatt.tsth5ex_t_objrefatt.ddl
    Read / Write Object References (Dataset) +C +FORTRAN +Java + JavaObj + MATLAB PyHigh PyLow +h5ex_t_objref.h5h5ex_t_objref.tsth5ex_t_objref.ddl
    Read / Write Opaque (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_opaqueatt.h5h5ex_t_opaqueatt.tsth5ex_t_opaqueatt.ddl
    Read / Write Opaque (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_opaque.h5h5ex_t_opaque.tsth5ex_t_opaque.ddl
    Read / Write Region References (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_regrefatt.h5h5ex_t_regrefatt.tsth5ex_t_regrefatt.ddl
    Read / Write Region References (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_regref.h5h5ex_t_regref.tsth5ex_t_regref.ddl
    Read / Write String (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_stringatt.h5h5ex_t_stringatt.tsth5ex_t_stringatt.ddl
    Read / Write String (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_string.h5h5ex_t_string.tsth5ex_t_string.ddl
    Read / Write Variable Length (Attribute) +C +FORTRAN + Java JavaObj MATLAB PyHigh PyLow +h5ex_t_vlenatt.h5h5ex_t_vlenatt.tsth5ex_t_vlenatt.ddl
    Read / Write Variable Length (Dataset) +C +FORTRAN + Java JavaObj MATLAB PyHigh PyLow +h5ex_t_vlen.h5h5ex_t_vlen.tsth5ex_t_vlen.ddl
    Read / Write Variable Length String (Attribute) +C +FORTRAN + Java JavaObj MATLAB PyHigh PyLow +h5ex_t_vlstringatt.h5h5ex_t_vlstringatt.tsth5ex_t_vlstringatt.ddl
    Read / Write Variable Length String (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_vlstring.h5h5ex_t_vlstring.tsth5ex_t_vlstring.ddl
    + +\subsection sec_exapi_filts Filters + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    FeatureLanguagesHDF5 FileOutputDDL
    Read / Write Dataset using Blosc Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_blosc.h5h5ex_d_blosc.tsth5ex_d_blosc.ddl
    Read / Write Dataset using Bit Shuffle Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_bshuf.h5h5ex_d_bshuf.tsth5ex_d_bshuf.ddl
    Read / Write Dataset using BZip2 Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_bzip2.h5h5ex_d_bzip2.tsth5ex_d_bzip2.ddl
    Read / Write Dataset using JPEG Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_jpeg.h5h5ex_d_jpeg.tsth5ex_d_jpeg.ddl
    Read / Write Dataset using LZ4 Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_lz4.h5h5ex_d_lz4.tsth5ex_d_lz4.ddl
    Read / Write Dataset using LZF Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_lzf.h5h5ex_d_lzf.tsth5ex_d_lzf.ddl
    Read / Write Dataset using MAFISC Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_mafisc.h5h5ex_d_mafisc.tsth5ex_d_mafisc.ddl
    Read / Write Dataset using ZFP Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_zfp.h5h5ex_d_zfp.tsth5ex_d_zfp.ddl
    Read / Write Dataset using ZStd Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_zstd.h5h5ex_d_zstd.tsth5ex_d_zstd.ddl
    + +\subsection sec_exapi_java Java General + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    FeatureLanguagesOutput
    Create/Read/Write an Attribute +Java +JavaObj +HDF5AttributeCreate.txt
    Create Datasets +Java +JavaObj +HDF5DatasetCreate.txt
    Read/Write Datasets +Java +JavaObj +HDF5DatasetRead.txt
    Create an Empty File +Java +JavaObj +HDF5FileCreate.txt
    Retrieve the File Structure +Java + JavaObj +HDF5FileStructure.txt
    Create Groups +Java +JavaObj +HDF5GroupCreate.txt
    Select a Subset of a Dataset +Java + JavaObj +HDF5SubsetSelect.txt
    Create Two Datasets Within Groups +Java +JavaObj +HDF5GroupDatasetCreate.txt
    + + +\subsection sec_exapi_par Parallel + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    FeatureLanguagesHDF5 FileOutput
    Creating and Accessing a File +C +FORTRAN + MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
    Creating and Accessing a Dataset +C +FORTRAN + MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
    Writing and Reading Contiguous Hyperslabs +C +FORTRAN + MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
    Writing and Reading Regularly Spaced Data Hyperslabs +C +FORTRAN + MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
    Writing and Reading Pattern Hyperslabs +C +FORTRAN + MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
    Writing and Reading Chunk Hyperslabs +C +FORTRAN + MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
    Using the Subfiling VFD to Write a File Striped Across Multiple Subfiles +C + FORTRAN MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
    Write to Datasets with Filters Applied +C + FORTRAN MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
    Collectively Write Datasets with Filters and Not All Ranks have Data +C + FORTRAN MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
    + + +
    +Navigate back: \ref index "Main" / \ref GettingStarted + +*/ diff --git a/doxygen/dox/GettingStarted.dox b/doxygen/dox/GettingStarted.dox index 29c503319d0..87f3566361e 100644 --- a/doxygen/dox/GettingStarted.dox +++ b/doxygen/dox/GettingStarted.dox @@ -50,10 +50,10 @@ Parallel HDF5, and the HDF5-1.10 VDS and SWMR new features: -
    Introduction to Parallel HDF5 +\ref IntroParHDF5 -A brief introduction to Parallel HDF5. If you are new to HDF5 please see the @ref LearnBasics topic first. +A brief introduction to Parallel HDF5. If you are new to HDF5 please see the @ref LearnBasics topic first. diff --git a/doxygen/dox/IntroHDF5.dox b/doxygen/dox/IntroHDF5.dox index 3ca7d00b091..2c25659b8f3 100644 --- a/doxygen/dox/IntroHDF5.dox +++ b/doxygen/dox/IntroHDF5.dox @@ -607,7 +607,7 @@ on the HDF-EOS Tools and Information Center pag \section secHDF5Examples Examples \li \ref LBExamples -\li Examples by API +\li \ref ExAPI \li Examples in the Source Code \li Other Examples diff --git a/doxygen/dox/IntroParExamples.dox b/doxygen/dox/IntroParExamples.dox new file mode 100644 index 00000000000..39291063dc7 --- /dev/null +++ b/doxygen/dox/IntroParExamples.dox @@ -0,0 +1,569 @@ +/** @page IntroParContHyperslab Writing by Contiguous Hyperslab + +Navigate back: \ref index "Main" / \ref GettingStarted / \ref IntroParHDF5 +
    + +This example shows how to write a contiguous buffer in memory to a contiguous hyperslab in a file. In this case, +each parallel process writes a contiguous hyperslab to the file. + +In the C example (figure a), each hyperslab in memory consists of an equal number of consecutive rows. In the FORTRAN +90 example (figure b), each hyperslab in memory consists of +an equal number of consecutive columns. This reflects the difference in the storage order for C and FORTRAN 90. + + + + + + + + +
    Figure a C ExampleFigure b Fortran Example
    +\image html pcont_hy_figa.gif + +\image html pcont_hy_figb.gif +
    + +\section secIntroParContHyperslabC Writing a Contiguous Hyperslab in C +In this example, you have a dataset of 8 (rows) x 5 (columns) and each process writes an equal number +of rows to the dataset. The dataset hyperslab is defined as follows: +\code + count [0] = dimsf [0] / number_processes + count [1] = dimsf [1] +\endcode +where, +\code + dimsf [0] is the number of rows in the dataset + dimsf [1] is the number of columns in the dataset +\endcode +The offset for the hyperslab is different for each process: +\code + offset [0] = k * count[0] + offset [1] = 0 +\endcode +where, +\code + "k" is the process id number + count [0] is the number of rows written in each hyperslab + offset [1] = 0 indicates to start at the beginning of the row +\endcode + +The number of processes that you could use would be 1, 2, 4, or 8. The number of rows that would be written by each slab is as follows: + + + + + + + + + + + + + +
    ProcessesSize of count[0](\# of rows)
    18
    24
    42
    81
    + +If using 4 processes, then process 1 would look like: + + + + +
    +\image html pcont_hy_figc.gif +
    + +The code would look like the following: +\code + 71 /* + 72 * Each process defines dataset in memory and writes it to the hyperslab + 73 * in the file. + 74 */ + 75 count[0] = dimsf[0]/mpi_size; + 76 count[1] = dimsf[1]; + 77 offset[0] = mpi_rank * count[0]; + 78 offset[1] = 0; + 79 memspace = H5Screate_simple(RANK, count, NULL); + 80 + 81 /* + 82 * Select hyperslab in the file. + 83 */ + 84 filespace = H5Dget_space(dset_id); + 85 H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL); +\endcode + +Below is the example program: + + + + +
    +hyperslab_by_row.c +
    + +If using this example with 4 processes, then, +\li Process 0 writes "10"s to the file. +\li Process 1 writes "11"s. +\li Process 2 writes "12"s. +\li Process 3 writes "13"s. + +The following is the output from h5dump for the HDF5 file created by this example using 4 processes: +\code +HDF5 "SDS_row.h5" { +GROUP "/" { + DATASET "IntArray" { + DATATYPE H5T_STD_I32BE + DATASPACE SIMPLE { ( 8, 5 ) / ( 8, 5 ) } + DATA { + 10, 10, 10, 10, 10, + 10, 10, 10, 10, 10, + 11, 11, 11, 11, 11, + 11, 11, 11, 11, 11, + 12, 12, 12, 12, 12, + 12, 12, 12, 12, 12, + 13, 13, 13, 13, 13, + 13, 13, 13, 13, 13 + } + } +} +} +\endcode + + +\section secIntroParContHyperslabFort Writing a Contiguous Hyperslab in Fortran +In this example you have a dataset of 5 (rows) x 8 (columns). Since a contiguous hyperslab in Fortran 90 +consists of consecutive columns, each process will be writing an equal number of columns to the dataset. + +You would define the size of the hyperslab to write to the dataset as follows: +\code + count(1) = dimsf(1) + count(2) = dimsf(2) / number_of_processes +\endcode + +where, +\code + dimsf(1) is the number of rows in the dataset + dimsf(2) is the number of columns +\endcode + +The offset for the hyperslab dimension would be different for each process: +\code + offset (1) = 0 + offset (2) = k * count (2) +\endcode + +where, +\code + offset (1) = 0 indicates to start at the beginning of the column + "k" is the process id number + "count(2) is the number of columns to be written by each hyperslab +\endcode + +The number of processes that could be used in this example are 1, 2, 4, or 8. The number of +columns that could be written by each slab is as follows: + + + + + + + + + + + + + +
    ProcessesSize of count (2)(\# of columns)
    18
    24
    42
    81
    + +If using 4 processes, the offset and count parameters for Process 1 would look like: + + + + +
    +\image html pcont_hy_figd.gif +
    + +The code would look like the following: +\code + 69 ! Each process defines dataset in memory and writes it to the hyperslab + 70 ! in the file. + 71 ! + 72 count(1) = dimsf(1) + 73 count(2) = dimsf(2)/mpi_size + 74 offset(1) = 0 + 75 offset(2) = mpi_rank * count(2) + 76 CALL h5screate_simple_f(rank, count, memspace, error) + 77 ! + 78 ! Select hyperslab in the file. + 79 ! + 80 CALL h5dget_space_f(dset_id, filespace, error) + 81 CALL h5sselect_hyperslab_f (filespace, H5S_SELECT_SET_F, offset, count, error) +\endcode + +Below is the F90 example program which illustrates how to write contiguous hyperslabs by column in Parallel HDF5: + + + + +
    +hyperslab_by_col.F90 +
    + +If you run this program with 4 processes and look at the output with h5dump you will notice that the output is +much like the output shown above for the C example. This is because h5dump is written in C. The data would be +displayed in columns if it was printed using Fortran 90 code. + +
    +Navigate back: \ref index "Main" / \ref GettingStarted / \ref IntroParHDF5 + +@page IntroParRegularSpaced Writing by Regularly Spaced Data + +Navigate back: \ref index "Main" / \ref GettingStarted / \ref IntroParHDF5 +
    + +In this case, each process writes data from a contiguous buffer into disconnected locations in the file, using a regular pattern. + +In C it is done by selecting a hyperslab in a file that consists of regularly spaced columns. In F90, it is done by selecting a +hyperslab in a file that consists of regularly spaced rows. + + + + + + + + +
    Figure a C ExampleFigure b Fortran Example
    +\image html preg_figa.gif + +\image html preg_figb.gif +
    + +\section secIntroParRegularSpacedC Writing Regularly Spaced Columns in C +In this example, you have two processes that write to the same dataset, each writing to +every other column in the dataset. For each process the hyperslab in the file is set up as follows: +\code + 89 count[0] = 1; + 90 count[1] = dimsm[1]; + 91 offset[0] = 0; + 92 offset[1] = mpi_rank; + 93 stride[0] = 1; + 94 stride[1] = 2; + 95 block[0] = dimsf[0]; + 96 block[1] = 1; +\endcode + +The stride is 2 for dimension 1 to indicate that every other position along this +dimension will be written to. A stride of 1 indicates that every position along a dimension will be written to. + +For two processes, the mpi_rank will be either 0 or 1. Therefore: +\li Process 0 writes to even columns (0, 2, 4...) +\li Process 1 writes to odd columns (1, 3, 5...) + +The block size allows each process to write a column of data to every other position in the dataset. + + + + + +
    +\image html preg_figc.gif +
    + +Below is an example program for writing hyperslabs by column in Parallel HDF5: + + + + +
    +hyperslab_by_col.c +
    + +The following is the output from h5dump for the HDF5 file created by this example: +\code +HDF5 "SDS_col.h5" { +GROUP "/" { + DATASET "IntArray" { + DATATYPE H5T_STD_I32BE + DATASPACE SIMPLE { ( 8, 6 ) / ( 8, 6 ) } + DATA { + 1, 2, 10, 20, 100, 200, + 1, 2, 10, 20, 100, 200, + 1, 2, 10, 20, 100, 200, + 1, 2, 10, 20, 100, 200, + 1, 2, 10, 20, 100, 200, + 1, 2, 10, 20, 100, 200, + 1, 2, 10, 20, 100, 200, + 1, 2, 10, 20, 100, 200 + } + } +} +} +\endcode + + +\section secIntroParRegularSpacedFort Writing Regularly Spaced Rows in Fortran +In this example, you have two processes that write to the same dataset, each writing to every +other row in the dataset. For each process the hyperslab in the file is set up as follows: + + +You would define the size of the hyperslab to write to the dataset as follows: +\code + 83 ! Each process defines dataset in memory and writes it to + 84 ! the hyperslab in the file. + 85 ! + 86 count(1) = dimsm(1) + 87 count(2) = 1 + 88 offset(1) = mpi_rank + 89 offset(2) = 0 + 90 stride(1) = 2 + 91 stride(2) = 1 + 92 block(1) = 1 + 93 block(2) = dimsf(2) +\endcode + +The stride is 2 for dimension 1 to indicate that every other position along this dimension will +be written to. A stride of 1 indicates that every position along a dimension will be written to. + +For two process, the mpi_rank will be either 0 or 1. Therefore: +\li Process 0 writes to even rows (0, 2, 4 ...) +\li Process 1 writes to odd rows (1, 3, 5 ...) + +The block size allows each process to write a row of data to every other position in the dataset, +rather than just a point of data. + +The following shows the data written by Process 1 to the file: + + + + +
    +\image html preg_figd.gif +
    + +Below is the example program for writing hyperslabs by column in Parallel HDF5: + + + + +
    +hyperslab_by_row.F90 +
    + +The output for h5dump on the file created by this program will look like the output as shown above for the C example. This is +because h5dump is written in C. The data would be displayed in rows if it were printed using Fortran 90 code. + +
    +Navigate back: \ref index "Main" / \ref GettingStarted / \ref IntroParHDF5 + +@page IntroParPattern Writing by Pattern + +Navigate back: \ref index "Main" / \ref GettingStarted / \ref IntroParHDF5 +
    + +This is another example of writing data into disconnected locations in a file. Each process writes data from the contiguous +buffer into regularly scattered locations in the file. + +Each process defines a hyperslab in the file as described below and writes data to it. The C and Fortran 90 examples below +result in the same data layout in the file. + + + + + + + + + +
    Figure a C ExampleFigure b Fortran Example
    +\image html ppatt_figa.gif + +\image html ppatt_figb.gif +
    + +The C and Fortran 90 examples use four processes to write the pattern shown above. Each process defines a hyperslab by: +\li Specifying a stride of 2 for each dimension, which indicates that you wish to write to every other position along a dimension. +\li Specifying a different offset for each process: + + + + + + + + + + + + + + +
    CProcess 0Process 1Process 2Process 3
    offset[0] = 0offset[0] = 1offset[0] = 0offset[0] = 1
    offset[1] = 0offset[1] = 0offset[1] = 1offset[1] = 1
    FortranProcess 0Process 1Process 2Process 3
    offset(1) = 0offset(1) = 0offset(1) = 1offset(1) = 1
    offset(2) = 0offset(2) = 1offset(2) = 0offset(2) = 1
    +\li Specifying the size of the slab to write. The count is the number of positions along a dimension to write to. If writing a 4 x 2 slab, +then the count would be: + + + + + + + + +
    CFortran
    count[0] = 4count(1) = 2
    count[1] = 2count(2) = 4
    + +For example, the offset, count, and stride parameters for Process 2 would look like: + + + + + + + + +
    Figure a C ExampleFigure b Fortran Example
    +\image html ppatt_figc.gif + +\image html ppatt_figd.gif +
    + +Below are example programs for writing hyperslabs by pattern in Parallel HDF5: + + + + + + + +
    +hyperslab_by_pattern.c +
    +hyperslab_by_pattern.F90 +
    + +The following is the output from h5dump for the HDF5 file created in this example: +\code +HDF5 "SDS_pat.h5" { +GROUP "/" { + DATASET "IntArray" { + DATATYPE H5T_STD_I32BE + DATASPACE SIMPLE { ( 8, 4 ) / ( 8, 4 ) } + DATA { + 1, 3, 1, 3, + 2, 4, 2, 4, + 1, 3, 1, 3, + 2, 4, 2, 4, + 1, 3, 1, 3, + 2, 4, 2, 4, + 1, 3, 1, 3, + 2, 4, 2, 4 + } + } +} +} +\endcode +The h5dump utility is written in C so the output is in C order. + + +
    +Navigate back: \ref index "Main" / \ref GettingStarted / \ref IntroParHDF5 + +@page IntroParChunk Writing by Chunk + +Navigate back: \ref index "Main" / \ref GettingStarted / \ref IntroParHDF5 +
    + +In this example each process writes a "chunk" of data to a dataset. The C and Fortran 90 +examples result in the same data layout in the file. + + + + + + + + + +
    Figure a C ExampleFigure b Fortran Example
    +\image html pchunk_figa.gif + +\image html pchunk_figb.gif +
    + +For this example, four processes are used, and a 4 x 2 chunk is written to the dataset by each process. + +To do this, you would: +\li Use the block parameter to specify a chunk of size 4 x 2 (or 2 x 4 for Fortran). +\li Use a different offset (start) for each process, based on the chunk size: + + + + + + + + + + + + + + +
    CProcess 0Process 1Process 2Process 3
    offset[0] = 0offset[0] = 0offset[0] = 4offset[0] = 4
    offset[1] = 0offset[1] = 2offset[1] = 0offset[1] = 2
    FortranProcess 0Process 1Process 2Process 3
    offset(1) = 0offset(1) = 2offset(1) = 0offset(1) = 2
    offset(2) = 0offset(2) = 0offset(2) = 4offset(2) = 4
    + +For example, the offset and block parameters for Process 2 would look like: + + + + + + + + +
    Figure a C ExampleFigure b Fortran Example
    +\image html pchunk_figc.gif + +\image html pchunk_figd.gif +
    + +Below are example programs for writing hyperslabs by pattern in Parallel HDF5: + + + + + + + +
    +hyperslab_by_chunk.c +
    +hyperslab_by_chunk.F90 +
    + +The following is the output from h5dump for the HDF5 file created in this example: +\code +HDF5 "SDS_chnk.h5" { +GROUP "/" { + DATASET "IntArray" { + DATATYPE H5T_STD_I32BE + DATASPACE SIMPLE { ( 8, 4 ) / ( 8, 4 ) } + DATA { + 1, 1, 2, 2, + 1, 1, 2, 2, + 1, 1, 2, 2, + 1, 1, 2, 2, + 3, 3, 4, 4, + 3, 3, 4, 4, + 3, 3, 4, 4, + 3, 3, 4, 4 + } + } +} +} +\endcode +The h5dump utility is written in C so the output is in C order. + +
    +Navigate back: \ref index "Main" / \ref GettingStarted / \ref IntroParHDF5 + +*/ diff --git a/doxygen/dox/IntroParHDF5.dox b/doxygen/dox/IntroParHDF5.dox new file mode 100644 index 00000000000..1f04e968e44 --- /dev/null +++ b/doxygen/dox/IntroParHDF5.dox @@ -0,0 +1,271 @@ +/** @page IntroParHDF5 A Brief Introduction to Parallel HDF5 + +Navigate back: \ref index "Main" / \ref GettingStarted +
    + +If you are new to HDF5 please see the @ref LearnBasics topic first. + +\section sec_pintro_overview Overview of Parallel HDF5 (PHDF5) Design +There were several requirements that we had for Parallel HDF5 (PHDF5). These were: +\li Parallel HDF5 files had to be compatible with serial HDF5 files and sharable +between different serial and parallel platforms. +\li Parallel HDF5 had to be designed to have a single file image to all processes, +rather than having one file per process. Having one file per process can cause expensive +post processing, and the files are not usable by different processes. +\li A standard parallel I/O interface had to be portable to different platforms. + +With these requirements of HDF5 our initial target was to support MPI programming, but not +for shared memory programming. We had done some experimentation with thread-safe support +for Pthreads and for OpenMP, and decided to use these. + +Implementation requirements were to: +\li Not use Threads, since they were not commonly supported in 1998 when we were looking at this. +\li Not have a reserved process, as this might interfere with parallel algorithms. +\li Not spawn any processes, as this is not even commonly supported now. + +The following shows the Parallel HDF5 implementation layers. + + +\subsection subsec_pintro_prog Parallel Programming with HDF5 +This tutorial assumes that you are somewhat familiar with parallel programming with MPI (Message Passing Interface). + +If you are not familiar with parallel programming, here is a tutorial that may be of interest: +Tutorial on HDF5 I/O tuning at NERSC + +Some of the terms that you must understand in this tutorial are: +
      +
    • +MPI Communicator +Allows a group of processes to communicate with each other. + +Following are the MPI routines for initializing MPI and the communicator and finalizing a session with MPI: + + + + + + + + + + + + + + + + + + + + + + + + + + +
      CFortranDescription
      MPI_InitMPI_INITInitialize MPI (MPI_COMM_WORLD usually)
      MPI_Comm_sizeMPI_COMM_SIZEDefine how many processes are contained in the communicator
      MPI_Comm_rankMPI_COMM_RANKDefine the process ID number within the communicator (from 0 to n-1)
      MPI_FinalizeMPI_FINALIZEExiting MPI
      +
    • +
    • +Collective +MPI defines this to mean all processes of the communicator must participate in the right order. +
    • +
    + +Parallel HDF5 opens a parallel file with a communicator. It returns a file handle to be used for future access to the file. + +All processes are required to participate in the collective Parallel HDF5 API. Different files can be opened using different communicators. + +Examples of what you can do with the Parallel HDF5 collective API: +\li File Operation: Create, open and close a file +\li Object Creation: Create, open, and close a dataset +\li Object Structure: Extend a dataset (increase dimension sizes) +\li Dataset Operations: Write to or read from a dataset +(Array data transfer can be collective or independent.) + +Once a file is opened by the processes of a communicator: +\li All parts of the file are accessible by all processes. +\li All objects in the file are accessible by all processes. +\li Multiple processes write to the same dataset. +\li Each process writes to an individual dataset. + +Please refer to the Supported Configuration Features Summary in the release notes for the current release +of HDF5 for an up-to-date list of the platforms that we support Parallel HDF5 on. + + +\subsection subsec_pintro_create_file Creating and Accessing a File with PHDF5 +The programming model for creating and accessing a file is as follows: +
      +
    1. Set up an access template object to control the file access mechanism.
    2. +
    3. Open the file.
    4. +
    5. Close the file.
    6. +
    + +Each process of the MPI communicator creates an access template and sets it up with MPI parallel +access information. This is done with the #H5Pcreate call to obtain the file access property list +and the #H5Pset_fapl_mpio call to set up parallel I/O access. + +Following is example code for creating an access template in HDF5: +C +\code + 23 MPI_Comm comm = MPI_COMM_WORLD; + 24 MPI_Info info = MPI_INFO_NULL; + 25 + 26 /* + 27 * Initialize MPI + 28 */ + 29 MPI_Init(&argc, &argv); + 30 MPI_Comm_size(comm, &mpi_size); + 31 MPI_Comm_rank(comm, &mpi_rank); + 32 + 33 /* + 34 * Set up file access property list with parallel I/O access + 35 */ + 36 plist_id = H5Pcreate(H5P_FILE_ACCESS); 37 H5Pset_fapl_mpio(plist_id, comm, info); +\endcode + +Fortran +\code + 23 comm = MPI_COMM_WORLD + 24 info = MPI_INFO_NULL + 25 + 26 CALL MPI_INIT(mpierror) + 27 CALL MPI_COMM_SIZE(comm, mpi_size, mpierror) + 28 CALL MPI_COMM_RANK(comm, mpi_rank, mpierror) + 29 ! + 30 ! Initialize FORTRAN interface + 31 ! + 32 CALL h5open_f(error) + 33 + 34 ! + 35 ! Setup file access property list with parallel I/O access. + 36 ! + 37 CALL h5pcreate_f(H5P_FILE_ACCESS_F, plist_id, error) 38 CALL h5pset_fapl_mpio_f(plist_id, comm, info, error) +\endcode + +The following example programs create an HDF5 file using Parallel HDF5: +C: file_create.c +F90: file_create.F90 + + +\subsection subsec_pintro_create_dset Creating and Accessing a Dataset with PHDF5 +The programming model for creating and accessing a dataset is as follows: +
      +
    1. +Create or open a Parallel HDF5 file with a collective call to: +#H5Dcreate +#H5Dopen +
    2. +
    3. +Obtain a copy of the file transfer property list and set it to use collective or independent I/O. +
        +
      • +Do this by first passing a data transfer property list class type to: #H5Pcreate +
      • +
      • +Then set the data transfer mode to either use independent I/O access or to use collective I/O, with a call to: #H5Pset_dxpl_mpio + +Following are the parameters required by this call: +C +\code + herr_t H5Pset_dxpl_mpio (hid_t dxpl_id, H5FD_mpio_xfer_t xfer_mode ) + dxpl_id IN: Data transfer property list identifier + xfer_mode IN: Transfer mode: + H5FD_MPIO_INDEPENDENT - use independent I/O access + (default) + H5FD_MPIO_COLLECTIVE - use collective I/O access +\endcode + +Fortran +\code + h5pset_dxpl_mpi_f (prp_id, data_xfer_mode, hdferr) + prp_id IN: Property List Identifier (INTEGER (HID_T)) + data_xfer_mode IN: Data transfer mode (INTEGER) + H5FD_MPIO_INDEPENDENT_F (0) + H5FD_MPIO_COLLECTIVE_F (1) + hdferr IN: Error code (INTEGER) +\endcode +
      • +
      • +Access the dataset with the defined transfer property list. +All processes that have opened a dataset may do collective I/O. Each process may do an independent +and arbitrary number of data I/O access calls, using: +#H5Dwrite +#H5Dread + +If a dataset is unlimited, you can extend it with a collective call to: #H5Dextend +
      • +
      +
    4. +
    + +The following code demonstrates a collective write using Parallel HDF5: +C +\code + 95 /* + 96 * Create property list for collective dataset write. + 97 */ + 98 plist_id = H5Pcreate (H5P_DATASET_XFER); 99 H5Pset_dxpl_mpio (plist_id, H5FD_MPIO_COLLECTIVE); + 100 + 101 status = H5Dwrite (dset_id, H5T_NATIVE_INT, memspace, filespace, + 102 plist_id, data); +\endcode + +Fortran +\code + 108 ! Create property list for collective dataset write + 109 ! + 110 CALL h5pcreate_f (H5P_DATASET_XFER_F, plist_id, error) 111 CALL h5pset_dxpl_mpio_f (plist_id, H5FD_MPIO_COLLECTIVE_F, error) + 112 + 113 ! + 114 ! Write the dataset collectively. + 115 ! + 116 CALL h5dwrite_f (dset_id, H5T_NATIVE_INTEGER, data, dimsfi, error, & + 117 file_space_id = filespace, mem_space_id = memspace, xfer_prp = plist_id) +\endcode + +The following example programs create an HDF5 dataset using Parallel HDF5: +C: dataset.c +F90: dataset.F90 + + +\subsubsection subsec_pintro_hyperslabs Hyperslabs +The programming model for writing and reading hyperslabs is: +/li Each process defines the memory and file hyperslabs. +/li Each process executes a partial write/read call which is either collective or independent. + +The memory and file hyperslabs in the first step are defined with the #H5Sselect_hyperslab. + +The start (or offset), count, stride, and block parameters define the portion of the dataset +to write to. By changing the values of these parameters you can write hyperslabs with Parallel +HDF5 by contiguous hyperslab, by regularly spaced data in a column/row, by patterns, and by chunks: + + + + + + + + + + + + + + +
    +\li @subpage IntroParContHyperslab +
    +\li @subpage IntroParRegularSpaced +
    +\li @subpage IntroParPattern +
    +\li @subpage IntroParChunk +
    + + +
    +Navigate back: \ref index "Main" / \ref GettingStarted + +*/ diff --git a/doxygen/dox/LearnBasics1.dox b/doxygen/dox/LearnBasics1.dox index a9b6d0e71fd..53c8e0aab99 100644 --- a/doxygen/dox/LearnBasics1.dox +++ b/doxygen/dox/LearnBasics1.dox @@ -642,7 +642,7 @@ See the programming example for an illustration of the use of these calls. \subsection subsecLBDsetCreateContent File Contents The contents of the file dset.h5 (dsetf.h5 for FORTRAN) are shown below: - +
    Contents of dset.h5 ( dsetf.h5)Contents of dset.h5 (dsetf.h5)
    \image html imgLBDsetCreate.gif diff --git a/doxygen/img/pchunk_figa.gif b/doxygen/img/pchunk_figa.gif new file mode 100644 index 0000000000000000000000000000000000000000..90b49c0cda7097d9237a32333130e9dd5dabe9d1 GIT binary patch literal 2754 zcmV;z3O)5lNk%v~VM_t10Q3L=0000@O-*QMXncHp00960W&n(gjGCI7r>Cc~va-Fs zy~oGL(9qD?+1cUY;pyq=@bK{e|Nj6000000000000000000000000000000000000 z000000000000000EC2ui080U=000I4U?h%YX`X1Ru59bRa4gSsZQppV?|kq7z@TtQ zEEWOgw8}WX!MP~i-ru8@#Ea8r z+0fhKuqNQ)C-0~3vGQW+$$;!Z`1Uoy{@UOp#R}cDXP6G2%O#CjG(QNb73=eC;kb(! zGiuz(v7^V2AVZ2ANwTELlPLaEs$9vkrOTHvW6GRKv!>0PICJXU$+M@=pFo2O9ZIyQ z(W6L{DqYI7sne%Wqe|uZsH)YgShGf)8WU210wTVKEwD#y1hZxVfIStF?DP-@Rb~5B_3!)-s1DD-Nyv@e0p` zU^`-NIkCUiwl~+tT~Gxj0t8GDFb2Msi}4Z4rw_m0k^1-Sp--tjyR7oq+)L%I-knME z>Xb=`oeOuJ7MCNewf;p{aU!r6o`ch+*WiTly?|bO7=mXAd<}*tpA^(FM%sM{DtI7W zC$jdT53S8+-G46r#@T)Y&iD#}>XD$HAUTepp>Pfc*&rkwuD2o-mAS|pfPHOeBWCpM z7Gi4jH7Ohm&Jl=ZmMs8KmSc;pi>BDPp*T<(LUbZ0jALXCOlGZ<}Gb|@Zs>=AYX zdS80jr*30ZNn4VlL8+e!oz15tSoSfAUvVk6#b|=7$taBjV z_~vMJrbdbysXDZXW|?_e${%O64E9@(c1DPyju+yX>w83s+Uq@9@n+nyTQPy1lCr@j zYYJ&Tw0V_jFPcC*sSV6HmiX_2PV1Nqgr12;Dnk^$Z3Cj2K#Hg6GO}ext76-uZfkS#xa~t zxZ0e?Y1NtTg86ldC9W9qn5U1!u3T}pGyh12xJ=%A+ngDvtY;F-7OFF&CPPc@y54;Y z?Vs+|>~9hzr$*vvM|(FPvQ@o_b=Jjg{fLEL7lPcWY31v4GBoz9D4S+Oda%a^8)zwn zn{Mdv!WNhCsj#6|{4^362gYrQ2TM&b%1?;wXw)i88z-&&-qzq16u$`CA;Q1>v@PvAPF0{JLk2HK=8ZX+u{d1 zx`9u84 z)S7i4C#makG+Y7$K?ktaMX-Walc4UFc0m9J%6UkX;~PCVMgsmWJA*i&tWJ12)2)Dr zLYmji!pIK?2CPtLBi!_^#zEn5@Hdwv-zCT9JL@emMXJ#r50zl74O+62UE5?TO?k6? z&CiTID;)&oH^x`G5nQlKSkm<99ob>9c5KQc91U5jVEQqHfD9f92bsn3pm2Xe{F5RB z$Vg5&5{5OBp$MzlC4gb`H#y{9$JXUT2{lZ76*|usJ9NGxa?)F#6y^JzI7*-mEr~Ir zB8~9(Gj|j(8Czs0kN^m(0g{c3i#r+V(0IimtdUBNyd^Pl$;(nG?Hj(d?9 zM!g8kKKd@LQB?vA+tff8kQ7~AC0;XC+Rt{i)IH3*nFFr}Nm(9Zh7CQUH*1Qea8jyw z3EOFeHifsJ7F3!;En`s&H&3KWVX33>q&^YF&$#+?ssLpd7YE8#plTIO5zQ+@znX$l zhV7KBBv2-Cd)roi5VjuetLJ`;yOEaGFUvF;A$c`X6d=-0(<~Bg{xLDwZ+aE6)BVO~ zv4_K(iW9OFlx!p@J3bY<7PEqNN4igApv(2!qb9^)+cjzcbZmHkYN+Y@uMcTiDrR zcWw1!>~kE;-f>3ud*vl>I^nfW0irihmt>lbF-J>)HC2~S^_hLelc%cMOut{M@sWuY z)7+lUzs{0qf(1Nf0S}p)zfA8gr4{19^6{V@HLew5xlju3YPHO*vX6oaT@eTBzLQ0* zlAqh2_qv0`ml>FM9}B&siMOXPwro~!e2<6GZ4crdPnP-q3kA?lO1@6r=a{QSWLM;O za#-aYCsmu;LLOqRN&aje+jw9FQ<$Qs-Y}>|W#=0><}qD3K9WR@9Da5Eb%*3_hUN&2nY^D#Hz^3r(4@#aHi15weLR$jS9o&|^B z3F3g2xJFs-5qZFR+g_%5&EY&=IS(<|qtZD@3XcBHrmY+0`*pX_JK}P^(L&m*R`}9u zZgFv7T)Is`l4)6v@Nm}7b7VIRe-sdaT~r#&Ma*P_S$4RXJ|UF=8C zmdPoba54S-?iU%myvsiAT=gATfFH-&we2@C5XO z-kpwj;afIw&tpE&&yyeY<)d%q;pau> zjZX3*abD%=7l`#S2laYeJN|31dFqG1dQtqN;rRasdzU49{`Y_Dw|94Qc**v61!#c& z=5#POdSHTf4aj7@cPtf{ff~4h9N2*#_<MvVOHnIs8-Zi z7ima1=_iSZ8KGCU$an-vS=lM6MwfZYz!z#t1jlJtDSE(%S{sQFn)EkX=vg@0Wh_>B#&H9W6agU2T1hovppi-R=Di9xgskUT%Jlp02*m-tPVmA1^;oUvKXu zAYhQcuRp;5&tE=)1p68McdwnXg$x`1N|=rdK!9#0>eFIz-XG#$8#q~Vnj)DW5JU!Rjw>Y6J$x0B2xyGh|=cGaX=4NoSD-n$&tZ^Li=?rflPxv zE$%c5^P)XGNKfnI{{Qmp<{|{h*0uD%Efd(FkV1nAb z$KXN=Vlmz<5pt4>0~0>@gd?N9g$rm1*Whm_lthSBE?vP~?X^UN}c*=ew&W`aqqm0AMH{$;IRVrS);##TtCBEvFtP>0mIscMFl zeukZ$yi)ipsCNpbC#h7Z0w=XJUK_=EM?UvkdY)qHtEjerlRC!``?WwswdJ zFsyyZ>JpK~>11iHbvTUVsNd@KtHCguQX`emvgIta?KY(BwVyfIEW3MDxoT>!_T*-W zZ(h7^%&W+3t}P`ZG^vRx;%p+LJ--Mfq>)8R>An(cif5(sii>p3gcW?Gz^yj5@}u1* zOzU?I?*Z|yy7n6J$(emd0+iAT@-@EChOpEF?GEk4FBeqnp>39=C&gzoT zVM`VGpHf2H`6)ENw`JO{Eo)NSaJ#+xh$onb-JITj{UhKC_&Ip&zGndW(6(EZIp@(Q zjc?|gE3N$U7tGE*_u8XCb}Aj8J*A&aUpi{}==+L(>EDAe7A~X@nX+E5lGJ|0TBvU+ z`&;w7I^?~t1T8uoU<}Bjjp)oe3q%N$@;55~tS5~i=z;t5Qo5v@b6Pa{9FD7Yly)lT8W2edQ(z91iBYZk%U_dBBrLOLM?irg@<{e z#$ss3A*!#6NK_*P;t&DOL8MfQf42EuP50Chk&yzf?&nXQ<3zt*%4IqgV3& zH%cgMkSvgF4H)%E$rM`BPrB6P*vi()xApFEx?55xt(ZzvVzHXp{KOa*p}TTPutTl$ z8!&}{$1c=T4Y&MUJN_$KO%=wlU&IL0^(wbH&DATJ(L3g&pg9?5R%C%FGSM`5HO3=m z5}Opgg+;fCqjA|mmN0eS62|$+4w@5)=;Rbce`&@{{&batvT1NEI=*?DZjnr7P`@x1 zP(Te-LIwS1A!h)KKsA+QSB*hdUs|y=5^tt}0w>H=wHESyReyzQ-Te+%5; z3U|0{IHgJ>{)$QBu3)RiQ10)JIL!6YF{qezE?fR6$aS6(f(Nzk1WuZ%3dXUT-Sw^} z_^I88{WPWK6+wEVd%@}Ivb`4iCu;VDP4K=KjpYrlsZO=0N=|pYQ|vEr{|i>}LKnO$ zt!{3G6+Srrk)QLWZ^;}(Eav{x!mN$yb@w?&0_V@bzjcgT(@DeyyOX^fUJ`~Qe8v1y z7`iQ9<3rWCU+y}Xz9&WH1Vwz=8h3z>5-lNv`6*-$q?fOAlrN2wtO4p~7rOl2aeI3_ zU=WYk%H}2VdymXwC~r^08y2&f&y40Yt9i|AZnK-;4Cgq@dCqjMvz_mZ=RE6q&wTE) YpZ^RG=s*j4(1b3up%0Dd1rz`PJN;WAMgRZ+ literal 0 HcmV?d00001 diff --git a/doxygen/img/pchunk_figc.gif b/doxygen/img/pchunk_figc.gif new file mode 100644 index 0000000000000000000000000000000000000000..9975a87669d773ab1ef98818c6528af03dd583ed GIT binary patch literal 3194 zcmV-=42APYNk%v~VN(Ir0Q3L=0000@O-*QMXncHp00960W&n(gjGCI7r>Cc~va-Fs zy~oGL(9qD?+1cUY;pyq=@bK{e|Nj6000000000000000000000000000000000000 z000000000000000EC2ui08;_f000I4U?h%YX`X1Ru59bRa4gSsZQppV?|kq7z@TtQ zEEgoR;)C5TjZB#efSZ+d@|jfzfmiI$L?W_6O3ccD{wl$xBUhoXRad5%$}tbv%J zn23z9cCM(qJ+`c>i>tK1q_wcWfR3QN%sjrTlEJIdg0s}Jv5mmZ+%~Scv&mJ_)WXET zrHSU(lczbbej2+ zA`~bfVtVxgbgWCKzWomO@6xN|p$twNQ22>c@C99Nn%E{UR{kkpcZ_-oCW&FTU%YKoy>>!k|Gn~Ij& z?5ua@VJeEs{%*D_JgIs>OrMjJpyKJ4q>QOA4^l}8; zuD({pY`NU}7-@?4QQMJL1Vm7tj&+#-|a>Z4Ps)a2^9Aak^~f4@>)FUH_OSvv$SKcCx!6$q<4q z_6d(Z#3K~SFlMO#0d0BkJK^(QC&3+UFI4Hm;Oce=k-?GhiW2$Y@InZm5f;vfAXEOL z{7|U82%hDEXoM9QMWMsl2~dYCbVCo(g}|{DuWaDT8PAZ|i1@kAA=xWd$}H$5405oB zhx{WAOIE3Poy?2B3zi8*HbxbiF%)&wUJQ-)y*M^$4hIpPSN@VX_X(qo3VUM?sYsAH zekzmQGL#U_7DrYt@|IU@oGBinHq9lGl8R(x6EQW&X|2P8o$Jqk08$D~fJ1uhVI1jh zMU*sBE{o~Y_%4qY0=X{&VODskAfh`|Y#W>cUYGd7eYStB@$`y{La^o>j;avQyzU;ZD(7)&0% z=aJi71tHgE$PO;ceB;a=-M;9~QaN&)knH9q0kKY>DY2I2(*``{G{t$A6sJgms5It* z%2Q5_fNY7LHJitd0)%h{?sbj31 z&N#}>n+h%M(&ijG&0rMo~G|yrbr@uw;ZPUD2vn7VfdC zzl5wUCnUbh#pY*cO)T8%s#raWwWE#wkW4K~R?aF6uS+Z;HMKfZ#IB7Wb4Bah?rPe} z3iB65r5zkwJHUa`;jeE*ENU04+0go-jIZ4&Fm219%AWADY|AWO{)Nj~*ZDXr>k!#;RE;khGWiV~Oi(#Yo zb-utAXop!bSeojWbG3c2gFOb}GYB}j1THUceaT@SXV}5~O>B)jf!$jU`3v0j=Fk{S z(-T5Z4yG8tt;2d!e%rW0|A=ZR(cGs^>lLz!zdp@qXd{9O(8MIt}AQuv@Q;XEK&~ zzmKLt=tfD+v6cmL%1H{>GR5Rdr#92ryC{x$N0ofNrNR~lGpc1!QGR|JO495ns8_Y? zSj&3WFtE!x$eEz3fKQ^5zLlR%-QY79nl8c?r#gB^&WdHV{;-~^ zyXjgVs~6BS4qB;!4TUjr_1eAtYkuPm;~6*FuI%2cbCpf$ZC{#m`F3n}xjJa4GTYh# zPX+uwd~RUg8_nxZ_;v3q-v^KR#Q&D@nd2;RBCq+m3XZP78Qt;m*7?;q)3%dK9^gI? zxZ#j}xVrDma!AAaNs8v&<2L^6n!8U|g!Q$neGdNRlkc$LX7xAGJ?>#jlkv4ONBVo0 zUUZ6|cy>8|Fv)pN^{VfO%|3Rsdo#}Qu3yXO*}k*V&93mKw|UD@Pq)-V)^oP!CDAX| zy1nIYcQ7Qo$5R&jbhqC3F3?%R8y9?d3-5&}FZ=1fjC$hh-gm6vo!)wPHs2XYK>u>RD zR<}&3oxZg{0EBV?DxWN=H-3i<9c^OA?!SVIum!`hyCw$-zIM}g?iym%k@kjrT&K&hDpxkD-`7%ww4y0=X{_h2kw@B2iR4u zlq{Hmd`B@+L)U=(v3lbYO{SN8(13x7ksP6f79ZFr{3!>13PH+{WUP1CbXi$eXc6WuZkQ%^J85X!=dj%Z5S4xhU10MKpa5#dVmWJE4 z2mAL`*(ZqF_kt}5iq$rLIg(R|{?mie7d^S=iC&mEfPsh2Bp^UYIm$4926$3Jm_c4c zgP14;)z?pN_=UE(9!|)8P&kGfGKHO(T@8d?+@*nM_)WMY7mb61S|}r}c#B{th2B?A zWr%*v_!~4hg~SJkn}`I5(}M-VPfxLjodyoGHy{B5N)R-F>9>T@XmZF{jQA*w;`oR5 z6NusHSXnrV+(?C_NPe7Pe{9$f^9YQ!NE+D~84H*NOrahK*nymJgo{Xe?g%>lm}-VN zYw7rHsK%{TgX>q5HpywHXn(ord3qRB g?)a07AdoJJltROlPWhAzP#Kj{Ih9n|2O$6eJCLzSi~s-t literal 0 HcmV?d00001 diff --git a/doxygen/img/pchunk_figd.gif b/doxygen/img/pchunk_figd.gif new file mode 100644 index 0000000000000000000000000000000000000000..45da389fa9d2d4b3cc9490313e5f069ec3f2ef24 GIT binary patch literal 2984 zcmV;Z3s>|Cc~va-j= z$I#Hw+1c6Q;o<4&>G1II|Ns900000000000000000000000000000000000000000 z000000000000000EC2ui06zi5000I4AS8}tX`X1Ru59bRa4gSsZQppV?|kq7z@TtQ zEEr0fC%^8OUSUH!-o(fN}NcsqQ#3C zGiuz(v7^V2AVZ2ANwTELlPLaEs$9vkrOTHvW6GRKv!>0PICGvc03d(}o<3E`dLRJ8 zf&c*)KoIair$Yw=01R*-;84-300bmBhz1IZ2IgcJ8G{3Y{r;UtRchs|V6!6XdsM231po|) zRSGHXwd>cgW6PfHL>g_8wr_*Xt=l8--W-1e*C>2A#^S~;B2P|nIdh55 zCsuVa9Yu9+EK+;5yZQ*wpFLL$EeZhv`R-4mKkSV6P=e`Iu>$C;XaM;Upetb&As_(H z1zUwz9$99MVpW#m|92R5q1_3vOl< zUITm<+Jy26KqnLH0GFC-O@VPJ3Fr}u4ovc4Hq%1Sx$$KVlvY|!rkW;1&@P{bD(a}D zmTD@gEnQkgE+Q0AK&!7R6057Q9y05#j?fB$t_6%j00E%@0Bi}r!t!e}$Lba3NvdW6 zz^$;#3hl18{!ZI#w7bU28>PQ(nR7n@O^a3`@eX#>#6L zxZ$*c?z*FxTZ+Hqu6ymU1d~$mu#Ny*Fa!h$E5gC|R&**l6Nl@iz>kn?tHAqa+p4ef zS`zZZ4bK~L!XtBeQL`|xJcPO)dmM1gz0S;Qzv)im?Ib*}*jy4W2Vpa=G@n~^#u&GY zFvy{#Os~m42Q7%N4>Me~)eB>NwZdClEj5ovQ=2r-(GFd!)o24-$-%X7owmbiGaT^< z)$O;!bt0r@9^HxRdrrlGKWYWpF~e-`#zl90Gs(d^UGfO-e6y4Xmo-8Xns5peEL#Pu#V^*CE_OmOU6mYSnBYzBg2eM4<9s)` z3YIHf=Zhe~5(vDJWpIHTl#BxvvOV}A!)*wIV%4aowI%W(h+;chCIIHc_d(2w^Ar9b z+`h;|E8r~!XDq>WkaMWLEv0WtL?G1sV#5X^&xIOGq5HyS!5bctWJzpW2g?|R&20ug zji8TILgXj?=*K^_bCBn-1G?-H@`N)ip%nDUt`)Y=f~k8U>0$^&aa9I<&k^I?kV-nX=EVvAF3}OQ0)|2K8M!-6-R>n$fl1!IGF1r3w5x1HlGMhJ)>?+VqM;u;S3G zm~2Q5(<;Z69y78RdMP7fiqo2PHW_lImRPgtSfBQEn8uV?6Xl0g@JZ2YX%TQ zEgM`d5n2ll7`+4wt|CUeV6o;`zlIGjcxU8T`j*zTcm3}Y*t=QxdRDXY#c6pPd|K;1 z4!asw0#s`&w%bNEx3x7KZP|BW1V0hD=S?xt0xYuuYofr^)s~4BJYv2kSjR|+ur(&C zmkOV^#@2nXdN~|Hx6+ieA^sSHjr=eh0~N(cHo}Ts+~OHqmA5jkDwiKyVa0}ZxM3DE zkJ;s8>CV@~f-Et9w|w6?zpTH2PN<*XKv*dc8VH3xw7!7;rszeBxzU!YSa?qb=|4EL z(gCp;rU{|xO;63!pbmA*L_Lj8lUgCBK6M_tsp>!!q}78kG_0L9>ss6T*0`R{b?k=5 z@ocA|xz5HR?Ba;b^$#yWS>$NSa+V4O+Ze3`pj&WfB;85MFMPS=vsV>uEDeTwoPZ`b zo}|ldkJNG`wT%D3LeDs<$v_1(w;UMyMtR%&-uTY9zW2@Ve*631d)PIn&e+EDIFtry zJVCZ;7Up?w|?cbfOKmPFaK-z%KI7b7`Ssy0*L23(L{22TE$Hs2NatKcyD?)Y7F$Q zH^7-d(!Kk=4FVSq!He?iyCWPq3(w!s^G0|G0XyQuo_JKgE})UGoY`n8yTa$r@$eiH zBf`Bi`+pqjGoO%C>A>fODM>2^TysIy+uDpDn>c_OnN_?j3RT z>OIr=PJ+zhn|>GMyE5uZn))MJ$;_))(=p0l{MOOkh^kON>=(aA%U^Kzd%{zH=4U$z z>=`Jy>&e}7(tNP(e(*5l-SB@WLf`YxfBrUn=R4w20(x^F_?H6tu_gV-0>-u=5+X^3 z7Z&jMIr7JRx|e`05OXe)3bAw-d0}%}S0b`TYSWW}Byc5*AtD}FMifYW7PtcZ!+>F- zC!NtJM)4=^HbCQNfRWdNOF(}-_=Du|e*#ibjQ4ALCj*JcD8I2rEubBacZ7qsfNob5 z4(NCi;%=%(cdEg8pV4uj2Nt1cdH6wkqhNX|Hz8S=gF1$R4Yz?eLOFZUBGFNMCSrTA zH(1oCBtYkTBmjIN7!xDdaD!1zCpdk{KyE6?h0ON zg>g57exf@AatUg90_8U*=ocRVI5c{0mlf^@C-4V~@+WUTXn#CcfIyf7`&Wy&n2Wl& ei@ey2zW9s47>vR=jKo-s#(0d#n2d!G0suPz&!Ncx literal 0 HcmV?d00001 diff --git a/doxygen/img/pcont_hy_figa.gif b/doxygen/img/pcont_hy_figa.gif new file mode 100644 index 0000000000000000000000000000000000000000..1417d1700944ed0976a1be846158dacfb2cb6eca GIT binary patch literal 3201 zcmV-{41V)RNk%v~VNC(H0Q3L=0000@O-*QMXncHp00960W&n(gjGCI7r>Cc~va-Fs zy~oGL(9qD?+1cUY;pyq=@bK{e|Nj6000000000000000000000000000000000000 z000000000000000EC2ui08Ih5000I4U?h%YX`X1Ru59bRa4gSsZQppV?|kq7z@TtQ zEE)RO4x_*c92$m8|UwtUU&l&@eGbG^j1>i6y4K56$18vH|o&=7K) z5RwYW>YYYn5<^xTQA%0F9qj&+{e#DEpgUV9MXECyilmT-*<4~w$dMtam@>K1yj4@k zM}Px;W^}0V6VRZ{{Mamd)1g(4SY29r3N_cgp+GPl8cUXs*s^82;`=%^Y|~geSK=I| zRFyW1YQ35O3*_k5o?89RU3da-UOk3G@gcIa8dI!|?LAJB_^@RcDgAl0T9=;37L_lH zHZj;~+{rEfaV@cUv}@L=AqI|}b4KXbY9oiDtr_Oz8nh+dHhT#B2a7+-ZPM&)7*A*u*scksLj z$r_Bl$(S0EP=-`rj!wonPk91mDNKV!s-dp38mOmkylEVC@}QZKpP9T0sH>^M8tkII4vVU>M2#qCLwlZy zWjd>>O02Yv{sJp&p{4j*tgN)Q!KF%GGTG8h$VRIzwWr34uDQc@%SMq+f?H*ru!V}ip%Gm-|)KbufB#-?y3pzYwEvh;Q8ip=Pdh^!3S?kt;7Ca`=`c{ zLi{O@p3N(8ZB|e;(naOw%PPpJ_6w=Y(u#}*yjRM*qme1MC1nmAJ1VoPG>4opsvg_i zOu*q-Ov{sdw3RFn4C{OJ(2Bxb?bUk4yEDBxk8_aJN~gf?%LgBuw8vF*>lMi-etj}s z2a(BG%WP|%F5Vfx9qz!znaNYsg)v5Ir-h6v6=IuesyL~H?=?i*8(+P!-hu|5Zec3N zQS`L_mapA+!($gN#Mw2ct$CuK4~qG(E?gQR-wlVpZtI7ZZQX_v+D3c3X(N4m-oSO< zjqT^wZZF^BA>Xm;&dIN<8V%noqgW zzoX5560$duKDKOM|8V@NlPh}i%ljYjqENpgtd3CkJ74$s#XJ?rjxV2z9?|l5K!u6! z3e@|7^(u(J>~V{0ppjbQ*zmqqNib{B+a4GS_#zK7P=NB&SL_@}LicU(XsAQs{K~*W z6x@Y>@bexJrxn6v@#SY*NT3bTH$>OHA%i3_$qB*t!2n{91&eFWP;|$iDXK4pLcIRq z90qZV1>$Rc;p5i~jp(!9A?R^%gd!f|NJlajab;|*(-@@)!^&CljD5SK&!)mfJ?_Da zxdY=1ujoMV5CShqG~~?`2}l4|QGAlL6B@&aJxgvZlXhz2qd2HU;(@X&q`Vv@W7fb^ zj%k(Bb72tG=*plh5s^8h;wNj#CQ)V%mrLa3m~dIbt^M(N!j#AjeOb&=f-spA+2kTC z3A`Ob6CaYW#0av<&24(~5#Q7%IL9f@a&{A(g}mmwHkG(K8c&_#G3Pg(Ay1^7Q=75~ zr)2agLU?XN8;;0kO$4gVZ03`o0R?C|2U>!FQe_*Wawj83)wO#5#iIKpr~X0tX-|Hp zbCTe6C^SS`P<)oOn*<4|IYog|lj4G<@=WPTy_rvy?vtbrRfa>k`O#+FbE6vFW-&NQ z&wJ`rpFIT-Mt8%}i~jT$Plbm?lgh)GiW8P?YiwX0LpkWFc7)s1d7p<_j7 zSs%)beRy=KQvK;rgId#iQnh+bb!$-L8qT9?bguHeD}iPj(xR4>p)DoIPP=MYk_tA7 zXEo_!RjSpMMpm(hE$CyzU|4ecb)|c~YE+XYSG`2Fv!GpSUC)VHpvDuSS5+wqWGY)F zH8i%FRqIP zxNf|=NPjuC;ugDD5a2ziRnXK5|qgUj_%5Oirj8$ShPe|?T2@(U+nVLvuS=R zl9>Z4@@de(TZ!?Ft=wRkj&;uEYcQWP>)^=}IK6n@g?N7c{8u+~n6yCNt&vTv=1CiM zEo)8ij1%o!!X7%OT@y5g%PC`AuGKtuxs(CJ+qa zKkK@&q<))?1ufG)>zUZ9J_w^zTWdJCS<>|7U@%E1k5mBC!$+*;WbFQ#ZZ zU@h5bKU>agR#u=!K&TG3``s2?x4e@D?=RN7P}yAujq^NgLJu0Jex7Nv2?(fyvm4*a zFgUz>b#Q|t&Efcl-MS$jE%Ah#ox8m^0yWO=Y#n^v44?PLKaOv<`ZFpMFFC?hu5g=B zJmMaIdB`JP@`0bcXCm)-&gHExmbXRY9?!YWZ(jbsmm8enMyEH;XO(o=wS3}3C%P17 zUhkd*edjCZxbS>#a;bxx1Q4%DyHhT9l|vouW-nt293FMAKOx~HH&@)_&2^=VJm@}e zyW8#ebeh9m=00b8-;dsKmg{}*OQ8D4tsZy0FTCy}$NJ$BPamQTUhOGwH_{6)cfiA& z?UdJ$MYIn2xnrI59G`sXRZP(ruYQkiT8!2ifA`a4p6j$%x$0j}gLhU$8)3ga+0{OF zn4`V;Gbj7*o8UM@w6jvpw^W4ej78JC+w{5Dy!GWSbWCTy<4q@i;FV7CSbcuwWbi!< z%s>9W=Y9MmPrCcxy?nk~AMUj;{nqb|fBwS*i24(vf7z#VrKfm2M|$bC)M^{KZ{K_I)4+e=Y}qG53Dwhd>yUdIE@gjt7APs9gYP6x&yV2Y7!gXnP5m zcr^EUj0bU>_kEj3gC96wMxcK55q>#Xc=HE-8W@CU=RcD7f+OgHr?*)?sDMk@gfJL| zbr*yDk#?ZhfEjpvQ&N0c7%aJUC{W`P%of!G3sGbn@)$aFci`;RlPphy*}!iig8a;+Bk+r(*$zg>IOJVVH)=w{#tdc2<~7 z%NPW<_;|CIiv@Us(Aa|ka)%|dikd=*u1JZ4n26^{2c2kl{-=i|*MIakkMyX452uR6 zh>8{@fBYy1;b?;6xQVyOi3KSKMhJ7mD1JCth)@@i-Pn*aP>=dZfC-s|QizdHM2#S5 zjbRmh+4zOjc#$CK0tcCa9SM0X$chZPlHS6O>^G6`Xp@9Ulj}5!l){rh8I(dfltfvS nMtPJ-nUqSoluX%_PWhA)P#Kj{Ih9mdl~#F`Secbj5CQ-@LxfEo literal 0 HcmV?d00001 diff --git a/doxygen/img/pcont_hy_figb.gif b/doxygen/img/pcont_hy_figb.gif new file mode 100644 index 0000000000000000000000000000000000000000..a3b637b0562eb520c91061d732d29cb84717c852 GIT binary patch literal 2450 zcmV;D32pXANk%v~Vden00Pz3-000000RR7H0RR90EC2ui0OkO<00091oR6u??GK}z zwAzca-n{z{hT=$;=82~2%C_zc$MQ_q_KoNI&iDQg3<`(DqVb4KDwoWr69tG$4bzQu zTFqReRIX+V=!Tyz?{(n)j=5>=8f<;P$t^Q>tnQ-mZm~YUchE)$B^Ne$VHX(phsL3} zNT{+Eh-g6>>6Yafw`nOk(#FPzG5ASgN?NL#30fk0$Xb#5NO}fH)#yrc>(`0bo0x^U z3M;#doUFXe-0b`e9W6agU2T1hovppi-R=FY113IxEJ%KSihzzDv3|}i5GEjCFi%B4 zV4qK4P8DDf;NO6NXb27z7@*(1d<^Sx!N-tayMYJ({ljCCA}9WB@#^8T2SA_5jST=8 zJelwyK!FM!L4@e#Wv&M)HEJ|jF=tC?Aw7DO_Yo$9Fem|zDcW)jy`_4X)+1SxqCkp7 zb6VW_EM!oJKKIRZa1NH{+ zrV@u*r7G2CO^!;pYVBGWY|(>6{hC#~NUg$}ZT-PT%*`%cpk1LZsJzl>YQ7WCHhkDn z(8P@DYy$k-?eX1?yXzX>YfvvrvrnI*!DOSGNR4%s*L7nl zNL+0E*yUhS^Wj#XY1TxCAbNTEh247F$rKMBgjuKnMu}BL8-mY9$UT-76W2i}z+a0^=5AX*T`CXB+b;jduw1v40Z!Q-2k(+2Tc&45%)fm%_5Pa#}a-Gq+ zjA%jqsE?kg0T)(k#Elq8VBkfSB9CyYs12TLx#%d71@ZZtgL}N!C4Ge2glIA$3b+Xa zuns8etm&9zD?C$z1nQ-yvG-_j6QNq%YlG%^{-t=WCZp(2y~(CrpPZ4^Aa}V0wq%J- zzWF4l1opbCZ_)NCoLE`jNUWL6sS53ipf-z_nMe&e6r~KY$(>o7nmDJfEKWzAqrj3o zY>jpbN)xi4vAZlX@Xo`iP|?zhRBQGoiPmh&iU(zA;QkvbxiOww+?L(7v!lYpWCD*&uwm(Hb}YN2GTbAJ5x0wJnnYF`7{&?c zYnvle$=UEOw%)o&t^xMP&e*W}`)--a$($X{^;&x~h<;Wj=SDlPJdK@>^*S`4GZtLz z!6heMpP@Q6-P_Z6=Rz^H7Q=1zi-u+Xs`X($e=8-(RnACsshXv`wwEgV`Ao}osV&*m z^eSg_WFt0sH;U_Sqqk3<<}4Y}e;aMEQV2V+YO;qSKH9U3Yc2JeRL2%M)R<;Xw^d!c za(nGm{>C|}m zcI2tZ1qoq2UIlDJy0Z-6A{4me1#e*+^O)0&7plfd3WAV>+wm&5waOu&W80`612<=x z&Ta2=-cy%b!goSCWbkDIL!bV3YPP;VwaI-avfK|#2ej(-?SZ?C*3o{~fEg~WhDG!d z4ilt9*8JvTS;X4#Hg!2{7*TWBOQQC|6GIKc(1|95Vr~M6Kd~ipRs>`uI7$>nZSmq2 zj}*ovFBytTCW4Zg?4%|yfyod=(ny~Kr3gmpJP)8UG@b;(Dpy%VRKD_*rDTLFX(>ur z<_4D^kYz19Fv?)&@|VCAj%Ok1iFnZ-;dFrygBXgYJ5y{slRr`gPGKEj&V)aDns zX@zbw^PAq(f;fA5!4szQoUWYSI%~nsah776^o%Ds=NZm?+Ebph=$bqC$xD2ilbir; z=Qj~`2z?e5ovn(M1pbJD3SI7#R%0uMFdVv2O@Qq*8|7#*Bx+BL`ezj+&8P-?g`T@O za5DPiUWJ7B8IvOLYj>b2*$^qy4`$AI<&qS=Fe9QHrOc2yZNL;YG_b$fD;^+(AI~xc z(nqGSMQ)5EcHo0Ah^WzU_9^NIZ}>5h@d+Z|;X~E7#kg+e(W`<8>$}+YQ$QJvhEJ?e zRKao7f3v9v2SMw#+g>5=r9XV#VCZ*P2=0j<) z5Te+t&NZurEZk$Sdo?$PwX$WoY(6r}yq*b>JmdA)PTy)pw~;oGT%{0d;d`>Q>2P6Yr6~VMMZ(^sG+ZyebX$US~@#ZUBz7iL+#?5bv(mF(MUH7#tZt(#rtT-y} z)v{&u>r(eCSTFW=pB1iZ@{p3@Vr`g>2>vaI8%#909qhPwv|NAN_+RIW_KGf@?rW-h z;0|Z^P7L0QgS!Lax;=GvFxIPuZ!5@*=~iV0j#Gz?yi@v8MoGoRSz3b_J+5{59#KZn zlm#sQR{~=hOnct4W4bJ??k)td;2rFa?TJ#5>4QC@YN>8{8$LG|*~O#av;ohyXV3yz zdC|?UpuuNdEyozCEo?A%9&F#iO1QmcF0YTN4CM7Tmd)=CwS40oV#|*Bz9kmQiTA5# zU-y|xe?E!tq+8lHTd>fpUT~|uj5F^tdS5zz^3TkCYq#Y(hU`_1dwJ{aZ-EuqNFHmp z@oVgj;W6EMY_6XJ?Cd~>P^hTRGK}-5A#%v<({N~A+3sfUe{RzaO^yHcF%`3kmj0=UVU5nL4&3by)RU4?Or?4uU_<;8r^MbgZqYPmSEff#cOnQ za~#eQ7wd4}t>{2(7QAbft*E7p?~qq{z0xLXnH8*ZzqxR!e?3wfNQiJ1%eKcf|1q_K z%)Fd)_|3oWxtu9ZXA;|)3mH$Y){WlqxU-hybg#SJ?~eDp>wWKh@4Mgs4*0+ee(;1Z Qyx|Xz_{1w-0|fv8J6{deLjV8( literal 0 HcmV?d00001 diff --git a/doxygen/img/pcont_hy_figc.gif b/doxygen/img/pcont_hy_figc.gif new file mode 100644 index 0000000000000000000000000000000000000000..91bab7d7f0b3eb2695abb60d0d2feff3ff0db1bf GIT binary patch literal 3694 zcmV-!4w3OkNk%v~VT1wI0Q3L=0000@O-*QMXncHp00960W&n(gjGCI7r>Cc~va-Fs zy~oGL(9qD?+1cUY;pyq=@bK{e|Nj6000000000000000000000000000000000000 z000000000000000EC2ui0E7Y6000I4U?h%YX`X1Ru59bRa4gSsZQppV?|kq7z@TtQ zEE-g*blXNUWDBQm_6_ZCqKgWtDfTs%;#C?MKa;*<9+}i2~;ioIVfo9Lh83 zu9-cNx-d$#sne%Wqe`7hwW`&tShH%~%C)Q4uV9Jt@~D)B(y<}QG7Zbt3(~SnKmB^U zwyoVLWBtU`t2U+IQR4#N9qhK%E5RPqsyvK1u_?lkw=8z7i`Cp%X9sGI%=yaXw3Sa9 z?jyG`=+3C2d@#tgc=$cMVdr4ls$Zo@YSF$zk*0qC2gZo&x@mgp}6F<(px%0Ml zpG&tjeO>02m$^o-ekQZ!*;co6FTxj{NMLBoQ(k`>tN8IkWV)UYQfBGmz^}fC4@7?b z*Pq!(+HA{3wI6?QZ2tBieF74wpjHPW$b)|Y0{B{e`N_6bgcAO=8+N!EmJ)v6V5eP& z0TtMuekcNz+&C&e#bJvyDlub?IFgs+j*_XzWE@ zH&}>NR%zv4S7xc@jofkR5?lZjAVL9RMnLA6cbsWvnP-N{=9_@Pxh9%p%1Ne|(S}34_(kZE+m->0=q!T?^40L=hiYcdR zDw?U8X$CrIrHwYnSXf|IimIrZl3J#zq^2rrtJ-a8rDeYURurGM#!2g%tJ1ouoVeP_ zt9!v#sa&+e{(c#1vCA&1>!rzNnr)(8K1+k1i$r?ro#);vYXRF9+U=y`Qbn$f@%ETP zv)UfpX0d?k+b*Vgc2TbuvC4`sx9L)P?7@syH}JR*dv-0W{IaXB#n|#2u~YLh@oaKN zws4(aYOPq0t?x3s?!R@WtSNJH5Ri(@5fDJL%_87j^UXZx-1E*p2MzSlI^)c%h7b?v zn2D^xNbS>5hiDu%R}v64UsnrxgKoS?jSk6QlepS~No5_T*})38QqeyH@N?03upPA9 zZ`16y&~SSQ8qy42=-tx4n2busD_`vLoZptJHVIY}-80>I+g-QMb;u1g9%$6dGVkpsfK?a%AY8sB95hv9m{i%z`3>takX`2H?VNA8nM?|k&Mx2^t! zRAViyk1z5zQ~lo0LU{S<9=@-^=F1uMGS@z#-3@j&8=%|@I5gJv2PpYFnW{|3FgB$Q zUDyg>^P(3!1j>wodGnwGAJ>ME`7T!c+ux}wXsP5`Yg)eZhR6=_J`s9QdK{!)|KMNvb6zrZH^=t{eX0 z+@6?2+|5pjEz#aBN_V^zPB4RFppb?72E3+q&n#gqqZluTJ{>x4k48)5h>V50iz#e{ z$eUms!!gI~fsT8k1Jk3H__ijp4Um${p(T07Ln-_!aq2Q$5EoXwFleYkMl9m!e%BYL zQK>{xgJmpRCoQzO&6ZmlgBII3t?VqmHqnG<9kN6?#u`Ds_W5rKn5)=uMNVl&Kh{>PxR`Q=cw0pBhc4 zjL_OmumV-5sKE?dg-Xzx1{Iu9b*oXc>O`oj4VQrRl{y1U*lI-yW@7DYQ@>ih!@6p& z^wg^X;p*7hz_X^@RGMYS+ym0fshq#V@s>N)P_{DQ!OWJExXyY z@)fmyo$XFPn@PsLw4=J+>1mPL)4k3#xN&vuFclkC*&>%Sm+d1+{+El}hCVm0yp5+> ztIEFXcBV{b8Uc96E8g;s$-Ln`uX@wVUhR`T?KQ8d^4mi68j8Q^ z-LHFfcVAeMKuq8rTnP|d4hA21R^?4_E+p(=_!hVk{uOVUGJHA!=V8DW1Mq(@d>2JI zIJ_f9aefCC;Q%w3yeWRMhDlsv5ff6x8(!~!L44x@TbRbwk@0);dtw&D7{WphFN@Dx zVIV_?!${_^fYJM5|K@nbPG*aVTfAcxGuawRX4i>jY-A$;ILn%_afd;CW%4eWzf)fC znbC~J2TS z%#3a{JL=2nOlw+TUv~4P!CU4~Z+OfTjx>QkJ(WSXx6ta1GkaY5%evNz9`d1&jS*s3MGz-eu$}$9=MeW<+AUtSvk{u?C?}iQh_-UK zQ@rg~Kib!*ezTCbeB?{Ny4=q;_MyFv&?)9|Lksq3w3VVio_%2qYL^X*9}huGdw zruB?%JaB@mrq_@@H<*8|YZZ67*9~`gk>qV~TX*@}_trQ~J6&Iq%V?V@$1BZhmhuC# zJZBhLxy<91a+YV@rDT;XjzVRs*~O7S|4ZCXTJ89W4-Emc>CDfp7gX6;cjB@ zxzK$c_p=MI>w6FT(_d%zFFjrAS8saMpKkWICm!yF-#X)dj*68Zz4DYNyX!J<_RH5j z^8IN#)Z31Dxv#wMYhQfXL7&FGAHDB@&qdyG7<;3io$aiT;^D1s`qA%R@1nQq?mwUV z2_1d)v&Veixz6~(qrLb5={@OxAAIUhfA!F}4%+Bj^6Wp<`j1EY^<^LY@SooM-}m_} zEbn*nvtQ?%XMgAYPJHz5qWZQgzx1tPdj8d;y#Dq#F0OZWu-AOahj-3bfVdZcnb3G{ zhkyPzb>O#u05~4@g<-e%f6jM)wKsOL=YT|4fz~4$F=Rqp^F=cNGS*}tWno_z$VwY` zeG$le)rWfGM|~ow1FCdKHW48^7)kfhdK>6|3Fvb(*MY!Sdv%vZ@v}utXg`5tM>8@K zT$3JMV_04kg^%${F%d&{z%{@EHbLNkr3ZoIM}{;weMz@Vj+9D4xJt|6HA&GMydw)# z_(;H%hxqXk=plu8q=ydyIPo!g@|SsF*M3D8fAa8t&$nsyfJ$v>GFg}(M*)a@m>VPl zNq|Thmk5W!G)bLfSa4v56Ig?!{@8nKs34HI8b7#+)6!3Z$ce?kf_UhMwip_n$b)!< zg1_^EFo1w!r+^xGfCIR5!iZOn7>JR`im?%eym*3ics;ZDDGeevKxmD2$PCXY7%0ew z+Ex%GS6GV#BGjZsB*7l|(*u22OSw^nyqJ!-c!^#3HJf0C?&yWM79+m+1p26jV3dQ` zWr8wP1_G&qA|sFtn2-wD9j~;I4tX3S6FU2-9F+J1D%g#%_>emIhDI`xN;3!?35pt7 zA}6$tPnb=a2!oZl7%X{`?f8t?b3}#5hO5Xq9|=5hD2iM7MY(8`4gww?nU9RbOU!ta zD-x8Rm>BX17(bbk40uZZjZ~CPS&^SpkCr%%CHRI%36MZSlTqoD{u$m7r3GnOKu`G>LT-hJ1pRuc?wJ(J8`NAh$V` z%jlEX*qg;^7g7lvY*~%G$(oLtjY;{3qVbg~@|=(Om2Nni*9bGxIh<63mbS>9b{U)A zNt*Jxn&Ro4nv?z^=GjY~NuA2Gp6vmN;Lwcr>6x4wXRgi}1 zp`nQ!hB=sv*`rg_kUkou9nzQ@0Yi{9nLH|_JqR9^iJmc8iOQ*@I2xaFh?`-fpG=xG zD=MWR37puupD-AcIjJ=T+M-+vp4u6p)0C!S>68wdqg;BO?Af4UnwMj0qY8Sa-5H@@ zno3K9rEaRFD58Q8`hY9*QSbBL7dRh7k8iFZJHb2 zZr-(i>}fI#!WRR85(w%$ctTXF2J8wVd5wkVj3Kpxuhg!2n!^(}TT1EUcxvd0kZQ7+|sa{of>d&mXeEke12IH9Zk;U9u`B~FK=apZDAE@$q}c|+)p;6a;CCsb>xa-Ubg zPW{aV?+YSN;OWeK3aH`5kAL(Omb|xd`<8t`-Z6FK%l-oct|i%LqmiQ@Dvz``P(KQy zHXdi)y@i&2pk23Mg%)0j8ipEHW8Fk*1i(fB0T>|S86^&I;)wh?$XIIhgm&Lv_~|8p z00z{!7mfqq2tbM>q6i|8B>pG>8#dl($B{c8VB?1e{D@+RPli^S9Y6Z`;gwMK7axT6 zu{I%t1?JH258=x zW|3$ndSjqNTKTAuha%Z1rAvkeC7hW)NggSHz6mL)ohq>?qK(GMsELu5*rT8-UTP5u&4#`k3pkNT6!!Dr%bO{%VlG?r0>FNji%pu6stAYqi2&u}QL7 zMmp#s#h#gJjoY^A>xpr~+9bMmN;zVl?M`{E71_F4E}VXZtLU-JuBs`&XDA79vzE@W zsjAgd+b6Zl)+*|~;ntxq!@#z>s;se|J8Prw=K3nfx>mu{i&Qf6si@0#90svF9*M`X z6u&v{iFX40tflD|jBvs6-Vjt`eSs4ymY|M2#O(P(u9vinaHPJPI zX$yz-m7^Y?8#0*k*J5Wlc7+szCxcaI;Wf_BT?4JGpc4PN^B(#tx^l5|Kl*Zt?a-Vq zk{Q=J^{bnr%yXD|txaGeZ+qtE#3y(DTXe)mGTOJ6N>{4xztAozZKhIMtApAC4yG5^ zou$EwwnJ0CkI|9fyLrxX#~Q7=QG4v}w1_9JU1IVXC?nYewN9_UuycWQqGa67w80b8 z%sKUB>Ws6S%tyu0rwb>$JSotttmEFlx@z&P7^6!#p4ZzNzB#@6n|ZK)^3fsG`Z8OK|C<)w^!xyN@LdL}hy+p^Ava zB#w;=@eJn-1>>y)N>vb}a*73aQ4zoUt&F zEm)D`-r%}rZ4e6$%pdKBCPBWfPdpaf7X~2-C95@oiiNr37R4aKyvZ?5AJoOadx+G`;KRT>7Mhb^91=%pK88o^b5^z1NBmC@l zxS~Q6oJvI>P;mLs}J) z{B+qOhvrq5dGm~tB;8#x$|k8!p_AqNBuW>i(x)j?YSVHOOclF_)QC-?Rcxql6=TJS z<}{f3{A4ld{&z=Yst>Y|qU-=Kdyx5=Y_k-_sb^7ZE}q^pcO4DqX}KyW)k=Z2qJ^Ma z={HKyy%UUdwXI#N6TvE3XZnDyKO}PapIxw z)nz!nFNpISS~Fr#tS?p=FdHUJ&GZasMs5QN-!06g12tT-XaA6>HD;m_u zX4}9OcWH@dn%=7Jw1ZBwx`t02K(<(gVFtarj+cbs8PB!In*$$@pIbC9M-O9sacBNY zQ~VqLMhUP3E^PI%Cf&ydxo%gTXr|yC-{_f-mm1BSLP7g@KZAL-<%hPbm!{st=~Akx zUOZ!8Tg{J)%ADg}bqW+)>+m6VT;7dxQAF}N z(cGafAi&>U-l?m*$=A(nZ&OdOla!qyXHR_VW8fIN8@taI_lRlUt_t})PTv1U$K~;{`o(E2@qtc0T#zDs z!Qb5XEXsSunGgBNbGqcjr-wjJhjv0b&Mu5Y-Kk%`5mtvh_=*`#yD794?uJWW?wf}T zp9633VsCuxh^YSKvHyQ~mv*MNbpFSC;0J(Tr*{Z=Y56dG{MUb$7hGRLYV3Cn1?Y55 zHyQjEd5q>WKz9$rCOjQjZlAY%eIRwshIpgbOS;E`F9?G%D1$RdgEeS_H;98dsDnGm dgFWbjKL~_DD1<{ughgnCM~H+;r~w5406WobPVfK# literal 0 HcmV?d00001 diff --git a/doxygen/img/ppatt_figa.gif b/doxygen/img/ppatt_figa.gif new file mode 100644 index 0000000000000000000000000000000000000000..5c86c93855fce2b129e4a0e158c3396448953b08 GIT binary patch literal 2359 zcmV-73CQ+GNk%v~VN3zI0Pz3-000000RR7H0RR90EC2ui089b600091oR6u??GK}z zwAzca-n{z{hT=$;=82~2%C_zc$MQ_q_KoNI&iDQg3<`(DqVb4KDwoWr^9hYgr_`$T zip^@b+^+Wv4vWX+viW=ir~!tx8V#qo*KPNgKCs94l6StI$L58n$7k5^CWzqI#YU(& zN9Xq#8F*-=C+WzDwJA91xfz)FS?YL+c?xL9i8%^5${K1(`-)oCN;;?Yc>wAwDhoS& zE0sIDB^hg~xy;;KY+M+*8;$s9oxRtp-R=Di9xgskUT%Jlp02*m-tPVmA1^;oUvGbp zpRd2q-|znqFrdI}0t6f=XaL~=g$xZYbg1xQLWmS6T6|bhqQ?G)7YE*<1~R0`kOD@U zBx%wk5NdT4MG#c5qMD2a*>HRaGa~?+B{go$d2#2(m@ZZFU?i*$MT#?N0`)0$qrsg{ zC+g&x=Vnc#Kae(4>hPz{t3w@9Eju>oQj2G$riIGYhgVV)-fHy2v2DYiTAw0p>sD$) zzh>k1s6@AqRkl$ZzI9x-4Ntoa2}>^9)9{XCwIDQhtQVr?%Ys7>UVIm;X`Grj7ft*! zV&%tXv1XktI4kPETKQ(g);bLyU_~ELHfwn`(cGEy%4W)Zba60R0~`ooeE{~z8n*uF)t6x5*Uuxg32A3pm!jKXpMvt0U#iP1CFTB zfeOmFAd48{cB6MLDg>j96t1XaF&SP*A%_7{_+gRY&}deP+nvZpirEqAZWd!{y8eC zWbFCrf~w{bUao`s*{7_iB+9Cdj!xR=sn(VB{wJ}S`FcsFf^I4&n6-wd2&J(y3r3sa z-R3B-MMb*lo!4SJNUP?$bZd~)?iwndbeOo#O4C4tBnJmEAg`vLoBQqN8)&+gSZlmY7<@&<5{g3uS@I1 z76%HPt{Uo!E6gveY;Y7`jtugY)m52molKnES<2fg`ZBmk2ih{y+CtngtC=basmnP- zAuxI>BgY0JFv}v(7ZMRxq8~_!!%zFg@a)xS!sGCV@J1e)5JQn zxixQ#D&Y|ex^2{*CU9!iiwMqf-%fn~+~bw1c_{7B(}~Rh+$(R~G?H*rt9Ia@%DuSR zXN$43);(KC<&7bmygA?;J6v(e5+|N9)VUAN^2V?S+biqIbxGiPx{E0}4TZEc>cgO` z?(Dg256^bX6OW!Y+RYCQFx~>!{N0e4-u||+g%jU4!|LDMc<?xoG9qQCxmWw83a`DD8<4Ql7V6lYZS4n79M0}aDp;CU+`L(y6m-URXr@#(!eA_ z{!J}z%7Rr9OGQ0iMGJKrgW>+yp16et?v8`R0+sr7Xpu>w5Q|6)IN}n8zX5?RD+BiMo(izd-UrjfB)qX)xD7MQUM= zc=H)eP~^2MN>U10LnMN@cDFxnk_&^ZTp@L`w?vK-7a%-H7e&Q5$i3=utIWax@imfr z?aPb@L=3xG*`9Mk;unf} zW5}inPxG0QeBfc|hW^|+PP}Q77g21N3#VsAZ@PeJ6J10qdDbI&Cdi&Rg6P09$+v}) zbarFVqSNx1LC3iXj1mze?Vbm?XQs47D^(FbH-ycei4=BpRAsq-cq?(EML3xfLgKf-qbsq}@}8lQM()bgK(ZsY)y9&}~X-sKxVG zMKO3$neFg_0_8+CoS9V<&9#vZrK&{TSUIltRA^$LA>xRs#N8d_pS)7$7!X^bw>HwG zJ55qhLrO@_x-~wwS!i!wr_fUIa<#1etH%n6O9a-^3X}}39iOSu+1{X&e9Vm>ORC!| zlrmt_No8(>{#yge@$_c=NDrBTe znJeDvl9vV1edu(vsowRfpkDkj;2P!y68Wm|g5p%_=d7jQ$$d3d37ASOg$B?V2DprO z6(<$lWD_ZNbiqDwEYxhcDt#ievhk&G3BZYsq~7emBlK_?^~tDL_LG5@8O-Vo)T2q!_{i({P4t$)7BM4Jh;q?t;22L>*+}*jL%mF`cA)DL9)R-pKz)6iT zo{L=m8;io(x*UaS0wSEmnE;WdZ$`A8&)gh40piSlcA#bBJkUAQImyFNRi%>!=;C?W zm7@Cbi<3IxRWei4QB5W-8vW*OeVT`5-f;dHQA|gJdck5d^qV9spZ?~#PrHgkCYo$$i<3pwM%j!gSEk?cPNMmr=ZuYdP zC#&p1FB#1J3q+eE{mNa9`Q3qr$Yf`&w_%Y5cEuer@{j`JCu d>wM=t@43%^4)mZ4edt6ly3vo0^d1xd06T%1n4SOt literal 0 HcmV?d00001 diff --git a/doxygen/img/ppatt_figb.gif b/doxygen/img/ppatt_figb.gif new file mode 100644 index 0000000000000000000000000000000000000000..fe4e350ac9cae9910d0d94d6a3fa0c512435754d GIT binary patch literal 2431 zcmV-_34r!TNk%v~VKf1<0Pz3-000000RR7H0RR90EC2ui05k!z00091oR6u??GK}z zwAzca-n{z{hT=$;=82~2%C_zc$MQ_q_KoNI&iDQg3<`(DqVb4KDwoWr^9hYgr_`$T zip^@b+-?Q{1{lI(uNgrGkI837cwKI{)q}E{E=SYf`SV?Y*H>3}XIJ>PcbG;f2*gGx zcGwqL$XKap8EM(*sJRA7nMWDAc!?Ma$Jv>n$Tv6k8E9&%r}`6^iR<^rmNt8e7iYWk z25cQbcI6A;$iU5Q#|q=b$3UkQkpq zsK~M7kW#2F@)_A+5iEtpQmR~az~DtzCIJeA8PgN6PN zpbMui_&b!S+^=!#kbPWs?c&LM-C907ckUp*WZ^RYT-Wm5n@#X$hQ;@*Y0#-Hi^)5d zZP_uf<%(%-n6wbjle-=rD>~$DZC7a_8pM(~Hhc-p6Xn#s1qmhaK8>ogo*SfV9zf+icL~ zR|Ic)9jBgf-AUG#di$Z3V1;AsK&cVTh{ehA_Y0hl!- z0$$yCfB-b^7*<#~j>V&8Kc+=wjcx^rV`I+6xDrxLk#W>hQiieClvXn7QIIqKM`U9G z8ad-@Id;ibkYXyinU;8UxgeIefJtLtXga25k3^24<$PnR24tUdjtQfiG~9-yXK3n3 zsGn!vnH`+VjfvxzJQ@mVpn}R!rlO2CcATTUZJMT^T=rO3o?_Z*>7_783gnV(;^}9Y zjQW`Ak+2MjTAh%#sVY&9j%TM{zk;`?W3lQP{%oyA#(`$4y|$33t+c(G)^EQu`)aAN zZn!I}wo3^OGJeHqBN!LJq!iR*3zx)mB{>TA6X470v*t2~m6URDuAV)nQhkCajqKQVM3_KuZQ*d_iIg}pOhzE9e;#(5u zT$bF<*UiG+{P7LBs9G=SHjW~w|wM6 zPbDdaDx2H{NteG#?XOtxD^O^Sb^l2-EQbgX&FE^Vr#NbY(V6AAKdiORzP zzqquo*ZIpiI^^NNXjCw?`Nek*+zuLDrz;73v4ZzXA+}biE6wpsYMYXw3W-F>6R1y* zCxP66xHm+jjZbjDS_=;m1-?av%aBA&;ub+w#U2Q7F?l0i-t?yr{zdWxjud0{FvrC^ z^66ahf|D)Vcmw#QGK~)eRs_QcJ0t@09(i(A9Ff@=T3vBbXaFD7el$xHFmH30Yt0`I z!$Lu>iBwrK;vwP1LPh3@k>Q#q6~CE-NiI_?kjr5v*C;TLba0>iiMP%tz3(B;cjiah5fwZ*Pp)RW^*yb?ZK+*eIw1h*YBw0luF|Yn^tP}_- zK9Ts+x2k5LB;hMz9vVWx>Qw~7%waLTsnjzg)>xUXt@C8;1djT|q^|Cs z6Sbpd;p#-G5^$ha?FUvr8-l{FuB((oCR6E!+SKy&dp@nGWRXW%xyIH3n026i5F5%J zj&!#jfGT3~{zqCVw$z}+?SN|g=rfecOR@WEt_Nlt0o)|buMFU#ZmpXD69y!(f(1x$ z9eP6WUck6WMOqa5lsT8GR~cKaWaCt7Q`h3Ny7R?=V^`}(+oo%>!K!S3`P3e%qED&P z3hO2hTmklG*1gGvZ$TRgVXM>^AT%{*ey2M@{$}{9!h>#w1WYj87WljU?d?kIx!I-e z7f>UV3 zv|;WuAjs@y)}0x|GajR%(a>hJE*Z|ZwegDS{MNX(@-XNfuOLy$;`xeK zCge4;l1p1;Pd|EEQF7ZrlcHB7gcFweBVuD|c!x%SMxJ{+?pYNRG zo}&6Br|q@l7z69~N!ORep0%_8YvO8By2+M)va{`M-vtZX$|A+`w{MK>=vMe!7jCDs zpG_PtM+nx`4zabrZR-hWx6`B+DtE;^Y7Yh($cg6cp7VKbfKHfe?`8LLG5qf50$SRl zt#_m8+G}jzI>1a$akuA$X|jTw!@L8oT{}D6ULTvbu57rv+Rem>BiePv#IJp0eP>}c z(^}(P<@mNeF3wB8d*r7MHYR0^@?x)g<>p=61OcH~s7L(YfLIE@1wL?9zkBOk@4DB& x4)(B%ee7f}yV=i<_Oz>g?QCzm+usiNxXXR+bg#SJ?~eDp>wWKhA4mZJ06P`c*Ps9Z literal 0 HcmV?d00001 diff --git a/doxygen/img/ppatt_figc.gif b/doxygen/img/ppatt_figc.gif new file mode 100644 index 0000000000000000000000000000000000000000..aca8ef9d9bbf52c75e64beb351a55f5b1133afd6 GIT binary patch literal 2616 zcmV-83di+FNk%v~VLt)H0Q3L=0000@O-*QMXncHp00960W&n(gjGCI7r>Cc~va-Fs zy~oGL(9qD?+1cUY;pyq=@bK{e|Nj6000000000000000000000000000000000000 z000000000000000EC2ui06zi5000I4U?h%YX`X1Ru59bRa4gSsZQppV?|kq7z@TtQ zEE8PUeJY(8O6RO~z@oaFIdv*Fo?XL=9jk&X*|TQPj!mo9>({noyTK(J zaBf_-cJm&}=Z65_zkmn%3q6hynX+HYkSAZp%o%`Y&44~< z4lSB;^XN-iFK3h&P-_dY|gb?>lVG7c5KYA zY5$HG{1$S}&y(ja4m+k_c-)#ea)?Q`uiT;LOGBYgbMuEfuc-(x(IdtGW|m220sZL}#Zy3G31ggIuJ%U(R7P2t zVQw}7r&P05G~{j);mGW{I0Yy2e{E zvVfbKHr)nz7$*uC8=A^E5WXh|o;2rxZf_)83 ztQ@BT+n=$i{+5}cKF^{fE335!{5Y?)=! zQn74SKvH9!uhun_9P`4TnxJ&MLnR64QG^02cur>f9W>hv7rip%_YF=s%hMfBl-;ZZ zHqN{LfW<49;%~Q5)s@^})lSoe(#Ykl$fBmKf3|UD`O{#P{y9KP#^kNHygC{#?5^@= zD3P0UsXGzBr%OAF->TO;>dOp+&zy<7xV#RaD2wi_jVf3wSRg<3r}A2DPfE(ia_T&N zp#Nlk_UMPLJ`l}I>l?M;yE=O3Sy6{IuE9FD-ERlY1K+aPqrk@PW_msN%J>e5smE#R zd-@ZPj3`Jk_L<9l4Fr|#w8z0N5F;`BQdsB!Cordxu!LRM6X^1Rr_c$`fG`vT(GEtn z66gyR7V{WXXkxh>CLuh2`<2krRn6;Ck{xDQe+6O;r9_ z;L0|+CvwStUL?Z~frc@wl@Vsquwo0vmPR#_aZyV&h1$f(xjL#5X!sJM3bA-GEw=Dd zeiQ>3{gtpKj!|+CLmDCzsY8zaqG}ogBFTivq#{ys2r~@j5I@OJV{C* zY^zh?lX7wRdT?D2SOj#}ACG1$ufgkCV znT1!TDwfgP-!!R-gTvPFU!Af>e?`v9` zU<$aF5E+JY5H8EAd9S!4|Qnl68_jog+7NM zv0+L)n;ygHNPc#Zh+Bgi=Ind@ zup<+N`dS65$^0CLDIUxXfX;b^q%sL*kj#WWCRrxT0uN{;vl$sIr79_*|Vs z+!?<8r4l;f*==Zf>MZfXfwgABCTxWT9lx5;d!50K^Hvlg_ill?Z=@wapW7Y#Rug#5 z6rUfy3m*K?&7#Pg2znRj%R8;tIkV;M);el~#u}uv`SBbba3>GM_OYe=KY7O%}C=k461yIqpC(tS1R+)!LEh6tF!cK~~}ZoC&TXG#i;6v1aj{k~1rn ziD$KQQYpO&(so!Q10?j}{$u(gUa>l?~?x-UtaVHL0rG94$J14E|=!&DmONeQL7 z--No<*L9&{jz=U?kGFd9>V(}8S7Gc`b)Vcbwmrr?I9ub0*tN!Xo|yo)8g+ZykrJr4 zg$?c}kU62L-9uCtM_L3kRwIe8riwe1j%vFy=v)8N;y z7V^Gf?QDB{?IJxMH?VOTV%MXT&uzx=ym*`o==rv7AO7=>9KEvr9`r>G zhurZIoNzi+xJ?+Ia#}z9!j#IcMW2oY#t~ZLk>)kVXMJU*dq~&mHZ9zbNOhJMx>D4Q zdPbSe*si-PcTc^Bc74i&ELm@ExV1af$(wCR2l~MD@bz*33S*Qy+_;M8;Q2CVShi!n z@komGqdzZmBZAuO2*&PK3NNSYPMOMAnfT7BZBO1cedoB(z3z9<``-Kh_rMRn@P|+Q a;v4_?$WOlVm(Tp>JOBC6kG@(%0028vLM?{? literal 0 HcmV?d00001 diff --git a/doxygen/img/ppatt_figd.gif b/doxygen/img/ppatt_figd.gif new file mode 100644 index 0000000000000000000000000000000000000000..e6c55c09fd42fdcccb01afde03ecdf58a9d37a55 GIT binary patch literal 2505 zcmV;)2{!geNk%v~VLt)G0Pz3-000000RR7H0RR90EC2ui06zi400091oR6u??GK}z zwAzca-n{z{hT=$;=82~2%C_zc$MQ_q_KoNI&O5*l90U(QfT7q+DwDSLsoBe0I4g`} zs;4^~F^9a&ynNdX9W6agU2T1hovppi-R=Di9xgskUT%Jlp02*m-tPVmA1^;oUvGbp zpRd2q-|znqFrdJJ1PdBGh%lkTg$x@ydjDy1!T5)B6+KmY?$ zsjA$$swgH(GGo1j@Rey&qhFnV3t*(`0JT@4W`#NF>C>i0kHQJ+3sXlkZMC7LI(6;T zzToNrT0~Ol+>kD=64n+KFyP1k+EQ*9HL%*hD zO}qNK?Hu@OJx#B4Z}hT_=RPl2JjeBd<*Rd#p_W$#_Wlc-&Mr`Yb={(pfZTwGoq^{R zw}~DCFbH9U5>7}SGwMmEpicrAAi#zLbjTT1YZ2k06dk6;##Da+&>@E_X zg9xZtS2WG_mSH(8)>tD^F@|;`jT}}o;t@8^*rFLjcERL}B8u3f690t=kP%T5f#Z-5 z{s3egX~C!@ltBtdCW|hv85xdNSwmcLzZpsAm{We}#FkDTFc_X|0yqYdVbW-4lI&U7 zg=tlCG$@FPk~u}AUk(Xl7=w-(s483fNv3bs?ATg>n{Jrt6GDvo=$&f*$YzXzn)zp` zBQDCrb(_(O3t#v#00>;t(K#bpZcY)vl3_a0{w1G3YI!D~sthKn5W}j7YYqsqB@A6N z`5IpZu-0kl7&Zbssk2ma8f>abjcOyMIW}`4Yt@?6D^T|8$_0Fn@g-@9$dVXqy9rQx zWUP=%g6fS)&Pxn^QZY<|Sa9LnUANpWaL1@gmCCA-EUN2jiO0G-#HOj%*o;0 z%U_o$warjg%_Ub-4_-B52_k;?HMVO0#`iQQ-CBg*R13fx5Or_fc;T86B{!)yW2iaX zmB(YNI9x0nnuA&&*lQ>HGw6me zor>KI*DJgT%)@;g;0}`x`Q)?*zdf|Kk4wGmi1!%t#U_Xkpz$dEcdGOswjNCBM_j2X z;-oqZ1k(j!T)etVDGcK>_mKd9*@I3?{-dSa5wCt*!yR_i!n^QY$!qJ|4pJm09Hk`C zEGC%X>OujF*szOI!&;%Cx)iNWnTuQ%OxDKCgA=i|j4TR_81haCl41}@P7Ko7&Zuxi z36Uf*rosy=aM+7?`4E8e@L>K%++F%}?CiVH%L zzWy|$LdA%8Bb#BLun00GNlK0=Iio0UW{h?)hF5avSBk>5M?;B-GbV$Sj2d;w&;YS9 zKOE0%NC8GW>Jg54R1zHjXUA{FF_e>}Bo*6;o0DuxT8>N}4TW^DOtO%IWK2_3N~y-& zJ5hXcjk!7&B|AZJ8Ybk3AAL~BPa1PrV+0Z(3HHh{QHoidgxbO+ zSINTk2a}D0<&=fwOA|)e-;1rNL8@ zZ;ewM9Q15bL92bLhyl7_$n3{CU1Cdbg!^7bL07)|rLLqYFzBb2NXSF5(QG#Ds03ds zui#njKN;K_{NP5uo^h0@>Dl#w{L|)_qUrpp8 z3BhvG^X*Tiw+m`Z8^=-b`Lh~V<*K*JavB{D60FHV>w{8vzqLAbsCjLxUm=CWg=;5}O) zX==%cmN!55DC~MOa9lJP^}Xy(4|U_4CH!guuJldLGZ&)Wvdz{8G2T{yQZ8HLu{95ujS?hb%+-K~4_>%jYEM&)D^gYro=2S; zTuYj%%5Ju^pAGG3i^9wHA+WS_JwjxI#n#t;AEOVLXuE2gD=O~xAl?$fH%ba&qveKaI_Ru|$)u)QO}?0*Y<-~=B}T(}+Zf-j*gv@TDuh5hXYgpSjI%j`ORW zmkp_AUhH9YzJeI1S+rg?G|-cQz}u#bX>l<~ESB&an@-@_l z;_Iw$02HpsZIu^ebxp%u>^hiqyV(x+xXXR+&5)UjTP&TLzgp;nF8P=BerTGMebGB! z^}C)9YP$2>t7H1*vr1Z1poj$=r_pB literal 0 HcmV?d00001 diff --git a/doxygen/img/preg_figa.gif b/doxygen/img/preg_figa.gif new file mode 100644 index 0000000000000000000000000000000000000000..0929bf4d505ad2a1b0f11b5151d08d6f1cfbe21e GIT binary patch literal 2359 zcmV-73CQ+GNk%v~VLk!I0Pz3-000000RR7H0RR90EC2ui06qc600091oR6u??GK}z zwAzca-n{z{hT=$;=82~2%C_zc$MQ_q_KoNI&iDQg3<`(DqVb4KDwoWr^9hYgr&Jx_ zip^@b+^+Wv4sWaEC4hV`tBq-DdkqJ^rSrJmioWZ2b!dMN2va9USZH`?7^qk>`1F_X zxG0Iy=oEQyIjJd8*_1i(xhcxf$@CdInu@7_Di_hJI$COgSZjN08=E={igfy_E6X~3 z^a}wST*ixRWBkCJ>_&_LP2=oP9UZmo3w*3S$zAOI3@$0oMviR`7!UyJKJX525DySv zbls3msRf$FF2$HF0lE;;N9x5DTK^gb0r+p>lY;W*^|DA2AI1J52p90{*NfW66Sq9a$V>mYU7!WE_zEsCbgxu^Z;)-3BO4F@cunXZ}ZcC9P#(pG2iu9Lw3fk zrWJYz0vDcG2ue{NZgV-v*m4uenU=LzmPQm^*`$%)AqnG8TizI^m>+=nQh-6$xq)tJT$z$=Thb^bkvQ7P zk!o3S_oFM_0Wjo?a@9Czl0hvwC!dIFP+^g-QHj%twQLGRhb#G%;&gndx#W-lz9}Q2 zy)daMsaEZYYO1wT!Xlt3zBFS>H5%Hctv_6a{;HavI*JdZ27n2joRxOjAg#vsV5V7R z8YG96kroFXrq5Da(3g^4%Y&(_MTcv*N@6*#o8=BFE2ea!>k(iChJfd*x&qm#uWq&x zY>=SXh~=)kuyvVNibh*udOe6bD!%L$xUUDD%JpfZRqn}VmZ*x`BDu~YtgtK$XI6r` zAD&p{yKKHX@3Zw*TCBzUt))W0x%%u%zy%K+YrO~`s;wvm!4@ZBP0L9u5pMPOQRPve$6lY;Dpv0Bt1SrScSM z0v63UXp~rfiM8IWJe*a;iBCW=B8u1k(Div9%MJ3kS`u#drD#7sxpzUHx?tQB&pmUp z&f4tuteJ*xjlT7v$w#k4$7r+B#DX3=wcGhv7m$wzKJ}4XTAerSZ{6mp=$D_Kx!B+? z#<|I#D{tHA>;4;rvUCfusBw2k$z z3zc8^yfj5GVVmO;tFG5S7Gx`1reWLZWacW>aSm_%OCU+o$FB7OEO>Malyz=5EW}xG zCOm@J*Pxa^so~FflG@+BMwo*Cb&P`_TVKeg=eltn@Nj8^VF+9Xw|1$=eT49#KrqO? z6$uSSf&yWoL}aUP_ow4=F=u=<2NB;_NOmAEg1Na{9

    2McR&u zAB@uz6Bfx3u*qiSD_{Mh1}#9A4|x^LUBjOFB6clJ2_H_c#VC!U40I=cE8J&t=S`5prUa+)gERhs=6bplN4v zU|Hb!$G(knkZ4TWHvZ*F&TnGQhMF5^^<-I0fl?ry-i%co_n9VM=FFEKg(EvPYR~)S z^O-}u<`+;pP-$|~buUapbzq9lR;{9X5-mn1GL|u%CV-<&T4ylxVbJ{;NT>r%L#SF>(5uC1*s776A}%mNmzlhxa11k%^4 z#Wu9P^-b^C{=%{m1kSZGA{IVXTU_yA?=%f7fny;>(5WV8wvL%Dav>Yd+s3M>x|J<& zxvMz)fL8;0WSv$)n<4ZXcXGj5F9yVVTC%1#v*+#aar1+M*OnB%+x2gEVYfEz-pR7L z4d`xZD__!Xa=bWmt9c=8UwM=t@43%^4)mZ4edt6ly3vo0^t2WL06X{4n1}!X literal 0 HcmV?d00001 diff --git a/doxygen/img/preg_figb.gif b/doxygen/img/preg_figb.gif new file mode 100644 index 0000000000000000000000000000000000000000..33e57fc56c3dd108556ce6e2ce428975f5b74403 GIT binary patch literal 2033 zcmV{<(xLV=X3~saIfq4c)VQhho&aD1~+K9#;1rxNa*+Y z*XZ~MxoCOR2&twSIGLDt85&d=_jnN5$$6S6+FC}rdFUCN$Le~kC5XG5Ys>o!94tIc zTx@)doUFXe-0b`e9W6agU2T1hovppi-R=Di9xgskUT)rS80UNbksfFXNXt$VU${U%Hs@XkUnarW6fQC9vHle+F6UOXvX*0*QPmK7`1DBE$YY3)Nwa2&&`9kt9o+ zJc%-;%9Sizx_k*Urp%c%YueN!K!Aaq2Y3$9>C@*=oj-@}3`+FqQKCYH$a)Gjs?@0( zp;5hxHS0Yc00IOs;1!V90RhmACEFG3*EMImekEI$?OV5Mw=zQ$6aZ75a`Nu&V-yck zKs?O^1q{UO+qjFvs!ek?Y~wzW%``3x7p&c7Ykcb6OPJ{Aqk5I%Wonr8=~GT0`-zNa zvSQ7VF=u|=Sgty)TB-1s^>_8p)JBC17(KMJXynJ0r*SHKHssu{rI(FGp+eT_w%W;vt+Ka9;md^~uFLMaL-CXofA22!F1+#1+o%w*)mB@exlQJpjh6*n z?y&}^dawkhE*zCp3_pAj2(}cwL%N6B8{WOmVSL)88q0g{fRiG*o|W547aMiA9f;jo z$rUDDM6g9_<=FK_>ZMWS$3tj1lLeJd8 zfov;S-@n_DJ@|p$$sBVIbIUvt$5m_oopIJ)yDPG*m_xU;%XGnYHp`tGeYk#%6AAfN zk8d<|>WUt_df*VxUamt>(=2uCu+#pqHn!7^`|W_=gu(BSI}v;1H5sov1HAX{eD1>& zZvpeu8|>idv~8#Mj_{SfK=RZtuKPU_h0Zp8)1|F)Tk@$KzVyn|WIa_cuk|@&hgU|n zY~7=U4*8AGbj@>~A?70;%d~G;D`U_C3)sBa4RBS*3*RRYr@cLKZ+{Z%ANT|)ya_G~ zcnYgd`aVj30&mz)+m3~tr8AWlbxJlA^0>X1d*;+H{w!|`1B(~ zR*{v3Owq1%)g@1I3xK=Q)-Q#rOFHthkjpw?utJF?FG(g`#|)Nsdi6{@QgWGgI$x(= z^Ql3d(wN2k=Aep|#Z@jdnRXDRk3?0@Y|03oN~=*kRhh|kvh$3Ve33Re$<2)DEoV$p z5^EU6w`X;2LuibkKK@-uAxpWbS`2;Fv#OLNR!P(zn<=9{YsVy8d&oi?gbk>Jy%+uUKK>~=SjQtOc&x*MFJ#kwW(G@m1k6|1ae z(Kj7+XhiJ^wvZT;+V#^#1YGJ%ZMdKe-Vbe{TOt4W7g4VQ)PP^2j)N|vxCR<7Gi>|n zCcWCt1G;r>a3vcHLFc~@4oR(@;A#WKTG#v$j!+#$Ymo?BqUu$9TNT}Rzx?g5fBy^M01J4)1TL_F4~*ahD|o>S zZm@$N4B-e%c)}E}u!YN!j)-0O70U8cf literal 0 HcmV?d00001 diff --git a/doxygen/img/preg_figc.gif b/doxygen/img/preg_figc.gif new file mode 100644 index 0000000000000000000000000000000000000000..a4f98ffb1ed3dba608b2f8efa42a8d292264a5d4 GIT binary patch literal 3242 zcmV;b3{~?-Nk%v~VLt)M0Pz3-000000RR7H0RR90EC2ui06ziA00091oR6u??GK}z zwAzca-n{z{hT=$;=82~2%C_zc$MQ_q_KoNI&iDQg3<`(DqVb4KDwoWr^9hYgr_`$T zip^@b+^+Wv4vWX+viXcotJmzd8w7~U2lDv)9=6lz06wd30EjoJV%LYDw=&0fNa$z+ zIA@p$skiuuDPkDd7ui_SImuZk$~fwoiAgz0iV_;?hl(1znfkc;`g!t7+sX>-XKL#x zNNVgF$Qew_2%Jbc9Ni+>+$?%ry(hix)XDw51Pwk;UT%Jlp02*m-tPVmA1^;oUvGbp zpRd2q-`{q52?8`h%LvX8N6=D0e=cxU7}&_*vV*z`1{BATL^1wPi+puVs0(5TkF0wA z`XurI#*85pDpMIT<{_7eLfYi;%w?%-6;r-s32LU$nl(=jC5jX#PLxA;!a{gcDbS!V z9VR=az|zmH6EWm$>g?%Esbsq;W6#q@8<$)@ONcH7ASiia5KqOW~KdmIpEgi+#Y9){rI2k+`DHt z5Fh|}@dE+~Fd*POAoAr8us?4Ne)#bR(x10KApK=>DE{^5hp%3K0>Zb)eFNr49UPSy z)$Ni85Yl=a&>__34IpKA9(* ze{5Q6ct_@FWTE1b$Eb^AmKtHKj@CF%Pj#wS{;8Dz-KSrsg7rzJp{>@KW{*2^*(QmW zF8iOH)nyfEiq;~kW1h}taIOW1L<=a2gSw|=hOd&UXt0gSN-Q{ug}Y|5UZ$7s1p&`i ztBL$R8ERys7JMqRsSeDBY1mowpmYirQt@ONXBQ!Nh(KD~v2R|htd|ha25qfo=7=V| z*7o~rl-tJn@wXw@sc@6!HhS^|Jqx60AjiIoz^;etOmDV}3hOO@tR771wI@2TIvc!mYis zq%9ZvGT$+~=_$>R;*AK@I}0i7MT<z8#}|#f zlLW49o_pyDeX9yxokrKB(kbpYKiMAdn4v!HUGD+xV^9U_(6R40uYk2X)2q-Xz~CKa zBDgS$S>Uq01_)14#Pd(CFvqJYIf;Da>74F1$PHsfk7RwLo(LN_!N^END?lV+@VfWH z?-h!0;=@+7W_CUtZo^nO%bosnnn%ISoMe7zxd>CD!oBXb>q3`1UJ~c^H7r(dicdRW zzl27;)8PO%?Mo8nR2QhNEzxxegq`1{D2<*>OHBu|;s7fHz!pNzjP7BS|K=7Y8`=$! z(a>Sr_NJlfh3#)D8|2{HbGz|%4~onZTlyMT#T`LTfcTQax=J<1|6ER2M_XjQ>UPL{ zaqx<6Y@!0AIEAE<36J7J9*cOmxgs@;Tc+IQRlX-TL$)#sD^!{y-3G(TMYDU7%;g#b z`J9Pe@Mk+%o5+mWI)4T2LR6EZE^`R3RLbyilMH4BjWkXONr;le5TxkdXsLD%%avFo z<_yun@pe~ zHkF9ZP&%epZ{HI)f_szNyjHfFvRc*l55Fg}}&A{LfUyCYjTuX91i zRFGn$&4qT{{?}Qg*tUMTwTN>=qmj3YY@}S8D_ae_&d3@Md|e^Kh<403iP^* zNu)(z7%J-W$efy7){Shyn&v#jz<-6NSTpNblzNh<_MNAr*u}~Vf8n|zMUsSjxXH?7 zce)8X`>JZWl0JjEB0siSY2!uQAxt!=-(VK5MUY(NA9^p`M2WJM1K zVXnQ{c(sDDg=4w}C_hwi8qKPcWnfqDc@V5S^B<1?lwvT0GjEraD{9ZIml^A=#tFU( znRIOax@O9>D8L;q6#zKM^I)`41o9~o8QWxs@>0saL@J&SbI;-$BZCv0YAOV2(a0SX z#N7~as7cJy55AdKarQJ-p;u=fRv15*e!;3McS$~<_SS&zkf4e17BOumxxQ5iedlRr z`3}>_Bz`J>mn_X+1N&zVQSVKc<8&ztSrYH*^4DS9V zZrS8ZV`$1%-k|f^#kj~VOR=Bn!EKVd329EE2j7Kj#7+o8!CM2DdW9)ifzM zecb-qV&m>bHe+kukkxYh$TjU_t*9>NN*;JpeWr4133_ELWm)Iqrf^BCev5Y}{k`%I zrm4SY?+?l<-zblJKMn3;g!^!E?4ohQo2ZRH+kK#DxB8FW>*&H0w8$dopDv#L_GmkP z>~jxP-D_^;n}2(cF+?WM%k56Fe_HSmRr-((Pw!Jhou1edW2@iEMmir{*4CvwEG_7Qiq`@e{!alL#n(|j_PQ+sxK$---!2YAGgeZ>@dXQynOl31WJa$kaatY3wnGAV zXU+#%&}U52mw^AWfW*)r^0Zh>S6APsedt7TGQ@M`hkS!pATvig>xXwD=zg?^Ygi?J zGeU3FC(dTgArGQgtBVNZFTjhs= zs6oMTZ9OJ1=n`}9@KGvgSnJklgfWPUs0q1&R)}a1m6$~lLx|`S{&fAPVPkM+pO_Dr zmooh3ig2u5P?igkru{#HcDNMOa7jNr(6 zH(*=-_6!-4=K- zaF4rFSO%$(@^FqoG>x>Qh@GZx$5oLNX^pOU6eyGy)X;BS!I7mH59{c19wBrj`HrXL z4k-C=z_XHFQIRhBl0j!&kzfkN5r!5ikc5U<|Hyzp85=~8S(HYJlu46A~o z6vkK-DwL1D2$fdnEXE;|#dwv%B~Co?KDo7#3E7pqP$eZKL?0OzEID3f>6Ig-Syhpf zY{`{wxm6OB44xs5ddZD;`Ia)lMfA8u_-Ji@sS5sBl7^|6i^-Uc>6niRnUN`(lS!GC cX_=RanVG4Xo5`7->6xDinxQG07*GHJJEfpV4*&oF literal 0 HcmV?d00001 diff --git a/doxygen/img/preg_figd.gif b/doxygen/img/preg_figd.gif new file mode 100644 index 0000000000000000000000000000000000000000..fe345fbca8171cd36da4da658e66004643d68f36 GIT binary patch literal 2367 zcmV-F3BdM8Nk%v~VLt)B0Pz3-000000RR7H0RR90EC2ui06zh~00091oR6u??GK}z zwAzca-n{z{hT=$;=82~2%C_zc$MQ_q_KoNI&iDQg3<`(DqVb4KDwoWr^9hYgr_`$T zip^@b+^+Wv4vWX+viXcotJmzd`wfrF=k&V$j?e4&{J#GW7&r$&SU6yqPzYe4Cs#DwOMIvsqiV_fs{|^H8T*WDd}+$u%xbI&{5ySMD-2p2EBiXmy)vEss5wcl3JslZ zTHd~UOAEf|X`jFu>;@V9*VjWb@4nrebV1;Ma}6~NRyt6m%v(Ce z2DO*$e0n7`K8!q>=)tk!c4ul~c@Oz@;Rkwe9}fQfZvOV$_pb$jObJ-jJYW!bAR7jL zfgpnlQg=mzUPwp3f zjFHqxh>I}NXuys=&gc#uv7wRTkBro~B9AgIgJdN;(x~GhNT#SGiykso$AT?@^G^fQ z`4wYbRTkOgl%(LOWRfcCxMq(~{&=OAS(Ye*YZTzMm1JU8c_xr$8kr-TeZDv-lWYb# zr#q3U_svwZ{pQVAWV$FNphil`XPc5#S?Q96#>oJv7xILfINFs8W@&i3dFh)ra;j;H zg9h3orf|ZFXhqI}iC%G?+4Ws`Bo0e#uqTMx{>QF@<#`+W>em2t;N=_xZRNvMi(<89^3Vzrxt z+98Z1cik)=9_HbCyNS3ki0AlXUUXL@*ucY+eL0U=V!J!_u_fi`U;SoYy5}4 z86TiJzpLX+ZR}Cs}LX&+9$o`IbEXx%zu|mi_qYs+_OOP)FVH z)lfez1ot3sfW`s{IjnZFsfA2s_WPE=z^ASNx}|^KvR?(S6|RR>kb)^=og^}ttqo%D z4kE)J%3?G!f3eJ57DM0&bwHg{BoKr$TvrBb_(9f1f`%Xr+7Fa6G+vn}f+=ia_aOE? z6N>MInTyYBa(2VmU9ehH8`hPV&>CusNd96h;}3!A1WX^Bo#Ua*uF92o1)$x9k6 z=PD3wpQR?6(OA`UQ+$#rAMzN`WQs7LIW^|FcAC?jGSmhu-4mQF8dG^rsvnqT z%>m+Id9O(-JdX;~c`~(CPEFQ0vzXI^7Ib9_6>C{{D$U%5N&+oC(n}*6&#ihYrZzR8 z_NJB4ZQ&Jz4%DmhVz*bk@=UPS0<1t)8mjk1b+LB!>G-x7P_v#jvLak)Bb#9_3OE*n zRdgm=!+KCHb~d3eltWbf3ckGhm9HryEEF9muEMUCAlunbYFCC@*0K<=|4JxpD;8Vb z4v&y+C}CK`{)$$#PS&&0Ol~(msaejN6tc{9CTIsLUCS~`Sc^^MRPUu%k_9%lyX6^c z^T%7)3ir7}GY>6tOJ4L2mb~J%7Px#H-}|yMDc*f$Zn}HT7ud;(0n4s|lN;IVX7Rw3 z8GW*?0Co;FtP#h#N-nX>nABDDoXs> zrJ|XpDJNOa4^#RUlWe1Wk0{BI$lPM9lqk!QQY(Z<66N^}assR3B>ig4pDnr9#MpV1 zS_Vx1Tps6{RPGIqZmwe+Q>ALHgWggMQFZ9!#29N-PI9ARmgVgD(l+Ms6`(I&gEN<+ zSA9MYq~jImjr63=rQMj7#fX#XYC>t9FQ}WGr{DzBpOg-?tA7pb zWuax*zf!cZ*@bK&2m3F~1~RXkXzMFSM$Pt^SEMzwi)haW)&kx$x9Pmw_%Y5cEuer@{j`Ja$ l>wM=t@43%^4)mZ4edt6ly3vo0^rS0&=}d3B(-~3#06TV5otyvw literal 0 HcmV?d00001 diff --git a/examples/CMakeTests.cmake b/examples/CMakeTests.cmake index 30f73c436b2..e2cd8266db0 100644 --- a/examples/CMakeTests.cmake +++ b/examples/CMakeTests.cmake @@ -117,8 +117,8 @@ if (H5_HAVE_PARALLEL AND HDF5_TEST_PARALLEL AND NOT WIN32) add_test (NAME MPI_TEST_EXAMPLES-${parallel_example} COMMAND ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} ${NUMPROCS} ${MPIEXEC_PREFLAGS} $ ${MPIEXEC_POSTFLAGS}) else () add_test (NAME MPI_TEST_EXAMPLES-${parallel_example} COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=${MPIEXEC_EXECUTABLE};${MPIEXEC_NUMPROC_FLAG};${NUMPROCS};${MPIEXEC_PREFLAGS};$;${MPIEXEC_POSTFLAGS}" - -D "TEST_ARGS:STRING=" + -D "TEST_PROGRAM=${MPIEXEC_EXECUTABLE}" + -D "TEST_ARGS:STRING=${MPIEXEC_NUMPROC_FLAG};${NUMPROCS};${MPIEXEC_PREFLAGS};$;${MPIEXEC_POSTFLAGS}" -D "TEST_EXPECT=0" -D "TEST_SKIP_COMPARE=TRUE" -D "TEST_OUTPUT=${parallel_example}.out" diff --git a/java/examples/datasets/CMakeLists.txt b/java/examples/datasets/CMakeLists.txt index 6ed03cabce0..7542e8e1288 100644 --- a/java/examples/datasets/CMakeLists.txt +++ b/java/examples/datasets/CMakeLists.txt @@ -80,7 +80,7 @@ endforeach () if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) get_property (target_name TARGET ${HDF5_JAVA_JNI_LIB_TARGET} PROPERTY OUTPUT_NAME) - set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$:${CMAKE_DEBUG_POSTFIX}>;") + set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$,$>:${CMAKE_DEBUG_POSTFIX}>;") set (last_test "") foreach (example ${HDF_JAVA_EXAMPLES}) diff --git a/tools/test/h5diff/CMakeTests.cmake b/tools/test/h5diff/CMakeTests.cmake index 7e437af09ca..ccb938088f4 100644 --- a/tools/test/h5diff/CMakeTests.cmake +++ b/tools/test/h5diff/CMakeTests.cmake @@ -422,8 +422,8 @@ add_test ( NAME MPI_TEST_H5DIFF-${resultfile} COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=${MPIEXEC_EXECUTABLE};${MPIEXEC_NUMPROC_FLAG};${MPIEXEC_MAX_NUMPROCS};${MPIEXEC_PREFLAGS};$;${MPIEXEC_POSTFLAGS}" - -D "TEST_ARGS:STRING=${ARGN}" + -D "TEST_PROGRAM=${MPIEXEC_EXECUTABLE}" + -D "TEST_ARGS:STRING=${MPIEXEC_NUMPROC_FLAG};${MPIEXEC_MAX_NUMPROCS};${MPIEXEC_PREFLAGS};$;${MPIEXEC_POSTFLAGS};${ARGN}" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/PAR/testfiles" -D "TEST_OUTPUT=${resultfile}.out" -D "TEST_EXPECT=0" diff --git a/tools/test/h5dump/CMakeTests.cmake b/tools/test/h5dump/CMakeTests.cmake index 0ae7bbdb3ab..cdc3be83bdf 100644 --- a/tools/test/h5dump/CMakeTests.cmake +++ b/tools/test/h5dump/CMakeTests.cmake @@ -580,7 +580,7 @@ ) add_test ( NAME H5DUMP-${resultfile}-output-cmp - COMMAND ${CMAKE_COMMAND} -E compare_files --ignore-eol ${resultfile}.txt ${resultfile}.exp + COMMAND ${CMAKE_COMMAND} -E compare_files ${CMAKE_IGNORE_EOL} ${resultfile}.txt ${resultfile}.exp ) set_tests_properties (H5DUMP-${resultfile}-output-cmp PROPERTIES DEPENDS H5DUMP-${resultfile} @@ -645,7 +645,7 @@ ) add_test ( NAME H5DUMP-${resultfile}-output-cmp - COMMAND ${CMAKE_COMMAND} -E compare_files --ignore-eol ${resultfile}.txt ${resultfile}.exp + COMMAND ${CMAKE_COMMAND} -E compare_files ${CMAKE_IGNORE_EOL} ${resultfile}.txt ${resultfile}.exp ) set_tests_properties (H5DUMP-${resultfile}-output-cmp PROPERTIES DEPENDS H5DUMP-${resultfile} @@ -653,7 +653,7 @@ ) add_test ( NAME H5DUMP-${resultfile}-output-cmp-ddl - COMMAND ${CMAKE_COMMAND} -E compare_files --ignore-eol ${ddlfile}.txt ${ddlfile}.exp + COMMAND ${CMAKE_COMMAND} -E compare_files ${CMAKE_IGNORE_EOL} ${ddlfile}.txt ${ddlfile}.exp ) set_tests_properties (H5DUMP-${resultfile}-output-cmp-ddl PROPERTIES DEPENDS H5DUMP-${resultfile}-output-cmp @@ -699,7 +699,7 @@ ) add_test ( NAME H5DUMP-output-cmp-${resultfile} - COMMAND ${CMAKE_COMMAND} -E compare_files --ignore-eol ${resultfile}.txt ${resultfile}.exp + COMMAND ${CMAKE_COMMAND} -E compare_files ${CMAKE_IGNORE_EOL} ${resultfile}.txt ${resultfile}.exp ) set_tests_properties (H5DUMP-output-cmp-${resultfile} PROPERTIES DEPENDS H5DUMP-output-${resultfile} diff --git a/tools/test/misc/CMakeTestsClear.cmake b/tools/test/misc/CMakeTestsClear.cmake index 5e307aa3fcc..a5549724cf4 100644 --- a/tools/test/misc/CMakeTestsClear.cmake +++ b/tools/test/misc/CMakeTestsClear.cmake @@ -99,6 +99,10 @@ -D "TEST_REFERENCE=${resultfile}.ddl" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) + if (last_test) + set_tests_properties (H5CLEAR_CMP-${testname} PROPERTIES DEPENDS ${last_test}) + endif () + set (last_test "H5CLEAR_CMP-${testname}") endif () endmacro () @@ -117,6 +121,10 @@ -D "TEST_ERRREF=${resultfile}.err" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) + if (last_test) + set_tests_properties (H5CLEAR_CMP-${testname} PROPERTIES DEPENDS ${last_test}) + endif () + set (last_test "H5CLEAR_CMP-${testname}") endif () endmacro () From a4107095a9114667552f4b4cece8c952605484d7 Mon Sep 17 00:00:00 2001 From: Larry Knox Date: Wed, 27 Sep 2023 12:38:17 -0500 Subject: [PATCH 104/108] Update bin/release (#3608) * Remove hpc-cmake-tgz * Improve sha256 handling * Use hdf5-examples-master.zip instead of HDF5Examples-2.0.3-Source.zip --- bin/release | 131 +++++++++------------------------------------------- 1 file changed, 21 insertions(+), 110 deletions(-) diff --git a/bin/release b/bin/release index 1226d33d8a1..31060d4bef9 100755 --- a/bin/release +++ b/bin/release @@ -42,36 +42,36 @@ for compressing the resulting tar archive (if none are given then using CMake on unix machines. cmake-zip -- convert all text files to DOS style and create a zip file including cmake scripts and .bat files to build HDF5 source using CMake on Windows. - hpc-cmake-tgz - -- create a tar file using the gzip default level with a build-unix.sh - command file and all other CMake files needed to build HDF5 source - using CMake on unix machines, with HDF5options.cmake files for serial - and parallel builds on machines requiring batch jobs to run tests. - The default is for parallel build, with serial only build by changing - the HDF5options.cmake symlink to ser-HDF5options.cmake. More - information is available in the README_HPC file. doc -- produce the latest doc tree in addition to the archive. -A sha256 checksum is produced for each archive created and stored in the sha256 file. +A sha256 checksum is produced for each archive created and stored in a corresponding sha256 file. Examples: $ bin/release -d /tmp /tmp/hdf5-1.8.13-RELEASE.txt - /tmp/hdf5-1.8.13.sha256 /tmp/hdf5-1.8.13.tar + /tmp/hdf5-1.8.13.tar.sha256 $ bin/release -d /tmp gzip /tmp/hdf5-1.8.13-RELEASE.txt - /tmp/hdf5-1.8.13.sha256 + /tmp/hdf5-1.8.13.tar + /tmp/hdf5-1.8.13.tar.sha256 /tmp/hdf5-1.8.13.tar.gz + /tmp/hdf5-1.8.13.tar.gz.sha256 $ bin/release -d /tmp tar gzip zip /tmp/hdf5-1.8.13-RELEASE.txt - /tmp/hdf5-1.8.13.sha256 /tmp/hdf5-1.8.13.tar + /tmp/hdf5-1.8.13.tar.sha256 /tmp/hdf5-1.8.13.tar.gz - /tmp/hdf5-1.8.13.tar.zip + /tmp/hdf5-1.8.13.tar.gz.sha256 + /tmp/hdf5-1.8.13.zip + /tmp/hdf5-1.8.13.zip.sha256 + +The integrity of a downloaded file can be verified on Linux platforms by running +"sha256sum --check .sha256, which will display 'OK' if the calculated +checksum of matches the checksum in .sha256. EOF @@ -205,7 +205,7 @@ tar2cmakezip() # step 3: add LIBAEC.tar.gz, ZLib.tar.gz and cmake files cp /mnt/scr1/pre-release/hdf5/CMake/LIBAEC.tar.gz $cmziptmpsubdir cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmziptmpsubdir - cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-2.0.3-Source.zip $cmziptmpsubdir + cp /mnt/scr1/pre-release/hdf5/CMake/hdf5-examples-master.zip $cmziptmpsubdir cp /mnt/scr1/pre-release/hdf5/CMake/hdf5_plugins-master.zip $cmziptmpsubdir cp $cmziptmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmziptmpsubdir cp $cmziptmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmziptmpsubdir @@ -297,7 +297,7 @@ tar2cmaketgz() # step 3: add LIBAEC.tar.gz, ZLib.tar.gz and cmake files cp /mnt/scr1/pre-release/hdf5/CMake/LIBAEC.tar.gz $cmgztmpsubdir cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmgztmpsubdir - cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-2.0.3-Source.tar.gz $cmgztmpsubdir + cp /mnt/scr1/pre-release/hdf5/CMake/hdf5-examples-master.tar.gz $cmgztmpsubdir cp /mnt/scr1/pre-release/hdf5/CMake/hdf5_plugins-master.tar.gz $cmgztmpsubdir cp $cmgztmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmgztmpsubdir cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmgztmpsubdir @@ -308,88 +308,6 @@ tar2cmaketgz() rm -rf $cmgztmpdir } -# Function name: tar2hpccmaketgz -# Convert the release tarball to a gzipped tarfile with files to run CMake build -# and HDF5options.cmake files for parallel or serial only builds where build -# tests are run on compute nodes using batch scripts. -# -# Steps: -# 1. untar the tarball in a temporary directory; -# Note: do this in a temporary directory to avoid changing -# the original source directory which may be around. -# 2. add build-unix.sh script. -# 3. add LIBAEC.tar.gz, ZLib.tar.gz and cmake files to top level directory. -# 4. create gzipped tar file with these contents: -# build-unix.sh script -# hdf5- source code directory extracted from tar file -# CTestScript.cmake cmake file copied from /config/cmake/scripts -# HDF5config.cmake cmake file copied from /config/cmake/scripts -# HDF5options.cmake cmake file copied from /config/cmake/scripts -# LIBAEC.tar.gz copied from /mnt/scr1/pre-release/hdf5/CMake -# ZLib.tar.gz copied from /mnt/scr1/pre-release/hdf5/CMake -# -# 5. For HPC-CMake tgz file the following are also needed in the top-level directory: -# README_HPC copied from release_docs -# ser-HDF5options.cmake copied from /config/cmake/scripts/HPC -# par-HDF5options.cmake copied from /config/cmake/scripts/HPC -# HDF5options.cmake symlink to par-HDF5options.cmake -# - -# Parameters: -# $1 version -# $2 release tarball -# $3 output zipball file name -# -# Returns 0 if successful; 1 otherwise -# - # need function to create another temporary directory, extract the - # $tmpdir/$HDF5_VERS.tar into it, create build-unix.sh, - # add CTestScript.cmake, HDF5config.cmake, LIBAEC.tar.gz - # ZLib.tar.gz, HDF5 examples, and then tar.gz it. -tar2hpccmaketgz() -{ - if [ $# -ne 3 ]; then - echo "usage: tar2hpccmaketgz " - return 1 - fi - cmgztmpdir=/tmp/cmgztmpdir$$ - cmgztmpsubdir=$cmgztmpdir/HPC-CMake-$HDF5_VERS - mkdir -p $cmgztmpsubdir - version=$1 - tarfile=$2 - tgzfile=$3 - - # step 1: untar tarball in cmgztmpdir - (cd $cmgztmpsubdir; tar xf -) < $tarfile - # sanity check - if [ ! -d $cmgztmpsubdir/$version ]; then - echo "untar did not create $cmgztmpsubdir/$version source dir" - # cleanup - rm -rf $cmgztmpdir - return 1 - fi - - - # step 2: add build-unix.sh script - (cd $cmgztmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=Unix -C Release -V -O hdf5.log" > build-unix.sh; chmod 755 build-unix.sh) - - # step 3: add LIBAEC.tar.gz, ZLib.tar.gz and cmake files - cp /mnt/scr1/pre-release/hdf5/CMake/LIBAEC.tar.gz $cmgztmpsubdir - cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmgztmpsubdir - cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-2.0.3-Source.tar.gz $cmgztmpsubdir - cp /mnt/scr1/pre-release/hdf5/CMake/hdf5_plugins-master.tar.gz $cmgztmpsubdir - cp $cmgztmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmgztmpsubdir - cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmgztmpsubdir - - cp $cmgztmpsubdir/$version/release_docs/README_HPC $cmgztmpsubdir - cp $cmgztmpsubdir/$version/config/cmake/scripts/HPC/ser-HDF5options.cmake $cmgztmpsubdir - cp $cmgztmpsubdir/$version/config/cmake/scripts/HPC/par-HDF5options.cmake $cmgztmpsubdir - (cd $cmgztmpsubdir; ln -s par-HDF5options.cmake HDF5options.cmake) - tar czf $DEST/HPC-CMake-$HDF5_VERS.tar.gz -C $cmgztmpdir . || exit 1 - - # cleanup - rm -rf $cmgztmpdir -} # This command must be run at the top level of the hdf5 source directory. # Verify this requirement. @@ -551,43 +469,36 @@ test "$verbose" && echo " Running tar..." 1>&2 (cd "$tmpdir" && exec tar -ch --exclude-vcs -f "$HDF5_VERS.tar" "./$HDF5_IN_VERS" || exit 1 ) # Compress -SHA256=$HDF5_VERS.sha256 -cp /dev/null $DEST/$SHA256 for comp in $methods; do case $comp in tar) cp -p $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.tar - (cd $DEST; sha256sum $HDF5_VERS.tar >> $SHA256) + (cd $DEST; sha256sum $HDF5_VERS.tar > $HDF5_VERS.tar.sha256) ;; gzip) test "$verbose" && echo " Running gzip..." 1>&2 gzip -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.gz - (cd $DEST; sha256sum $HDF5_VERS.tar.gz >> $SHA256) + (cd $DEST; sha256sum $HDF5_VERS.tar.gz > $HDF5_VERS.tar.gz.sha256) ;; cmake-tgz) test "$verbose" && echo " Creating CMake tar.gz file..." 1>&2 tar2cmaketgz $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/CMake-$HDF5_VERS.tar.gz 1>&2 - (cd $DEST; sha256sum CMake-$HDF5_VERS.tar.gz >> $SHA256) - ;; - hpc-cmake-tgz) - test "$verbose" && echo " Creating HPC-CMake tar.gz file..." 1>&2 - tar2hpccmaketgz $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/HPC-CMake-$HDF5_VERS.tar.gz 1>&2 - (cd $DEST; sha256sum HPC-CMake-$HDF5_VERS.tar.gz >> $SHA256) + (cd $DEST; sha256sum CMake-$HDF5_VERS.tar.gz > CMake-$HDF5_VERS.tar.gz.sha256) ;; bzip2) test "$verbose" && echo " Running bzip2..." 1>&2 bzip2 -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.bz2 - (cd $DEST; sha256sum $HDF5_VERS.tar.bz2 >> $SHA256) + (cd $DEST; sha256sum $HDF5_VERS.tar.bz2 > $HDF5_VERS.tar.bz2.sha256) ;; zip) test "$verbose" && echo " Creating zip ball..." 1>&2 tar2zip $HDF5_IN_VERS $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.zip 1>&2 - (cd $DEST; sha256sum $HDF5_VERS.zip >> $SHA256) + (cd $DEST; sha256sum $HDF5_VERS.zip > $HDF5_VERS.zip.sha256) ;; cmake-zip) test "$verbose" && echo " Creating CMake-zip ball..." 1>&2 tar2cmakezip $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/CMake-$HDF5_VERS.zip 1>&2 - (cd $DEST; sha256sum CMake-$HDF5_VERS.zip >> $SHA256) + (cd $DEST; sha256sum CMake-$HDF5_VERS.zip > CMake-$HDF5_VERS.zip.sha256) ;; doc) if [ "${DOCVERSION}" = "" ]; then From 9f00c099dbcd1e5e287bc6c35cb028dfac4118dc Mon Sep 17 00:00:00 2001 From: Lori Cooper Date: Wed, 27 Sep 2023 17:04:49 -0500 Subject: [PATCH 105/108] Edit RELEASE.txt for consistency. (#3610) --- release_docs/RELEASE.txt | 389 +++++++++++++++++++++------------------ 1 file changed, 205 insertions(+), 184 deletions(-) diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 02e0c731b45..b237ad13640 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -35,11 +35,11 @@ CONTENTS ======== - New Features -- Support for new platforms and languages -- Bug Fixes since HDF5-1.12.2 +- Support for New Platforms and Languages +- Bug Fixes Since HDF5-1.12.2 - Platforms Tested - Known Problems -- CMake vs. Autotools installations +- CMake vs. Autotools Installations New Features @@ -47,13 +47,13 @@ New Features Configuration: ------------- - - The minimum required version of Autoconf has been bumped to 2.71 + - Bumped the minimum required version of Autoconf to 2.71. This fixes a problem with the Intel oneAPI Fortran compiler's -loopopt option being interpreted as a linker option (see bug fixes, below). This should only affect building the library from a maintenance branch - using the Autotools, when autogen.sh is used to generate the Autotools + using Autotools, when autogen.sh is used to generate the Autotools files. It does NOT affect the binaries or building from the source tarballs @@ -63,13 +63,12 @@ New Features CMake supports two main files, CMakePresets.json and CMakeUserPresets.json, that allow users to specify common configure options and share them with others. - HDF added a CMakePresets.json file of a typical configuration and support - file, config/cmake-presets/hidden-presets.json. - Also added a section to INSTALL_CMake.txt with very basic explanation of the - process to use CMakePresets. + This release added a CMakePresets.json file of a typical configuration and support + file, config/cmake-presets/hidden-presets.json. It also added a section to + INSTALL_CMake.txt with very basic explanation of the process to use CMakePresets. - Enabled instrumentation of the library by default in CMake for parallel - debug builds + debug builds. HDF5 can be configured to instrument portions of the parallel library to aid in debugging. Autotools builds of HDF5 turn this capability on by @@ -99,37 +98,37 @@ New Features See the CMakeFilters.cmake and config/cmake/cacheinit.cmake files for usage. - - Add new CMake configuration variable HDF5_USE_GNU_DIRS + - Added new CMake configuration variable HDF5_USE_GNU_DIRS. HDF5_USE_GNU_DIRS (default OFF) selects the use of GNU Coding Standard install - directory variables by including the CMake module, GNUInstallDirs(see CMake + directory variables by including the CMake module, GNUInstallDirs (see CMake documentation for details). The HDF_DIR_PATHS macro in the HDFMacros.cmake file - sets various PATH variables for use during the build, test and install processes. + sets various PATH variables for use during the build, test, and install processes. By default, the historical settings for these variables will be used. - - Correct the usage of CMAKE_Fortran_MODULE_DIRECTORY and where to + - Corrected the usage of CMAKE_Fortran_MODULE_DIRECTORY and where to install Fortran mod files. The Fortran modules files, ending in .mod are files describing a Fortran 90 (and above) module API and ABI. These are not like C header files describing an API, they are compiler dependent and - arch dependent, and not easily readable by a human being. They are + architecture dependent, and not easily readable by a human being. They are nevertheless searched for in the includes directories by gfortran (in directories specified with -I). - + Autotools configure uses the -fmoddir option to specify the folder. CMake will use "mod" folder by default unless overridden by the CMake - variable; HDF5_INSTALL_MODULE_DIR. + variable HDF5_INSTALL_MODULE_DIR. Library: -------- - - Change the error handling for a not found path in the find plugin process. + - Changed the error handling for a not found path in the find plugin process. - While attempting to load a plugin the HDF5 library will fail if one of the + While attempting to load a plugin, the HDF5 library will fail if one of the directories in the plugin paths does not exist, even if there are more paths - to check. Instead of exiting the function with an error, just logged the error - and continue processing the list of paths to check. + to check. Instead of exiting the function with an error, it now just logs the error + and continues processing the list of paths to check. Parallel Library: @@ -149,17 +148,18 @@ New Features Java Library: ------------- - - Fixed memory leaks that could occur when reading a dataset from a - malformed file + - Fixed memory leaks that can occur when reading a dataset from a + malformed file. When attempting to read layout, pline, and efl information for a dataset, memory leaks could occur if attempting to read pline/efl - information threw an error, which is due to the memory that was - allocated for pline and efl not being properly cleaned up on error. + information throws an error. The leaks would occur because the memory + that was allocated for pline and efl not being properly cleaned up by + error handling. Fixes Github issue #2602 - - HDF5GroupInfo class has been deprecated. + - Deprecated the HDF5GroupInfo class. This class assumes that an object can contain four values which uniquely identify an object among those HDF5 files which are open. This is no longer valid in future @@ -167,25 +167,26 @@ New Features - Added version of H5Rget_name to return the name as a Java string. - Other functions that get_name process the get_size then get the name - within the JNI implementation. Now H5Rget_name has a H5Rget_name_string. + Other functions that use the "get_name" process use "get_size" before + retrieving the name. This happens in the JNI wrapper implementation of + the function. Now H5Rget_name has a H5Rget_name_string which returns the + string like similar functions. - Added reference support to H5A and H5D read write vlen JNI functions. - Added the implementation to handle VL references as an Array of Lists + This release implemented a change to handle VL references as an Array of Lists of byte arrays. The JNI wrappers translate the Array of Lists to/from the hvl_t vlen structures. The wrappers use the specified datatype arguments for the - List type translation, it is expected that the Java type is correct. + List type translation; it is expected that the Java type is correct. Fixes Jira issue HDFFV-11318 - - H5A and H5D read write vlen JNI functions were incorrect. + - Corrected H5A and H5D read write vlen JNI functions. - Corrected the vlen function implementations for the basic primitive types. - The VLStrings functions now correctly use the implementation that had been - the VL functions. (VLStrings functions did not have an implementation.) + The VLStrings functions now correctly use the implementation previously used + by the VL functions. (VLStrings functions did not have an implementation.) The new VL functions implementation now expect an Array of Lists between Java and the JNI wrapper. @@ -195,16 +196,17 @@ New Features Fixes Jira issue HDFFV-11310 - - H5A and H5D read write JNI functions had flawed vlen datatype check. + - Corrected H5A and H5D read write JNI functions with a flawed vlen datatype check. - Adapted tools function for JNI utils file. This reduced multiple calls + Adapted tools function for JNI utils file. This reduces multiple calls to a single check and variable. The variable can then be used to call - the H5Treclaim function. Adjusted existing test and added new test. + the H5Treclaim function. This release also adjusts the existing test and adds + a new test. Tools: ------ - - 1.10 References in containers were not displayed properly by h5dump. + - Fixed an issue with h5dump not properly displaying 1.10 References in containers. Ported 1.10 tools display function to provide ability to inspect and display 1.10 reference data. @@ -227,8 +229,8 @@ New Features Documentation: -------------- - - Doxygen User Guide documentation is available when configured and generated. - The resulting documentation files will be in the share/html subdirectory + - Doxygen User Guide documentation can be configured and generated. The + resulting documentation files will be in the share/html subdirectory of the HDF5 install directory. @@ -241,7 +243,7 @@ Bug Fixes since HDF5-1.12.2 release =================================== Library ------- - - Fixed CVE-2018-11202 + - Fixed memory leak during shut down. A malformed file could result in chunk index memory leaks. Under most conditions (i.e., when the --enable-using-memchecker option is NOT @@ -255,7 +257,9 @@ Bug Fixes since HDF5-1.12.2 release The chunk index resources are now correctly cleaned up when reading misparsed files and valgrind confirms no memory leaks. - - Fixed an assertion in a previous fix for CVE-2016-4332 + Fixes CVE-2018-11202 + + - Fixed an assertion in a previous fix for CVE-2016-4332. An assert could fail when processing corrupt files that have invalid shared message flags (as in CVE-2016-4332). @@ -268,7 +272,7 @@ Bug Fixes since HDF5-1.12.2 release Fixes CVE-2016-4332 and HDFFV-9950 (confirmed via the cve_hdf5 repo) - Fixed a file space allocation bug in the parallel library for chunked - datasets + datasets. With the addition of support for incremental file space allocation for chunked datasets with filters applied to them that are created/accessed @@ -279,7 +283,7 @@ Bug Fixes since HDF5-1.12.2 release cause parallel writes to those datasets to place incorrect data in the file. - Fixed an assertion failure in Parallel HDF5 when a file can't be created - due to an invalid library version bounds setting + due to an invalid library version bounds setting. An assertion failure could occur in H5MF_settle_raw_data_fsm when a file can't be created with Parallel HDF5 due to specifying the use of a paged, @@ -289,7 +293,7 @@ Bug Fixes since HDF5-1.12.2 release (H5Pset_libver_bounds(..., H5F_LIBVER_EARLIEST, H5F_LIBVER_V18)). This has now been fixed. - - Fixed a bug in H5Ocopy that could generate invalid HDF5 files + - Fixed a bug in H5Ocopy that could generate invalid HDF5 files. H5Ocopy was missing a check to determine whether the new object's object header version is greater than version 1. Without this check, @@ -301,109 +305,100 @@ Bug Fixes since HDF5-1.12.2 release Fixes GitHub issue #2653 - - Fixed potential heap buffer overflow in decoding of link info message + - Fixed potential heap buffer overflow in decoding of link info message. Detections of buffer overflow were added for decoding version, index - flags, link creation order value, and the next three addresses. The + flags, link creation order value, and the next three addresses. The checkings will remove the potential invalid read of any of these values that could be triggered by a malformed file. Fixes GitHub issue #2603 - - Fixed potential buffer overrun issues in some object header decode routines + - Fixed potential buffer overrun issues in some object header decode routines. Several checks were added to H5O__layout_decode and H5O__sdspace_decode to ensure that memory buffers don't get overrun when decoding buffers read from - a (possibly corrupted) HDF5 file. - - - Fixed a heap buffer overflow that occurs when reading from - a dataset with a compact layout within a malformed HDF5 file - - During opening of a dataset that has a compact layout, the - library allocates a buffer that stores the dataset's raw data. - The dataset's object header that gets written to the file - contains information about how large of a buffer the library - should allocate. If this object header is malformed such that - it causes the library to allocate a buffer that is too small - to hold the dataset's raw data, future I/O to the dataset can - result in heap buffer overflows. To fix this issue, an extra - check is now performed for compact datasets to ensure that - the size of the allocated buffer matches the expected size - of the dataset's raw data (as calculated from the dataset's - dataspace and datatype information). If the two sizes do not - match, opening of the dataset will fail. + a possibly corrupted HDF5 file. + + - Fixed a heap buffer overflow that occurs when reading from a dataset with + a compact layout within a malformed HDF5 file. + + During opening of a dataset that has a compact layout, the library allocates + a buffer that stores the dataset's raw data. The dataset's object header that + gets written to the file contains information about how large of a buffer the + library should allocate. If this object header is malformed such that it causes + the library to allocate a buffer that is too small to hold the dataset's raw data, + future I/O to the dataset can result in heap buffer overflows. To fix this issue, + an extra check is now performed for compact datasets to ensure that the size of the + allocated buffer matches the expected size of the dataset's raw data (as calculated + from the dataset's dataspace and datatype information). If the two sizes do not match, + opening of the dataset will fail. Fixes GitHub issue #2606 - - Fix for CVE-2019-8396 + - Fixed an issue with malformed HDF5 files causing heap overflow. - Malformed HDF5 files may have truncated content which does not match - the expected size. When H5O__pline_decode() attempts to decode these it - may read past the end of the allocated space leading to heap overflows - as bounds checking is incomplete. + Malformed HDF5 files may have truncated content which does not match the expected size. + When H5O__pline_decode() attempts to decode these it may read past the end of the + allocated space leading to heap overflows as bounds checking is incomplete. The fix ensures each element is within bounds before reading. Fixes Jira issue HDFFV-10712, CVE-2019-8396, GitHub issue #2209 - - Memory leak + - Fixed memory leak when running h5dump with proof of vulnerability file. - Memory leak was detected when running h5dump with "pov". The memory was allocated - via H5FL__malloc() in hdf5/src/H5FL.c + The fuzzed file "pov" was an HDF5 file containing an illegal continuation + message. When deserializing the object header chunks for the file, memory + was allocated for the array of continuation messages, but this memory was + never freed when an error occurred. - The fuzzed file "pov" was an HDF5 file containing an illegal continuation message. - When deserializing the object header chunks for the file, memory is allocated for the - array of continuation messages (cont_msg_info->msgs) in continuation message info struct. - As error is encountered in loading the illegal message, the memory allocated for - cont_msg_info->msgs needs to be freed. + The allocated memory is now properly released. Fixes GitHub issue #2599 - - Fixed a memory corruption issue that can occur when reading - from a dataset using a hyperslab selection in the file - dataspace and a point selection in the memory dataspace + - Fixed a memory corruption issue that can occur when reading from a dataset + using a hyperslab selection in the file dataspace and a point selection in + the memory dataspace. - When reading from a dataset using a hyperslab selection in - the dataset's file dataspace and a point selection in the - dataset's memory dataspace where the file dataspace's "rank" - is greater than the memory dataspace's "rank", memory corruption - could occur due to an incorrect number of selection points - being copied when projecting the point selection onto the - hyperslab selection's dataspace. + When reading from a dataset using a hyperslab selection in the dataset's file + dataspace and a point selection in the dataset's memory dataspace where the file + dataspace's "rank" is greater than the memory dataspace's "rank", memory + corruption could occur due to an incorrect number of selection points being copied + when projecting the point selection onto the hyperslab selection's dataspace. - - Fix CVE-2021-37501 / GHSA-rfgw-5vq3-wrjf + Fixes CVE-2021-37501 and GHSA-rfgw-5vq3-wrjf - Check for overflow when calculating on-disk attribute data size. + - Added a check for overflow when calculating on-disk attribute data size. A bogus hdf5 file may contain dataspace messages with sizes which lead to the on-disk data sizes to exceed what is addressable. - When calculating the size, make sure, the multiplication does not - overflow. - The test case was crafted in a way that the overflow caused the - size to be 0. + When calculating the size, this update now ensures that the + multiplication does not overflow. The test case was crafted in a way + that the overflow caused the size to be 0. Fixes GitHub issue #2458 - - Seg fault on file close + - Fixed seg fault on file close. h5debug fails at file close with core dump on a file that has an - illegal file size in its cache image. In H5F_dest(), the library + illegal file size in its cache image. In H5F_dest(), the library performs all the closing operations for the file and keeps track of - the error encountered when reading the file cache image. - At the end of the routine, it frees the file's file structure and - returns error. Due to the error return, the file object is not removed - from the ID node table. This eventually causes assertion failure in + the error encountered when reading the file cache image. At the end of + the routine, it frees the file's file structure and returns an error. + Due to the error return, the file object is not removed from the ID + node table. This eventually causes assertion failure in H5VL__native_file_close() when the library finally exits and tries to access that file object in the table for closing. The closing routine, H5F_dest(), will not free the file structure if - there is error, keeping a valid file structure in the ID node table. - It will be freed later in H5VL__native_file_close() when the - library exits and terminates the file package. + there is an error, keeping a valid file structure in the ID node table. + It will be freed later in H5VL__native_file_close() when the library + exits and terminates the file package. Fixes Jira issue HDFFV-11052, CVE-2020-10812 - - Fixed an issue with variable length attributes + - Fixed an issue with variable length attributes. Previously, if a variable length attribute was held open while its file was opened through another handle, the same attribute was opened through @@ -411,12 +406,12 @@ Bug Fixes since HDF5-1.12.2 release closed, attempting to write to the attribute through the first handle would cause an error. - - Fixed an issue with hyperslab selections + - Fixed an issue with hyperslab selections. Previously, when combining hyperslab selections, it was possible for the library to produce an incorrect combined selection. - - Fixed an issue with attribute type conversion with compound datatypes + - Fixed an issue with attribute type conversion with compound datatypes. Previously, when performing type conversion for attribute I/O with a compound datatype, the library would not fill the background buffer with @@ -425,18 +420,18 @@ Bug Fixes since HDF5-1.12.2 release Fixes GitHub issue #2016 - - Modified H5Fstart_swmr_write() to preserve DAPL properties + - Modified H5Fstart_swmr_write() to preserve DAPL properties. Internally, H5Fstart_swmr_write() closes and reopens the file in question - as part of its process for making the file SWMR-safe. Previously, when + as part of its process for making the file SWMR-safe. Previously, when the library reopened the file it would simply use the default access - properties. Modified the library to instead save these properties and use - them when reopening the file. + properties. This release modifies the library to instead save these + properties and use them when reopening the file. Fixes Jira issue HDFFV-11308 - Converted an assertion on (possibly corrupt) file contents to a normal - error check + error check. Previously, the library contained an assertion check that a read superblock doesn't contain a superblock extension message when the superblock @@ -446,7 +441,7 @@ Bug Fixes since HDF5-1.12.2 release Fixes Jira issues HDFFV-11316 & HDFFV-11317 - - Memory leak + - Fixed memory leak with variable-length fill values. A memory leak was observed with variable-length fill value in H5O_fill_convert() function in H5Ofill.c. The leak is @@ -454,7 +449,7 @@ Bug Fixes since HDF5-1.12.2 release Previously, fill->buf is used for datatype conversion if it is large enough and the variable-length information - is therefore lost. A buffer is now allocated regardless + is therefore lost. A buffer is now allocated regardless so that the element in fill->buf can later be reclaimed. Fixes Jira issue HDFFV-10840 @@ -475,7 +470,7 @@ Bug Fixes since HDF5-1.12.2 release Configuration ------------- - - Fix Intel oneAPI -loopopt Fortran option being detected as a linker flag + - Fix Intel oneAPI -loopopt Fortran option being detected as a linker flag. Intel's new oneAPI Fortran compiler takes a -loopopt flag that is interpreted as a linker flag by Autoconf 2.69 and earlier. This bug @@ -486,7 +481,7 @@ Bug Fixes since HDF5-1.12.2 release to 2.71 in configure.ac. The release source code will be processed with Autoconf 2.71 or later. - - The accum test now passes on macOS 12+ (Monterey) w/ CMake + - The accum test now passes on macOS 12+ (Monterey) with CMake. Due to changes in the way macOS handles LD_LIBRARY_PATH, the accum test started failing on macOS 12+ when building with CMake. CMake has been @@ -494,7 +489,7 @@ Bug Fixes since HDF5-1.12.2 release Fixes GitHub #2994, #2261, and #1289 - - Fixed syntax of generator expressions used by CMake + - Fixed syntax of generator expressions used by CMake. Adding quotes around the generator expression should allow CMake to correctly parse the expression. Generator expressions are typically @@ -510,25 +505,26 @@ Bug Fixes since HDF5-1.12.2 release - Correct the CMake generated pkg-config file The pkg-config file generated by CMake had the order and placement of the - libraries wrong. Also added support for debug library names. + libraries wrong. This release adds support for debug library names. - Changed the order of Libs.private libraries so that dependencies come after - dependents. Did not move the compression libraries into Requires.private - because there was not a way to determine if the compression libraries had - supported pkconfig files. Still recommend that the CMake config file method - be used for building projects with CMake. + This release also changes the order of Libs.private libraries so that + dependencies come after dependents. However, the release did not move the + compression libraries into Requires.private because there was not a way to + determine if the compression libraries had supported pkconfig files. It is + still recommended that the CMake config file method be used for building + projects with CMake. Fixes GitHub issues #1546 and #2259 - - Change the settings of the *pc files to use the correct format + - Change the settings of the *pc files to use the correct format. The pkg-config files generated by CMake uses incorrect syntax for the 'Requires' settings. Changing the set to use 'lib-name = version' instead 'lib-name-version' - fixes the issue + fixes the issue. Fixes Jira issue HDFFV-11355 - - Move MPI libraries link from PRIVATE to PUBLIC + - Move MPI libraries link from PRIVATE to PUBLIC. The install dependencies were not including the need for MPI libraries when an application or library was built with the C library. Also updated the @@ -537,21 +533,25 @@ Bug Fixes since HDF5-1.12.2 release Tools ----- - - Names of objects with square brackets will have trouble without the - special argument, --no-compact-subset, on the h5dump command line. + - Added option --no-compact-subset to h5diff. + + When the name of an object contains square brackets, h5diff was not able + to parse it correctly because “[ ]” were interpreted as the compact form + of subsetting. - h5diff did not have this option and now it has been added. + h5dump has the special argument, --no-compact-subset to allow the use of + ‘[‘ and ‘]’ in dataset names. h5diff now has this option as well. Fixes GitHub issue #2682 - - In the tools traverse function - an error in either visit call - will bypass the cleanup of the local data variables. + - Replaced the H5TOOLS_GOTO_ERROR with just H5TOOLS_ERROR. - Replaced the H5TOOLS_GOTO_ERROR with just H5TOOLS_ERROR. + In the tools traverse function, an error in either visit call + will bypass the cleanup of the local data variables. Fixes GitHub issue #2598 - - Fix h5repack to only print output when verbose option is selected + - Fixed h5repack to only print output when verbose option is selected. When timing option was added to h5repack, the check for verbose was incorrectly implemented. @@ -602,42 +602,53 @@ Bug Fixes since HDF5-1.12.2 release Platforms Tested =================== - Linux 5.13.14-200.fc34 GNU gcc (GCC) 11.2.1 2021078 (Red Hat 11.2.1-1) - #1 SMP x86_64 GNU/Linux GNU Fortran (GCC) 11.2.1 2021078 (Red Hat 11.2.1-1) - Fedora34 clang version 12.0.1 (Fedora 12.0.1-1.fc34) + Linux 5.19.0-1023-aws GNU gcc, gfortran, g++ + #24-Ubuntu SMP x86_64 GNU/Linux (Ubuntu 11.3.0-1ubuntu1~22.04) 11.3.0 + Ubuntu 22.04 Ubuntu clang version 14.0.0-1ubuntu1 + Intel(R) oneAPI DPC++/C++ Compiler 2023.1.0 + ifort (IFORT) 2021.9.0 20230302 (cmake and autotools) - Linux 5.11.0-34-generic GNU gcc (GCC) 9.3.0-17ubuntu1 - #36-Ubuntu SMP x86_64 GNU/Linux GNU Fortran (GCC) 9.3.0-17ubuntu1 - Ubuntu 20.04 Ubuntu clang version 10.0.0-4 + Linux 5.16.14-200.fc35 GNU gcc (GCC) 11.2.1 20220127 (Red Hat 11.2.1-9) + #1 SMP x86_64 GNU/Linux GNU Fortran (GCC) 11.2.1 20220127 (Red Hat 11.2.1-9) + Fedora35 clang version 13.0.0 (Fedora 13.0.0-3.fc35) (cmake and autotools) - Linux 5.8.0-63-generic GNU gcc (GCC) 10.3.0-1ubuntu1 - #71-Ubuntu SMP x86_64 GNU/Linux GNU Fortran (GCC) 10.3.0-1ubuntu1 - Ubuntu20.10 Ubuntu clang version 11.0.0-2 - (cmake and autotools) + Linux 5.14.21-cray_shasta_c cray-mpich/8.1.23 + #1 SMP x86_64 GNU/Linux cce/15.0.0 + (frontier) gcc/12.2.0 + (cmake) - Linux 5.3.18-22-default GNU gcc (SUSE Linux) 7.5.0 - #1 SMP x86_64 GNU/Linux GNU Fortran (SUSE Linux) 7.5.0 - SUSE15sp2 clang version 7.0.1 (tags/RELEASE_701/final 349238) + Linux 5.11.0-34-generic GNU gcc (GCC) 9.4.0-1ubuntu1 + #36-Ubuntu SMP x86_64 GNU/Linux GNU Fortran (GCC) 9.4.0-1ubuntu1 + Ubuntu 20.04 Ubuntu clang version 10.0.0-4ubuntu1 + Intel(R) oneAPI DPC++/C++ Compiler 2023.1.0 + ifort (IFORT) 2021.9.0 20230302 (cmake and autotools) - Linux-4.14.0-115.21.2 spectrum-mpi/rolling-release - #1 SMP ppc64le GNU/Linux clang 8.0.1, 11.0.1 - (lassen) GCC 7.3.1 - XL 16.1.1.2 + Linux 4.14.0-115.35.1.1chaos aue/openmpi/4.1.4-arm-22.1.0.12 + #1 SMP aarch64 GNU/Linux Arm C/C++/Fortran Compiler version 22.1 + (stria) (based on LLVM 13.0.1) (cmake) - Linux-4.12.14-150.75-default cray-mpich/7.7.10 - #1 SMP x86_64 GNU/Linux GCC 7.3.0, 8.2.0 - (cori) Intel (R) Version 19.0.3.199 + Linux 4.14.0-115.35.1.3chaos spectrum-mpi/rolling-release + #1 SMP ppc64le GNU/Linux clang 12.0.1 + (vortex) GCC 8.3.1 + XL 2021.09.22 (cmake) - Linux-4.12.14-197.86-default cray-mpich/7.7.6 - # 1SMP x86_64 GNU/Linux GCC 7.3.0, 9.3.0, 10.2.0 - (mutrino) Intel (R) Version 17.0.4, 18.0.5, 19.1.3 + Linux-4.14.0-115.21.2 spectrum-mpi/rolling-release + #1 SMP ppc64le GNU/Linux clang 12.0.1, 14.0.5 + (lassen) GCC 8.3.1 + XL 16.1.1.2, 2021.09.22, 2022.08.05 (cmake) + Linux-4.12.14-197.99-default cray-mpich/7.7.14 + #1 SMP x86_64 GNU/Linux cce 12.0.3 + (theta) GCC 11.2.0 + llvm 9.0 + Intel 19.1.2 + Linux 3.10.0-1160.36.2.el7.ppc64 gcc (GCC) 4.8.5 20150623 (Red Hat 4.8.5-39) #1 SMP ppc64be GNU/Linux g++ (GCC) 4.8.5 20150623 (Red Hat 4.8.5-39) Power8 (echidna) GNU Fortran (GCC) 4.8.5 20150623 (Red Hat 4.8.5-39) @@ -645,48 +656,58 @@ Platforms Tested Linux 3.10.0-1160.24.1.el7 GNU C (gcc), Fortran (gfortran), C++ (g++) #1 SMP x86_64 GNU/Linux compilers: Centos7 Version 4.8.5 20150623 (Red Hat 4.8.5-4) - (jelly/kituo/moohan) Version 4.9.3, Version 5.3.0, Version 6.3.0, - Version 7.2.0, Version 8.3.0, Version 9.1.0 + (jelly/kituo/moohan) Version 4.9.3, Version 7.2.0, Version 8.3.0, + Version 9.1.0, Version 10.2.0 Intel(R) C (icc), C++ (icpc), Fortran (icc) compilers: Version 17.0.0.098 Build 20160721 GNU C (gcc) and C++ (g++) 4.8.5 compilers - with NAG Fortran Compiler Release 6.1(Tozai) + with NAG Fortran Compiler Release 7.1(Hanzomon) Intel(R) C (icc) and C++ (icpc) 17.0.0.098 compilers - with NAG Fortran Compiler Release 6.1(Tozai) + with NAG Fortran Compiler Release 7.1(Hanzomon) MPICH 3.1.4 compiled with GCC 4.9.3 MPICH 3.3 compiled with GCC 7.2.0 - OpenMPI 2.1.6 compiled with icc 18.0.1 - OpenMPI 3.1.3 and 4.0.0 compiled with GCC 7.2.0 + OpenMPI 3.1.3 compiled with GCC 7.2.0 and 4.1.2 + compiled with GCC 9.1.0 PGI C, Fortran, C++ for 64-bit target on x86_64; - Version 19.10-0 + Versions 18.4.0 and 19.10-0 + NVIDIA nvc, nvfortran and nvc++ version 22.5-0 + (autotools and cmake) + - Linux-3.10.0-1127.0.0.1chaos openmpi-4.0.0 + Linux-3.10.0-1160.0.0.1chaos openmpi-4.1.2 #1 SMP x86_64 GNU/Linux clang 6.0.0, 11.0.1 (quartz) GCC 7.3.0, 8.1.0 - Intel 16.0.4, 18.0.2, 19.0.4 + Intel 19.0.4, 2022.2, oneapi.2022.2 + + Linux-3.10.0-1160.90.1.1chaos openmpi/4.1 + #1 SMP x86_64 GNU/Linux GCC 7.2.0 + (skybridge) Intel/19.1 + (cmake) + + Linux-3.10.0-1160.90.1.1chaos openmpi/4.1 + #1 SMP x86_64 GNU/Linux GCC 7.2.0 + (attaway) Intel/19.1 + (cmake) + + Linux-3.10.0-1160.90.1.1chaos openmpi-intel/4.1 + #1 SMP x86_64 GNU/Linux Intel/19.1.2, 21.3.0 and 22.2.0 + (chama) (cmake) macOS Apple M1 11.6 Apple clang version 12.0.5 (clang-1205.0.22.11) Darwin 20.6.0 arm64 gfortran GNU Fortran (Homebrew GCC 11.2.0) 11.1.0 - (macmini-m1) Intel icc/icpc/ifort version 2021.3.0 20210609 + (macmini-m1) Intel icc/icpc/ifort version 2021.3.0 202106092021.3.0 20210609 macOS Big Sur 11.3.1 Apple clang version 12.0.5 (clang-1205.0.22.9) Darwin 20.4.0 x86_64 gfortran GNU Fortran (Homebrew GCC 10.2.0_3) 10.2.0 (bigsur-1) Intel icc/icpc/ifort version 2021.2.0 20210228 - macOS High Sierra 10.13.6 Apple LLVM version 10.0.0 (clang-1000.10.44.4) - 64-bit gfortran GNU Fortran (GCC) 6.3.0 - (bear) Intel icc/icpc/ifort version 19.0.4.233 20190416 - - macOS Sierra 10.12.6 Apple LLVM version 9.0.0 (clang-900.39.2) - 64-bit gfortran GNU Fortran (GCC) 7.4.0 - (kite) Intel icc/icpc/ifort version 17.0.2 - Mac OS X El Capitan 10.11.6 Apple clang version 7.3.0 from Xcode 7.3 64-bit gfortran GNU Fortran (GCC) 5.2.0 (osx1011test) Intel icc/icpc/ifort version 16.0.2 + Linux 2.6.32-573.22.1.el6 GNU C (gcc), Fortran (gfortran), C++ (g++) #1 SMP x86_64 GNU/Linux compilers: Centos6 Version 4.4.7 20120313 @@ -696,12 +717,12 @@ Platforms Tested x86_64; Version 19.10-0 - Windows 10 x64 Visual Studio 2015 w/ Intel C/C++/Fortran 18 (cmake) - Visual Studio 2017 w/ Intel C/C++/Fortran 19 (cmake) - Visual Studio 2019 w/ clang 12.0.0 + Windows 10 x64 Visual Studio 2019 w/ clang 12.0.0 + with MSVC-like command-line (C/C++ only - cmake) + Visual Studio 2019 w/ Intel C/C++ only cmake) + Visual Studio 2022 w/ clang 15.0.1 with MSVC-like command-line (C/C++ only - cmake) - Visual Studio 2019 - w/ Intel C/C++/Fortran oneAPI 2021 (cmake) + Visual Studio 2022 w/ Intel C/C++/Fortran oneAPI 2023 (cmake) Visual Studio 2019 w/ MSMPI 10.1 (C only - cmake) @@ -709,8 +730,8 @@ Known Problems ============== testflushrefresh.sh will fail when run with "make check-passthrough-vol" - on centos7, with 3 Errors/Segmentation faults. These will not occur when - run with "make check". See https://github.com/HDFGroup/hdf5/issues/673 + on centos7, with 3 Errors/Segmentation faults. These will not occur when + run with "make check". See https://github.com/HDFGroup/hdf5/issues/673 for details. The t_bigio test fails on several HPC platforms, generally by timeout with @@ -724,12 +745,12 @@ Known Problems ADB - 2019/05/07 At present, metadata cache images may not be generated by parallel - applications. Parallel applications can read files with metadata cache + applications. Parallel applications can read files with metadata cache images, but since this is a collective operation, a deadlock is possible if one or more processes do not participate. - CPP ptable test fails on both VS2017 and VS2019 with Intel compiler, JIRA - issue: HDFFV-10628. This test will pass with VS2015 with Intel compiler. + CPP ptable test fails on both VS2017 and VS2019 with Intel and Clang compilers, + JIRA issue: HDFFV-10628. This test will pass with VS2015 with Intel compiler. The subsetting option in ph5diff currently will fail and should be avoided. The subsetting option works correctly in serial h5diff. From c423e74fbfbfe84497210e5b15db20bee8c84c3c Mon Sep 17 00:00:00 2001 From: Larry Knox Date: Fri, 29 Sep 2023 19:50:27 -0500 Subject: [PATCH 106/108] Removed the use of -commons linking option on Darwin (#3581) (#3618) --- configure.ac | 8 -------- fortran/src/H5f90global.F90 | 2 -- release_docs/RELEASE.txt | 5 ++++- 3 files changed, 4 insertions(+), 11 deletions(-) diff --git a/configure.ac b/configure.ac index 0fad0ae411b..c5e08f93171 100644 --- a/configure.ac +++ b/configure.ac @@ -1077,14 +1077,6 @@ H5_FORTRAN_SHARED="no" if test "X${HDF_FORTRAN}" = "Xyes" && test "X${enable_shared}" != "Xno"; then AC_MSG_CHECKING([if shared Fortran libraries are supported]) H5_FORTRAN_SHARED="yes" - ## tell libtool to do the right thing with COMMON symbols, this fixes - ## corrupt values with COMMON and EQUIVALENCE when building shared - ## Fortran libraries on OSX with gnu and Intel compilers (HDFFV-2772). - case "`uname`" in - Darwin*) - H5_LDFLAGS="$H5_LDFLAGS -Wl,-commons,use_dylibs" - ;; - esac ## Report results of check(s) diff --git a/fortran/src/H5f90global.F90 b/fortran/src/H5f90global.F90 index 718b95dcd33..fe575507e41 100644 --- a/fortran/src/H5f90global.F90 +++ b/fortran/src/H5f90global.F90 @@ -9,8 +9,6 @@ ! PURPOSE ! This module is used to pass C stubs for H5 Fortran APIs. The C stubs are ! packed into arrays in H5_f.c and these arrays are then passed to Fortran. -! This module then uses EQUIVALENCE to assign elements of the arrays to -! Fortran equivalent C stubs. ! ! NOTES ! The size of the C arrays in H5_f.c has to match the values of the variables diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index b237ad13640..4287fcfdd74 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -138,7 +138,10 @@ New Features Fortran Library: ---------------- - - + - Removed "-commons" linking option on Darwin, as COMMON and EQUIVALENCE + are no longer used in the Fortran source. + + Fixes GitHub issue #3571 C++ Library: From 069688c3faf01607a9a7a1ddb8f26961d8b993a7 Mon Sep 17 00:00:00 2001 From: Dana Robinson <43805+derobins@users.noreply.github.com> Date: Wed, 4 Oct 2023 02:55:15 -0700 Subject: [PATCH 107/108] Fix spelling (#3633) --- CMakeInstallation.cmake | 2 +- config/cmake/UseJava.cmake | 2 +- doxygen/examples/H5.format.1.0.html | 2 +- doxygen/examples/H5.format.1.1.html | 2 +- doxygen/examples/H5.format.2.0.html | 2 +- doxygen/examples/H5.format.html | 2 +- fortran/src/H5Pff.F90 | 2 +- hl/fortran/src/H5IMff.F90 | 2 +- hl/src/H5IM.c | 2 +- hl/src/H5IMpublic.h | 6 +++--- java/src/hdf/hdf5lib/H5.java | 4 ++-- release_docs/HISTORY-1_0-1_8_0.txt | 2 +- release_docs/HISTORY-1_10_0-1_12_0.txt | 2 +- release_docs/HISTORY-1_8_0-1_10_0.txt | 4 ++-- src/H5ACmpio.c | 2 +- src/H5ACprivate.h | 2 +- src/H5Cimage.c | 2 +- src/H5Cprivate.h | 2 +- src/H5FDhdfs.c | 2 +- src/H5FDmpio.c | 2 +- src/H5FDpublic.h | 2 +- src/H5FDros3.c | 4 ++-- src/H5FDs3comms.c | 4 ++-- src/H5FDs3comms.h | 4 ++-- src/H5FLprivate.h | 2 +- src/H5Gpublic.h | 2 +- src/H5HFcache.c | 8 ++++---- src/H5MFaggr.c | 4 ++-- src/H5Oalloc.c | 2 +- src/H5Oefl.c | 2 +- src/H5Pfapl.c | 2 +- src/H5Tcommit.c | 4 ++-- src/H5Tnative.c | 6 +++--- src/H5WB.c | 4 ++-- src/H5Zfletcher32.c | 2 +- src/H5detect.c | 2 +- src/H5private.h | 6 +++--- test/cache_api.c | 4 ++-- test/direct_chunk.c | 2 +- test/external.c | 2 +- test/mf.c | 2 +- test/swmr.c | 4 ++-- test/tfile.c | 2 +- test/tselect.c | 4 ++-- test/ttsafe_rec_rw_lock.c | 2 +- testpar/t_2Gio.c | 2 +- testpar/t_cache_image.c | 2 +- testpar/t_dset.c | 2 +- testpar/t_shapesame.c | 6 +++--- testpar/t_span_tree.c | 6 +++--- testpar/testphdf5.c | 2 +- tools/src/h5perf/perf.c | 2 +- tools/src/h5repack/h5repack_copy.c | 2 +- tools/test/h5dump/h5dumpgentest.c | 2 +- tools/test/h5repack/h5repackgentest.c | 2 +- tools/test/h5stat/CMakeTests.cmake | 2 +- tools/test/h5stat/testh5stat.sh.in | 2 +- utils/mirror_vfd/mirror_writer.c | 4 ++-- 58 files changed, 83 insertions(+), 83 deletions(-) diff --git a/CMakeInstallation.cmake b/CMakeInstallation.cmake index 3aa79816c8c..64641ce2c16 100644 --- a/CMakeInstallation.cmake +++ b/CMakeInstallation.cmake @@ -331,7 +331,7 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES) set (CPACK_NSIS_INSTALL_ROOT "$PROGRAMFILES") set (CPACK_PACKAGE_INSTALL_REGISTRY_KEY "${CPACK_PACKAGE_NAME}-${CPACK_PACKAGE_VERSION}") endif () - # set the install/unistall icon used for the installer itself + # set the install/uninstall icon used for the installer itself # There is a bug in NSI that does not handle full unix paths properly. set (CPACK_NSIS_MUI_ICON "${HDF_RESOURCES_DIR}\\\\hdf.ico") set (CPACK_NSIS_MUI_UNIICON "${HDF_RESOURCES_DIR}\\\\hdf.ico") diff --git a/config/cmake/UseJava.cmake b/config/cmake/UseJava.cmake index 1de08db8d40..2783cb638e4 100644 --- a/config/cmake/UseJava.cmake +++ b/config/cmake/UseJava.cmake @@ -1474,7 +1474,7 @@ function (create_javah) "CLASSES;CLASSPATH;DEPENDS" ${ARGN}) - # ckeck parameters + # check parameters if (NOT _create_javah_TARGET AND NOT _create_javah_GENERATED_FILES) message (FATAL_ERROR "create_javah: TARGET or GENERATED_FILES must be specified.") endif() diff --git a/doxygen/examples/H5.format.1.0.html b/doxygen/examples/H5.format.1.0.html index 26d04213d84..5002695de76 100644 --- a/doxygen/examples/H5.format.1.0.html +++ b/doxygen/examples/H5.format.1.0.html @@ -2020,7 +2020,7 @@

    Name: Complex Dataspace (Fiber Bundle?)

    Mesh Type: (unsigned 32-bit integer)
    This value indicates whether the grid is - polar/spherical/cartesion, + polar/spherical/cartesian, structured/unstructured and regular/irregular.
    The mesh type value is broken up as follows:
    diff --git a/doxygen/examples/H5.format.1.1.html b/doxygen/examples/H5.format.1.1.html index 3af50d66194..f5e4c4e0fe5 100644 --- a/doxygen/examples/H5.format.1.1.html +++ b/doxygen/examples/H5.format.1.1.html @@ -2596,7 +2596,7 @@

    Name: Complex Dataspace (Fiber Bundle?)

    Mesh Type: (unsigned 32-bit integer)
    This value indicates whether the grid is - polar/spherical/cartesion, + polar/spherical/cartesian, structured/unstructured and regular/irregular.
    The mesh type value is broken up as follows:
    diff --git a/doxygen/examples/H5.format.2.0.html b/doxygen/examples/H5.format.2.0.html index d2979e18ba1..bde030f3853 100644 --- a/doxygen/examples/H5.format.2.0.html +++ b/doxygen/examples/H5.format.2.0.html @@ -8458,7 +8458,7 @@

    Header Message Name: Complex Dataspace (Fiber Bun
    Mesh Type: (unsigned 32-bit integer)
    This value indicates whether the grid is - polar/spherical/cartesion, + polar/spherical/cartesian, structured/unstructured and regular/irregular.
    The mesh type value is broken up as follows:
    diff --git a/doxygen/examples/H5.format.html b/doxygen/examples/H5.format.html index c52e8ea3b8c..832e3fcd79b 100644 --- a/doxygen/examples/H5.format.html +++ b/doxygen/examples/H5.format.html @@ -9123,7 +9123,7 @@

    Header Message Name: Complex Dataspace (Fiber Bun
    Mesh Type: (unsigned 32-bit integer)
    This value indicates whether the grid is - polar/spherical/cartesion, + polar/spherical/cartesian, structured/unstructured and regular/irregular.
    The mesh type value is broken up as follows:
    diff --git a/fortran/src/H5Pff.F90 b/fortran/src/H5Pff.F90 index 60b89891bf4..36ca9316e52 100644 --- a/fortran/src/H5Pff.F90 +++ b/fortran/src/H5Pff.F90 @@ -466,7 +466,7 @@ END SUBROUTINE h5pset_deflate_f !! !! \brief Retrieves the version information of various objects for a file creation property list. !! -!! \param prp_id File createion property list identifier. +!! \param prp_id File creation property list identifier. !! \param boot Super block version number. !! \param freelist Global freelist version number. !! \param stab Symbol table version number. diff --git a/hl/fortran/src/H5IMff.F90 b/hl/fortran/src/H5IMff.F90 index e082e2ffd0d..c5f47018a6b 100644 --- a/hl/fortran/src/H5IMff.F90 +++ b/hl/fortran/src/H5IMff.F90 @@ -366,7 +366,7 @@ END SUBROUTINE h5imlink_palette_f !> !! \ingroup FH5IM !! -!! \brief This function dettaches a palette to an existing image dataset. +!! \brief This function detaches a palette to an existing image dataset. !! !! \param loc_id Location identifier. The identifier may be that of a file or group. !! \param image_name The name of the image dataset. diff --git a/hl/src/H5IM.c b/hl/src/H5IM.c index aa12315543c..952bbedca65 100644 --- a/hl/src/H5IM.c +++ b/hl/src/H5IM.c @@ -649,7 +649,7 @@ H5IMlink_palette(hid_t loc_id, const char *image_name, const char *pal_name) /*------------------------------------------------------------------------- * Function: H5IMunlink_palette * - * Purpose: This function dettaches a palette from an existing image dataset + * Purpose: This function detaches a palette from an existing image dataset * * Return: Success: 0, Failure: -1 * diff --git a/hl/src/H5IMpublic.h b/hl/src/H5IMpublic.h index 81dbb623740..0ba9d648cff 100644 --- a/hl/src/H5IMpublic.h +++ b/hl/src/H5IMpublic.h @@ -66,7 +66,7 @@ extern "C" { * - \ref H5IMread_image * \n Reads image data from disk. * - \ref H5IMunlink_palette - * \n Dettaches a palette from an image. + * \n Detaches a palette from an image. * */ @@ -229,7 +229,7 @@ H5_HLDLL herr_t H5IMlink_palette(hid_t loc_id, const char *image_name, const cha * -------------------------------------------------------------------------- * \ingroup H5IM * - * \brief Dettaches a palette from an image. + * \brief Detaches a palette from an image. * * \fg_loc_id * \param[in] image_name The name of the image dataset @@ -237,7 +237,7 @@ H5_HLDLL herr_t H5IMlink_palette(hid_t loc_id, const char *image_name, const cha * * \return \herr_t * - * \details H5IMunlink_palette() dettaches a palette from an image + * \details H5IMunlink_palette() detaches a palette from an image * specified by \p image_name. * */ diff --git a/java/src/hdf/hdf5lib/H5.java b/java/src/hdf/hdf5lib/H5.java index 78ce2a0aa08..1ad1eebcb92 100644 --- a/java/src/hdf/hdf5lib/H5.java +++ b/java/src/hdf/hdf5lib/H5.java @@ -9404,7 +9404,7 @@ public synchronized static native void H5Pset_meta_block_size(long fapl_id, long /** * @ingroup JH5P * - * H5Pset_sieve_buf_size Sets the maximum size of the data seive buffer used for file + * H5Pset_sieve_buf_size Sets the maximum size of the data sieve buffer used for file * drivers which are capable of using data sieving. The data sieve * buffer is used when performing I/O on datasets in the file. Using a * buffer which is large anough to hold several pieces of the dataset @@ -9418,7 +9418,7 @@ public synchronized static native void H5Pset_meta_block_size(long fapl_id, long * @param fapl_id * IN: Identifier of property list to modify. * @param size - * IN: maximum size of the data seive buffer. + * IN: maximum size of the data sieve buffer. * * @exception HDF5LibraryException * Error from the HDF5 Library. diff --git a/release_docs/HISTORY-1_0-1_8_0.txt b/release_docs/HISTORY-1_0-1_8_0.txt index 2f05815205c..c5b508d0e19 100644 --- a/release_docs/HISTORY-1_0-1_8_0.txt +++ b/release_docs/HISTORY-1_0-1_8_0.txt @@ -1443,7 +1443,7 @@ Known Problems filter script. * H5Ocopy() does not copy reg_ref attributes correctly when shared-message - is turn on. The value of the reference in the destination attriubte is + is turn on. The value of the reference in the destination attribute is wrong. This H5Ocopy problem will affect h5copy tool %%%%1.6.7%%%% Release Information for hdf5-1.6.7 (31/January/08) diff --git a/release_docs/HISTORY-1_10_0-1_12_0.txt b/release_docs/HISTORY-1_10_0-1_12_0.txt index a83e58db858..4649a319523 100644 --- a/release_docs/HISTORY-1_10_0-1_12_0.txt +++ b/release_docs/HISTORY-1_10_0-1_12_0.txt @@ -409,7 +409,7 @@ Bug Fixes since HDF5-1.10.3 release - Fixed a bug caused by bad tag value when condensing object header messages - There was an assertion failure when moving meessages from running a + There was an assertion failure when moving messages from running a user test program with library release hdf5.1.10.4. It was because the tag value (object header's address) was not set up when entering the library routine H5O__chunk_update_idx(), which will eventually diff --git a/release_docs/HISTORY-1_8_0-1_10_0.txt b/release_docs/HISTORY-1_8_0-1_10_0.txt index 21aa0531195..d18bc665c40 100644 --- a/release_docs/HISTORY-1_8_0-1_10_0.txt +++ b/release_docs/HISTORY-1_8_0-1_10_0.txt @@ -837,7 +837,7 @@ Bug Fixes since HDF5-1.8.0 release - Support for TFLOPS, config/intel-osf1, is removed since the TFLOPS machine has long retired. AKC - 2009/10/06. - Added $(EXEEXT) extension to H5detect when it's executed in the - src/Makfile to generate H5Tinit.c so it works correctly on platforms + src/Makefile to generate H5Tinit.c so it works correctly on platforms that require the full extension when running executables. MAM - 2009/10/01 - BZ #1613 - Configure will now set FC and CXX to "no" when fortran and c++ @@ -1734,6 +1734,6 @@ Known Problems filter script. * H5Ocopy() does not copy reg_ref attributes correctly when shared-message - is turn on. The value of the reference in the destination attriubte is + is turn on. The value of the reference in the destination attribute is wrong. This H5Ocopy problem will affect h5copy tool diff --git a/src/H5ACmpio.c b/src/H5ACmpio.c index c0a9856b86a..cbd00fc2866 100644 --- a/src/H5ACmpio.c +++ b/src/H5ACmpio.c @@ -1019,7 +1019,7 @@ H5AC__log_inserted_entry(const H5AC_info_t *entry_ptr) * dirty bytes count. * * The rank 0 process then removes any references to the - * entry under its old address from the cleands and dirtied + * entry under its old address from the cleaned and dirtied * lists, and inserts an entry in the dirtied list under the * new address. * diff --git a/src/H5ACprivate.h b/src/H5ACprivate.h index bc6bfa681a7..6beec5d6c27 100644 --- a/src/H5ACprivate.h +++ b/src/H5ACprivate.h @@ -27,7 +27,7 @@ #include "H5ACpublic.h" /*public prototypes */ -/* Pivate headers needed by this header */ +/* Private headers needed by this header */ #include "H5private.h" /* Generic Functions */ #include "H5Cprivate.h" /* Cache */ #include "H5Fprivate.h" /* File access */ diff --git a/src/H5Cimage.c b/src/H5Cimage.c index 446dc8801fe..ae18a24fbdc 100644 --- a/src/H5Cimage.c +++ b/src/H5Cimage.c @@ -443,7 +443,7 @@ H5C__generate_cache_image(H5F_t *f, H5C_t *cache_ptr) * deserialized entry after it is inserted in the cache. * * Since deserializing a prefetched entry is semantically - * equivalent to a load, issue an entry loaded nofification + * equivalent to a load, issue an entry loaded notification * if the notify callback is defined. * * Return: SUCCEED on success, and FAIL on failure. diff --git a/src/H5Cprivate.h b/src/H5Cprivate.h index 4a1d7253839..e424964393f 100644 --- a/src/H5Cprivate.h +++ b/src/H5Cprivate.h @@ -541,7 +541,7 @@ typedef struct H5C_t H5C_t; * ensure that the entry is ready to be flushed -- in particular, * if the entry contains references to other entries that are in * temporary file space, the pre-serialize callback must move those - * entries into real file space so that the serialzed entry will + * entries into real file space so that the serialized entry will * contain no invalid data. * * One would think that the base address and length of diff --git a/src/H5FDhdfs.c b/src/H5FDhdfs.c index 7524c6b3687..b53d5da3d25 100644 --- a/src/H5FDhdfs.c +++ b/src/H5FDhdfs.c @@ -1010,7 +1010,7 @@ hdfs__fprint_stats(FILE *stream, const H5FD_hdfs_t *file) unsigned long long max_raw = 0; unsigned long long bytes_raw = 0; unsigned long long bytes_meta = 0; - double re_dub = 0.0; /* re-usable double variable */ + double re_dub = 0.0; /* reusable double variable */ unsigned suffix_i = 0; const char suffixes[] = {' ', 'K', 'M', 'G', 'T', 'P'}; diff --git a/src/H5FDmpio.c b/src/H5FDmpio.c index 05d196f192e..e783eec935d 100644 --- a/src/H5FDmpio.c +++ b/src/H5FDmpio.c @@ -1762,7 +1762,7 @@ H5FD__mpio_truncate(H5FD_t *_file, hid_t H5_ATTR_UNUSED dxpl_id, hbool_t H5_ATTR /* In principle, it is possible for the size returned by the * call to MPI_File_get_size() to depend on whether writes from - * all proceeses have completed at the time process 0 makes the + * all processes have completed at the time process 0 makes the * call. * * In practice, most (all?) truncate calls will come after a barrier diff --git a/src/H5FDpublic.h b/src/H5FDpublic.h index 317299c6426..7464eb6fe21 100644 --- a/src/H5FDpublic.h +++ b/src/H5FDpublic.h @@ -184,7 +184,7 @@ typedef enum H5F_mem_t H5FD_mem_t; /* * Defining H5FD_FEAT_DATA_SIEVE for a VFL driver means that * the library will attempt to cache raw data as it is read from/written to - * a file in a "data seive" buffer. See Rajeev Thakur's papers: + * a file in a "data sieve" buffer. See Rajeev Thakur's papers: * http://www.mcs.anl.gov/~thakur/papers/romio-coll.ps.gz * http://www.mcs.anl.gov/~thakur/papers/mpio-high-perf.ps.gz */ diff --git a/src/H5FDros3.c b/src/H5FDros3.c index 09eeccd3318..1314b1ca007 100644 --- a/src/H5FDros3.c +++ b/src/H5FDros3.c @@ -743,7 +743,7 @@ H5FD__ros3_open(const char *url, unsigned flags, hid_t fapl_id, haddr_t maxaddr) if (fa.authenticate == TRUE) { /* compute signing key (part of AWS/S3 REST API) * can be re-used by user/key for 7 days after creation. - * find way to re-use/share + * find way to reuse/share */ now = gmnow(); HDassert(now != NULL); @@ -865,7 +865,7 @@ ros3_fprint_stats(FILE *stream, const H5FD_ros3_t *file) unsigned long long max_raw = 0; unsigned long long bytes_raw = 0; unsigned long long bytes_meta = 0; - double re_dub = 0.0; /* re-usable double variable */ + double re_dub = 0.0; /* reusable double variable */ unsigned suffix_i = 0; const char suffixes[] = {' ', 'K', 'M', 'G', 'T', 'P'}; diff --git a/src/H5FDs3comms.c b/src/H5FDs3comms.c index caca51333ad..aad7ff3e23b 100644 --- a/src/H5FDs3comms.c +++ b/src/H5FDs3comms.c @@ -579,7 +579,7 @@ H5FD_s3comms_hrb_node_set(hrb_node_t **L, const char *name, const char *value) * * Headers list at `first_header` is not touched. * - * - Programmer should re-use or destroy `first_header` pointer + * - Programmer should reuse or destroy `first_header` pointer * (hrb_node_t *) as suits their purposes. * - Recommend fetching prior to destroy() * e.g., `reuse_node = hrb_to_die->first_header; destroy(hrb_to_die);` @@ -2527,7 +2527,7 @@ H5FD_s3comms_percent_encode_char(char *repr, const unsigned char c, size_t *repr * Create AWS4 "Signing Key" from secret key, AWS region, and timestamp. * * Sequentially runs HMAC_SHA256 on strings in specified order, - * generating re-usable checksum (according to documentation, valid for + * generating reusable checksum (according to documentation, valid for * 7 days from time given). * * `secret` is `access key id` for targeted service/bucket/resource. diff --git a/src/H5FDs3comms.h b/src/H5FDs3comms.h index aa354d97366..4f006a953ec 100644 --- a/src/H5FDs3comms.h +++ b/src/H5FDs3comms.h @@ -216,7 +216,7 @@ * * `magic` (unsigned long) * - * "unique" idenfier number for the structure type + * "unique" identifier number for the structure type * * `name` (char *) * @@ -463,7 +463,7 @@ typedef struct { * * `signing_key` (unsigned char *) * - * Pointer to `SHA256_DIGEST_LENGTH`-long string for "re-usable" signing + * Pointer to `SHA256_DIGEST_LENGTH`-long string for "reusable" signing * key, generated via * `HMAC-SHA256(HMAC-SHA256(HMAC-SHA256(HMAC-SHA256("AWS4", * ""), ""), "aws4_request")` diff --git a/src/H5FLprivate.h b/src/H5FLprivate.h index bd84f60d56a..41bb1b70e95 100644 --- a/src/H5FLprivate.h +++ b/src/H5FLprivate.h @@ -206,7 +206,7 @@ typedef struct H5FL_blk_head_t { #define H5FL_BLK_REALLOC(t, blk, new_size) \ (uint8_t *)H5FL_blk_realloc(&(H5FL_BLK_NAME(t)), blk, new_size H5FL_TRACK_INFO) -/* Check if there is a free block available to re-use */ +/* Check if there is a free block available to reuse */ #define H5FL_BLK_AVAIL(t, size) H5FL_blk_free_block_avail(&(H5FL_BLK_NAME(t)), size) #else /* H5_NO_BLK_FREE_LISTS */ diff --git a/src/H5Gpublic.h b/src/H5Gpublic.h index 65cb1d661a0..5ffa72a029b 100644 --- a/src/H5Gpublic.h +++ b/src/H5Gpublic.h @@ -712,7 +712,7 @@ H5_DLL herr_t H5Gmove2(hid_t src_loc_id, const char *src_name, hid_t dst_loc_id, * any object identifier is open for the object, the space will not be * released until after the object identifier is closed. * - * Note that space identified as freespace is available for re-use only + * Note that space identified as freespace is available for reuse only * as long as the file remains open; once a file has been closed, the * HDF5 library loses track of freespace. See “Freespace Management” in * the \ref UG for further details. diff --git a/src/H5HFcache.c b/src/H5HFcache.c index bdf220211e4..9dbf52790ae 100644 --- a/src/H5HFcache.c +++ b/src/H5HFcache.c @@ -2621,7 +2621,7 @@ H5HF__cache_dblock_fsf_size(const void *_thing, hsize_t *fsf_size) * Further, metadata cache entries are now allowed to have * multiple flush dependency parents. * - * This means that the fractal heap is no longer ncessarily + * This means that the fractal heap is no longer necessarily * flushed from the bottom up. * * For example, it is now possible for a dirty fractal heap @@ -2986,7 +2986,7 @@ H5HF__cache_verify_hdr_descendants_clean(H5F_t *f, H5HF_hdr_t *hdr, hbool_t *fd_ * Further, metadata cache entries are now allowed to have * multiple flush dependency parents. * - * This means that the fractal heap is no longer ncessarily + * This means that the fractal heap is no longer necessarily * flushed from the bottom up. * * For example, it is now possible for a dirty fractal heap @@ -3103,7 +3103,7 @@ H5HF__cache_verify_iblock_descendants_clean(H5F_t *f, haddr_t fd_parent_addr, H5 * Further, metadata cache entries are now allowed to have * multiple flush dependency parents. * - * This means that the fractal heap is no longer ncessarily + * This means that the fractal heap is no longer necessarily * flushed from the bottom up. * * For example, it is now possible for a dirty fractal heap @@ -3266,7 +3266,7 @@ H5HF__cache_verify_iblocks_dblocks_clean(H5F_t *f, haddr_t fd_parent_addr, H5HF_ * Further, metadata cache entries are now allowed to have * multiple flush dependency parents. * - * This means that the fractal heap is no longer ncessarily + * This means that the fractal heap is no longer necessarily * flushed from the bottom up. * * For example, it is now possible for a dirty fractal heap diff --git a/src/H5MFaggr.c b/src/H5MFaggr.c index 2c3a6727ba8..0afc82dc5bb 100644 --- a/src/H5MFaggr.c +++ b/src/H5MFaggr.c @@ -192,7 +192,7 @@ H5MF__aggr_alloc(H5F_t *f, H5F_blk_aggr_t *aggr, H5F_blk_aggr_t *other_aggr, H5F if ((f->shared->feature_flags & aggr->feature_flag) && f->shared->fs_strategy != H5F_FSPACE_STRATEGY_NONE && !f->shared->closing) { #endif - haddr_t aggr_frag_addr = HADDR_UNDEF; /* Address of aggregrator fragment */ + haddr_t aggr_frag_addr = HADDR_UNDEF; /* Address of aggregator fragment */ hsize_t aggr_frag_size = 0; /* Size of aggregator fragment */ hsize_t alignment; /* Alignment of this section */ hsize_t aggr_mis_align = 0; /* Misalignment of aggregator */ @@ -483,7 +483,7 @@ H5MF__aggr_alloc(H5F_t *f, H5F_blk_aggr_t *aggr, H5F_blk_aggr_t *other_aggr, H5F } /* end else */ } /* end if */ else { - /* The aggreator is not at end of file */ + /* The aggregator is not at end of file */ /* Check if aggregator has enough internal space to satisfy the extension. */ if (aggr->size >= extra_requested) { /* Extend block into aggregator */ diff --git a/src/H5Oalloc.c b/src/H5Oalloc.c index 8a7d5773ec2..8a6abbecdc5 100644 --- a/src/H5Oalloc.c +++ b/src/H5Oalloc.c @@ -1848,7 +1848,7 @@ H5O__move_msgs_forward(H5F_t *f, H5O_t *oh) null_msg->raw + null_msg->raw_size, gap_size) < 0) HGOTO_ERROR(H5E_OHDR, H5E_CANTINSERT, FAIL, "can't insert gap in chunk") - /* Re-use message # for new null message taking place of non-null message */ + /* Reuse message # for new null message taking place of non-null message */ new_null_msg = v; } /* end if */ else { diff --git a/src/H5Oefl.c b/src/H5Oefl.c index 90ef7cf7f5a..394e775b4c1 100644 --- a/src/H5Oefl.c +++ b/src/H5Oefl.c @@ -354,7 +354,7 @@ H5O__efl_size(const H5F_t *f, hbool_t H5_ATTR_UNUSED disable_shared, const void * Function: H5O__efl_reset * * Purpose: Frees internal pointers and resets the message to an - * initialial state. + * initial state. * * Return: Non-negative on success/Negative on failure * diff --git a/src/H5Pfapl.c b/src/H5Pfapl.c index 2e0a59f0232..655eb470f73 100644 --- a/src/H5Pfapl.c +++ b/src/H5Pfapl.c @@ -2165,7 +2165,7 @@ H5Pget_meta_block_size(hid_t plist_id, hsize_t *size /*out*/) /*------------------------------------------------------------------------- * Function: H5Pset_sieve_buf_size * - * Purpose: Sets the maximum size of the data seive buffer used for file + * Purpose: Sets the maximum size of the data sieve buffer used for file * drivers which are capable of using data sieving. The data sieve * buffer is used when performing I/O on datasets in the file. Using a * buffer which is large anough to hold several pieces of the dataset diff --git a/src/H5Tcommit.c b/src/H5Tcommit.c index 2a53731d27a..df83f0b9d2c 100644 --- a/src/H5Tcommit.c +++ b/src/H5Tcommit.c @@ -1172,7 +1172,7 @@ H5T_get_actual_type(H5T_t *dt) /*------------------------------------------------------------------------- * Function: H5T_save_refresh_state * - * Purpose: Save state for datatype reconstuction after a refresh. + * Purpose: Save state for datatype reconstruction after a refresh. * * Return: SUCCEED/FAIL * @@ -1212,7 +1212,7 @@ H5T_save_refresh_state(hid_t tid, H5O_shared_t *cached_H5O_shared) /*------------------------------------------------------------------------- * Function: H5T_restore_refresh_state * - * Purpose: Restore state for datatype reconstuction after a refresh. + * Purpose: Restore state for datatype reconstruction after a refresh. * * Return: SUCCEED/FAIL * diff --git a/src/H5Tnative.c b/src/H5Tnative.c index e2e8cff5822..7ccae70f479 100644 --- a/src/H5Tnative.c +++ b/src/H5Tnative.c @@ -42,7 +42,7 @@ static herr_t H5T__cmp_offset(size_t *comp_size, size_t *offset, size_t elem_siz * * Purpose: High-level API to return the native type of a datatype. * The native type is chosen by matching the size and class of - * querried datatype from the following native premitive + * queried datatype from the following native premitive * datatypes: * H5T_NATIVE_CHAR H5T_NATIVE_UCHAR * H5T_NATIVE_SHORT H5T_NATIVE_USHORT @@ -588,7 +588,7 @@ H5T__get_native_integer(size_t prec, H5T_sign_t sign, H5T_direction_t direction, match = H5T_NATIVE_INT_MATCH_LLONG; native_size = sizeof(long long); } - else { /* If no native type matches the querried datatype, simply choose the type of biggest size. */ + else { /* If no native type matches the queried datatype, simply choose the type of biggest size. */ match = H5T_NATIVE_INT_MATCH_LLONG; native_size = sizeof(long long); } @@ -853,7 +853,7 @@ H5T__get_native_bitfield(size_t prec, H5T_direction_t direction, size_t *struct_ native_size = 8; align = H5T_NATIVE_UINT64_ALIGN_g; } - else { /* If no native type matches the querried datatype, simply choose the type of biggest size. */ + else { /* If no native type matches the queried datatype, simply choose the type of biggest size. */ tid = H5T_NATIVE_B64; native_size = 8; align = H5T_NATIVE_UINT64_ALIGN_g; diff --git a/src/H5WB.c b/src/H5WB.c index 2f898071e10..260e4f74d27 100644 --- a/src/H5WB.c +++ b/src/H5WB.c @@ -161,10 +161,10 @@ H5WB_actual(H5WB_t *wb, size_t need) /* Sanity check */ HDassert(wb->actual_size > wb->wrapped_size); - /* Check if we can re-use existing buffer */ + /* Check if we can reuse existing buffer */ if (need <= wb->alloc_size) HGOTO_DONE(wb->actual_buf) - /* Can't re-use existing buffer, free it and proceed */ + /* Can't reuse existing buffer, free it and proceed */ else wb->actual_buf = H5FL_BLK_FREE(extra_buf, wb->actual_buf); } /* end if */ diff --git a/src/H5Zfletcher32.c b/src/H5Zfletcher32.c index 0aeb49e88f9..19dd36e19b3 100644 --- a/src/H5Zfletcher32.c +++ b/src/H5Zfletcher32.c @@ -112,7 +112,7 @@ H5Z__filter_fletcher32(unsigned flags, size_t H5_ATTR_UNUSED cd_nelmts, } /* Set return values */ - /* (Re-use the input buffer, just note that the size is smaller by the size of the checksum) */ + /* (Reuse the input buffer, just note that the size is smaller by the size of the checksum) */ ret_value = nbytes - FLETCHER_LEN; } else { /* Write */ diff --git a/src/H5detect.c b/src/H5detect.c index 39cbddd07e6..d975a95f481 100644 --- a/src/H5detect.c +++ b/src/H5detect.c @@ -767,7 +767,7 @@ done:\n\ "/* ALIGNMENT and signal-handling status */\n" "/****************************************/\n"); if (align_status_g & STA_NoALIGNMENT) - fprintf(rawoutstream, "/* ALIGNAMENT test is not available */\n"); + fprintf(rawoutstream, "/* ALIGNMENT test is not available */\n"); if (align_status_g & STA_NoHandlerVerify) fprintf(rawoutstream, "/* Signal handlers verify test is not available */\n"); /* The following is available in H5pubconf.h. Printing them here for */ diff --git a/src/H5private.h b/src/H5private.h index 89bc272d36b..b2316f34562 100644 --- a/src/H5private.h +++ b/src/H5private.h @@ -2410,7 +2410,7 @@ H5_DLL herr_t H5CX_pop(void); H5_PUSH_FUNC \ if (H5_PKG_INIT_VAR || !H5_TERM_GLOBAL) { -/* Use this macro for package-level functions which propgate errors, but don't issue them */ +/* Use this macro for package-level functions which propagate errors, but don't issue them */ #define FUNC_ENTER_PACKAGE_NOERR \ { \ FUNC_ENTER_COMMON_NOERR(H5_IS_PKG(FUNC)); \ @@ -2435,14 +2435,14 @@ H5_DLL herr_t H5CX_pop(void); H5_PUSH_FUNC \ if (H5_PKG_INIT_VAR || !H5_TERM_GLOBAL) { -/* Use this macro for staticly-scoped functions which propgate errors, but don't issue them */ +/* Use this macro for staticly-scoped functions which propagate errors, but don't issue them */ #define FUNC_ENTER_STATIC_NOERR \ { \ FUNC_ENTER_COMMON_NOERR(H5_IS_PKG(FUNC)); \ H5_PUSH_FUNC \ if (H5_PKG_INIT_VAR || !H5_TERM_GLOBAL) { -/* Use this macro for staticly-scoped functions which propgate errors, but don't issue them */ +/* Use this macro for staticly-scoped functions which propagate errors, but don't issue them */ /* And that shouldn't push their name on the function stack */ #define FUNC_ENTER_STATIC_NOERR_NOFS \ { \ diff --git a/test/cache_api.c b/test/cache_api.c index d9d6abe0d78..1885d5ee40e 100644 --- a/test/cache_api.c +++ b/test/cache_api.c @@ -249,7 +249,7 @@ check_fapl_mdc_api_calls(unsigned paged, hid_t fcpl_id) } } - /* conpare the cache's internal configuration with the expected value */ + /* compare the cache's internal configuration with the expected value */ if (pass) { if (!resize_configs_are_equal(&default_auto_size_ctl, &cache_ptr->resize_ctl, TRUE)) { @@ -393,7 +393,7 @@ check_fapl_mdc_api_calls(unsigned paged, hid_t fcpl_id) } } - /* conpare the cache's internal configuration with the expected value */ + /* compare the cache's internal configuration with the expected value */ if (pass) { if (!resize_configs_are_equal(&mod_auto_size_ctl, &cache_ptr->resize_ctl, TRUE)) { diff --git a/test/direct_chunk.c b/test/direct_chunk.c index 5a9cade8861..e5ec4f25d22 100644 --- a/test/direct_chunk.c +++ b/test/direct_chunk.c @@ -413,7 +413,7 @@ test_direct_chunk_overwrite_data(hid_t fid) if ((sid = H5Screate_simple(OVERWRITE_NDIMS, dset_dims, dset_max_dims)) < 0) FAIL_STACK_ERROR - /* Set chunk size and filll value */ + /* Set chunk size and fill value */ if ((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0) FAIL_STACK_ERROR if (H5Pset_fill_value(dcpl_id, tid, &fill_value) < 0) diff --git a/test/external.c b/test/external.c index ed9640cfa1c..3e46e5005c2 100644 --- a/test/external.c +++ b/test/external.c @@ -541,7 +541,7 @@ test_multiple_files(hid_t file) FAIL_STACK_ERROR if (H5Pclose(dcpl) < 0) FAIL_STACK_ERROR - /* Re-use space below */ + /* Reuse space below */ /* ---------------------------------------------- * Verify that too-small external files will fail diff --git a/test/mf.c b/test/mf.c index b899bde5452..735ecb15667 100644 --- a/test/mf.c +++ b/test/mf.c @@ -1978,7 +1978,7 @@ test_mf_fs_extend(hid_t fapl) *------------------------------------------------------------------------- * To verify that an aggregator is absorbed into a section. * - * Test 1: To aborb the aggregator onto the beginning of the section + * Test 1: To absorb the aggregator onto the beginning of the section * Allocate block A from meta_aggr * Create a free-space section node with an address that adjoins * the end of meta_aggr and a size to make the aggregator diff --git a/test/swmr.c b/test/swmr.c index bbd55f01842..9bc164d8c02 100644 --- a/test/swmr.c +++ b/test/swmr.c @@ -4054,7 +4054,7 @@ test_append_flush_dataset_chunked(hid_t in_fapl) hsize_t dims[2] = {100, 0}; /* The dataset dimension sizes */ hsize_t maxdims[2] = {100, H5S_UNLIMITED}; /* The dataset maximum dimension sizes */ - hsize_t chunk_dims[2] = {5, 2}; /* The chunk dimesion sizes */ + hsize_t chunk_dims[2] = {5, 2}; /* The chunk dimension sizes */ TESTING("H5Fget/set_append_flush() for a chunked dataset's access property list"); @@ -4498,7 +4498,7 @@ test_append_flush_dataset_multiple(hid_t in_fapl) hsize_t dims[2] = {0, 0}; /* The dataset dimension sizes */ hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED}; /* The dataset maximum dimension sizes */ - hsize_t chunk_dims[2] = {5, 2}; /* The chunk dimesion sizes */ + hsize_t chunk_dims[2] = {5, 2}; /* The chunk dimension sizes */ TESTING("H5Fget/set_append_flush() for multiple opens of a chunked dataset"); diff --git a/test/tfile.c b/test/tfile.c index 44c43877173..4250c7da119 100644 --- a/test/tfile.c +++ b/test/tfile.c @@ -2502,7 +2502,7 @@ test_file_double_file_dataset_open(hbool_t new_format) hsize_t e_ext_dims[1] = {7}; /* Expanded dimension sizes */ hsize_t s_ext_dims[1] = {3}; /* Shrunk dimension sizes */ hsize_t max_dims0[1] = {8}; /* Maximum dimension sizes */ - hsize_t max_dims1[1] = {H5S_UNLIMITED}; /* Maximum dimesion sizes for extensible array index */ + hsize_t max_dims1[1] = {H5S_UNLIMITED}; /* Maximum dimension sizes for extensible array index */ hsize_t max_dims2[2] = {H5S_UNLIMITED, H5S_UNLIMITED}; /* Maximum dimension sizes for v2 B-tree index */ hsize_t chunks[1] = {2}, chunks2[2] = {4, 5}; /* Chunk dimension sizes */ hsize_t size; /* File size */ diff --git a/test/tselect.c b/test/tselect.c index 2b0fd8987d5..36cfea60c26 100644 --- a/test/tselect.c +++ b/test/tselect.c @@ -11277,7 +11277,7 @@ test_shape_same_dr__checkerboard(int test_num, int small_rank, int large_rank, i /* Now select the checkerboard selection in the (possibly larger) n-cube. * * Since we have already calculated the base start, stride, count, - * and block, re-use the values in setting up start, stride, count, + * and block, reuse the values in setting up start, stride, count, * and block. */ for (i = 0; i < SS_DR_MAX_RANK; i++) { @@ -12718,7 +12718,7 @@ test_space_update_diminfo(void) { hid_t space_id; /* Dataspace id */ H5S_diminfo_valid_t diminfo_valid; /* Diminfo status */ - H5S_diminfo_valid_t rebuild_status; /* Diminfo status after rebuid */ + H5S_diminfo_valid_t rebuild_status; /* Diminfo status after rebuild */ H5S_sel_type sel_type; /* Selection type */ herr_t ret; /* Return value */ diff --git a/test/ttsafe_rec_rw_lock.c b/test/ttsafe_rec_rw_lock.c index a38dd933d39..457b9676ff8 100644 --- a/test/ttsafe_rec_rw_lock.c +++ b/test/ttsafe_rec_rw_lock.c @@ -42,7 +42,7 @@ #define MAX_NUM_THREADS 32 #define MAX_LOCK_CYCLES 1000000 -/* structure used to configure test threads in the recurive +/* structure used to configure test threads in the recursive * R/W/ lock tests. */ /*********************************************************************** diff --git a/testpar/t_2Gio.c b/testpar/t_2Gio.c index 62a9759cef6..b6f8d251a2e 100644 --- a/testpar/t_2Gio.c +++ b/testpar/t_2Gio.c @@ -3323,7 +3323,7 @@ none_selection_chunk(void) * * TEST_ACTUAL_IO_RESET: * Performs collective and then independent I/O with the same dxpl to - * make sure the peroperty is correctly reset to the default on each use. + * make sure the property is correctly reset to the default on each use. * Specifically, this test runs TEST_ACTUAL_IO_MULTI_CHUNK_NO_OPT_MIX_DISAGREE * (The most complex case that works on all builds) and then performs * an independent read and write with the same dxpls. diff --git a/testpar/t_cache_image.c b/testpar/t_cache_image.c index 59958aa60f7..e41a22d4326 100644 --- a/testpar/t_cache_image.c +++ b/testpar/t_cache_image.c @@ -2130,7 +2130,7 @@ par_verify_dataset(int dset_num, hid_t file_id, int mpi_rank) * On failure, print an appropriate error message and * return FALSE. * - * Return: TRUE if succussful, FALSE otherwise. + * Return: TRUE if successful, FALSE otherwise. * * Programmer: John Mainzer * 3/8/17 diff --git a/testpar/t_dset.c b/testpar/t_dset.c index b9450eeee42..582d1d1e678 100644 --- a/testpar/t_dset.c +++ b/testpar/t_dset.c @@ -2881,7 +2881,7 @@ none_selection_chunk(void) * * TEST_ACTUAL_IO_RESET: * Performs collective and then independent I/O with the same dxpl to - * make sure the peroperty is correctly reset to the default on each use. + * make sure the property is correctly reset to the default on each use. * Specifically, this test runs TEST_ACTUAL_IO_MULTI_CHUNK_NO_OPT_MIX_DISAGREE * (The most complex case that works on all builds) and then performs * an independent read and write with the same dxpls. diff --git a/testpar/t_shapesame.c b/testpar/t_shapesame.c index cbae5e11299..3a1bbcae8e4 100644 --- a/testpar/t_shapesame.c +++ b/testpar/t_shapesame.c @@ -2468,14 +2468,14 @@ do { good_data = FALSE; } - /* zero out buffer for re-use */ + /* zero out buffer for reuse */ *val_ptr = 0; } else if (*val_ptr != 0) { good_data = FALSE; - /* zero out buffer for re-use */ + /* zero out buffer for reuse */ *val_ptr = 0; } @@ -3969,7 +3969,7 @@ hid_t fapl; /* file access property list */ #ifdef USE_PAUSE /* pause the process for a moment to allow debugger to attach if desired. */ -/* Will pause more if greenlight file is not persent but will eventually */ +/* Will pause more if greenlight file is not present but will eventually */ /* continue. */ #include #include diff --git a/testpar/t_span_tree.c b/testpar/t_span_tree.c index aab2b598184..bbb653b1d96 100644 --- a/testpar/t_span_tree.c +++ b/testpar/t_span_tree.c @@ -1379,14 +1379,14 @@ lower_dim_size_comp_test__verify_data(uint32_t *buf_ptr, good_data = FALSE; } - /* zero out buffer for re-use */ + /* zero out buffer for reuse */ *val_ptr = 0; } else if (*val_ptr != 0) { good_data = FALSE; - /* zero out buffer for re-use */ + /* zero out buffer for reuse */ *val_ptr = 0; } @@ -2315,7 +2315,7 @@ lower_dim_size_comp_test(void) * 2) Has no in memory buffer for any other chunk. * * The test differers from Rob Latham's bug report in - * that is runs with an arbitrary number of proceeses, + * that is runs with an arbitrary number of processes, * and uses a 1 dimensional dataset. * * Return: void diff --git a/testpar/testphdf5.c b/testpar/testphdf5.c index ceaeff3290b..40b3e3ed761 100644 --- a/testpar/testphdf5.c +++ b/testpar/testphdf5.c @@ -47,7 +47,7 @@ hid_t fapl; /* file access property list */ #ifdef USE_PAUSE /* pause the process for a moment to allow debugger to attach if desired. */ -/* Will pause more if greenlight file is not persent but will eventually */ +/* Will pause more if greenlight file is not present but will eventually */ /* continue. */ #include #include diff --git a/tools/src/h5perf/perf.c b/tools/src/h5perf/perf.c index 3ef486ee2d3..741f4839c46 100644 --- a/tools/src/h5perf/perf.c +++ b/tools/src/h5perf/perf.c @@ -289,7 +289,7 @@ main(int argc, char **argv) dataset = H5Dopen2(fid, "Dataset1", H5P_DEFAULT); VRFY((dataset >= 0), "H5Dopen succeeded", H5FATAL); - /* we can re-use the same mem_dataspace and file_dataspace + /* we can reuse the same mem_dataspace and file_dataspace * the H5Dwrite used since the dimension size is the same. */ diff --git a/tools/src/h5repack/h5repack_copy.c b/tools/src/h5repack/h5repack_copy.c index b92046f61be..39c21fbb08a 100644 --- a/tools/src/h5repack/h5repack_copy.c +++ b/tools/src/h5repack/h5repack_copy.c @@ -416,7 +416,7 @@ copy_objects(const char *fnamein, const char *fnameout, pack_opt_t *options) * * hslab_nbytes_p : [OUT] total byte of the hyperslab * * Update: - * The hyperslab calucation would be depend on if the dataset is chunked + * The hyperslab calculation would be depend on if the dataset is chunked * or not. * * There care 3 conditions to cover: diff --git a/tools/test/h5dump/h5dumpgentest.c b/tools/test/h5dump/h5dumpgentest.c index 5e86d06c98b..5c3075372dc 100644 --- a/tools/test/h5dump/h5dumpgentest.c +++ b/tools/test/h5dump/h5dumpgentest.c @@ -11163,7 +11163,7 @@ gent_err_attr_dspace(void) hid_t fcpl = H5I_INVALID_HID; /* File access property list */ hid_t sid = H5I_INVALID_HID; /* Dataspace identifier */ hid_t aid = H5I_INVALID_HID; /* Attribute identifier */ - hsize_t dims = 2; /* Dimensino size */ + hsize_t dims = 2; /* Dimension size */ int wdata[2] = {7, 42}; /* The buffer to write */ int fd = -1; /* The file descriptor */ char val = 6; /* An invalid version */ diff --git a/tools/test/h5repack/h5repackgentest.c b/tools/test/h5repack/h5repackgentest.c index bc22e8d8349..d0b70adcea7 100644 --- a/tools/test/h5repack/h5repackgentest.c +++ b/tools/test/h5repack/h5repackgentest.c @@ -16,7 +16,7 @@ * + h5repack_.h5 * + h5repack__ex.h5 * + h5repack__ex-.dat - * ...where NAME idenfities the type, and N is a positive decimal number; + * ...where NAME identifies the type, and N is a positive decimal number; * multiple external files (*.dat) are allowed per file, but they must * follow the pattern and be in contiguous numerical sequence starting at 0. * diff --git a/tools/test/h5stat/CMakeTests.cmake b/tools/test/h5stat/CMakeTests.cmake index 14bb5ea3a8c..8802d0d0f6f 100644 --- a/tools/test/h5stat/CMakeTests.cmake +++ b/tools/test/h5stat/CMakeTests.cmake @@ -160,7 +160,7 @@ ADD_H5_TEST (h5stat_notexist 1 notexist.h5) ADD_H5_TEST (h5stat_nofile 1 '') -# Test file with groups, compressed datasets, user-applied fileters, etc. +# Test file with groups, compressed datasets, user-applied filters, etc. # h5stat_filters.h5 is a copy of ../../testfiles/tfilters.h5 as of release 1.8.0-alpha4 ADD_H5_TEST (h5stat_filters 0 h5stat_filters.h5) ADD_H5_TEST (h5stat_filters-file 0 -f h5stat_filters.h5) diff --git a/tools/test/h5stat/testh5stat.sh.in b/tools/test/h5stat/testh5stat.sh.in index 65e40c4d820..9ea6adc075d 100644 --- a/tools/test/h5stat/testh5stat.sh.in +++ b/tools/test/h5stat/testh5stat.sh.in @@ -256,7 +256,7 @@ TOOLTEST h5stat_help2.ddl --help TOOLTEST h5stat_notexist.ddl notexist.h5 TOOLTEST h5stat_nofile.ddl '' -# Test file with groups, compressed datasets, user-applied fileters, etc. +# Test file with groups, compressed datasets, user-applied filters, etc. # h5stat_filters.h5 is a copy of ../../testfiles/tfilters.h5 as of release 1.8.0-alpha4 TOOLTEST h5stat_filters.ddl h5stat_filters.h5 TOOLTEST h5stat_filters-file.ddl -f h5stat_filters.h5 diff --git a/utils/mirror_vfd/mirror_writer.c b/utils/mirror_vfd/mirror_writer.c index ad7cd911985..80c45725ca2 100644 --- a/utils/mirror_vfd/mirror_writer.c +++ b/utils/mirror_vfd/mirror_writer.c @@ -57,7 +57,7 @@ * guard against commands from the wrong entity. * * xmit_count (uint32_t) - * Record of trasmissions received from the Driver. While the transmission + * Record of transmissions received from the Driver. While the transmission * protocol should be trustworthy, this serves as an additional guard. * Starts a 0 and should be incremented for each one-way transmission. * @@ -761,7 +761,7 @@ do_write(struct mirror_session *session, const unsigned char *xmit_buf) addr = (haddr_t)xmit_write.offset; type = (H5FD_mem_t)xmit_write.type; - /* Allocate the buffer once -- re-use between loops. + /* Allocate the buffer once -- reuse between loops. */ buf = (char *)HDmalloc(sizeof(char) * H5FD_MIRROR_DATA_BUFFER_MAX); if (NULL == buf) { From fc550202c28cea0ac2fd4a4182a6585fa01a0eca Mon Sep 17 00:00:00 2001 From: Dana Robinson <43805+derobins@users.noreply.github.com> Date: Wed, 4 Oct 2023 09:10:57 -0700 Subject: [PATCH 108/108] Disable static + thread-safe on Windows w/ CMake (#3622) (#3631) * Disable static + thread-safe on Windows w/ CMake (#3622) The thread-safety feature on Windows requires a hook in DllMain() and thus is only available when HDF5 is built as a shared library. This was previously a warning, but has now been elevated to a fatal error that cannot be overridden with ALLOW_UNSUPPORTED. Fixes GitHub #3613 --- .github/workflows/main.yml | 2 ++ CMakeLists.txt | 5 ++--- release_docs/RELEASE.txt | 11 +++++++++++ 3 files changed, 15 insertions(+), 3 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 10b3a9cf2de..031783be225 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -455,6 +455,7 @@ jobs: if: (matrix.generator != 'autogen') && ! (matrix.thread_safety.enabled) + # NOTE: Windows does not support static + thread-safe - name: CMake Configure (Thread-Safe) run: | mkdir "${{ runner.workspace }}/build" @@ -464,6 +465,7 @@ jobs: -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} \ -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} \ -DBUILD_SHARED_LIBS=ON \ + -DBUILD_STATIC_LIBS=OFF \ -DHDF5_ENABLE_ALL_WARNINGS=ON \ -DHDF5_ENABLE_THREADSAFE:BOOL=ON \ -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} \ diff --git a/CMakeLists.txt b/CMakeLists.txt index 28fe23dc510..ad2c9d4060b 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -820,9 +820,8 @@ option (HDF5_ENABLE_THREADSAFE "Enable thread-safety" OFF) if (HDF5_ENABLE_THREADSAFE) # check for unsupported options if (WIN32) - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE " **** thread-safety option not supported with static library **** ") - message (VERBOSE " **** thread-safety option will not be used building static library **** ") + if (BUILD_STATIC_LIBS) + message (FATAL_ERROR " **** thread-safety option not supported with static library **** ") endif () endif () if (HDF5_ENABLE_PARALLEL) diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 4287fcfdd74..b5f8acb30a6 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -47,6 +47,17 @@ New Features Configuration: ------------- + - Thread-safety + static library disabled on Windows w/ CMake + + The thread-safety feature requires hooks in DllMain(), which is only + present in the shared library. + + We previously just warned about this, but now any CMake configuration + that tries to build thread-safety and the static library will fail. + This cannot be overridden with ALLOW_UNSUPPORTED. + + Fixes GitHub issue #3613 + - Bumped the minimum required version of Autoconf to 2.71. This fixes a problem with the Intel oneAPI Fortran compiler's -loopopt