From 04d11bfb2bf430226af75c302f69e2d284ba395b Mon Sep 17 00:00:00 2001
From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com>
Date: Thu, 6 Oct 2022 18:40:27 -0500
Subject: [PATCH 001/108] 1.12 Fix some Java warnings (#2146)
* Fix some Java warnings
* Fix accidental change
---
java/src/hdf/hdf5lib/HDF5Constants.java | 2 +-
.../exceptions/HDF5LibraryException.java | 2 +-
java/src/jni/h5aImp.c | 30 +-
java/src/jni/h5dImp.c | 28 +-
java/src/jni/h5util.c | 296 +++++++++---------
java/test/TestH5Fparams.java | 1 +
java/test/TestH5Ocreate.java | 4 +
java/test/TestH5Oparams.java | 3 +
8 files changed, 183 insertions(+), 183 deletions(-)
diff --git a/java/src/hdf/hdf5lib/HDF5Constants.java b/java/src/hdf/hdf5lib/HDF5Constants.java
index 67b94492ee8..e3973976b3d 100644
--- a/java/src/hdf/hdf5lib/HDF5Constants.java
+++ b/java/src/hdf/hdf5lib/HDF5Constants.java
@@ -19,7 +19,7 @@
* @page HDF5CONST Constants and Enumerated Types
* This class contains C constants and enumerated types of HDF5 library. The
* values of these constants are obtained from the library by calling
- * the JNI function jconstant, where jconstant is any of the private constants
+ * the JNI function jconstant, where jconstant is used for any of the private constants
* which start their name with "H5" need to be converted.
*
* Do not edit this file!
diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5LibraryException.java b/java/src/hdf/hdf5lib/exceptions/HDF5LibraryException.java
index 3de2a281869..d8f9346d232 100644
--- a/java/src/hdf/hdf5lib/exceptions/HDF5LibraryException.java
+++ b/java/src/hdf/hdf5lib/exceptions/HDF5LibraryException.java
@@ -108,7 +108,7 @@ public HDF5LibraryException(String s)
/**
* @ingroup JERRLIB
*
- * Return a error message for the minor error number.
+ * Return an error message for the minor error number.
*
* These messages come from @ref H5E.
*
diff --git a/java/src/jni/h5aImp.c b/java/src/jni/h5aImp.c
index e5e8462e5e9..086fea87450 100644
--- a/java/src/jni/h5aImp.c
+++ b/java/src/jni/h5aImp.c
@@ -1084,7 +1084,7 @@ Java_hdf_hdf5lib_H5_H5AreadVL(JNIEnv *env, jclass clss, jlong attr_id, jlong mem
/* Get size of data array */
if ((n = ENVPTR->GetArrayLength(ENVONLY, buf)) < 0) {
CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
- H5_BAD_ARGUMENT_ERROR(ENVONLY, "H5Aread: readBuf length < 0");
+ H5_BAD_ARGUMENT_ERROR(ENVONLY, "H5AreadVL: readBuf length < 0");
}
dims[0] = (hsize_t)n;
@@ -1095,12 +1095,12 @@ Java_hdf_hdf5lib_H5_H5AreadVL(JNIEnv *env, jclass clss, jlong attr_id, jlong mem
if ((type_class = H5Tget_class((hid_t)mem_type_id)) < 0)
H5_LIBRARY_ERROR(ENVONLY);
if (type_class == H5T_VLEN) {
- size_t typeSize;
- hid_t memb = H5I_INVALID_HID;
- H5T_class_t vlClass;
- size_t vlSize;
- void *rawBuf = NULL;
- jobject *jList = NULL;
+ size_t typeSize;
+ hid_t memb = H5I_INVALID_HID;
+ H5T_class_t vlClass;
+ size_t vlSize;
+ void *rawBuf = NULL;
+ jobjectArray jList = NULL;
size_t i, j, x;
@@ -1234,7 +1234,7 @@ Java_hdf_hdf5lib_H5_H5AreadVL(JNIEnv *env, jclass clss, jlong attr_id, jlong mem
}
jobj = ENVPTR->CallStaticObjectMethod(ENVONLY, cFloat, floatValueMid,
- floatValue);
+ (double)floatValue);
CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
break;
}
@@ -1321,7 +1321,7 @@ Java_hdf_hdf5lib_H5_H5AwriteVL(JNIEnv *env, jclass clss, jlong attr_id, jlong me
htri_t vl_data_class;
herr_t status = FAIL;
jboolean writeBufIsCopy;
- jbyteArray *writeBuf = NULL;
+ jbyteArray writeBuf = NULL;
UNUSED(clss);
@@ -1344,12 +1344,12 @@ Java_hdf_hdf5lib_H5_H5AwriteVL(JNIEnv *env, jclass clss, jlong attr_id, jlong me
if ((type_class = H5Tget_class((hid_t)mem_type_id)) < 0)
H5_LIBRARY_ERROR(ENVONLY);
if (type_class == H5T_VLEN) {
- size_t typeSize;
- hid_t memb = H5I_INVALID_HID;
- H5T_class_t vlClass;
- size_t vlSize;
- void *rawBuf = NULL;
- jobject *jList = NULL;
+ size_t typeSize;
+ hid_t memb = H5I_INVALID_HID;
+ H5T_class_t vlClass;
+ size_t vlSize;
+ void *rawBuf = NULL;
+ jobjectArray jList = NULL;
size_t i, j, x;
diff --git a/java/src/jni/h5dImp.c b/java/src/jni/h5dImp.c
index e6e9a76927b..59775e3b838 100644
--- a/java/src/jni/h5dImp.c
+++ b/java/src/jni/h5dImp.c
@@ -1110,12 +1110,12 @@ Java_hdf_hdf5lib_H5_H5DreadVL(JNIEnv *env, jclass clss, jlong dataset_id, jlong
if ((type_class = H5Tget_class((hid_t)mem_type_id)) < 0)
H5_LIBRARY_ERROR(ENVONLY);
if (type_class == H5T_VLEN) {
- size_t typeSize;
- hid_t memb = H5I_INVALID_HID;
- H5T_class_t vlClass;
- size_t vlSize;
- void *rawBuf = NULL;
- jobject *jList = NULL;
+ size_t typeSize;
+ hid_t memb = H5I_INVALID_HID;
+ H5T_class_t vlClass;
+ size_t vlSize;
+ void *rawBuf = NULL;
+ jobjectArray jList = NULL;
size_t i, j, x;
@@ -1250,7 +1250,7 @@ Java_hdf_hdf5lib_H5_H5DreadVL(JNIEnv *env, jclass clss, jlong dataset_id, jlong
}
jobj = ENVPTR->CallStaticObjectMethod(ENVONLY, cFloat, floatValueMid,
- floatValue);
+ (double)floatValue);
CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
break;
}
@@ -1334,7 +1334,7 @@ Java_hdf_hdf5lib_H5_H5DwriteVL(JNIEnv *env, jclass clss, jlong dataset_id, jlong
htri_t vl_data_class;
herr_t status = FAIL;
jboolean writeBufIsCopy;
- jbyteArray *writeBuf = NULL;
+ jbyteArray writeBuf = NULL;
UNUSED(clss);
@@ -1353,12 +1353,12 @@ Java_hdf_hdf5lib_H5_H5DwriteVL(JNIEnv *env, jclass clss, jlong dataset_id, jlong
if ((type_class = H5Tget_class((hid_t)mem_type_id)) < 0)
H5_LIBRARY_ERROR(ENVONLY);
if (type_class == H5T_VLEN) {
- size_t typeSize;
- hid_t memb = H5I_INVALID_HID;
- H5T_class_t vlClass;
- size_t vlSize;
- void *rawBuf = NULL;
- jobject *jList = NULL;
+ size_t typeSize;
+ hid_t memb = H5I_INVALID_HID;
+ H5T_class_t vlClass;
+ size_t vlSize;
+ void *rawBuf = NULL;
+ jobjectArray jList = NULL;
size_t i, j, x;
diff --git a/java/src/jni/h5util.c b/java/src/jni/h5util.c
index 4140a9ec527..76a050a40ef 100644
--- a/java/src/jni/h5util.c
+++ b/java/src/jni/h5util.c
@@ -436,7 +436,7 @@ h5str_convert(JNIEnv *env, char **in_str, hid_t container, hid_t tid, void *out_
break;
case H5T_ENUM: {
- void *value;
+ void *value = NULL;
token = HDstrtok(this_str, delimiter);
@@ -865,7 +865,7 @@ h5str_sprintf(JNIEnv *env, h5str_t *out_str, hid_t container, hid_t tid, void *i
if (NULL == (this_str = (char *)HDmalloc(this_len)))
H5_OUT_OF_MEMORY_ERROR(ENVONLY, "h5str_sprintf: failed to allocate string buffer");
- if (HDsnprintf(this_str, this_len, "%g", tmp_float) < 0)
+ if (HDsnprintf(this_str, this_len, "%g", (double)tmp_float) < 0)
H5_JNI_FATAL_ERROR(ENVONLY, "h5str_sprintf: HDsnprintf failure");
break;
@@ -2273,10 +2273,10 @@ h5str_render_bin_output(FILE *stream, hid_t container, hid_t tid, void *_mem, hs
}
case H5T_STRING: {
- unsigned char tempuchar;
unsigned int i;
H5T_str_t pad;
- char *s;
+ char *s = NULL;
+ unsigned char tempuchar;
if ((pad = H5Tget_strpad(tid)) < 0) {
ret_value = FAIL;
@@ -2287,7 +2287,7 @@ h5str_render_bin_output(FILE *stream, hid_t container, hid_t tid, void *_mem, hs
mem = ((unsigned char *)_mem) + block_index * size;
if (H5Tis_variable_str(tid)) {
- s = *(char **)mem;
+ s = *(char **)((void *)mem);
if (s != NULL)
size = HDstrlen(s);
}
@@ -2919,24 +2919,20 @@ h5str_dump_region_attribute(JNIEnv *env, h5str_t *str, hid_t region_id)
int
h5str_dump_simple_dset(JNIEnv *env, FILE *stream, hid_t dset, int binary_order)
{
+ hid_t f_space = H5I_INVALID_HID; /* file data space */
hsize_t elmtno; /* counter */
+ size_t i = 0; /* counter */
+ int sndims; /* rank of dataspace */
+ int carry; /* counter carry value */
hsize_t zero[8]; /* vector of zeros */
hsize_t total_size[H5S_MAX_RANK]; /* total size of dataset*/
- size_t i; /* counter */
- hid_t f_space = H5I_INVALID_HID; /* file data space */
- int ndims;
- int carry; /* counter carry value */
-
- /* Print info */
- hssize_t p_nelmts; /* total selected elmts */
- size_t p_type_nbytes; /* size of memory type */
/* Stripmine info */
- void *sm_buf = NULL; /* buffer for raw data */
- hsize_t sm_size[H5S_MAX_RANK]; /* stripmine size */
- hsize_t sm_nbytes; /* bytes per stripmine */
- hsize_t sm_nelmts; /* elements per stripmine */
- hid_t sm_space = H5I_INVALID_HID; /* stripmine data space */
+ hsize_t sm_size[H5S_MAX_RANK]; /* stripmine size */
+ hsize_t sm_nbytes; /* bytes per stripmine */
+ hsize_t sm_nelmts; /* elements per stripmine */
+ unsigned char *sm_buf = NULL; /* buffer for raw data */
+ hid_t sm_space = H5I_INVALID_HID; /* stripmine data space */
/* Hyperslab info */
hsize_t hs_offset[H5S_MAX_RANK]; /* starting offset */
@@ -2944,11 +2940,11 @@ h5str_dump_simple_dset(JNIEnv *env, FILE *stream, hid_t dset, int binary_order)
hsize_t hs_nelmts; /* elements in request */
/* VL data special information */
- unsigned int vl_data = 0; /* contains VL datatypes */
- hid_t p_type = H5I_INVALID_HID;
- hid_t f_type = H5I_INVALID_HID;
+ unsigned int vl_data = 0; /* contains VL datatypes */
+ int ret_value = FAIL;
- int ret_value = FAIL;
+ hid_t p_type = H5I_INVALID_HID;
+ hid_t f_type = H5I_INVALID_HID;
if (dset < 0)
H5_BAD_ARGUMENT_ERROR(ENVONLY, "h5str_dump_simple_dset: dset ID < 0");
@@ -2959,18 +2955,21 @@ h5str_dump_simple_dset(JNIEnv *env, FILE *stream, hid_t dset, int binary_order)
if ((f_space = H5Dget_space(dset)) < 0)
H5_LIBRARY_ERROR(ENVONLY);
- if ((ndims = H5Sget_simple_extent_ndims(f_space)) < 0)
+ if ((sndims = H5Sget_simple_extent_ndims(f_space)) < 0)
H5_LIBRARY_ERROR(ENVONLY);
+ /* Assume entire data space to be printed */
if (H5Sget_simple_extent_dims(f_space, total_size, NULL) < 0)
H5_LIBRARY_ERROR(ENVONLY);
if (H5Tequal(f_type, H5T_STD_REF_DSETREG)) {
+ hssize_t p_nelmts; /* total selected elmts */
+
if ((p_nelmts = H5Sget_simple_extent_npoints(f_space)) < 0)
H5_LIBRARY_ERROR(ENVONLY);
if (NULL ==
- (sm_buf = (H5R_ref_t *)HDcalloc(MAX(sizeof(unsigned), sizeof(H5R_ref_t)), (size_t)p_nelmts)))
+ (sm_buf = (unsigned char *)HDcalloc(MAX(sizeof(unsigned), sizeof(H5R_ref_t)), (size_t)p_nelmts)))
H5_OUT_OF_MEMORY_ERROR(ENVONLY, "h5str_dump_simple_mem: failed to allocate sm_buf");
/* Read the data */
@@ -2987,6 +2986,10 @@ h5str_dump_simple_dset(JNIEnv *env, FILE *stream, hid_t dset, int binary_order)
}
}
else {
+ /* Print info */
+ size_t p_type_nbytes; /* size of memory type */
+ hsize_t p_nelmts; /* total selected elmts */
+
switch (binary_order) {
case 1: {
if ((p_type = h5str_get_native_type(f_type)) < 0)
@@ -3017,109 +3020,102 @@ h5str_dump_simple_dset(JNIEnv *env, FILE *stream, hid_t dset, int binary_order)
}
}
- if ((size_t)ndims <= (sizeof(sm_size) / sizeof(sm_size[0]))) {
- if (H5Sget_simple_extent_dims(f_space, total_size, NULL) < 0)
- H5_LIBRARY_ERROR(ENVONLY);
+ /* Calculate the number of elements we're going to print */
+ p_nelmts = 1;
- /* Calculate the number of elements we're going to print */
- p_nelmts = 1;
-
- if (ndims > 0) {
- for (i = 0; i < (size_t)ndims; i++)
- p_nelmts *= total_size[i];
- } /* end if */
+ if (sndims > 0) {
+ for (i = 0; i < (size_t)sndims; i++)
+ p_nelmts *= total_size[i];
+ } /* end if */
- if (p_nelmts > 0) {
- /* Check if we have VL data in the dataset's datatype */
- if (h5str_detect_vlen(p_type) != 0)
- vl_data = 1;
+ if (p_nelmts > 0) {
+ /* Check if we have VL data in the dataset's datatype */
+ if (h5str_detect_vlen(p_type) != 0)
+ vl_data = 1;
- /*
- * Determine the strip mine size and allocate a buffer. The strip mine is
- * a hyperslab whose size is manageable.
- */
- if (!(sm_nbytes = p_type_nbytes = H5Tget_size(p_type)))
- H5_LIBRARY_ERROR(ENVONLY);
+ /*
+ * Determine the strip mine size and allocate a buffer. The strip mine is
+ * a hyperslab whose size is manageable.
+ */
+ if (!(sm_nbytes = p_type_nbytes = H5Tget_size(p_type)))
+ H5_LIBRARY_ERROR(ENVONLY);
- if (ndims > 0) {
- for (i = (size_t)ndims; i > 0; --i) {
- hsize_t size = H5TOOLS_BUFSIZE / sm_nbytes;
- if (size == 0) /* datum size > H5TOOLS_BUFSIZE */
- size = 1;
- sm_size[i - 1] = (((total_size[i - 1]) < (size)) ? (total_size[i - 1]) : (size));
- sm_nbytes *= sm_size[i - 1];
- }
+ if (sndims > 0) {
+ for (i = (size_t)sndims; i > 0; --i) {
+ hsize_t size = H5TOOLS_BUFSIZE / sm_nbytes;
+ if (size == 0) /* datum size > H5TOOLS_BUFSIZE */
+ size = 1;
+ sm_size[i - 1] = (((total_size[i - 1]) < (size)) ? (total_size[i - 1]) : (size));
+ sm_nbytes *= sm_size[i - 1];
}
+ }
- if (sm_nbytes > 0) {
- if (NULL == (sm_buf = (unsigned char *)HDmalloc((size_t)sm_nbytes)))
- H5_OUT_OF_MEMORY_ERROR(ENVONLY, "h5str_dump_simple_dset: failed to allocate sm_buf");
+ if (sm_nbytes > 0) {
+ if (NULL == (sm_buf = (unsigned char *)HDmalloc((size_t)sm_nbytes)))
+ H5_OUT_OF_MEMORY_ERROR(ENVONLY, "h5str_dump_simple_dset: failed to allocate sm_buf");
- sm_nelmts = sm_nbytes / p_type_nbytes;
+ sm_nelmts = sm_nbytes / p_type_nbytes;
- if ((sm_space = H5Screate_simple(1, &sm_nelmts, NULL)) < 0)
- H5_LIBRARY_ERROR(ENVONLY);
-
- /* The stripmine loop */
- HDmemset(hs_offset, 0, sizeof hs_offset);
- HDmemset(zero, 0, sizeof zero);
-
- for (elmtno = 0; elmtno < (hsize_t)p_nelmts; elmtno += hs_nelmts) {
- /* Calculate the hyperslab size */
- if (ndims > 0) {
- for (i = 0, hs_nelmts = 1; i < (size_t)ndims; i++) {
- hs_size[i] = (((total_size[i] - hs_offset[i]) < (sm_size[i]))
- ? (total_size[i] - hs_offset[i])
- : (sm_size[i]));
- hs_nelmts *= hs_size[i];
- }
-
- if (H5Sselect_hyperslab(f_space, H5S_SELECT_SET, hs_offset, NULL, hs_size, NULL) <
- 0)
- H5_LIBRARY_ERROR(ENVONLY);
+ if ((sm_space = H5Screate_simple(1, &sm_nelmts, NULL)) < 0)
+ H5_LIBRARY_ERROR(ENVONLY);
- if (H5Sselect_hyperslab(sm_space, H5S_SELECT_SET, zero, NULL, &hs_nelmts, NULL) <
- 0)
- H5_LIBRARY_ERROR(ENVONLY);
+ /* The stripmine loop */
+ HDmemset(hs_offset, 0, sizeof hs_offset);
+ HDmemset(zero, 0, sizeof zero);
+
+ for (elmtno = 0; elmtno < (hsize_t)p_nelmts; elmtno += hs_nelmts) {
+ /* Calculate the hyperslab size */
+ if (sndims > 0) {
+ for (i = 0, hs_nelmts = 1; i < (size_t)sndims; i++) {
+ hs_size[i] = (((total_size[i] - hs_offset[i]) < (sm_size[i]))
+ ? (total_size[i] - hs_offset[i])
+ : (sm_size[i]));
+ hs_nelmts *= hs_size[i];
}
- else {
- if (H5Sselect_all(f_space) < 0)
- H5_LIBRARY_ERROR(ENVONLY);
- if (H5Sselect_all(sm_space) < 0)
- H5_LIBRARY_ERROR(ENVONLY);
+ if (H5Sselect_hyperslab(f_space, H5S_SELECT_SET, hs_offset, NULL, hs_size, NULL) < 0)
+ H5_LIBRARY_ERROR(ENVONLY);
- hs_nelmts = 1;
- }
+ if (H5Sselect_hyperslab(sm_space, H5S_SELECT_SET, zero, NULL, &hs_nelmts, NULL) < 0)
+ H5_LIBRARY_ERROR(ENVONLY);
+ }
+ else {
+ if (H5Sselect_all(f_space) < 0)
+ H5_LIBRARY_ERROR(ENVONLY);
- /* Read the data */
- if (H5Dread(dset, p_type, sm_space, f_space, H5P_DEFAULT, sm_buf) < 0)
+ if (H5Sselect_all(sm_space) < 0)
H5_LIBRARY_ERROR(ENVONLY);
- if (binary_order == 99) {
- if (h5str_dump_simple_data(ENVONLY, stream, dset, p_type, sm_buf, hs_nelmts) < 0)
- CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
- }
- else {
- if (h5str_render_bin_output(stream, dset, p_type, sm_buf, hs_nelmts) < 0)
- CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
- }
+ hs_nelmts = 1;
+ }
- /* Reclaim any VL memory, if necessary */
- if (vl_data) {
- if (H5Treclaim(p_type, sm_space, H5P_DEFAULT, sm_buf) < 0)
- H5_LIBRARY_ERROR(ENVONLY);
- }
+ /* Read the data */
+ if (H5Dread(dset, p_type, sm_space, f_space, H5P_DEFAULT, sm_buf) < 0)
+ H5_LIBRARY_ERROR(ENVONLY);
+
+ if (binary_order == 99) {
+ if (h5str_dump_simple_data(ENVONLY, stream, dset, p_type, sm_buf, hs_nelmts) < 0)
+ CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
+ }
+ else {
+ if (h5str_render_bin_output(stream, dset, p_type, sm_buf, hs_nelmts) < 0)
+ CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
+ }
- /* Calculate the next hyperslab offset */
- for (i = (size_t)ndims, carry = 1; i > 0 && carry; --i) {
- hs_offset[i - 1] += hs_size[i - 1];
+ /* Reclaim any VL memory, if necessary */
+ if (vl_data) {
+ if (H5Treclaim(p_type, sm_space, H5P_DEFAULT, sm_buf) < 0)
+ H5_LIBRARY_ERROR(ENVONLY);
+ }
- if (hs_offset[i - 1] == total_size[i - 1])
- hs_offset[i - 1] = 0;
- else
- carry = 0;
- }
+ /* Calculate the next hyperslab offset */
+ for (i = (size_t)sndims, carry = 1; i > 0 && carry; --i) {
+ hs_offset[i - 1] += hs_size[i - 1];
+
+ if (hs_offset[i - 1] == total_size[i - 1])
+ hs_offset[i - 1] = 0;
+ else
+ carry = 0;
}
}
}
@@ -3148,15 +3144,12 @@ h5str_dump_simple_mem(JNIEnv *env, FILE *stream, hid_t attr_id, int binary_order
{
hid_t f_space = H5I_INVALID_HID; /* file data space */
hsize_t alloc_size;
- int ndims; /* rank of dataspace */
- unsigned i; /* counters */
- hsize_t total_size[H5S_MAX_RANK]; /* total size of dataset*/
- hssize_t p_nelmts; /* total selected elmts */
+ int sndims; /* rank of dataspace */
+ unsigned i; /* counters */
+ hsize_t total_size[H5S_MAX_RANK]; /* total size of dataset */
+ int ret_value = 0;
- void *sm_buf = NULL; /* buffer for raw data */
- hsize_t sm_size[H5S_MAX_RANK]; /* stripmine size */
-
- int ret_value = 0;
+ unsigned char *sm_buf = NULL; /* buffer for raw data */
/* VL data special information */
unsigned int vl_data = 0; /* contains VL datatypes */
@@ -3172,18 +3165,20 @@ h5str_dump_simple_mem(JNIEnv *env, FILE *stream, hid_t attr_id, int binary_order
if (H5I_INVALID_HID == (f_space = H5Aget_space(attr_id)))
H5_LIBRARY_ERROR(ENVONLY);
- if ((ndims = H5Sget_simple_extent_ndims(f_space)) < 0)
+ if ((sndims = H5Sget_simple_extent_ndims(f_space)) < 0)
H5_LIBRARY_ERROR(ENVONLY);
if (H5Sget_simple_extent_dims(f_space, total_size, NULL) < 0)
H5_LIBRARY_ERROR(ENVONLY);
if (H5Tequal(f_type, H5T_STD_REF_DSETREG)) {
+ hssize_t p_nelmts; /* total selected elmts */
+
if ((p_nelmts = H5Sget_simple_extent_npoints(f_space)) < 0)
H5_LIBRARY_ERROR(ENVONLY);
if (NULL ==
- (sm_buf = (H5R_ref_t *)HDcalloc(MAX(sizeof(unsigned), sizeof(H5R_ref_t)), (size_t)p_nelmts)))
+ (sm_buf = (unsigned char *)HDcalloc(MAX(sizeof(unsigned), sizeof(H5R_ref_t)), (size_t)p_nelmts)))
H5_OUT_OF_MEMORY_ERROR(ENVONLY, "h5str_dump_simple_mem: failed to allocate sm_buf");
/* Read the data */
@@ -3200,6 +3195,8 @@ h5str_dump_simple_mem(JNIEnv *env, FILE *stream, hid_t attr_id, int binary_order
}
}
else {
+ hsize_t p_nelmts; /* total selected elmts */
+
switch (binary_order) {
case 1: {
if ((p_type = h5str_get_native_type(f_type)) < 0)
@@ -3230,45 +3227,40 @@ h5str_dump_simple_mem(JNIEnv *env, FILE *stream, hid_t attr_id, int binary_order
}
}
- if ((size_t)ndims <= (sizeof(sm_size) / sizeof(sm_size[0]))) {
- if (H5Sget_simple_extent_dims(f_space, total_size, NULL) < 0)
- H5_LIBRARY_ERROR(ENVONLY);
- /* Calculate the number of elements we're going to print */
- p_nelmts = 1;
+ /* Calculate the number of elements we're going to print */
+ p_nelmts = 1;
- if (ndims > 0) {
- for (i = 0; i < (size_t)ndims; i++)
- p_nelmts *= total_size[i];
- } /* end if */
+ if (sndims > 0) {
+ for (i = 0; i < (size_t)sndims; i++)
+ p_nelmts *= total_size[i];
+ } /* end if */
- if (p_nelmts > 0) {
- /* Check if we have VL data in the dataset's datatype */
- if (h5str_detect_vlen(p_type) != 0)
- vl_data = 1;
+ if (p_nelmts > 0) {
+ /* Check if we have VL data in the dataset's datatype */
+ if (h5str_detect_vlen(p_type) != 0)
+ vl_data = 1;
- alloc_size = (size_t)p_nelmts * H5Tget_size(p_type);
- if (NULL == (sm_buf = (unsigned char *)HDmalloc((size_t)alloc_size)))
- H5_OUT_OF_MEMORY_ERROR(ENVONLY, "h5str_dump_simple_mem: failed to allocate sm_buf");
+ alloc_size = (size_t)p_nelmts * H5Tget_size(p_type);
+ if (NULL == (sm_buf = (unsigned char *)HDmalloc((size_t)alloc_size)))
+ H5_OUT_OF_MEMORY_ERROR(ENVONLY, "h5str_dump_simple_mem: failed to allocate sm_buf");
- /* Read the data */
- if (H5Aread(attr_id, p_type, sm_buf) < 0)
- H5_LIBRARY_ERROR(ENVONLY);
+ /* Read the data */
+ if (H5Aread(attr_id, p_type, sm_buf) < 0)
+ H5_LIBRARY_ERROR(ENVONLY);
- if (binary_order == 99) {
- if (h5str_dump_simple_data(ENVONLY, stream, attr_id, p_type, sm_buf, (size_t)p_nelmts) <
- 0)
- CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
- }
- else {
- if (h5str_render_bin_output(stream, attr_id, p_type, sm_buf, (size_t)p_nelmts) < 0)
- CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
- }
+ if (binary_order == 99) {
+ if (h5str_dump_simple_data(ENVONLY, stream, attr_id, p_type, sm_buf, (size_t)p_nelmts) < 0)
+ CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
+ }
+ else {
+ if (h5str_render_bin_output(stream, attr_id, p_type, sm_buf, (size_t)p_nelmts) < 0)
+ CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
+ }
- /* Reclaim any VL memory, if necessary */
- if (vl_data) {
- if (H5Treclaim(p_type, f_space, H5P_DEFAULT, sm_buf) < 0)
- H5_LIBRARY_ERROR(ENVONLY);
- }
+ /* Reclaim any VL memory, if necessary */
+ if (vl_data) {
+ if (H5Treclaim(p_type, f_space, H5P_DEFAULT, sm_buf) < 0)
+ H5_LIBRARY_ERROR(ENVONLY);
}
}
}
diff --git a/java/test/TestH5Fparams.java b/java/test/TestH5Fparams.java
index 6accc3b3dc3..3fcfe3bf57c 100644
--- a/java/test/TestH5Fparams.java
+++ b/java/test/TestH5Fparams.java
@@ -77,6 +77,7 @@ public void testH5Funmount_null() throws Throwable
H5.H5Funmount(-1, null);
}
+ @SuppressWarnings("deprecation")
@Ignore
public void testH5Fis_hdf5_text()
{
diff --git a/java/test/TestH5Ocreate.java b/java/test/TestH5Ocreate.java
index 096abcd2183..eb57938d299 100644
--- a/java/test/TestH5Ocreate.java
+++ b/java/test/TestH5Ocreate.java
@@ -449,6 +449,7 @@ public int callback(long group, String name, H5O_info_t info, H5O_iterate_opdata
((idata)((H5O_iter_data)iter_data).iterdata.get(3)).link_name.compareToIgnoreCase("G1/DS2") == 0);
}
+ @SuppressWarnings("deprecation")
@Test
public void testH5Ocomment()
{
@@ -479,6 +480,7 @@ public void testH5Ocomment()
assertTrue("H5Oget_comment: ", obj_comment.compareTo("Test Comment") == 0);
}
+ @SuppressWarnings("deprecation")
@Test
public void testH5Ocomment_clear()
{
@@ -525,6 +527,7 @@ public void testH5Ocomment_clear()
assertTrue("H5Oget_comment: ", obj_comment == null);
}
+ @SuppressWarnings("deprecation")
@Test
public void testH5Ocomment_by_name()
{
@@ -548,6 +551,7 @@ public void testH5Ocomment_by_name()
assertTrue("H5Oget_comment_by_name: ", obj_comment.compareTo("Test Comment") == 0);
}
+ @SuppressWarnings("deprecation")
@Test
public void testH5Ocomment_by_name_clear()
{
diff --git a/java/test/TestH5Oparams.java b/java/test/TestH5Oparams.java
index 280e9fd8497..296babceb68 100644
--- a/java/test/TestH5Oparams.java
+++ b/java/test/TestH5Oparams.java
@@ -168,6 +168,7 @@ public void testH5Ovisit_by_name_null() throws Throwable
H5.H5Ovisit_by_name(-1, "Bogus", -1, -1, null, null, 0, -1);
}
+ @SuppressWarnings("deprecation")
@Test(expected = HDF5LibraryException.class)
public void testH5Oset_comment_invalid() throws Throwable
{
@@ -180,12 +181,14 @@ public void testH5Oget_comment_invalid() throws Throwable
H5.H5Oget_comment(-1);
}
+ @SuppressWarnings("deprecation")
@Test(expected = HDF5LibraryException.class)
public void testH5Oset_comment_by_name_invalid() throws Throwable
{
H5.H5Oset_comment_by_name(-1, "Bogus", null, -1);
}
+ @SuppressWarnings("deprecation")
@Test(expected = NullPointerException.class)
public void testH5Oset_comment_by_name_null() throws Throwable
{
From 1a81091b35bd4452338cc908c2c1fdb97dc1773d Mon Sep 17 00:00:00 2001
From: Scot Breitenfeld
Date: Thu, 6 Oct 2022 18:41:07 -0500
Subject: [PATCH 002/108] fixed fortran parallel testing with cmake (#2148)
---
CMakeLists.txt | 1 +
fortran/testpar/CMakeLists.txt | 2 +-
2 files changed, 2 insertions(+), 1 deletion(-)
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 6ef148e16fb..73bf6bf4b53 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -1083,6 +1083,7 @@ if (EXISTS "${HDF5_SOURCE_DIR}/fortran" AND IS_DIRECTORY "${HDF5_SOURCE_DIR}/for
# Parallel IO usage requires MPI to be Linked and Included
if (H5_HAVE_PARALLEL)
+ find_package(MPI REQUIRED COMPONENTS Fortran)
set (LINK_Fortran_LIBS ${LINK_Fortran_LIBS} ${MPI_Fortran_LIBRARIES})
if (MPI_Fortran_LINK_FLAGS)
set (CMAKE_Fortran_EXE_LINKER_FLAGS "${MPI_Fortran_LINK_FLAGS} ${CMAKE_EXE_LINKER_FLAGS}")
diff --git a/fortran/testpar/CMakeLists.txt b/fortran/testpar/CMakeLists.txt
index 12489528ff9..d41343c36ed 100644
--- a/fortran/testpar/CMakeLists.txt
+++ b/fortran/testpar/CMakeLists.txt
@@ -4,7 +4,7 @@ project (HDF5_FORTRAN_TESTPAR C Fortran)
#-----------------------------------------------------------------------------
# Setup include Directories
#-----------------------------------------------------------------------------
-set (TESTPAR_INCLUDES ${MPI_Fortran_INCLUDE_DIRS} ${HDF5_F90_BINARY_DIR} ${HDF5_F90_SRC_DIR}/src))
+set (TESTPAR_INCLUDES ${MPI_Fortran_INCLUDE_DIRS} ${HDF5_F90_BINARY_DIR} ${HDF5_F90_SRC_DIR}/src)
if (NOT BUILD_SHARED_LIBS)
set (TESTPAR_INCLUDES ${TESTPAR_INCLUDES} ${CMAKE_Fortran_MODULE_DIRECTORY}/static)
else ()
From 66c8a487221e354d5e8e820176e9a08887caa052 Mon Sep 17 00:00:00 2001
From: Neil Fortner
Date: Fri, 4 Nov 2022 07:51:21 -0500
Subject: [PATCH 003/108] Fix problem with variable length attributes being
accessed through multiple file handles (#2181) (#2207)
---
release_docs/RELEASE.txt | 10 +++++
src/H5Aint.c | 4 ++
test/tattr.c | 79 +++++++++++++++++++++++++++++++++++++++-
3 files changed, 92 insertions(+), 1 deletion(-)
diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt
index 26c62b432a4..23641e2da2f 100644
--- a/release_docs/RELEASE.txt
+++ b/release_docs/RELEASE.txt
@@ -170,6 +170,16 @@ Bug Fixes since HDF5-1.12.1 release
===================================
Library
-------
+ - Fixed an issue with variable length attributes
+
+ Previously, if a variable length attribute was held open while its file
+ was opened through another handle, the same attribute was opened through
+ the second file handle, and the second file and attribute handles were
+ closed, attempting to write to the attribute through the first handle
+ would cause an error.
+
+ (NAF - 2022/10/24)
+
- Fixed an issue with hyperslab selections
Previously, when combining hyperslab selections, it was possible for the
diff --git a/src/H5Aint.c b/src/H5Aint.c
index 0bb95766253..99e81fa6537 100644
--- a/src/H5Aint.c
+++ b/src/H5Aint.c
@@ -723,6 +723,10 @@ H5A__write(H5A_t *attr, const H5T_t *mem_type, const void *buf)
HDassert(mem_type);
HDassert(buf);
+ /* Patch the top level file pointer in attr->shared->dt->shared->u.vlen.f if needed */
+ if (H5T_patch_vlen_file(attr->shared->dt, H5F_VOL_OBJ(attr->oloc.file)) < 0)
+ HGOTO_ERROR(H5E_ATTR, H5E_CANTINIT, FAIL, "can't patch VL datatype file pointer")
+
/* Get # of elements for attribute's dataspace */
if ((snelmts = H5S_GET_EXTENT_NPOINTS(attr->shared->ds)) < 0)
HGOTO_ERROR(H5E_ATTR, H5E_CANTCOUNT, FAIL, "dataspace is invalid")
diff --git a/test/tattr.c b/test/tattr.c
index 5c630920264..61e528614a9 100644
--- a/test/tattr.c
+++ b/test/tattr.c
@@ -11073,6 +11073,81 @@ test_attr_bug9(hid_t fcpl, hid_t fapl)
CHECK(ret, FAIL, "H5Sclose");
} /* test_attr_bug9() */
+/****************************************************************
+**
+** test_attr_bug10(): Test basic H5A (attribute) code.
+** Attempts to trigger a bug which would result in a
+** segfault. Create a vlen attribute through a file
+** handle, then open the same file through a different
+** handle, open the same attribute through the second file
+** handle, then close the second file and attribute
+** handles, then write to the attribute through the first
+** handle.
+**
+****************************************************************/
+static void
+test_attr_bug10(hid_t fcpl, hid_t fapl)
+{
+ hid_t fid1, fid2; /* File IDs */
+ hid_t aid1, aid2; /* Attribute IDs */
+ hid_t sid; /* Dataspace ID */
+ hid_t tid; /* Datatype ID */
+ hsize_t dims[1] = {1}; /* Attribute dimensions */
+ const char *wbuf[1] = {"foo"}; /* Write buffer */
+ herr_t ret; /* Generic return status */
+
+ /* Output message about test being performed */
+ MESSAGE(5, ("Testing that vlen attributes can be written to after a second file handle is closed\n"));
+
+ /* Create dataspace */
+ sid = H5Screate_simple(1, dims, NULL);
+ CHECK(sid, FAIL, "H5Screate_simple");
+
+ /* Create VL string datatype */
+ tid = H5Tcopy(H5T_C_S1);
+ CHECK(tid, FAIL, "H5Tcreate");
+ ret = H5Tset_size(tid, H5T_VARIABLE);
+ CHECK(ret, FAIL, "H5Tset_size");
+
+ /* Create file */
+ fid1 = H5Fcreate(FILENAME, H5F_ACC_TRUNC, fcpl, fapl);
+ CHECK(fid1, FAIL, "H5Fcreate");
+
+ /* Create attribute on root group */
+ aid1 = H5Acreate2(fid1, "attr", tid, sid, H5P_DEFAULT, H5P_DEFAULT);
+ CHECK(aid1, FAIL, "H5Acreate2");
+
+ /* Open the same file again */
+ fid2 = H5Fopen(FILENAME, H5F_ACC_RDWR, fapl);
+ CHECK(fid2, FAIL, "H5Fcreate");
+
+ /* Open the same attribute through the second file handle */
+ aid2 = H5Aopen(fid2, "attr", H5P_DEFAULT);
+ CHECK(aid2, FAIL, "H5Aopen");
+
+ /* Close the second attribute and file handles */
+ ret = H5Aclose(aid2);
+ CHECK(ret, FAIL, "H5Aclose");
+ ret = H5Fclose(fid2);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ /* Write to the attribute through the first handle */
+ ret = H5Awrite(aid1, tid, wbuf);
+
+ /* Close IDs */
+ ret = H5Aclose(aid1);
+ CHECK(ret, FAIL, "H5Aclose");
+
+ ret = H5Fclose(fid1);
+ CHECK(ret, FAIL, "H5Fclose");
+
+ ret = H5Tclose(tid);
+ CHECK(ret, FAIL, "H5Tclose");
+
+ ret = H5Sclose(sid);
+ CHECK(ret, FAIL, "H5Sclose");
+} /* test_attr_bug10() */
+
/****************************************************************
**
** test_attr_delete_dense():
@@ -11301,7 +11376,9 @@ test_attr(void)
* attributes being larger than 64K */
test_attr_bug8(my_fcpl,
my_fapl); /* Test attribute expanding object header with undecoded messages */
- test_attr_bug9(my_fcpl, my_fapl); /* Test large attributes converting to dense storage */
+ test_attr_bug9(my_fcpl, my_fapl); /* Test large attributes converting to dense storage */
+ test_attr_bug10(my_fcpl, my_fapl); /* Test writing an attribute after opening and closing
+ through a different file handle */
/* tests specific to the "new format" */
if (new_format == TRUE) {
From 7a1885451213244da0b244338435830fc4dd6ea1 Mon Sep 17 00:00:00 2001
From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com>
Date: Fri, 4 Nov 2022 08:13:08 -0500
Subject: [PATCH 004/108] Merge #2198 correct CMake MPI linking (#2215)
---
config/cmake/scripts/CTestScript.cmake | 4 ++--
examples/CMakeLists.txt | 4 ++--
release_docs/RELEASE.txt | 8 +++++++-
src/CMakeLists.txt | 12 ++++++------
testpar/CMakeLists.txt | 8 ++++----
tools/lib/CMakeLists.txt | 4 ++--
tools/src/h5diff/CMakeLists.txt | 4 ++--
tools/src/h5perf/CMakeLists.txt | 4 ++--
8 files changed, 27 insertions(+), 21 deletions(-)
diff --git a/config/cmake/scripts/CTestScript.cmake b/config/cmake/scripts/CTestScript.cmake
index b142ade8b4e..fa149c3fa92 100644
--- a/config/cmake/scripts/CTestScript.cmake
+++ b/config/cmake/scripts/CTestScript.cmake
@@ -51,7 +51,7 @@ endif ()
set (BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DSITE:STRING=${CTEST_SITE} -DBUILDNAME:STRING=${CTEST_BUILD_NAME}")
# Launchers work only with Makefile and Ninja generators.
-if(NOT "${CTEST_CMAKE_GENERATOR}" MATCHES "Make|Ninja")
+if(NOT "${CTEST_CMAKE_GENERATOR}" MATCHES "Make|Ninja" OR LOCAL_SKIP_TEST)
set(CTEST_USE_LAUNCHERS 0)
set(ENV{CTEST_USE_LAUNCHERS_DEFAULT} 0)
set(BUILD_OPTIONS "${BUILD_OPTIONS} -DCTEST_USE_LAUNCHERS:BOOL=OFF")
@@ -269,7 +269,7 @@ endif ()
## -- LOCAL_MEMCHECK_TEST executes the Valgrind testing
## -- LOCAL_COVERAGE_TEST executes code coverage process
## --------------------------
- ctest_start (${MODEL} TRACK ${MODEL})
+ ctest_start (${MODEL} GROUP ${MODEL})
if (LOCAL_UPDATE)
ctest_update (SOURCE "${CTEST_SOURCE_DIRECTORY}")
endif ()
diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt
index 3f329c1aca7..f453467f10d 100644
--- a/examples/CMakeLists.txt
+++ b/examples/CMakeLists.txt
@@ -76,10 +76,10 @@ if (H5_HAVE_PARALLEL)
target_include_directories (${parallel_example} PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>")
if (NOT BUILD_SHARED_LIBS)
TARGET_C_PROPERTIES (${parallel_example} STATIC)
- target_link_libraries (${parallel_example} PRIVATE ${HDF5_LIB_TARGET} ${MPI_C_LIBRARIES})
+ target_link_libraries (${parallel_example} PRIVATE ${HDF5_LIB_TARGET} MPI::MPI_C)
else ()
TARGET_C_PROPERTIES (${parallel_example} SHARED)
- target_link_libraries (${parallel_example} PRIVATE ${HDF5_LIBSH_TARGET} ${MPI_C_LIBRARIES})
+ target_link_libraries (${parallel_example} PRIVATE ${HDF5_LIBSH_TARGET} MPI::MPI_C)
endif ()
set_target_properties (${parallel_example} PROPERTIES FOLDER examples)
diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt
index 23641e2da2f..ce18ed90920 100644
--- a/release_docs/RELEASE.txt
+++ b/release_docs/RELEASE.txt
@@ -225,7 +225,13 @@ Bug Fixes since HDF5-1.12.1 release
Configuration
-------------
- -
+ - Move MPI libraries link from PRIVATE to PUBLIC
+
+ The install dependencies were not including the need for MPI libraries when
+ an application or library was built with the C library. Also updated the
+ CMake target link command to use the newer style MPI::MPI_C link variable.
+
+ (ADB - 2022/20/27)
Tools
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index df3eadfa07e..a9802c162b9 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -1014,7 +1014,7 @@ target_include_directories (H5detect PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_
target_compile_definitions(H5detect PUBLIC ${HDF_EXTRA_C_FLAGS} ${HDF_EXTRA_FLAGS})
TARGET_C_PROPERTIES (H5detect STATIC)
target_link_libraries (H5detect
- PRIVATE "$<$:${MPI_C_LIBRARIES}>" $<$,$>:ws2_32.lib>
+ PRIVATE "$<$:MPI::MPI_C>" $<$,$>:ws2_32.lib>
)
target_compile_options(H5detect
PRIVATE "$<$:-O0>"
@@ -1108,7 +1108,7 @@ target_include_directories (H5make_libsettings PRIVATE "${HDF5_SRC_DIR};${HDF5_S
target_compile_definitions(H5make_libsettings PUBLIC ${HDF_EXTRA_C_FLAGS} ${HDF_EXTRA_FLAGS})
TARGET_C_PROPERTIES (H5make_libsettings STATIC)
target_link_libraries (H5make_libsettings
- PRIVATE "$<$:${MPI_C_LIBRARIES}>" $<$,$>:ws2_32.lib>
+ PRIVATE "$<$:MPI::MPI_C>" $<$,$>:ws2_32.lib>
)
target_compile_options(H5make_libsettings
PRIVATE "$<$:-O0>"
@@ -1179,8 +1179,8 @@ if (NOT ONLY_SHARED_LIBS)
)
TARGET_C_PROPERTIES (${HDF5_LIB_TARGET} STATIC)
target_link_libraries (${HDF5_LIB_TARGET}
- PRIVATE ${LINK_LIBS} ${LINK_COMP_LIBS} "$<$:${MPI_C_LIBRARIES}>"
- PUBLIC $<$>:${CMAKE_DL_LIBS}>
+ PRIVATE ${LINK_LIBS} ${LINK_COMP_LIBS}
+ PUBLIC $<$>:${CMAKE_DL_LIBS}> "$<$:MPI::MPI_C>"
)
if (NOT WIN32)
target_link_libraries (${HDF5_LIB_TARGET}
@@ -1221,8 +1221,8 @@ if (BUILD_SHARED_LIBS)
)
TARGET_C_PROPERTIES (${HDF5_LIBSH_TARGET} SHARED)
target_link_libraries (${HDF5_LIBSH_TARGET}
- PRIVATE ${LINK_LIBS} ${LINK_COMP_LIBS} "$<$:${MPI_C_LIBRARIES}>" $<$:Threads::Threads>
- PUBLIC $<$>:${CMAKE_DL_LIBS}>
+ PRIVATE ${LINK_LIBS} ${LINK_COMP_LIBS} $<$:Threads::Threads>
+ PUBLIC $<$>:${CMAKE_DL_LIBS}> "$<$:MPI::MPI_C>"
)
set_global_variable (HDF5_LIBRARIES_TO_EXPORT "${HDF5_LIBRARIES_TO_EXPORT};${HDF5_LIBSH_TARGET}")
H5_SET_LIB_OPTIONS (${HDF5_LIBSH_TARGET} ${HDF5_LIB_NAME} SHARED "LIB")
diff --git a/testpar/CMakeLists.txt b/testpar/CMakeLists.txt
index ff4446ce974..4d9e65bd49e 100644
--- a/testpar/CMakeLists.txt
+++ b/testpar/CMakeLists.txt
@@ -29,12 +29,12 @@ target_include_directories (testphdf5
if (NOT BUILD_SHARED_LIBS)
TARGET_C_PROPERTIES (testphdf5 STATIC)
target_link_libraries (testphdf5
- PRIVATE ${HDF5_TEST_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:${MPI_C_LIBRARIES}>"
+ PRIVATE ${HDF5_TEST_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:MPI::MPI_C>"
)
else ()
TARGET_C_PROPERTIES (testphdf5 SHARED)
target_link_libraries (testphdf5
- PRIVATE ${HDF5_TEST_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$:${MPI_C_LIBRARIES}>"
+ PRIVATE ${HDF5_TEST_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$:MPI::MPI_C>"
)
endif ()
set_target_properties (testphdf5 PROPERTIES FOLDER test/par)
@@ -55,13 +55,13 @@ macro (ADD_H5P_EXE file)
if (NOT BUILD_SHARED_LIBS)
TARGET_C_PROPERTIES (${file} STATIC)
target_link_libraries (${file}
- PRIVATE ${HDF5_TEST_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:${MPI_C_LIBRARIES}>"
+ PRIVATE ${HDF5_TEST_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:MPI::MPI_C>"
$<$,$>:ws2_32.lib>
)
else ()
TARGET_C_PROPERTIES (${file} SHARED)
target_link_libraries (${file}
- PRIVATE ${HDF5_TEST_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$:${MPI_C_LIBRARIES}>"
+ PRIVATE ${HDF5_TEST_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$:MPI::MPI_C>"
$<$,$>:ws2_32.lib>
)
endif ()
diff --git a/tools/lib/CMakeLists.txt b/tools/lib/CMakeLists.txt
index 6e4d3e63755..a49b53bbfe8 100644
--- a/tools/lib/CMakeLists.txt
+++ b/tools/lib/CMakeLists.txt
@@ -46,7 +46,7 @@ if (NOT ONLY_SHARED_LIBS)
TARGET_C_PROPERTIES (${HDF5_TOOLS_LIB_TARGET} STATIC)
target_link_libraries (${HDF5_TOOLS_LIB_TARGET}
PUBLIC ${HDF5_LIB_TARGET}
- PRIVATE "$<$:${MPI_C_LIBRARIES}>"
+ PRIVATE "$<$:MPI::MPI_C>"
)
set_global_variable (HDF5_LIBRARIES_TO_EXPORT "${HDF5_LIBRARIES_TO_EXPORT};${HDF5_TOOLS_LIB_TARGET}")
H5_SET_LIB_OPTIONS (${HDF5_TOOLS_LIB_TARGET} ${HDF5_TOOLS_LIB_NAME} STATIC 0)
@@ -69,7 +69,7 @@ if (BUILD_SHARED_LIBS)
TARGET_C_PROPERTIES (${HDF5_TOOLS_LIBSH_TARGET} SHARED)
target_link_libraries (${HDF5_TOOLS_LIBSH_TARGET}
PUBLIC ${HDF5_LIBSH_TARGET}
- PRIVATE "$<$:${MPI_C_LIBRARIES}>"
+ PRIVATE "$<$:MPI::MPI_C>"
)
set_global_variable (HDF5_LIBRARIES_TO_EXPORT "${HDF5_LIBRARIES_TO_EXPORT};${HDF5_TOOLS_LIBSH_TARGET}")
H5_SET_LIB_OPTIONS (${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_TOOLS_LIB_NAME} SHARED "TOOLS")
diff --git a/tools/src/h5diff/CMakeLists.txt b/tools/src/h5diff/CMakeLists.txt
index e54b2da2806..b3402a898f7 100644
--- a/tools/src/h5diff/CMakeLists.txt
+++ b/tools/src/h5diff/CMakeLists.txt
@@ -57,7 +57,7 @@ if (H5_HAVE_PARALLEL)
target_include_directories (ph5diff PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>")
target_compile_options(ph5diff PRIVATE "${HDF5_CMAKE_C_FLAGS}")
TARGET_C_PROPERTIES (ph5diff STATIC)
- target_link_libraries (ph5diff PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:${MPI_C_LIBRARIES}>")
+ target_link_libraries (ph5diff PRIVATE ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:MPI::MPI_C>")
set_target_properties (ph5diff PROPERTIES FOLDER tools)
set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};ph5diff")
set (H5_DEP_EXECUTABLES ${H5_DEP_EXECUTABLES} ph5diff)
@@ -70,7 +70,7 @@ if (H5_HAVE_PARALLEL)
target_include_directories (ph5diff-shared PRIVATE "${HDF5_TOOLS_DIR}/lib;${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>")
target_compile_options(ph5diff-shared PRIVATE "${HDF5_CMAKE_C_FLAGS}")
TARGET_C_PROPERTIES (ph5diff-shared SHARED)
- target_link_libraries (ph5diff-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$:${MPI_C_LIBRARIES}>")
+ target_link_libraries (ph5diff-shared PRIVATE ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$:MPI::MPI_C>")
set_target_properties (ph5diff-shared PROPERTIES FOLDER tools)
set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};ph5diff-shared")
set (H5_DEP_EXECUTABLES ${H5_DEP_EXECUTABLES} ph5diff-shared)
diff --git a/tools/src/h5perf/CMakeLists.txt b/tools/src/h5perf/CMakeLists.txt
index 36b0b2f37d5..4caef5978f1 100644
--- a/tools/src/h5perf/CMakeLists.txt
+++ b/tools/src/h5perf/CMakeLists.txt
@@ -67,10 +67,10 @@ if (H5_HAVE_PARALLEL)
target_include_directories (h5perf PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$:${MPI_C_INCLUDE_DIRS}>")
if (NOT ONLY_SHARED_LIBS)
TARGET_C_PROPERTIES (h5perf STATIC)
- target_link_libraries (h5perf PRIVATE ${LINK_LIBS} ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:${MPI_C_LIBRARIES}>")
+ target_link_libraries (h5perf PRIVATE ${LINK_LIBS} ${HDF5_TOOLS_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$:MPI::MPI_C>")
else ()
TARGET_C_PROPERTIES (h5perf SHARED)
- target_link_libraries (h5perf PRIVATE ${LINK_LIBS} ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$:${MPI_C_LIBRARIES}>")
+ target_link_libraries (h5perf PRIVATE ${LINK_LIBS} ${HDF5_TOOLS_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$:MPI::MPI_C>")
endif ()
set_target_properties (h5perf PROPERTIES FOLDER perform)
set_global_variable (HDF5_UTILS_TO_EXPORT "${HDF5_UTILS_TO_EXPORT};h5perf")
From 52854566ecad891ee60458777932539d3edab6e0 Mon Sep 17 00:00:00 2001
From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com>
Date: Fri, 4 Nov 2022 10:37:17 -0500
Subject: [PATCH 005/108] 1.12 Merge HDFGroup#2175 Implement option for using
CMake GNUInstallDirs (#2221)
---
CMakeInstallation.cmake | 2 +-
c++/src/CMakeLists.txt | 4 +-
config/cmake/CTestScript.cmake | 2 +-
config/cmake/HDF5PluginCache.cmake | 32 ++++-----
config/cmake/HDF5PluginMacros.cmake | 5 ++
config/cmake/cacheinit.cmake | 11 ++++
config/cmake/scripts/HDF5options.cmake | 3 +-
config/cmake_ext_mod/HDFLibMacros.cmake | 22 ++++++-
config/cmake_ext_mod/HDFMacros.cmake | 86 +++++++++++++++++--------
doxygen/CMakeLists.txt | 2 +-
fortran/src/CMakeLists.txt | 6 +-
hl/c++/src/CMakeLists.txt | 4 +-
hl/fortran/src/CMakeLists.txt | 6 +-
hl/src/CMakeLists.txt | 4 +-
java/src/hdf/hdf5lib/CMakeLists.txt | 2 +-
release_docs/INSTALL_CMake.txt | 19 +++++-
release_docs/RELEASE.txt | 10 +++
src/CMakeLists.txt | 4 +-
src/H5Dmodule.h | 8 +--
19 files changed, 166 insertions(+), 66 deletions(-)
diff --git a/CMakeInstallation.cmake b/CMakeInstallation.cmake
index 1fe516c039a..2c4c304964c 100644
--- a/CMakeInstallation.cmake
+++ b/CMakeInstallation.cmake
@@ -238,7 +238,7 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED)
endif ()
install (
FILES ${release_files}
- DESTINATION ${HDF5_INSTALL_DATA_DIR}
+ DESTINATION ${HDF5_INSTALL_DOC_DIR}
COMPONENT hdfdocuments
)
endif ()
diff --git a/c++/src/CMakeLists.txt b/c++/src/CMakeLists.txt
index 2a37dea8bf3..c8fc82df70b 100644
--- a/c++/src/CMakeLists.txt
+++ b/c++/src/CMakeLists.txt
@@ -173,8 +173,8 @@ endif ()
#-----------------------------------------------------------------------------
set (_PKG_CONFIG_PREFIX ${CMAKE_INSTALL_PREFIX})
set (_PKG_CONFIG_EXEC_PREFIX \${prefix})
-set (_PKG_CONFIG_LIBDIR \${exec_prefix}/lib)
-set (_PKG_CONFIG_INCLUDEDIR \${prefix}/include)
+set (_PKG_CONFIG_LIBDIR \${exec_prefix}/${HDF5_INSTALL_LIB_DIR})
+set (_PKG_CONFIG_INCLUDEDIR \${prefix}/${HDF5_INSTALL_INCLUDE_DIR})
set (_PKG_CONFIG_LIBNAME "${HDF5_CPP_LIB_CORENAME}")
set (_PKG_CONFIG_VERSION "${HDF5_PACKAGE_VERSION}")
diff --git a/config/cmake/CTestScript.cmake b/config/cmake/CTestScript.cmake
index 0ed90635087..dcf26851278 100644
--- a/config/cmake/CTestScript.cmake
+++ b/config/cmake/CTestScript.cmake
@@ -54,7 +54,7 @@ else ()
endif ()
#-----------------------------------------------------------------------------
-# MAC machines need special option
+# MacOS machines need special options
#-----------------------------------------------------------------------------
if (APPLE)
# Compiler choice
diff --git a/config/cmake/HDF5PluginCache.cmake b/config/cmake/HDF5PluginCache.cmake
index 3b085ddf54b..f5334bba740 100644
--- a/config/cmake/HDF5PluginCache.cmake
+++ b/config/cmake/HDF5PluginCache.cmake
@@ -1,29 +1,31 @@
-# This is the CMakeCache file.
+# CMake cache file for external HDF5 filter plugins
-########################
+#########################
# EXTERNAL cache entries
-########################
+#########################
# examples are the tests for plugins
-set (H5PL_BUILD_TESTING ON CACHE BOOL "Enable h5pl testing" FORCE)
-set (BUILD_EXAMPLES ON CACHE BOOL "Build h5pl Examples" FORCE)
+set (H5PL_BUILD_TESTING ON CACHE BOOL "Enable H5PL testing" FORCE)
+set (BUILD_EXAMPLES ON CACHE BOOL "Build H5PL Examples" FORCE)
-set (HDF5_HDF5_HEADER "h5pubconf.h" CACHE STRING "Name of HDF5 header" FORCE)
-set (HDF5_LINK_LIBS ${HDF5_LIBSH_TARGET} CACHE STRING "hdf5 target" FORCE)
-#set (HDF5_INCLUDE_DIR $ CACHE PATH "hdf5 include dirs" FORCE)
-set (HDF5_INCLUDE_DIR "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR}" CACHE PATH "hdf5 include dirs" FORCE)
-set (HDF5_INCLUDE_DIRS "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR}" CACHE PATH "hdf5 include dirs" FORCE)
-set (HDF5_DIR ${CMAKE_CURRENT_BINARY_DIR} CACHE STRING "hdf5 build folder" FORCE)
+set (HDF5_HDF5_HEADER "H5pubconf.h" CACHE STRING "Name of HDF5 header" FORCE)
+set (HDF5_LINK_LIBS ${HDF5_LIBSH_TARGET} CACHE STRING "HDF5 target" FORCE)
+#set (HDF5_INCLUDE_DIR $ CACHE PATH "HDF5 include dirs" FORCE)
+set (HDF5_INCLUDE_DIR "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR}" CACHE PATH "HDF5 include dirs" FORCE)
+set (HDF5_INCLUDE_DIRS "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR}" CACHE PATH "HDF5 include dirs" FORCE)
+set (HDF5_DIR ${CMAKE_CURRENT_BINARY_DIR} CACHE STRING "HDF5 build folder" FORCE)
-set (HDF5_DUMP_EXECUTABLE $ CACHE STRING "hdf5 h5dump target" FORCE)
-set (HDF5_REPACK_EXECUTABLE $ CACHE STRING "hdf5 h5repack target" FORCE)
+set (HDF5_DUMP_EXECUTABLE $ CACHE STRING "HDF5 h5dump target" FORCE)
+set (HDF5_REPACK_EXECUTABLE $ CACHE STRING "HDF5 h5repack target" FORCE)
set (H5PL_ALLOW_EXTERNAL_SUPPORT "${HDF5_ALLOW_EXTERNAL_SUPPORT}" CACHE STRING "Allow External Library Building (NO GIT TGZ)" FORCE)
-set (H5PL_GIT_URL "https://github.com/HDFGroup/hdf5_plugins.git" CACHE STRING "Use plugins from HDF repository" FORCE)
+set (H5PL_GIT_URL "https://github.com/HDFGroup/hdf5_plugins.git" CACHE STRING "Use plugins from HDF Group repository" FORCE)
set (H5PL_GIT_BRANCH "master" CACHE STRING "" FORCE)
set (H5PL_TGZ_NAME "${PLUGIN_TGZ_NAME}" CACHE STRING "Use plugins from compressed file" FORCE)
set (PL_PACKAGE_NAME "${PLUGIN_PACKAGE_NAME}" CACHE STRING "Name of plugins package" FORCE)
-set (H5PL_CPACK_ENABLE OFF CACHE BOOL "Enable the CPACK include and components" FORCE)
+set (H5PL_CPACK_ENABLE OFF CACHE BOOL "Enable CPack include and components" FORCE)
+
+set (H5PL_USE_GNU_DIRS ${HDF5_USE_GNU_DIRS} CACHE BOOL "TRUE to use GNU Coding Standard install directory variables" FORCE)
diff --git a/config/cmake/HDF5PluginMacros.cmake b/config/cmake/HDF5PluginMacros.cmake
index dbed15c6ea2..5cb5f99b057 100644
--- a/config/cmake/HDF5PluginMacros.cmake
+++ b/config/cmake/HDF5PluginMacros.cmake
@@ -67,6 +67,11 @@ macro (EXTERNAL_PLUGIN_LIBRARY compress_type)
add_dependencies (h5ex_d_zfp ${HDF5_LIBSH_TARGET})
target_include_directories (h5ex_d_zfp PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR}")
endif ()
+ if (ENABLE_ZSTD)
+ add_dependencies (h5zstd ${HDF5_LIBSH_TARGET})
+ add_dependencies (h5ex_d_zstd ${HDF5_LIBSH_TARGET})
+ target_include_directories (h5ex_d_zstd PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR}")
+ endif ()
endif ()
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
message (VERBOSE "HDF5_INCLUDE_DIR=${HDF5_INCLUDE_DIR}")
diff --git a/config/cmake/cacheinit.cmake b/config/cmake/cacheinit.cmake
index de6f92f89f1..faa05339260 100644
--- a/config/cmake/cacheinit.cmake
+++ b/config/cmake/cacheinit.cmake
@@ -181,3 +181,14 @@ set (ZFP_TGZ_NAME "zfp.tar.gz" CACHE STRING "Use ZFP from compressed file" FORCE
set (ZFP_PACKAGE_NAME "zfp" CACHE STRING "Name of ZFP package" FORCE)
+######
+# zstd
+######
+
+set (ZSTD_GIT_URL "https://github.com/facebook/zstd" CACHE STRING "Use ZSTD from repository" FORCE)
+set (ZSTD_GIT_BRANCH "dev" CACHE STRING "" FORCE)
+
+set (ZSTD_TGZ_NAME "zstd.tar.gz" CACHE STRING "Use ZSTD from compressed file" FORCE)
+
+set (ZSTD_PACKAGE_NAME "zstd" CACHE STRING "Name of ZSTD package" FORCE)
+
diff --git a/config/cmake/scripts/HDF5options.cmake b/config/cmake/scripts/HDF5options.cmake
index bab60de617e..e14dfba94a1 100644
--- a/config/cmake/scripts/HDF5options.cmake
+++ b/config/cmake/scripts/HDF5options.cmake
@@ -74,7 +74,7 @@ set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ALLOW_EXTERNAL_SUPPORT:STRIN
#set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_ENCODING:BOOL=OFF")
#### package examples ####
-#set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_PACK_EXAMPLES:BOOL=ON -DHDF5_EXAMPLES_COMPRESSED:STRING=HDF5Examples-2.0.1-Source.tar.gz -DHDF5_EXAMPLES_COMPRESSED_DIR:PATH=${CTEST_SCRIPT_DIRECTORY}")
+#set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_PACK_EXAMPLES:BOOL=ON -DHDF5_EXAMPLES_COMPRESSED:STRING=HDF5Examples-2.0.2-Source.tar.gz -DHDF5_EXAMPLES_COMPRESSED_DIR:PATH=${CTEST_SCRIPT_DIRECTORY}")
#############################################################################################
### enable parallel builds
@@ -96,6 +96,7 @@ endif()
#############################################################################################
### disable test program builds
+#If using CTestScript.cmake file be sure to uncomment set (LOCAL_SKIP_TEST "TRUE")
#set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DBUILD_TESTING:BOOL=OFF")
#############################################################################################
diff --git a/config/cmake_ext_mod/HDFLibMacros.cmake b/config/cmake_ext_mod/HDFLibMacros.cmake
index 2c5a9bf4b50..740ddae7eaa 100644
--- a/config/cmake_ext_mod/HDFLibMacros.cmake
+++ b/config/cmake_ext_mod/HDFLibMacros.cmake
@@ -24,12 +24,15 @@ macro (EXTERNAL_JPEG_LIBRARY compress_type jpeg_pic)
-DJPEG_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT}
-DJPEG_EXTERNALLY_CONFIGURED:BOOL=OFF
-DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
+ -DCMAKE_DEBUG_POSTFIX:STRING=${CMAKE_DEBUG_POSTFIX}
-DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX}
-DCMAKE_RUNTIME_OUTPUT_DIRECTORY:PATH=${CMAKE_RUNTIME_OUTPUT_DIRECTORY}
-DCMAKE_LIBRARY_OUTPUT_DIRECTORY:PATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY}
-DCMAKE_ARCHIVE_OUTPUT_DIRECTORY:PATH=${CMAKE_ARCHIVE_OUTPUT_DIRECTORY}
-DCMAKE_PDB_OUTPUT_DIRECTORY:PATH=${CMAKE_PDB_OUTPUT_DIRECTORY}
-DCMAKE_ANSI_CFLAGS:STRING=${jpeg_pic}
+ -DHDF_USE_GNU_DIRS:STRING=${HDF5_USE_GNU_DIRS}
+ -DCMAKE_OSX_ARCHITECTURES:STRING=${CMAKE_OSX_ARCHITECTURES}
-DCMAKE_TOOLCHAIN_FILE:STRING=${CMAKE_TOOLCHAIN_FILE}
-DPACKAGE_NAMESPACE=${HDF_PACKAGE_NAMESPACE}
)
@@ -43,12 +46,15 @@ macro (EXTERNAL_JPEG_LIBRARY compress_type jpeg_pic)
-DJPEG_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT}
-DJPEG_EXTERNALLY_CONFIGURED:BOOL=OFF
-DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
+ -DCMAKE_DEBUG_POSTFIX:STRING=${CMAKE_DEBUG_POSTFIX}
-DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX}
-DCMAKE_RUNTIME_OUTPUT_DIRECTORY:PATH=${CMAKE_RUNTIME_OUTPUT_DIRECTORY}
-DCMAKE_LIBRARY_OUTPUT_DIRECTORY:PATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY}
-DCMAKE_ARCHIVE_OUTPUT_DIRECTORY:PATH=${CMAKE_ARCHIVE_OUTPUT_DIRECTORY}
-DCMAKE_PDB_OUTPUT_DIRECTORY:PATH=${CMAKE_PDB_OUTPUT_DIRECTORY}
-DCMAKE_ANSI_CFLAGS:STRING=${jpeg_pic}
+ -DHDF_USE_GNU_DIRS:STRING=${HDF5_USE_GNU_DIRS}
+ -DCMAKE_OSX_ARCHITECTURES:STRING=${CMAKE_OSX_ARCHITECTURES}
-DCMAKE_TOOLCHAIN_FILE:STRING=${CMAKE_TOOLCHAIN_FILE}
-DPACKAGE_NAMESPACE=${HDF_PACKAGE_NAMESPACE}
)
@@ -93,6 +99,7 @@ macro (EXTERNAL_SZIP_LIBRARY compress_type encoding)
-DSZIP_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT}
-DSZIP_EXTERNALLY_CONFIGURED:BOOL=OFF
-DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
+ -DCMAKE_DEBUG_POSTFIX:STRING=${CMAKE_DEBUG_POSTFIX}
-DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX}
-DCMAKE_RUNTIME_OUTPUT_DIRECTORY:PATH=${CMAKE_RUNTIME_OUTPUT_DIRECTORY}
-DCMAKE_LIBRARY_OUTPUT_DIRECTORY:PATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY}
@@ -100,6 +107,8 @@ macro (EXTERNAL_SZIP_LIBRARY compress_type encoding)
-DCMAKE_PDB_OUTPUT_DIRECTORY:PATH=${CMAKE_PDB_OUTPUT_DIRECTORY}
-DCMAKE_ANSI_CFLAGS:STRING=${CMAKE_ANSI_CFLAGS}
-DSZIP_ENABLE_ENCODING:BOOL=${encoding}
+ -DHDF_USE_GNU_DIRS:STRING=${HDF5_USE_GNU_DIRS}
+ -DCMAKE_OSX_ARCHITECTURES:STRING=${CMAKE_OSX_ARCHITECTURES}
-DCMAKE_TOOLCHAIN_FILE:STRING=${CMAKE_TOOLCHAIN_FILE}
-DPACKAGE_NAMESPACE=${HDF_PACKAGE_NAMESPACE}
)
@@ -113,6 +122,7 @@ macro (EXTERNAL_SZIP_LIBRARY compress_type encoding)
-DSZIP_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT}
-DSZIP_EXTERNALLY_CONFIGURED:BOOL=OFF
-DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
+ -DCMAKE_DEBUG_POSTFIX:STRING=${CMAKE_DEBUG_POSTFIX}
-DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX}
-DCMAKE_RUNTIME_OUTPUT_DIRECTORY:PATH=${CMAKE_RUNTIME_OUTPUT_DIRECTORY}
-DCMAKE_LIBRARY_OUTPUT_DIRECTORY:PATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY}
@@ -120,6 +130,8 @@ macro (EXTERNAL_SZIP_LIBRARY compress_type encoding)
-DCMAKE_PDB_OUTPUT_DIRECTORY:PATH=${CMAKE_PDB_OUTPUT_DIRECTORY}
-DCMAKE_ANSI_CFLAGS:STRING=${CMAKE_ANSI_CFLAGS}
-DSZIP_ENABLE_ENCODING:BOOL=${encoding}
+ -DHDF_USE_GNU_DIRS:STRING=${HDF5_USE_GNU_DIRS}
+ -DCMAKE_OSX_ARCHITECTURES:STRING=${CMAKE_OSX_ARCHITECTURES}
-DCMAKE_TOOLCHAIN_FILE:STRING=${CMAKE_TOOLCHAIN_FILE}
-DPACKAGE_NAMESPACE=${HDF_PACKAGE_NAMESPACE}
)
@@ -184,12 +196,15 @@ macro (EXTERNAL_ZLIB_LIBRARY compress_type)
-DZLIB_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT}
-DZLIB_EXTERNALLY_CONFIGURED:BOOL=OFF
-DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
+ -DCMAKE_DEBUG_POSTFIX:STRING=${CMAKE_DEBUG_POSTFIX}
-DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX}
-DCMAKE_RUNTIME_OUTPUT_DIRECTORY:PATH=${CMAKE_RUNTIME_OUTPUT_DIRECTORY}
-DCMAKE_LIBRARY_OUTPUT_DIRECTORY:PATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY}
-DCMAKE_ARCHIVE_OUTPUT_DIRECTORY:PATH=${CMAKE_ARCHIVE_OUTPUT_DIRECTORY}
-DCMAKE_PDB_OUTPUT_DIRECTORY:PATH=${CMAKE_PDB_OUTPUT_DIRECTORY}
-DCMAKE_ANSI_CFLAGS:STRING=${CMAKE_ANSI_CFLAGS}
+ -DHDF_USE_GNU_DIRS:STRING=${HDF5_USE_GNU_DIRS}
+ -DCMAKE_OSX_ARCHITECTURES:STRING=${CMAKE_OSX_ARCHITECTURES}
-DCMAKE_TOOLCHAIN_FILE:STRING=${CMAKE_TOOLCHAIN_FILE}
-DPACKAGE_NAMESPACE=${HDF_PACKAGE_NAMESPACE}
)
@@ -203,19 +218,24 @@ macro (EXTERNAL_ZLIB_LIBRARY compress_type)
-DZLIB_PACKAGE_EXT:STRING=${HDF_PACKAGE_EXT}
-DZLIB_EXTERNALLY_CONFIGURED:BOOL=OFF
-DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
+ -DCMAKE_DEBUG_POSTFIX:STRING=${CMAKE_DEBUG_POSTFIX}
-DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_INSTALL_PREFIX}
-DCMAKE_RUNTIME_OUTPUT_DIRECTORY:PATH=${CMAKE_RUNTIME_OUTPUT_DIRECTORY}
-DCMAKE_LIBRARY_OUTPUT_DIRECTORY:PATH=${CMAKE_LIBRARY_OUTPUT_DIRECTORY}
-DCMAKE_ARCHIVE_OUTPUT_DIRECTORY:PATH=${CMAKE_ARCHIVE_OUTPUT_DIRECTORY}
-DCMAKE_PDB_OUTPUT_DIRECTORY:PATH=${CMAKE_PDB_OUTPUT_DIRECTORY}
-DCMAKE_ANSI_CFLAGS:STRING=${CMAKE_ANSI_CFLAGS}
+ -DHDF_USE_GNU_DIRS:STRING=${HDF5_USE_GNU_DIRS}
+ -DCMAKE_OSX_ARCHITECTURES:STRING=${CMAKE_OSX_ARCHITECTURES}
-DCMAKE_TOOLCHAIN_FILE:STRING=${CMAKE_TOOLCHAIN_FILE}
-DPACKAGE_NAMESPACE=${HDF_PACKAGE_NAMESPACE}
)
endif ()
externalproject_get_property (HDF5_ZLIB BINARY_DIR SOURCE_DIR)
- set (ZLIB_LIB_NAME "z")
+ if (NOT ZLIB_LIB_NAME)
+ set (ZLIB_LIB_NAME "z")
+ endif ()
##include (${BINARY_DIR}/${ZLIB_PACKAGE_NAME}${HDF_PACKAGE_EXT}-targets.cmake)
# Create imported target zlib-static
add_library(${HDF_PACKAGE_NAMESPACE}zlib-static STATIC IMPORTED)
diff --git a/config/cmake_ext_mod/HDFMacros.cmake b/config/cmake_ext_mod/HDFMacros.cmake
index 6f517bd7b5e..863b7ba2a68 100644
--- a/config/cmake_ext_mod/HDFMacros.cmake
+++ b/config/cmake_ext_mod/HDFMacros.cmake
@@ -12,30 +12,30 @@
#-------------------------------------------------------------------------------
macro (SET_HDF_BUILD_TYPE)
- get_property(_isMultiConfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
- if(_isMultiConfig)
- set(HDF_CFG_NAME ${CMAKE_BUILD_TYPE})
- set(HDF_BUILD_TYPE ${CMAKE_CFG_INTDIR})
- set(HDF_CFG_BUILD_TYPE \${CMAKE_INSTALL_CONFIG_NAME})
- else()
- set(HDF_CFG_BUILD_TYPE ".")
- if(CMAKE_BUILD_TYPE)
- set(HDF_CFG_NAME ${CMAKE_BUILD_TYPE})
- set(HDF_BUILD_TYPE ${CMAKE_BUILD_TYPE})
- else()
- set(HDF_CFG_NAME "Release")
- set(HDF_BUILD_TYPE "Release")
- endif()
- endif()
- if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES)
+ get_property (_isMultiConfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
+ if (_isMultiConfig)
+ set (HDF_CFG_NAME ${CMAKE_BUILD_TYPE})
+ set (HDF_BUILD_TYPE ${CMAKE_CFG_INTDIR})
+ set (HDF_CFG_BUILD_TYPE \${CMAKE_INSTALL_CONFIG_NAME})
+ else ()
+ set (HDF_CFG_BUILD_TYPE ".")
+ if (CMAKE_BUILD_TYPE)
+ set (HDF_CFG_NAME ${CMAKE_BUILD_TYPE})
+ set (HDF_BUILD_TYPE ${CMAKE_BUILD_TYPE})
+ else ()
+ set (HDF_CFG_NAME "Release")
+ set (HDF_BUILD_TYPE "Release")
+ endif ()
+ endif ()
+ if (NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES)
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
message (VERBOSE "Setting build type to 'RelWithDebInfo' as none was specified.")
endif()
- set(CMAKE_BUILD_TYPE RelWithDebInfo CACHE STRING "Choose the type of build." FORCE)
+ set (CMAKE_BUILD_TYPE RelWithDebInfo CACHE STRING "Choose the type of build." FORCE)
# Set the possible values of build type for cmake-gui
- set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "Debug" "Release"
+ set_property (CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "Debug" "Release"
"MinSizeRel" "RelWithDebInfo")
- endif()
+ endif ()
endmacro ()
#-------------------------------------------------------------------------------
@@ -45,7 +45,7 @@ endmacro ()
#-------------------------------------------------------------------------------
macro (IDE_GENERATED_PROPERTIES SOURCE_PATH HEADERS SOURCES)
- #set(source_group_path "Source/AIM/${NAME}")
+ #set (source_group_path "Source/AIM/${NAME}")
string (REPLACE "/" "\\\\" source_group_path ${SOURCE_PATH})
source_group (${source_group_path} FILES ${HEADERS} ${SOURCES})
@@ -321,6 +321,36 @@ macro (HDFTEST_COPY_FILE src dest target)
endmacro ()
macro (HDF_DIR_PATHS package_prefix)
+ option (HDF5_USE_GNU_DIRS "TRUE to use GNU Coding Standard install directory variables, FALSE to use historical settings" FALSE)
+ if (HDF5_USE_GNU_DIRS)
+ include(GNUInstallDirs)
+ if (NOT ${package_prefix}_INSTALL_BIN_DIR)
+ set (${package_prefix}_INSTALL_BIN_DIR ${CMAKE_INSTALL_BINDIR})
+ endif ()
+ if (NOT ${package_prefix}_INSTALL_LIB_DIR)
+ set (${package_prefix}_INSTALL_LIB_DIR ${CMAKE_INSTALL_LIBDIR})
+ endif ()
+ if (NOT ${package_prefix}_INSTALL_JAR_DIR)
+ set (${package_prefix}_INSTALL_JAR_DIR ${CMAKE_INSTALL_LIBDIR})
+ endif ()
+ if (NOT ${package_prefix}_INSTALL_INCLUDE_DIR)
+ set (${package_prefix}_INSTALL_INCLUDE_DIR ${CMAKE_INSTALL_INCLUDEDIR})
+ endif ()
+ if (NOT ${package_prefix}_INSTALL_MODULE_DIR)
+ set (${package_prefix}_INSTALL_MODULE_DIR ${CMAKE_INSTALL_INCLUDEDIR}/mod)
+ endif ()
+ if (NOT ${package_prefix}_INSTALL_DATA_DIR)
+ set (${package_prefix}_INSTALL_DATA_DIR ${CMAKE_INSTALL_DATADIR})
+ endif ()
+ if (NOT ${package_prefix}_INSTALL_CMAKE_DIR)
+ set (${package_prefix}_INSTALL_CMAKE_DIR ${CMAKE_INSTALL_LIBDIR}/cmake)
+ endif ()
+ if (NOT ${package_prefix}_INSTALL_DOC_DIR)
+ set (${package_prefix}_INSTALL_DOC_DIR ${CMAKE_INSTALL_DOCDIR})
+ endif ()
+ message(STATUS "GNU: ${${package_prefix}_INSTALL_DOC_DIR}")
+ endif ()
+
if (APPLE)
option (${package_prefix}_BUILD_FRAMEWORKS "TRUE to build as frameworks libraries, FALSE to build according to BUILD_SHARED_LIBS" FALSE)
endif ()
@@ -365,6 +395,10 @@ macro (HDF_DIR_PATHS package_prefix)
if (NOT ${package_prefix}_INSTALL_CMAKE_DIR)
set (${package_prefix}_INSTALL_CMAKE_DIR cmake)
endif ()
+ if (NOT ${package_prefix}_INSTALL_DOC_DIR)
+ set (${package_prefix}_INSTALL_DOC_DIR ${${package_prefix}_INSTALL_DATA_DIR})
+ endif ()
+ message(STATUS "Final: ${${package_prefix}_INSTALL_DOC_DIR}")
# Always use full RPATH, i.e. don't skip the full RPATH for the build tree
set (CMAKE_SKIP_BUILD_RPATH FALSE)
@@ -391,12 +425,12 @@ macro (HDF_DIR_PATHS package_prefix)
endif ()
#set the default debug suffix for all library targets
- if(NOT CMAKE_DEBUG_POSTFIX)
- if (WIN32)
- set (CMAKE_DEBUG_POSTFIX "_D")
- else ()
- set (CMAKE_DEBUG_POSTFIX "_debug")
- endif ()
+ if(NOT CMAKE_DEBUG_POSTFIX)
+ if (WIN32)
+ set (CMAKE_DEBUG_POSTFIX "_D")
+ else ()
+ set (CMAKE_DEBUG_POSTFIX "_debug")
+ endif ()
endif ()
SET_HDF_BUILD_TYPE()
diff --git a/doxygen/CMakeLists.txt b/doxygen/CMakeLists.txt
index 472c4dc9039..fda7e7d6e03 100644
--- a/doxygen/CMakeLists.txt
+++ b/doxygen/CMakeLists.txt
@@ -37,7 +37,7 @@ if (DOXYGEN_FOUND)
install (
DIRECTORY ${HDF5_BINARY_DIR}/hdf5lib_docs/html
- DESTINATION ${HDF5_INSTALL_DATA_DIR}
+ DESTINATION ${HDF5_INSTALL_DOC_DIR}
COMPONENT Documents
)
diff --git a/fortran/src/CMakeLists.txt b/fortran/src/CMakeLists.txt
index 0c84aa6d259..77ce82f8067 100644
--- a/fortran/src/CMakeLists.txt
+++ b/fortran/src/CMakeLists.txt
@@ -537,9 +537,9 @@ endif ()
#-----------------------------------------------------------------------------
set (_PKG_CONFIG_PREFIX ${CMAKE_INSTALL_PREFIX})
set (_PKG_CONFIG_EXEC_PREFIX \${prefix})
-set (_PKG_CONFIG_LIBDIR \${exec_prefix}/lib)
-set (_PKG_CONFIG_INCLUDEDIR \${prefix}/include)
-set (_PKG_CONFIG_MODULEDIR \${prefix}/mod)
+set (_PKG_CONFIG_LIBDIR \${exec_prefix}/${HDF5_INSTALL_LIB_DIR})
+set (_PKG_CONFIG_INCLUDEDIR \${prefix}/${HDF5_INSTALL_INCLUDE_DIR})
+set (_PKG_CONFIG_MODULEDIR \${prefix}/${HDF5_INSTALL_MODULE_DIR})
set (_PKG_CONFIG_LIBNAME "${HDF5_F90_LIB_CORENAME}")
set (_PKG_CONFIG_VERSION "${HDF5_PACKAGE_VERSION}")
diff --git a/hl/c++/src/CMakeLists.txt b/hl/c++/src/CMakeLists.txt
index c516df15e7e..e56d6279e55 100644
--- a/hl/c++/src/CMakeLists.txt
+++ b/hl/c++/src/CMakeLists.txt
@@ -91,8 +91,8 @@ endif ()
#-----------------------------------------------------------------------------
set (_PKG_CONFIG_PREFIX ${CMAKE_INSTALL_PREFIX})
set (_PKG_CONFIG_EXEC_PREFIX \${prefix})
-set (_PKG_CONFIG_LIBDIR \${exec_prefix}/lib)
-set (_PKG_CONFIG_INCLUDEDIR \${prefix}/include)
+set (_PKG_CONFIG_LIBDIR \${exec_prefix}/${HDF5_INSTALL_LIB_DIR})
+set (_PKG_CONFIG_INCLUDEDIR \${prefix}/${HDF5_INSTALL_INCLUDE_DIR})
set (_PKG_CONFIG_LIBNAME "${HDF5_HL_CPP_LIB_CORENAME}")
set (_PKG_CONFIG_VERSION "${HDF5_PACKAGE_VERSION}")
diff --git a/hl/fortran/src/CMakeLists.txt b/hl/fortran/src/CMakeLists.txt
index 4413424a9b2..7a484dcf150 100644
--- a/hl/fortran/src/CMakeLists.txt
+++ b/hl/fortran/src/CMakeLists.txt
@@ -324,9 +324,9 @@ endif ()
#-----------------------------------------------------------------------------
set (_PKG_CONFIG_PREFIX ${CMAKE_INSTALL_PREFIX})
set (_PKG_CONFIG_EXEC_PREFIX \${prefix})
-set (_PKG_CONFIG_LIBDIR \${exec_prefix}/lib)
-set (_PKG_CONFIG_INCLUDEDIR \${prefix}/include)
-set (_PKG_CONFIG_MODULEDIR \${prefix}/mod)
+set (_PKG_CONFIG_LIBDIR \${exec_prefix}/${HDF5_INSTALL_LIB_DIR})
+set (_PKG_CONFIG_INCLUDEDIR \${prefix}/${HDF5_INSTALL_INCLUDE_DIR})
+set (_PKG_CONFIG_MODULEDIR \${prefix}/${HDF5_INSTALL_MODULE_DIR})
set (_PKG_CONFIG_LIBNAME "${HDF5_HL_F90_LIB_CORENAME}")
set (_PKG_CONFIG_VERSION "${HDF5_PACKAGE_VERSION}")
diff --git a/hl/src/CMakeLists.txt b/hl/src/CMakeLists.txt
index a97d6fa3829..93a62bc7356 100644
--- a/hl/src/CMakeLists.txt
+++ b/hl/src/CMakeLists.txt
@@ -123,8 +123,8 @@ endif ()
#-----------------------------------------------------------------------------
set (_PKG_CONFIG_PREFIX ${CMAKE_INSTALL_PREFIX})
set (_PKG_CONFIG_EXEC_PREFIX \${prefix})
-set (_PKG_CONFIG_LIBDIR \${exec_prefix}/lib)
-set (_PKG_CONFIG_INCLUDEDIR \${prefix}/include)
+set (_PKG_CONFIG_LIBDIR \${exec_prefix}/${HDF5_INSTALL_LIB_DIR})
+set (_PKG_CONFIG_INCLUDEDIR \${prefix}/${HDF5_INSTALL_INCLUDE_DIR})
set (_PKG_CONFIG_LIBNAME "${HDF5_HL_LIB_CORENAME}")
set (_PKG_CONFIG_VERSION "${HDF5_PACKAGE_VERSION}")
diff --git a/java/src/hdf/hdf5lib/CMakeLists.txt b/java/src/hdf/hdf5lib/CMakeLists.txt
index 92471974cff..16745fa22c5 100644
--- a/java/src/hdf/hdf5lib/CMakeLists.txt
+++ b/java/src/hdf/hdf5lib/CMakeLists.txt
@@ -141,7 +141,7 @@ create_javadoc(hdf5_java_doc
CLASSPATH ${CMAKE_JAVA_INCLUDE_PATH}
WINDOWTITLE "HDF5 Java"
DOCTITLE "HDF5 Java Wrapper
"
- INSTALLPATH ${HDF5_INSTALL_DATA_DIR}
+ INSTALLPATH ${HDF5_INSTALL_DOC_DIR}
AUTHOR TRUE
USE TRUE
VERSION TRUE
diff --git a/release_docs/INSTALL_CMake.txt b/release_docs/INSTALL_CMake.txt
index 95576e181f0..44d32038413 100644
--- a/release_docs/INSTALL_CMake.txt
+++ b/release_docs/INSTALL_CMake.txt
@@ -32,7 +32,8 @@ CMake version
1. We suggest you obtain the latest CMake from the Kitware web site.
The HDF5 1.12."X" product requires a minimum CMake version 3.12,
where "X" is the current HDF5 release version. If you are using
- VS2019, the minimum version is 3.15.
+ VS2019, the minimum version is 3.15. For VS2022, the minimum
+ version is 3.21.
Note:
To change the install prefix from the platform defaults initialize
@@ -753,8 +754,20 @@ if (MSVC)
HDF5_INSTALL_DATA_DIR "."
else ()
HDF5_INSTALL_DATA_DIR "share"
+HDF5_INSTALL_DOC_DIR "HDF5_INSTALL_DATA_DIR"
+
+Defaults as defined by the `GNU Coding Standards`
+HDF5_INSTALL_BIN_DIR "bin"
+HDF5_INSTALL_LIB_DIR "lib"
+HDF5_INSTALL_INCLUDE_DIR "include"
+HDF5_INSTALL_MODULE_DIR "HDF5_INSTALL_INCLUDE_DIR/mod"
+HDF5_INSTALL_CMAKE_DIR "HDF5_INSTALL_LIB_DIR/cmake"
+HDF5_INSTALL_DATA_DIR "share"
+HDF5_INSTALL_DOC_DIR "HDF5_INSTALL_DATA_DIR/doc/hdf5"
---------------- HDF5 Advanced Options ---------------------
+HDF5_USE_GNU_DIRS "TRUE to use GNU Coding Standard install directory variables,
+ FALSE to use historical settings" FALSE
ONLY_SHARED_LIBS "Only Build Shared Libraries" OFF
ALLOW_UNSUPPORTED "Allow unsupported combinations of configure options" OFF
HDF5_EXTERNAL_LIB_PREFIX "Use prefix for custom library naming." ""
@@ -832,6 +845,10 @@ NOTE:
flag is not available on windows and some modern linux systems will
ignore the flag.
+NOTE:
+ The HDF5_USE_GNU_DIRS option is usually recommended for linux platforms, but may
+ be useful on other platforms. See the CMake documentation for more details.
+
---------------- Unsupported Library Options ---------------------
The threadsafe, C++ and Java interfaces are not compatible
with the HDF5_ENABLE_PARALLEL option.
diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt
index ce18ed90920..96e452a89d8 100644
--- a/release_docs/RELEASE.txt
+++ b/release_docs/RELEASE.txt
@@ -47,6 +47,16 @@ New Features
Configuration:
-------------
+ - Add new CMake configuration variable HDF5_USE_GNU_DIRS
+
+ HDF5_USE_GNU_DIRS (default OFF) selects the use of GNU Coding Standard install
+ directory variables by including the CMake module, GNUInstallDirs(see CMake
+ documentation for details). The HDF_DIR_PATHS macro in the HDFMacros.cmake file
+ sets various PATH variables for use during the build, test and install processes.
+ By default, the historical settings for these variables will be used.
+
+ (ADB - 2022/10/21, GH-2175, GH-1716)
+
- Correct the usage of CMAKE_Fortran_MODULE_DIRECTORY and where to
install Fortran mod files.
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index a9802c162b9..c6294c41af1 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -1288,8 +1288,8 @@ endif ()
#-----------------------------------------------------------------------------
set (_PKG_CONFIG_PREFIX ${CMAKE_INSTALL_PREFIX})
set (_PKG_CONFIG_EXEC_PREFIX \${prefix})
-set (_PKG_CONFIG_LIBDIR \${exec_prefix}/lib)
-set (_PKG_CONFIG_INCLUDEDIR \${prefix}/include)
+set (_PKG_CONFIG_LIBDIR \${exec_prefix}/${HDF5_INSTALL_LIB_DIR})
+set (_PKG_CONFIG_INCLUDEDIR \${prefix}/${HDF5_INSTALL_INCLUDE_DIR})
set (_PKG_CONFIG_LIBNAME "${HDF5_LIB_CORENAME}")
set (_PKG_CONFIG_VERSION "${HDF5_PACKAGE_VERSION}")
diff --git a/src/H5Dmodule.h b/src/H5Dmodule.h
index 4ad370924da..00751a91b3f 100644
--- a/src/H5Dmodule.h
+++ b/src/H5Dmodule.h
@@ -1897,10 +1897,10 @@ allocated if necessary.
* byte 0 |
*
*
- * ???????? |
- * ????SPPP |
- * PPPPPPPP |
- * PPPP???? |
+ * ???????? |
+ * ????SPPP |
+ * PPPPPPPP |
+ * PPPP???? |
*
*
*
From 1da36c90b636f6cd1889338a61602582e5187f00 Mon Sep 17 00:00:00 2001
From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com>
Date: Mon, 12 Dec 2022 09:32:01 -0600
Subject: [PATCH 006/108] 1.12 Correct requires setting for pkgconfig files
#2280 (#2283)
* Correct requires setting for pkgconfig files #2280
* Change macos for CI
---
.github/workflows/main.yml | 6 +++---
.github/workflows/pr-check.yml | 6 +++---
c++/src/CMakeLists.txt | 4 ++--
fortran/src/CMakeLists.txt | 4 ++--
hl/c++/src/CMakeLists.txt | 4 ++--
hl/fortran/src/CMakeLists.txt | 4 ++--
hl/src/CMakeLists.txt | 4 ++--
release_docs/RELEASE.txt | 8 ++++++++
8 files changed, 24 insertions(+), 16 deletions(-)
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 2282a097b14..5d3e14d68dd 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -48,7 +48,7 @@ jobs:
generator: "-G Ninja"
- name: "macOS Latest Clang"
artifact: "macOS.tar.xz"
- os: macos-latest
+ os: macos-11
build_type: "Release"
cpp: ON
fortran: OFF
@@ -109,7 +109,7 @@ jobs:
generator: "-G Ninja"
- name: "macOS TS Clang"
artifact: "macOSTS.tar.xz"
- os: macos-latest
+ os: macos-11
build_type: "Release"
cpp: OFF
fortran: OFF
@@ -171,7 +171,7 @@ jobs:
if: matrix.os == 'windows-latest'
- name: Install Dependencies (macOS)
run: brew install ninja
- if: matrix.os == 'macos-latest'
+ if: matrix.os == 'macos-11'
- name: Set environment for MSVC (Windows)
if: matrix.os == 'windows-latest'
run: |
diff --git a/.github/workflows/pr-check.yml b/.github/workflows/pr-check.yml
index 5dd923ff665..b48f835dbe1 100644
--- a/.github/workflows/pr-check.yml
+++ b/.github/workflows/pr-check.yml
@@ -47,7 +47,7 @@ jobs:
generator: "-G Ninja"
- name: "macOS Latest Clang"
artifact: "macOS.tar.xz"
- os: macos-latest
+ os: macos-11
build_type: "Release"
cpp: ON
fortran: OFF
@@ -108,7 +108,7 @@ jobs:
generator: "-G Ninja"
- name: "macOS TS Clang"
artifact: "macOSTS.tar.xz"
- os: macos-latest
+ os: macos-11
build_type: "Release"
cpp: OFF
fortran: OFF
@@ -170,7 +170,7 @@ jobs:
if: matrix.os == 'windows-latest'
- name: Install Dependencies (macOS)
run: brew install ninja
- if: matrix.os == 'macos-latest'
+ if: matrix.os == 'macos-11'
- name: Set environment for MSVC (Windows)
if: matrix.os == 'windows-latest'
run: |
diff --git a/c++/src/CMakeLists.txt b/c++/src/CMakeLists.txt
index c8fc82df70b..afb1c9fdbb3 100644
--- a/c++/src/CMakeLists.txt
+++ b/c++/src/CMakeLists.txt
@@ -187,8 +187,8 @@ if (BUILD_SHARED_LIBS)
set (_PKG_CONFIG_SH_LIBS "${_PKG_CONFIG_SH_LIBS} -l${HDF5_CPP_LIB_CORENAME}")
endif ()
-set (_PKG_CONFIG_REQUIRES "${HDF5_LIB_CORENAME}-${HDF5_PACKAGE_VERSION}")
-set (_PKG_CONFIG_REQUIRES_PRIVATE "${HDF5_LIB_CORENAME}-${HDF5_PACKAGE_VERSION}")
+set (_PKG_CONFIG_REQUIRES "${HDF5_LIB_CORENAME} = ${HDF5_PACKAGE_VERSION}")
+set (_PKG_CONFIG_REQUIRES_PRIVATE "${HDF5_LIB_CORENAME} = ${HDF5_PACKAGE_VERSION}")
configure_file (
${HDF_CONFIG_DIR}/libhdf5.pc.in
diff --git a/fortran/src/CMakeLists.txt b/fortran/src/CMakeLists.txt
index 77ce82f8067..409af0e753d 100644
--- a/fortran/src/CMakeLists.txt
+++ b/fortran/src/CMakeLists.txt
@@ -552,8 +552,8 @@ if (BUILD_SHARED_LIBS)
set (_PKG_CONFIG_SH_LIBS "${_PKG_CONFIG_SH_LIBS} -l${HDF5_F90_LIB_CORENAME}")
endif ()
-set (_PKG_CONFIG_REQUIRES "${HDF5_LIB_CORENAME}-${HDF5_PACKAGE_VERSION}")
-set (_PKG_CONFIG_REQUIRES_PRIVATE "${HDF5_LIB_CORENAME}-${HDF5_PACKAGE_VERSION}")
+set (_PKG_CONFIG_REQUIRES "${HDF5_LIB_CORENAME} = ${HDF5_PACKAGE_VERSION}")
+set (_PKG_CONFIG_REQUIRES_PRIVATE "${HDF5_LIB_CORENAME} = ${HDF5_PACKAGE_VERSION}")
configure_file (
${HDF_CONFIG_DIR}/libhdf5.fpc.in
diff --git a/hl/c++/src/CMakeLists.txt b/hl/c++/src/CMakeLists.txt
index e56d6279e55..ab09d743bee 100644
--- a/hl/c++/src/CMakeLists.txt
+++ b/hl/c++/src/CMakeLists.txt
@@ -105,8 +105,8 @@ if (BUILD_SHARED_LIBS)
set (_PKG_CONFIG_SH_LIBS "${_PKG_CONFIG_SH_LIBS} -l${HDF5_HL_CPP_LIB_CORENAME}")
endif ()
-set (_PKG_CONFIG_REQUIRES "${HDF5_HL_LIB_CORENAME}-${HDF5_PACKAGE_VERSION}")
-set (_PKG_CONFIG_REQUIRES_PRIVATE "${HDF5_HL_LIB_CORENAME}-${HDF5_PACKAGE_VERSION}")
+set (_PKG_CONFIG_REQUIRES "${HDF5_HL_LIB_CORENAME} = ${HDF5_PACKAGE_VERSION}")
+set (_PKG_CONFIG_REQUIRES_PRIVATE "${HDF5_HL_LIB_CORENAME} = ${HDF5_PACKAGE_VERSION}")
configure_file (
${HDF_CONFIG_DIR}/libhdf5.pc.in
diff --git a/hl/fortran/src/CMakeLists.txt b/hl/fortran/src/CMakeLists.txt
index 7a484dcf150..ad462ea9d9f 100644
--- a/hl/fortran/src/CMakeLists.txt
+++ b/hl/fortran/src/CMakeLists.txt
@@ -339,8 +339,8 @@ if (BUILD_SHARED_LIBS)
set (_PKG_CONFIG_SH_LIBS "${_PKG_CONFIG_SH_LIBS} -l${HDF5_HL_F90_LIB_CORENAME}")
endif ()
-set (_PKG_CONFIG_REQUIRES "${HDF5_F90_LIB_CORENAME}-${HDF5_PACKAGE_VERSION}")
-set (_PKG_CONFIG_REQUIRES_PRIVATE "${HDF5_F90_LIB_CORENAME}-${HDF5_PACKAGE_VERSION}")
+set (_PKG_CONFIG_REQUIRES "${HDF5_F90_LIB_CORENAME} = ${HDF5_PACKAGE_VERSION}")
+set (_PKG_CONFIG_REQUIRES_PRIVATE "${HDF5_F90_LIB_CORENAME} = ${HDF5_PACKAGE_VERSION}")
configure_file (
${HDF_CONFIG_DIR}/libhdf5.fpc.in
diff --git a/hl/src/CMakeLists.txt b/hl/src/CMakeLists.txt
index 93a62bc7356..6dd5be70dc6 100644
--- a/hl/src/CMakeLists.txt
+++ b/hl/src/CMakeLists.txt
@@ -137,8 +137,8 @@ if (BUILD_SHARED_LIBS)
set (_PKG_CONFIG_SH_LIBS "${_PKG_CONFIG_SH_LIBS} -l${HDF5_HL_LIB_CORENAME}")
endif ()
-set (_PKG_CONFIG_REQUIRES "${HDF5_LIB_CORENAME}-${HDF5_PACKAGE_VERSION}")
-set (_PKG_CONFIG_REQUIRES_PRIVATE "${HDF5_LIB_CORENAME}-${HDF5_PACKAGE_VERSION}")
+set (_PKG_CONFIG_REQUIRES "${HDF5_LIB_CORENAME} = ${HDF5_PACKAGE_VERSION}")
+set (_PKG_CONFIG_REQUIRES_PRIVATE "${HDF5_LIB_CORENAME} = ${HDF5_PACKAGE_VERSION}")
configure_file (
${HDF_CONFIG_DIR}/libhdf5.pc.in
diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt
index 96e452a89d8..9042a06adf1 100644
--- a/release_docs/RELEASE.txt
+++ b/release_docs/RELEASE.txt
@@ -235,6 +235,14 @@ Bug Fixes since HDF5-1.12.1 release
Configuration
-------------
+ - Change the settings of the *pc files to use the correct format
+
+ The pkg-config files generated by CMake uses incorrect syntax for the 'Requires'
+ settings. Changing the set to use 'lib-name = version' instead 'lib-name-version'
+ fixes the issue
+
+ (ADB - 2022/12/06 HDFFV-11355)
+
- Move MPI libraries link from PRIVATE to PUBLIC
The install dependencies were not including the need for MPI libraries when
From 1dae8c805c61039b7f0c53021d0b828026077eba Mon Sep 17 00:00:00 2001
From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com>
Date: Mon, 12 Dec 2022 09:32:38 -0600
Subject: [PATCH 007/108] 1.12 h5repack should only print if verbose (#2274)
* h5repack should only print if verbose
Also chage dump test to use long option
* Add note
* Update yaml workflows
---
.github/workflows/clang-format-check.yml | 2 +-
.github/workflows/clang-format-fix.yml | 2 +-
.github/workflows/codespell.yml | 2 +-
.github/workflows/main.yml | 13 ++---
release_docs/RELEASE.txt | 7 ++-
tools/src/h5repack/h5repack_copy.c | 62 ++++++++++++++----------
tools/test/h5dump/CMakeTests.cmake | 2 +-
7 files changed, 53 insertions(+), 37 deletions(-)
diff --git a/.github/workflows/clang-format-check.yml b/.github/workflows/clang-format-check.yml
index 2d7943c1f68..e8251f2ce7a 100644
--- a/.github/workflows/clang-format-check.yml
+++ b/.github/workflows/clang-format-check.yml
@@ -7,7 +7,7 @@ jobs:
runs-on: ubuntu-latest
if: "!contains(github.event.head_commit.message, 'skip-ci')"
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- name: Run clang-format style check for C and Java programs.
uses: DoozyX/clang-format-lint-action@v0.13
with:
diff --git a/.github/workflows/clang-format-fix.yml b/.github/workflows/clang-format-fix.yml
index a390a8bd795..c1110cf2b98 100644
--- a/.github/workflows/clang-format-fix.yml
+++ b/.github/workflows/clang-format-fix.yml
@@ -8,7 +8,7 @@ jobs:
runs-on: ubuntu-latest
if: "!contains(github.event.head_commit.message, 'skip-ci')"
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- name: Run clang-format style check for C and Java programs.
uses: DoozyX/clang-format-lint-action@v0.13
with:
diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml
index 6b7960a4eb5..a281fc7f1b4 100644
--- a/.github/workflows/codespell.yml
+++ b/.github/workflows/codespell.yml
@@ -8,7 +8,7 @@ jobs:
name: Check for spelling errors
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- uses: codespell-project/actions-codespell@master
with:
skip: ./bin/trace,./hl/tools/h5watch/h5watch.c,./tools/test/h5jam/tellub.c,./config/sanitizer/LICENSE
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 5d3e14d68dd..c112a1d4b37 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -6,12 +6,13 @@ on:
push:
branches: [ develop, hdf5_1_12, hdf5_1_10, hdf5_1_8 ]
paths-ignore:
- - '.github/**'
- - 'doc/**'
- - 'release_docs/**'
- - 'ACKNOWLEDGEMENTS'
- - 'COPYING**'
- - '**.md'
+ - '.github/CODEOWNERS'
+ - '.github/FUNDING.yml'
+ - 'doc/**'
+ - 'release_docs/**'
+ - 'ACKNOWLEDGEMENTS'
+ - 'COPYING**'
+ - '**.md'
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt
index 9042a06adf1..0b6ca4eeefd 100644
--- a/release_docs/RELEASE.txt
+++ b/release_docs/RELEASE.txt
@@ -254,7 +254,12 @@ Bug Fixes since HDF5-1.12.1 release
Tools
-----
- -
+ - Fix h5repack to only print output when verbose option is selected
+
+ When timing option was added to h5repack, the check for verbose was
+ incorrectly implemented.
+
+ (ADB - 2022/12/02, GH #2270)
Performance
diff --git a/tools/src/h5repack/h5repack_copy.c b/tools/src/h5repack/h5repack_copy.c
index d8a9e154e8a..f3ab22e31a3 100644
--- a/tools/src/h5repack/h5repack_copy.c
+++ b/tools/src/h5repack/h5repack_copy.c
@@ -666,15 +666,17 @@ do_copy_objects(hid_t fidin, hid_t fidout, trav_table_t *travt, pack_opt_t *opti
*-------------------------------------------------------------------------
*/
- if (options->verbose == 2) {
- HDprintf("-----------------------------------------------------------------\n");
- HDprintf(" Type Filter (Compression) Timing read/write Name\n");
- HDprintf("-----------------------------------------------------------------\n");
- }
- else {
- HDprintf("-----------------------------------------\n");
- HDprintf(" Type Filter (Compression) Name\n");
- HDprintf("-----------------------------------------\n");
+ if (options->verbose > 0) {
+ if (options->verbose == 2) {
+ HDprintf("-----------------------------------------------------------------\n");
+ HDprintf(" Type Filter (Compression) Timing read/write Name\n");
+ HDprintf("-----------------------------------------------------------------\n");
+ }
+ else {
+ HDprintf("-----------------------------------------\n");
+ HDprintf(" Type Filter (Compression) Name\n");
+ HDprintf("-----------------------------------------\n");
+ }
}
if (travt->objs) {
@@ -692,10 +694,12 @@ do_copy_objects(hid_t fidin, hid_t fidout, trav_table_t *travt, pack_opt_t *opti
*-------------------------------------------------------------------------
*/
case H5TRAV_TYPE_GROUP:
- if (options->verbose == 2)
- HDprintf(FORMAT_OBJ_NOTIME, "group", travt->objs[i].name);
- else
- HDprintf(FORMAT_OBJ, "group", travt->objs[i].name);
+ if (options->verbose > 0) {
+ if (options->verbose == 2)
+ HDprintf(FORMAT_OBJ_NOTIME, "group", travt->objs[i].name);
+ else
+ HDprintf(FORMAT_OBJ, "group", travt->objs[i].name);
+ }
/* open input group */
if ((grp_in = H5Gopen2(fidin, travt->objs[i].name, H5P_DEFAULT)) < 0)
@@ -1199,7 +1203,7 @@ do_copy_objects(hid_t fidin, hid_t fidout, trav_table_t *travt, pack_opt_t *opti
if (options->verbose > 0) {
double ratio = 0;
- /* only print the compression ration if there was a filter request */
+ /* only print the compression ratio if there was a filter request */
if (apply_s && apply_f && req_filter) {
/* get the storage size of the output dataset */
dsize_out = H5Dget_storage_size(dset_out);
@@ -1305,10 +1309,12 @@ do_copy_objects(hid_t fidin, hid_t fidout, trav_table_t *travt, pack_opt_t *opti
if (H5Dclose(dset_out) < 0)
H5TOOLS_GOTO_ERROR((-1), "H5Dclose failed");
- if (options->verbose == 2)
- HDprintf(FORMAT_OBJ_TIME, "dset", 0.0, write_time, travt->objs[i].name);
- else
- HDprintf(FORMAT_OBJ, "dset", travt->objs[i].name);
+ if (options->verbose > 0) {
+ if (options->verbose == 2)
+ HDprintf(FORMAT_OBJ_TIME, "dset", 0.0, write_time, travt->objs[i].name);
+ else
+ HDprintf(FORMAT_OBJ, "dset", travt->objs[i].name);
+ }
} /* end whether we have request for filter/chunking */
@@ -1320,10 +1326,12 @@ do_copy_objects(hid_t fidin, hid_t fidout, trav_table_t *travt, pack_opt_t *opti
*-------------------------------------------------------------------------
*/
case H5TRAV_TYPE_NAMED_DATATYPE:
- if (options->verbose == 2)
- HDprintf(FORMAT_OBJ_NOTIME, "type", travt->objs[i].name);
- else
- HDprintf(FORMAT_OBJ, "type", travt->objs[i].name);
+ if (options->verbose > 0) {
+ if (options->verbose == 2)
+ HDprintf(FORMAT_OBJ_NOTIME, "type", travt->objs[i].name);
+ else
+ HDprintf(FORMAT_OBJ, "type", travt->objs[i].name);
+ }
if ((type_in = H5Topen2(fidin, travt->objs[i].name, H5P_DEFAULT)) < 0)
H5TOOLS_GOTO_ERROR((-1), "H5Topen2 failed");
@@ -1362,10 +1370,12 @@ do_copy_objects(hid_t fidin, hid_t fidout, trav_table_t *travt, pack_opt_t *opti
*/
case H5TRAV_TYPE_LINK:
case H5TRAV_TYPE_UDLINK:
- if (options->verbose == 2)
- HDprintf(FORMAT_OBJ_NOTIME, "link", travt->objs[i].name);
- else
- HDprintf(FORMAT_OBJ, "link", travt->objs[i].name);
+ if (options->verbose > 0) {
+ if (options->verbose == 2)
+ HDprintf(FORMAT_OBJ_NOTIME, "link", travt->objs[i].name);
+ else
+ HDprintf(FORMAT_OBJ, "link", travt->objs[i].name);
+ }
/* Check -X option. */
if (options->merge) {
diff --git a/tools/test/h5dump/CMakeTests.cmake b/tools/test/h5dump/CMakeTests.cmake
index 933aba98241..2505e847bc6 100644
--- a/tools/test/h5dump/CMakeTests.cmake
+++ b/tools/test/h5dump/CMakeTests.cmake
@@ -1159,7 +1159,7 @@
ADD_H5ERR_MASK_TEST (torderlinks2 0 "unable to open external file, external link file name = 'fname'" --enable-error-stack --sort_by=name --sort_order=descending tfcontents1.h5)
# tests for floating point user defined printf format
- ADD_H5_TEST (tfpformat 0 --enable-error-stack -m %.7f tfpformat.h5)
+ ADD_H5_TEST (tfpformat 0 --enable-error-stack --format=%.7f tfpformat.h5)
# tests for traversal of external links
ADD_H5ERR_MASK_TEST (textlinksrc 0 "Too many soft links in path" --enable-error-stack textlinksrc.h5)
From 8ef713ae5dc454db66ea821635f6d6d6970858f0 Mon Sep 17 00:00:00 2001
From: vchoi-hdfgroup <55293060+vchoi-hdfgroup@users.noreply.github.com>
Date: Mon, 12 Dec 2022 10:40:30 -0600
Subject: [PATCH 008/108] =?UTF-8?q?Fix=20for=20HDFFV-10840:=20Instead=20of?=
=?UTF-8?q?=20using=20fill->buf=20for=20datatype=20conversi=E2=80=A6=20(#2?=
=?UTF-8?q?277)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
* Fix for HDFFV-10840: Instead of using fill->buf for datatype conversion (#2153)
* Fix for HDFFV-10840: Instead of using fill->buf for datatype conversion
if it is large enough, a buffer is allocated regardless so that the element
in fill->buf can later be reclaimed.
Valgrind is run on test/set_extent.c and there is no memory leak.
* Add information of this fix to release notes.
* Change macos version for CI to macos-11 until accum test failure is fixed for macos
12.
Co-authored-by: Larry Knox
---
release_docs/RELEASE.txt | 13 +++++++++++++
src/H5Ofill.c | 25 +++++++++++--------------
2 files changed, 24 insertions(+), 14 deletions(-)
diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt
index 0b6ca4eeefd..022010cfb8e 100644
--- a/release_docs/RELEASE.txt
+++ b/release_docs/RELEASE.txt
@@ -227,6 +227,19 @@ Bug Fixes since HDF5-1.12.1 release
(JTH - 2022/07/08, HDFFV-11316, HDFFV-11317)
+ - Memory leak
+
+ A memory leak was observed with variable-length fill value in
+ H5O_fill_convert() function in H5Ofill.c. The leak is
+ manifested by running valgrind on test/set_extent.c.
+
+ Previously, fill->buf is used for datatype conversion
+ if it is large enough and the variable-length information
+ is therefore lost. A buffer is now allocated regardless
+ so that the element in fill->buf can later be reclaimed.
+
+ (VC - 2022/10/10, HDFFV-10840)
+
Java Library
------------
diff --git a/src/H5Ofill.c b/src/H5Ofill.c
index 4106056e420..2197c09f0f8 100644
--- a/src/H5Ofill.c
+++ b/src/H5Ofill.c
@@ -1006,6 +1006,8 @@ H5O_fill_convert(H5O_fill_t *fill, H5T_t *dset_type, hbool_t *fill_changed)
/* Don't bother doing anything if there will be no actual conversion */
if (!H5T_path_noop(tpath)) {
+ size_t fill_type_size;
+
if ((src_id = H5I_register(H5I_DATATYPE, H5T_copy(fill->type, H5T_COPY_ALL), FALSE)) < 0 ||
(dst_id = H5I_register(H5I_DATATYPE, H5T_copy(dset_type, H5T_COPY_ALL), FALSE)) < 0)
HGOTO_ERROR(H5E_OHDR, H5E_CANTINIT, FAIL, "unable to copy/register data type")
@@ -1014,13 +1016,11 @@ H5O_fill_convert(H5O_fill_t *fill, H5T_t *dset_type, hbool_t *fill_changed)
* Datatype conversions are always done in place, so we need a buffer
* that is large enough for both source and destination.
*/
- if (H5T_get_size(fill->type) >= H5T_get_size(dset_type))
- buf = fill->buf;
- else {
- if (NULL == (buf = H5MM_malloc(H5T_get_size(dset_type))))
- HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed for type conversion")
- H5MM_memcpy(buf, fill->buf, H5T_get_size(fill->type));
- } /* end else */
+ fill_type_size = H5T_get_size(fill->type);
+
+ if (NULL == (buf = H5MM_malloc(MAX(fill_type_size, H5T_get_size(dset_type)))))
+ HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed for type conversion")
+ H5MM_memcpy(buf, fill->buf, fill_type_size);
/* Use CALLOC here to clear the buffer in case later the library thinks there's
* data in the background. */
@@ -1032,11 +1032,10 @@ H5O_fill_convert(H5O_fill_t *fill, H5T_t *dset_type, hbool_t *fill_changed)
HGOTO_ERROR(H5E_OHDR, H5E_CANTINIT, FAIL, "datatype conversion failed")
/* Update the fill message */
- if (buf != fill->buf) {
- H5T_vlen_reclaim_elmt(fill->buf, fill->type);
- H5MM_xfree(fill->buf);
- fill->buf = buf;
- } /* end if */
+ H5T_vlen_reclaim_elmt(fill->buf, fill->type);
+ H5MM_xfree(fill->buf);
+ fill->buf = buf;
+
(void)H5T_close_real(fill->type);
fill->type = NULL;
H5_CHECKED_ASSIGN(fill->size, ssize_t, H5T_get_size(dset_type), size_t);
@@ -1050,8 +1049,6 @@ H5O_fill_convert(H5O_fill_t *fill, H5T_t *dset_type, hbool_t *fill_changed)
HDONE_ERROR(H5E_OHDR, H5E_CANTDEC, FAIL, "unable to decrement ref count for temp ID")
if (dst_id >= 0 && H5I_dec_ref(dst_id) < 0)
HDONE_ERROR(H5E_OHDR, H5E_CANTDEC, FAIL, "unable to decrement ref count for temp ID")
- if (buf != fill->buf)
- H5MM_xfree(buf);
if (bkg)
H5MM_xfree(bkg);
From 91aa02817a957831ddd363af7e4e9232b7202f25 Mon Sep 17 00:00:00 2001
From: Dana Robinson <43805+derobins@users.noreply.github.com>
Date: Fri, 16 Dec 2022 13:42:48 -0800
Subject: [PATCH 009/108] 1.12: Brings the updated CI in from develop (#2311)
* Brings the updated CI in from develop
Does not include the -Werror checks, changes 1.14 API to 1.12
where needed.
* Fixes an API compat issue in the mirror VFD test
* Committing clang-format changes
* Removed pr-check.yml
Co-authored-by: github-actions <41898282+github-actions[bot]@users.noreply.github.com>
---
.github/workflows/main.yml | 508 ++++++++++++++++++++++-----------
.github/workflows/pr-check.yml | 220 --------------
test/mirror_vfd.c | 4 +-
3 files changed, 347 insertions(+), 385 deletions(-)
delete mode 100644 .github/workflows/pr-check.yml
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index c112a1d4b37..4c2159f9d09 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -4,7 +4,8 @@ name: hdf5 dev CI
on:
workflow_dispatch:
push:
- branches: [ develop, hdf5_1_12, hdf5_1_10, hdf5_1_8 ]
+ pull_request:
+ branches: [ develop, hdf5_1_14, hdf5_1_12, hdf5_1_10, hdf5_1_8 ]
paths-ignore:
- '.github/CODEOWNERS'
- '.github/FUNDING.yml'
@@ -14,209 +15,390 @@ on:
- 'COPYING**'
- '**.md'
-# A workflow run is made up of one or more jobs that can run sequentially or in parallel
+# A workflow run is made up of one or more jobs that can run sequentially or
+# in parallel. We just have one job, but the matrix items defined below will
+# run in parallel.
jobs:
- # This workflow contains a single job called "build"
- build:
+
+ # A workflow that builds the library and runs all the tests
+ build_and_test:
+
strategy:
-# fail-fast: false
+
+ # The current matrix has three dimensions:
+ #
+ # * config name
+ # * thread-safety on/off
+ # * release vs. debug build
+ #
+ # Most configuration information is added via the 'include' mechanism,
+ # which will append the key-value pairs in the configuration where the
+ # names match.
+
matrix:
- name: ["Windows Latest MSVC", "Ubuntu Latest GCC", "Ubuntu Debug GCC", "macOS Latest Clang", "Ubuntu Autotools GCC", "Windows TS MSVC", "Ubuntu TS GCC", "TS Debug GCC", "macOS TS Clang", "TS Autotools GCC"]
+
+ name:
+ - "Windows MSVC CMake"
+ - "Ubuntu gcc CMake"
+ - "Ubuntu gcc Autotools"
+ - "Ubuntu gcc Autotools parallel (build only)"
+ - "MacOS Clang CMake"
+
+ thread_safety:
+ - enabled: true
+ text: " TS"
+ - enabled: false
+ text: ""
+
+ build_mode:
+ - text: " REL"
+ cmake: "Release"
+ autotools: "production"
+ - text: " DBG"
+ cmake: "Debug"
+ autotools: "debug"
+
+ # This is where we list the bulk of the options for each configuration.
+ # The key-value pair values are usually appropriate for being CMake or
+ # Autotools configure values, so be aware of that.
+
include:
- - name: "Windows Latest MSVC"
- artifact: "Windows-MSVC.tar.xz"
+
+ # Windows w/ MSVC + CMake
+ #
+ # No Fortran, parallel, or VFDs that rely on POSIX things
+ - name: "Windows MSVC CMake"
os: windows-2022
- build_type: "Release"
toolchain: ""
cpp: ON
fortran: OFF
java: ON
- ts: OFF
- hl: ON
parallel: OFF
+ mirror_vfd: OFF
+ direct_vfd: OFF
generator: "-G \"Visual Studio 17 2022\" -A x64"
- - name: "Ubuntu Latest GCC"
- artifact: "Linux.tar.xz"
+ run_tests: true
+
+ # Linux (Ubuntu) w/ gcc + CMake
+ #
+ # We might think about adding Clang, but MacOS already tests that
+ # so it's not critical
+ - name: "Ubuntu gcc CMake"
os: ubuntu-latest
- build_type: "Release"
cpp: ON
fortran: ON
java: ON
- ts: OFF
- hl: ON
parallel: OFF
+ mirror_vfd: ON
+ direct_vfd: ON
toolchain: "config/toolchain/GCC.cmake"
generator: "-G Ninja"
- - name: "macOS Latest Clang"
- artifact: "macOS.tar.xz"
+ run_tests: true
+
+ # Linux (Ubuntu) w/ gcc + Autotools
+ #
+ # Keep this identical to the CMake configs. Note the difference in
+ # the values.
+ - name: "Ubuntu gcc Autotools"
+ os: ubuntu-latest
+ cpp: enable
+ fortran: enable
+ java: enable
+ parallel: disable
+ mirror_vfd: enable
+ direct_vfd: enable
+ deprec_sym: enable
+ default_api: v112
+ toolchain: ""
+ generator: "autogen"
+ flags: ""
+ run_tests: true
+
+ # Parallel Linux (Ubuntu) w/ gcc + Autotools
+ #
+ # The GitHub runners are inadequate for running parallel HDF5 tests,
+ # so we catch most issues in daily testing. What we have here is just
+ # a compile check to make sure nothing obvious is broken.
+ - name: "Ubuntu gcc Autotools parallel (build only)"
+ os: ubuntu-latest
+ cpp: disable
+ fortran: enable
+ java: disable
+ parallel: enable
+ mirror_vfd: disable
+ direct_vfd: disable
+ deprec_sym: enable
+ default_api: v112
+ toolchain: ""
+ generator: "autogen"
+ flags: "CC=mpicc"
+ run_tests: false
+
+ # MacOS w/ Clang + CMake
+ #
+ # We could also build with the Autotools via brew installing them,
+ # but that seems unnecessary
+ - name: "MacOS Clang CMake"
os: macos-11
- build_type: "Release"
cpp: ON
fortran: OFF
java: ON
- ts: OFF
- hl: ON
parallel: OFF
+ mirror_vfd: ON
+ direct_vfd: OFF
toolchain: "config/toolchain/clang.cmake"
generator: "-G Ninja"
- - name: "Ubuntu Debug GCC"
- artifact: "LinuxDBG.tar.xz"
- os: ubuntu-latest
- build_type: "Debug"
- cpp: ON
- fortran: OFF
- java: OFF
- ts: OFF
- hl: ON
- parallel: OFF
- toolchain: "config/toolchain/GCC.cmake"
- generator: "-G Ninja"
- - name: "Ubuntu Autotools GCC"
- artifact: "LinuxA.tar.xz"
+ run_tests: true
+
+
+ #
+ # SPECIAL AUTOTOOLS BUILDS
+ #
+ # These do not run tests and are not built into the matrix and instead
+ # become NEW configs as their name would clobber one of the matrix
+ # names (so make sure the names are UNIQUE).
+ #
+
+ - name: "Ubuntu gcc Autotools v1.6 default API (build only)"
os: ubuntu-latest
- build_type: "Release"
cpp: enable
fortran: enable
java: enable
- ts: disable
- hl: enable
parallel: disable
+ mirror_vfd: enable
+ direct_vfd: enable
+ deprec_sym: enable
+ default_api: v16
toolchain: ""
generator: "autogen"
-# Threadsafe runs
- - name: "Windows TS MSVC"
- artifact: "Windows-MSVCTS.tar.xz"
- os: windows-2019
- build_type: "Release"
+ flags: ""
+ run_tests: false
+ thread_safety:
+ enabled: false
+ text: ""
+ build_mode:
+ text: " DBG"
+ cmake: "Debug"
+ autotools: "debug"
+
+ - name: "Ubuntu gcc Autotools v1.8 default API (build only)"
+ os: ubuntu-latest
+ cpp: enable
+ fortran: enable
+ java: enable
+ parallel: disable
+ mirror_vfd: enable
+ direct_vfd: enable
+ deprec_sym: enable
+ default_api: v18
toolchain: ""
- cpp: OFF
- fortran: OFF
- java: OFF
- ts: ON
- hl: OFF
- parallel: OFF
- generator: "-G \"Visual Studio 16 2019\" -A x64"
- - name: "Ubuntu TS GCC"
- artifact: "LinuxTS.tar.xz"
+ generator: "autogen"
+ flags: ""
+ run_tests: false
+ thread_safety:
+ enabled: false
+ text: ""
+ build_mode:
+ text: " DBG"
+ cmake: "Debug"
+ autotools: "debug"
+
+ - name: "Ubuntu gcc Autotools v1.10 default API (build only)"
os: ubuntu-latest
- build_type: "Release"
- cpp: OFF
- fortran: OFF
- java: OFF
- ts: ON
- hl: OFF
- parallel: OFF
- toolchain: "config/toolchain/GCC.cmake"
- generator: "-G Ninja"
- - name: "macOS TS Clang"
- artifact: "macOSTS.tar.xz"
- os: macos-11
- build_type: "Release"
- cpp: OFF
- fortran: OFF
- java: OFF
- ts: ON
- hl: OFF
- parallel: OFF
- toolchain: "config/toolchain/clang.cmake"
- generator: "-G Ninja"
- - name: "TS Debug GCC"
- artifact: "LinuxTSDBG.tar.xz"
+ cpp: enable
+ fortran: enable
+ java: enable
+ parallel: disable
+ mirror_vfd: enable
+ direct_vfd: enable
+ deprec_sym: enable
+ default_api: v110
+ toolchain: ""
+ generator: "autogen"
+ flags: ""
+ run_tests: false
+ thread_safety:
+ enabled: false
+ text: ""
+ build_mode:
+ text: " DBG"
+ cmake: "Debug"
+ autotools: "debug"
+
+ - name: "Ubuntu gcc Autotools v1.12 default API (build only)"
os: ubuntu-latest
- build_type: "Debug"
- cpp: OFF
- fortran: OFF
- java: OFF
- ts: ON
- hl: OFF
- parallel: OFF
- toolchain: "config/toolchain/GCC.cmake"
- generator: "-G Ninja"
- - name: "TS Autotools GCC"
- artifact: "LinuxATS.tar.xz"
+ cpp: enable
+ fortran: enable
+ java: enable
+ parallel: disable
+ mirror_vfd: enable
+ direct_vfd: enable
+ deprec_sym: enable
+ default_api: v112
+ toolchain: ""
+ generator: "autogen"
+ flags: ""
+ run_tests: false
+ thread_safety:
+ enabled: false
+ text: ""
+ build_mode:
+ text: " DBG"
+ cmake: "Debug"
+ autotools: "debug"
+
+ - name: "Ubuntu gcc Autotools no deprecated symbols (build only)"
os: ubuntu-latest
- build_type: "Release"
- cpp: disable
- fortran: disable
- java: disable
- ts: enable
- hl: disable
+ cpp: enable
+ fortran: enable
+ java: enable
parallel: disable
+ mirror_vfd: enable
+ direct_vfd: enable
+ deprec_sym: disable
+ default_api: v112
toolchain: ""
generator: "autogen"
-# - name: "Ubuntu Parallel GCC"
-# artifact: "LinuxPar.tar.xz"
-# os: ubuntu-latest
-# build_type: "Release"
-# cpp: OFF
-# fortran: OFF
-# parallel: ON
-# toolchain: "config/toolchain/GCC.cmake"
-# generator: "-G Ninja"
-
- name: ${{ matrix.name }}
+ flags: ""
+ run_tests: false
+ thread_safety:
+ enabled: false
+ text: ""
+ build_mode:
+ text: " DBG"
+ cmake: "Debug"
+ autotools: "debug"
+
+ # Sets the job's name from the properties
+ name: "${{ matrix.name }}${{ matrix.build_mode.text }}${{ matrix.thread_safety.text }}"
+
+ # Don't run the action if the commit message says to skip CI
+ if: "!contains(github.event.head_commit.message, 'skip-ci')"
+
# The type of runner that the job will run on
runs-on: ${{ matrix.os }}
- if: "!contains(github.event.head_commit.message, 'skip-ci')"
-
+
# Steps represent a sequence of tasks that will be executed as part of the job
steps:
- - name: Install Dependencies (Linux)
- run: sudo apt-get install ninja-build
- if: matrix.os == 'ubuntu-latest'
- - name: Install Autotools Dependencies (Linux)
- run: sudo apt-get install automake autoconf libtool libtool-bin
- if: matrix.generator == 'autogen'
- - name: Install Dependencies (Windows)
- run: choco install ninja
- if: matrix.os == 'windows-latest'
- - name: Install Dependencies (macOS)
- run: brew install ninja
- if: matrix.os == 'macos-11'
- - name: Set environment for MSVC (Windows)
- if: matrix.os == 'windows-latest'
- run: |
- # Set these env vars so cmake picks the correct compiler
- echo "CXX=cl.exe" >> $GITHUB_ENV
- echo "CC=cl.exe" >> $GITHUB_ENV
+
+ #
+ # SETUP
+ #
+
+ #Useful for debugging
+ - name: Dump matrix context
+ run: echo '${{ toJSON(matrix) }}'
+
+ - name: Install CMake Dependencies (Linux)
+ run: sudo apt-get install ninja-build
+ if: matrix.os == 'ubuntu-latest'
+
+ - name: Install Autotools Dependencies (Linux, serial)
+ run: |
+ sudo apt update
+ sudo apt install automake autoconf libtool libtool-bin
+ sudo apt install gcc-11 g++-11 gfortran-11
+ echo "CC=gcc-11" >> $GITHUB_ENV
+ echo "CXX=g++-11" >> $GITHUB_ENV
+ echo "FC=gfortran-11" >> $GITHUB_ENV
+ if: (matrix.generator == 'autogen') && (matrix.parallel != 'enable')
+
+ - name: Install Autotools Dependencies (Linux, parallel)
+ run: |
+ sudo apt update
+ sudo apt install automake autoconf libtool libtool-bin
+ sudo apt install openmpi-bin openmpi-common mpi-default-dev
+ echo "CC=mpicc" >> $GITHUB_ENV
+ echo "FC=mpif90" >> $GITHUB_ENV
+ if: (matrix.generator == 'autogen') && (matrix.parallel == 'enable')
+
+ - name: Install Dependencies (Windows)
+ run: choco install ninja
+ if: matrix.os == 'windows-latest'
+
+ - name: Install Dependencies (macOS)
+ run: brew install ninja
+ if: matrix.os == 'macos-11'
+
+ - name: Set environment for MSVC (Windows)
+ run: |
+ # Set these environment variables so CMake picks the correct compiler
+ echo "CXX=cl.exe" >> $GITHUB_ENV
+ echo "CC=cl.exe" >> $GITHUB_ENV
+ if: matrix.os == 'windows-latest'
- # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- - name: Get Sources
- uses: actions/checkout@v2
-
- - name: Autotools Configure
- if: matrix.generator == 'autogen'
- run: |
- sh ./autogen.sh
- mkdir "${{ runner.workspace }}/build"
- cd "${{ runner.workspace }}/build"
- $GITHUB_WORKSPACE/configure --enable-shared --${{ matrix.ts }}-threadsafe --${{ matrix.hl }}-hl --${{ matrix.parallel }}-parallel --${{ matrix.cpp }}-cxx --${{ matrix.fortran }}-fortran --${{ matrix.java }}-java
- shell: bash
-
- - name: Configure
- if: matrix.generator != 'autogen'
- run: |
- mkdir "${{ runner.workspace }}/build"
- cd "${{ runner.workspace }}/build"
- cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_type }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_THREADSAFE:BOOL=${{ matrix.ts }} -DHDF5_BUILD_HL_LIB:BOOL=${{ matrix.hl }} -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=${{ matrix.cpp }} -DHDF5_BUILD_FORTRAN=${{ matrix.fortran }} -DHDF5_BUILD_JAVA=${{ matrix.java }} $GITHUB_WORKSPACE
- shell: bash
-
- - name: Autotools Build
- if: matrix.generator == 'autogen'
- run: make
- working-directory: ${{ runner.workspace }}/build
-
- - name: Build
- if: matrix.generator != 'autogen'
- run: cmake --build . --config ${{ matrix.build_type }}
- working-directory: ${{ runner.workspace }}/build
-
- - name: Autotools Test
- if: matrix.generator == 'autogen'
- run: make check
- working-directory: ${{ runner.workspace }}/build
-
- - name: Test
- if: matrix.generator != 'autogen'
- run: ctest --build . -C ${{ matrix.build_type }} -V
- working-directory: ${{ runner.workspace }}/build
+ # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
+ - name: Get Sources
+ uses: actions/checkout@v3
+
+ #
+ # AUTOTOOLS CONFIGURE
+ #
+
+ - name: Autotools Configure
+ run: |
+ sh ./autogen.sh
+ mkdir "${{ runner.workspace }}/build"
+ cd "${{ runner.workspace }}/build"
+ ${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --${{ matrix.deprec_sym }}-deprecated-symbols --with-default-api-version=${{ matrix.default_api }} --enable-shared --${{ matrix.parallel }}-parallel --${{ matrix.cpp }}-cxx --${{ matrix.fortran }}-fortran --${{ matrix.java }}-java --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd
+ shell: bash
+ if: (matrix.generator == 'autogen') && (! matrix.thread_safe.enabled)
+
+ - name: Autotools Configure (Thread-Safe)
+ run: |
+ sh ./autogen.sh
+ mkdir "${{ runner.workspace }}/build"
+ cd "${{ runner.workspace }}/build"
+ ${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --enable-shared --enable-threadsafe --disable-hl --${{ matrix.parallel }}-parallel --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd
+ shell: bash
+ if: (matrix.generator == 'autogen') && (matrix.thread_safe.enabled)
+
+ #
+ # CMAKE CONFIGURE
+ #
+
+ - name: CMake Configure
+ run: |
+ mkdir "${{ runner.workspace }}/build"
+ cd "${{ runner.workspace }}/build"
+ cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=${{ matrix.cpp }} -DHDF5_BUILD_FORTRAN=${{ matrix.fortran }} -DHDF5_BUILD_JAVA=${{ matrix.java }} -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} $GITHUB_WORKSPACE
+ shell: bash
+ if: (matrix.generator != 'autogen') && (! matrix.thread_safe.enabled)
+
+
+ - name: CMake Configure (Thread-Safe)
+ run: |
+ mkdir "${{ runner.workspace }}/build"
+ cd "${{ runner.workspace }}/build"
+ cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_THREADSAFE:BOOL=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_HL_LIB:BOOL=OFF -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} $GITHUB_WORKSPACE
+ shell: bash
+ if: (matrix.generator != 'autogen') && (matrix.thread_safe.enabled)
+
+ #
+ # BUILD
+ #
+
+ - name: Autotools Build
+ run: make
+ working-directory: ${{ runner.workspace }}/build
+ if: matrix.generator == 'autogen'
+
+ - name: CMake Build
+ run: cmake --build . --config ${{ matrix.build_mode.cmake }}
+ working-directory: ${{ runner.workspace }}/build
+ if: matrix.generator != 'autogen'
+
+ #
+ # RUN TESTS
+ #
+
+ - name: Autotools Run Tests
+ run: make check
+ working-directory: ${{ runner.workspace }}/build
+ if: (matrix.generator == 'autogen') && (matrix.run_tests)
+
+ - name: CMake Run Tests
+ run: ctest --build . -C ${{ matrix.build_mode.cmake }} -V
+ working-directory: ${{ runner.workspace }}/build
+ # Skip Debug MSVC while we investigate H5L Java test timeouts
+ if: (matrix.generator != 'autogen') && (matrix.run_tests) && ! ((matrix.name == 'Windows MSVC CMake') && (matrix.build_mode.cmake == 'Debug'))
diff --git a/.github/workflows/pr-check.yml b/.github/workflows/pr-check.yml
deleted file mode 100644
index b48f835dbe1..00000000000
--- a/.github/workflows/pr-check.yml
+++ /dev/null
@@ -1,220 +0,0 @@
-name: PR hdf5 dev CI
-
-# Controls when the action will run. Triggers the workflow on push or pull request
-on:
- pull_request:
- branches: [ develop, hdf5_1_12, hdf5_1_10, hdf5_1_8 ]
- paths-ignore:
- - '.github/**'
- - 'doc/**'
- - 'release_docs/**'
- - 'ACKNOWLEDGEMENTS'
- - 'COPYING**'
- - '**.md'
-
-# A workflow run is made up of one or more jobs that can run sequentially or in parallel
-jobs:
- # This workflow contains a single job called "build"
- build:
- strategy:
-# fail-fast: false
- matrix:
- name: ["Windows Latest MSVC", "Ubuntu Latest GCC", "Ubuntu Debug GCC", "macOS Latest Clang", "Ubuntu Autotools GCC", "Windows TS MSVC", "Ubuntu TS GCC", "TS Debug GCC", "macOS TS Clang", "TS Autotools GCC"]
- include:
- - name: "Windows Latest MSVC"
- artifact: "Windows-MSVC.tar.xz"
- os: windows-2022
- build_type: "Release"
- toolchain: ""
- cpp: ON
- fortran: OFF
- java: ON
- ts: OFF
- hl: ON
- parallel: OFF
- generator: "-G \"Visual Studio 17 2022\" -A x64"
- - name: "Ubuntu Latest GCC"
- artifact: "Linux.tar.xz"
- os: ubuntu-latest
- build_type: "Release"
- cpp: ON
- fortran: ON
- java: ON
- ts: OFF
- hl: ON
- parallel: OFF
- toolchain: "config/toolchain/GCC.cmake"
- generator: "-G Ninja"
- - name: "macOS Latest Clang"
- artifact: "macOS.tar.xz"
- os: macos-11
- build_type: "Release"
- cpp: ON
- fortran: OFF
- java: ON
- ts: OFF
- hl: ON
- parallel: OFF
- toolchain: "config/toolchain/clang.cmake"
- generator: "-G Ninja"
- - name: "Ubuntu Debug GCC"
- artifact: "LinuxDBG.tar.xz"
- os: ubuntu-latest
- build_type: "Debug"
- cpp: ON
- fortran: OFF
- java: OFF
- ts: OFF
- hl: ON
- parallel: OFF
- toolchain: "config/toolchain/GCC.cmake"
- generator: "-G Ninja"
- - name: "Ubuntu Autotools GCC"
- artifact: "LinuxA.tar.xz"
- os: ubuntu-latest
- build_type: "Release"
- cpp: enable
- fortran: enable
- java: enable
- ts: disable
- hl: enable
- parallel: disable
- toolchain: ""
- generator: "autogen"
-# Threadsafe runs
- - name: "Windows TS MSVC"
- artifact: "Windows-MSVCTS.tar.xz"
- os: windows-2019
- build_type: "Release"
- toolchain: ""
- cpp: OFF
- fortran: OFF
- java: OFF
- ts: ON
- hl: OFF
- parallel: OFF
- generator: "-G \"Visual Studio 16 2019\" -A x64"
- - name: "Ubuntu TS GCC"
- artifact: "LinuxTS.tar.xz"
- os: ubuntu-latest
- build_type: "Release"
- cpp: OFF
- fortran: OFF
- java: OFF
- ts: ON
- hl: OFF
- parallel: OFF
- toolchain: "config/toolchain/GCC.cmake"
- generator: "-G Ninja"
- - name: "macOS TS Clang"
- artifact: "macOSTS.tar.xz"
- os: macos-11
- build_type: "Release"
- cpp: OFF
- fortran: OFF
- java: OFF
- ts: ON
- hl: OFF
- parallel: OFF
- toolchain: "config/toolchain/clang.cmake"
- generator: "-G Ninja"
- - name: "TS Debug GCC"
- artifact: "LinuxTSDBG.tar.xz"
- os: ubuntu-latest
- build_type: "Debug"
- cpp: OFF
- fortran: OFF
- java: OFF
- ts: ON
- hl: OFF
- parallel: OFF
- toolchain: "config/toolchain/GCC.cmake"
- generator: "-G Ninja"
- - name: "TS Autotools GCC"
- artifact: "LinuxATS.tar.xz"
- os: ubuntu-latest
- build_type: "Release"
- cpp: disable
- fortran: disable
- java: disable
- ts: enable
- hl: disable
- parallel: disable
- toolchain: ""
- generator: "autogen"
-# - name: "Ubuntu Parallel GCC"
-# artifact: "LinuxPar.tar.xz"
-# os: ubuntu-latest
-# build_type: "Release"
-# cpp: OFF
-# fortran: OFF
-# parallel: ON
-# toolchain: "config/toolchain/GCC.cmake"
-# generator: "-G Ninja"
-
- name: ${{ matrix.name }}
- # The type of runner that the job will run on
- runs-on: ${{ matrix.os }}
- if: "!contains(github.event.head_commit.message, 'skip-ci')"
-
- # Steps represent a sequence of tasks that will be executed as part of the job
- steps:
- - name: Install Dependencies (Linux)
- run: sudo apt-get install ninja-build
- if: matrix.os == 'ubuntu-latest'
- - name: Install Autotools Dependencies (Linux)
- run: sudo apt-get install automake autoconf libtool libtool-bin
- if: matrix.generator == 'autogen'
- - name: Install Dependencies (Windows)
- run: choco install ninja
- if: matrix.os == 'windows-latest'
- - name: Install Dependencies (macOS)
- run: brew install ninja
- if: matrix.os == 'macos-11'
- - name: Set environment for MSVC (Windows)
- if: matrix.os == 'windows-latest'
- run: |
- # Set these env vars so cmake picks the correct compiler
- echo "CXX=cl.exe" >> $GITHUB_ENV
- echo "CC=cl.exe" >> $GITHUB_ENV
-
- # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- - name: Get Sources
- uses: actions/checkout@v2
-
- - name: Autotools Configure
- if: matrix.generator == 'autogen'
- run: |
- sh ./autogen.sh
- mkdir "${{ runner.workspace }}/build"
- cd "${{ runner.workspace }}/build"
- $GITHUB_WORKSPACE/configure --enable-shared --${{ matrix.ts }}-threadsafe --${{ matrix.hl }}-hl --${{ matrix.parallel }}-parallel --${{ matrix.cpp }}-cxx --${{ matrix.fortran }}-fortran --${{ matrix.java }}-java
- shell: bash
-
- - name: Configure
- if: matrix.generator != 'autogen'
- run: |
- mkdir "${{ runner.workspace }}/build"
- cd "${{ runner.workspace }}/build"
- cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_type }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_THREADSAFE:BOOL=${{ matrix.ts }} -DHDF5_BUILD_HL_LIB:BOOL=${{ matrix.hl }} -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=${{ matrix.cpp }} -DHDF5_BUILD_FORTRAN=${{ matrix.fortran }} -DHDF5_BUILD_JAVA=${{ matrix.java }} $GITHUB_WORKSPACE
- shell: bash
-
- - name: Autotools Build
- if: matrix.generator == 'autogen'
- run: make
- working-directory: ${{ runner.workspace }}/build
-
- - name: Build
- if: matrix.generator != 'autogen'
- run: cmake --build . --config ${{ matrix.build_type }}
- working-directory: ${{ runner.workspace }}/build
-
- - name: Autotools Test
- if: matrix.generator == 'autogen'
- run: make check
- working-directory: ${{ runner.workspace }}/build
-
- - name: Test
- if: matrix.generator != 'autogen'
- run: ctest --build . -C ${{ matrix.build_type }} -V
- working-directory: ${{ runner.workspace }}/build
diff --git a/test/mirror_vfd.c b/test/mirror_vfd.c
index 1a224c4488e..f8caea8a57c 100644
--- a/test/mirror_vfd.c
+++ b/test/mirror_vfd.c
@@ -1573,8 +1573,8 @@ _create_chunking_ids(hid_t file_id, unsigned min_dset, unsigned max_dset, hsize_
FAIL_PUTS_ERROR(mesg);
}
- dataset_ids[m] =
- H5Dcreate(file_id, dset_name, H5T_STD_I32BE, dataspace_ids[m], H5P_DEFAULT, dcpl_id, H5P_DEFAULT);
+ dataset_ids[m] = H5Dcreate2(file_id, dset_name, H5T_STD_I32BE, dataspace_ids[m], H5P_DEFAULT, dcpl_id,
+ H5P_DEFAULT);
if (dataset_ids[m] < 0) {
HDsnprintf(mesg, MIRR_MESG_SIZE, "unable to create dset ID %d\n", m);
FAIL_PUTS_ERROR(mesg);
From 6101a0cc836a42d99ac4fb9097ad396c007d3a05 Mon Sep 17 00:00:00 2001
From: Dana Robinson <43805+derobins@users.noreply.github.com>
Date: Wed, 4 Jan 2023 16:28:26 -0800
Subject: [PATCH 010/108] Brings README.md changes from develop (#2389)
---
README.md | 38 +++++++++++++++---------
doc/img/release-schedule.plantuml | 47 ++++++++++++------------------
doc/img/release-schedule.png | Bin 16991 -> 13977 bytes
3 files changed, 43 insertions(+), 42 deletions(-)
diff --git a/README.md b/README.md
index 961c1373eac..38e48ed1c8d 100644
--- a/README.md
+++ b/README.md
@@ -1,18 +1,24 @@
-HDF5 version 1.12.3-1 currently under development
+HDF5 version 1.15.0 currently under development
![HDF5 Logo](doxygen/img/HDF5.png)
-*Please refer to the release_docs/INSTALL file for installation instructions.*
+[![develop build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/main.yml?branch=develop&label=develop)](https://github.com/HDFGroup/hdf5/actions?query=branch%3Adevelop)
+[![1.14 build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/main.yml?branch=hdf5_1_14&label=1.14)](https://github.com/HDFGroup/hdf5/actions?query=branch%3Ahdf5_1_14)
+[![1.12 build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/main.yml?branch=hdf5_1_12&label=1.12)](https://github.com/HDFGroup/hdf5/actions?query=branch%3Ahdf5_1_12)
+[![1.10 build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/main.yml?branch=hdf5_1_10&label=1.10)](https://github.com/HDFGroup/hdf5/actions?query=branch%3Ahdf5_1_10)
+[![1.8 build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/main.yml?branch=hdf5_1_8&label=1.8)](https://github.com/HDFGroup/hdf5/actions?query=branch%3Ahdf5_1_8)
+[![BSD](https://img.shields.io/badge/License-BSD-blue.svg)](https://github.com/HDFGroup/hdf5/blob/develop/COPYING)
-THE HDF GROUP
----------------
+*Please refer to the release_docs/INSTALL file for installation instructions.*
-The HDF Group is the developer of HDF5®, a high-performance software library and
-data format that has been adopted across multiple industries and has become a
-de facto standard in scientific and research communities.
+This repository contains a high-performance library's source code and a file format
+specification that implement the HDF5® data model. The model has been adopted across
+many industries and this implementation has become a de facto data management standard
+in science, engineering, and research communities worldwide.
-More information about The HDF Group, the HDF5 Community and the HDF5 software
-project, tools and services can be found at the Group's website.
+The HDF Group is the developer, maintainer, and steward of HDF5 software. Find more
+information about The HDF Group, the HDF5 Community, and other HDF5 software projects,
+tools, and services at The HDF Group's website.
https://www.hdfgroup.org/
@@ -77,13 +83,19 @@ RELEASE SCHEDULE
HDF5 does not release on a regular schedule. Instead, releases are driven by
new features and bug fixes, though we try to have at least one release of each
maintenance branch per year. Future HDF5 releases indicated on this schedule
-are tentative.
+are tentative.
+
+**NOTE**: HDF5 1.12 is being retired early due to its incomplete and incompatible VOL
+layer.
| Release | New Features |
| ------- | ------------ |
-| 1.13.2 | VFD SWMR, Onion VFD, Selection I/O |
-| 1.13.3 | Multi-Dataset I/O |
-| 1.13.4 | Subfiling |
+| 1.8.23 | last HDF5 1.8 release |
+| 1.10.10 | CVE fixes, performance improvements, H5Dchunk\_iter() |
+| 1.12.3 | CVE fixes, performance improvements, H5Dchunk\_iter(), last HDF5 1.12 release |
+| 1.14.1 | selection I/O with datatype conversion |
+| 2.0.0 | TBD |
+| TBD | VFD SWMR |
This list of feature release versions is also tentative, and the specific release
in which a feature is introduced may change.
diff --git a/doc/img/release-schedule.plantuml b/doc/img/release-schedule.plantuml
index f5aa62a2856..c724dc98802 100644
--- a/doc/img/release-schedule.plantuml
+++ b/doc/img/release-schedule.plantuml
@@ -2,44 +2,33 @@ The release timeline was generated on PlantUML (https://plantuml.com)
The current script:
-@startuml
+@startgantt
+
title HDF5 Release Schedule
projectscale monthly
-Project starts 2021-01-01
+Project starts 2022-01-01
-[1.8] starts 2021-01-01 and lasts 114 weeks
-[1.8.22] happens 2021-02-05
-[1.8.23] happens 2022-12-31
-[1.8.23] displays on same row as [1.8.22]
-[1.8] is colored in #CC6677
+[1.8] starts 2022-01-01 and lasts 57 weeks
+[1.8.23] happens 2023-01-31
+[1.8] is colored in #F76969
-[1.10] starts 2021-01-01 and lasts 114 weeks
-[1.10.8] happens 2021-10-22
+[1.10] starts 2022-01-01 and lasts 104 weeks
[1.10.9] happens 2022-05-31
-[1.10.9] displays on same row as [1.10.8]
-[1.10] is colored in #DDCC77
+[1.10.10] happens 2023-02-28
+[1.10.10] displays on same row as [1.10.9]
+[1.10] is colored in #F6DD60
-[1.12] starts 2021-01-01 and lasts 114 weeks
-[1.12.1] happens 2021-07-01
+[1.12] starts 2022-01-01 and lasts 65 weeks
[1.12.2] happens 2022-04-30
-[1.12.2] displays on same row as [1.12.1]
+[1.12.3] happens 2023-03-31
+[1.12.3] displays on same row as [1.12.2]
[1.12] is colored in #88CCEE
-[1.13] starts 2021-01-01 and lasts 104 weeks
-[1.13.0] happens 2021-12-01
-[1.13.1] happens 2022-03-02
-[1.13.2] happens 2022-06-31
-[1.13.3] happens 2022-08-31
-[1.13.4] happens 2022-10-31
-[1.13.1] displays on same row as [1.13.0]
-[1.13.2] displays on same row as [1.13.0]
-[1.13.3] displays on same row as [1.13.0]
-[1.13.4] displays on same row as [1.13.0]
-[1.13] is colored in #44AA99
-
-[1.14] starts at 2022-12-31 and lasts 10 weeks
+[1.14] starts at 2023-01-01 and lasts 52 weeks
[1.14.0] happens at 2022-12-31
-[1.14] is colored in #AA4499
-@enduml
+[1.14.1] happens at 2023-04-30
+[1.14.1] displays on same row as [1.14.0]
+[1.14] is colored in #B187CF
+@endgantt
diff --git a/doc/img/release-schedule.png b/doc/img/release-schedule.png
index c2ed24142b06ff9b8753f0d8ab53ab2728b59233..b96f741a02953245d03f6420552621181d8aa608 100755
GIT binary patch
literal 13977
zcmeHuWn5L=*6*T}5D+OrNrKC$*xebR*p#^N_UGi(jA+WymRAu
z&U>D7-!J#x-}~kM4jeU1!ssCW!a?8K24-Psg{w6tK+x3QV=%?}4HM>yS2M}t_~iYVHRH&Z#w$PF{XE9I`<7kl*1QWceG99{
z-YL_$Hwvq4Oqh_vW#ktTQL#chwKq3Jzkgh!O=}z8p&*@N%@P?g&w4A=U{;2*@TD`D
zWh==S#Ti}g+>)!W>pnJ@pbT?
zH&vMu^^%)ed|CJAX%l;iLgJpT2iB~WuAa(hQu4yxBw-|PC=AJ8E)KsE*xV@^+{vu-
z!P&ZT5%l2w_)w-$R!^gsLs{omw^f4=jrz-nf+_$UT&KbLB{b
zNfz~Mrx&xLu!w*>qM*gta`PatewQX->?Q9kO7+KIldhOXdpUBP*|ojBAIBhPOBY+!D9S6n?c=58IL7@bHac$h
z3Qp-JcR>_E(ARg8+t+XMCxs~xfd#$r=0}7p
z^a+BYB3@r3!O;<~Uy&E_5U<{79W;nn2rCjy9()l|MO?o!{dWW8^HZ=o&w5mCR_EuJ
zHe{fa$vCa%m*zka_wEMQ4M>%5eOSr0+AcX?x5?AO!eXT_b*E+}?fZAp$2zyz7Ur}o
z&0=?x9!FqcVVRGYEy;0DJFE`eho#`85mNP^3=IvDup4Q)uH{w`M@2;%#z=TD7b*mmj852tB;V)q^F)<@0ud38bUhX|u%aKdwl16b&xIL(9Eh6$3
zaTi+4Z^;@uuv~umo47cU#{31mI=JMeJv}`ixUOF1DWt34lzQscD@EgVbv8R}^z7NYyNb%nPdrW?E>%Ck
z_Lr-sohF0u=_~BXB|PDs^i5~eJNf0F7pDaEVgWei#{Ddh9_jD&rSk2=;aS<)Vq#+W
zckY}XtnJUZ?fed99IUq0f7x?5rpxNeu*N;>PQxX1b-s^BBcXV^>kct7x6PdJ_DVzb
zLO9=c{eE9U!rh?^T%q$%2}||fXjnu8KBi0W8*CVyq;IyvhIz1{4F9+M$Qe$NY2p5?
zuxG_DD}(ud=MR(`ANO3VA4^nI;MCuK6@HH1FTPm)_W24xsex&L@h#|^M@4Q|Kq<<0SOY+Wp>0!hmu`@9B4--hK+POcsG
zYbj`u;y%b2MZVqr{_}E?k!AUPsGL8GUcg!KU_fjFUEsNp8*SQ!WCVqroZO*{7qz5I
zuMHVH%8gqL4+ys)&HkT54Ci~fXDw3qu=wF#mn&ZR9@|qqGxaNJ
zZnt!?0z*S{jx4LD+idf{>Da$0p@`;6Er%d83XuF}H$5*CNBgH68@1f_d%0`uf5Vq1
zker?;ada+RlyHpy5^rH-X7*Sw$aC5MkZh&LDpVugo*;f#gr?WtBfO`#cj?>KWYtcl
ziHOX_W_f@3J??O=!}ZbZ%b5nZvryBU(kUQGKjh}Z>U9%s^AuoEqRh#6P4xRyb%P(
zb6?+5>}0#{%rvz5?RW$p&w8cbjERi}x7lu|^om*Lji9xaD7|N0A2ngzm-2gh$FtoR
zws@1$T{y{qGFvj@Nb9zJ9|+mNK-Ructl8Pt_V)b;UNuC-#8t-q6x-u9_VnCO{nV?U
zvU)dZWjDWC_lqcB`7Y0CI`nRWMj|xhsOV#(*}u=43(mmESm1WN4W_Z&`}60|2*J}1
z7N5Dy019diJ-w{o#t~^=moQigF=O9yZ*tAbk5|@96qg0RA|oTi$@z>73~Yoi;qplv
zY|*jL8e9*DhKG|m%_R!`R^ZRfo=Hj$n+*>QnebW~85y01GJZoc{b*WxvbH1r{=w(5
z^Xzegm9lOYcH`rp=)}#oNV0KEzrN*YG3q~ihH4FO`R03b|CzE8Q*6(hT)BXMS!`ZX
zE_lbn1yw*!GYELkF>U8WulurLR8)*?dM$yVvPKbP+_aRJ4{sK_%5Ed!Fo|qGt0eGp
zRRRUjez^zK1%E$3rHWGRI{9?Li<7;Db*+FXU5`Vxk2($Co12rlt>xD(yJ8smTCD9W-FK@K8>O&!mO+SdVb;vU#87
zHtc%k1^(V&;ReTn?Lu4ClwJ4L<<3=-bFfpUaeo>|h&pxS-4ZQOJg`CWK%89vZztUU
z(Eu*+>oe9H1NVYn_5lRtV=IP3&|7E`Z-5=FAB%SF;_@<=$p95I^Ol0w>2Qtx%DdxK
z=b27$2z7R@4P-Xg!}iY;m*$X#z3os{Q;H<%x(0WDHM2II!$Fa5YiSvnB7CGCs0pz}
zN(w6&7%YOED%hK{?}ybtPZvzpPWL=Db#Xaq3BoV9m@xksW&X_vP0*O98oHf9hC~>!
zu2f_9Y)e7#cgvh)Mms(!X%`LBZBV5I_E-8>+B)CU*OXOQj7Ph*g_4LRhY+)VI0BJU
zKu$@itFFGx-~A2E8(Koof2tbM6-%>ct0XOrQTL-*vsnN!ov$}@b91!>C%#yA3m_7_
z7aX;I6X3Ws+5zF5V*Gmn8C?7{Lp|bQTz0N(yxkQzBx#BB7r^}~EvBlY9(8Alhg5@e
zCxuJb>%!42ozFSEVW=*ix0{j}VvzibROzakOJ-!!UBbYl_E22q4T
zG5&!0#BzdTkc5cn!MYL*#xs^WTBO0(4_`z63NX)+Ns**l7vAUB-;nKc^YZqOZ6S23
z2ybRpR*5ju*p!s>qe<)Jvsaa7OEtTuUrt6&%lifG)0Q@oy+wGCUv3sk=62)RpP$&K
z!G8r%b_pt0fpgV0oQ!7(JRyifaz3X}w;Q+=Jz;~{l5WL9SR};6&dWU{E1Ebg5GsSD
zaBHTkYAM;Y*VV<;Q`>pe?rg`6AGWyAEf5-FI|kCy(qqX|Qc|zSj!|(;`;%V~YMT48
z?wFR$FXa~)Rpr&>vo=D0lY3zqQoo$sn=c5g+PPWa1l!_7)v4dNPW;apr^2mK#WAZE
z?rrDgzdfO-vcFrpjH%Ra&Vz?Pw|xT4DW`F=my?@X-oqpGIZw%Y*GIG&8zGHinQW(;p%
zAmO%3S=Iewq3L@Ao5F4L@$d_;L_v?*7$xoV&-(2Qm!OE{c=C01cTWO1Q&&7q&gogX
z6BCnFD_a!`Ztz!PSV88r`4XS2;wsO~d$=Lt;
z?gzlgPO)bBHh^m&;=5?-2?z;;yx{U7NupY^>_$Cv6t&;iY}B@Qkc#$tx#2yD?DX;}
z)pkoVrqBHpRnSvDGF&OCsLJwCgDg{NaAxUJqEmGKwi_$%#y~dk;q^mT-px<4Z|!Im
z7c{JSx}4)ApS^kW#;^1q^8|tN;!<~fa>|NrU*8+k=L>CNDk>_t6#N|*_z#{3MnqKI
zI=si3U3(&Q^}9b?BCK{P?p2QG&Av1N^HraFwI>peJ26S3b
zDdAvRSs%8Z#JFH*vvm3wyEf2Q3Wmy{x;OXrvBEugtG?kQ;OifMC
zt!D>~ICZ4_9o1CR;BdH(+ZN|WBxZ)({aqO9xkz%>`clWPMClBlMsGe#ML38i}
zn`Gm~(PWL1q9U8!Vh4}?awrM=^2W!eXKTqaESj)=F
zP9ijtl~e*_6&b7)S;9XGlnlWhi=U!#3NA$YXeU^R0kSEq_=yIh!ZC+aAZ*|6~
z@?oGGGu;)#60`Oj$n>=(iSzLX61-IFoUbW*Ocw6VG$oDaNaO49mjlwVJzaONtM&I+
z6x`hqCBN`Hlg}z)D~Fe!uZ&PJ)2z!Q2RNhMKS;0OR!2j$#0NPIGvYbb9*c*JX
z6I09Bma&UfKTo{*4G+pd!ie8q%Y-LfZ>kjrF|
zZqZOEa@Fa{yru2l#)EZyqQ$QtFY3mdqjgX^1u}A{@hp4-d@Fq#ddGo!LR4>tyluW?fQls|sczPDMRfcK{dRQl?SLME1`QUF`&)UT((x@|exlh-^AvVs&qp%4Zy*
z3p&x;%aRmejeAjh5^7Im%aB6c|1)*aF!dxe-)
z=e(0XJwY#}okvhCH-N}`H9Wdj^5FylvOP=Pt5+$`d=-p6QI9kb!HL(N&ZkkFs1y<5
z`Z$}cV5RkrEOEMRpO0Nj1{ntYB3|;~hM2WRg=|h!j;u_I3%GK`SJsS$j&7+Vie^fu
z(H)j4jn;JdDyL;vqZ+zutbM*WapBqX0EFQC_L`_9;Bq$5R+|emoIee!`2&JIIH%m14=F4cw!L
zXZ+p2AHWv-Qy*Jf>3~j`cilf
zlFCenKNP6sMK>lt)^8Iyo??If)qA?mIa%-gj(Usc7mLKzJikHLGyqteje3`Lb*LP9
zJkMQ}V-ALd$Es>o^!5Opq>&8I`BpC-lF2@nr$8AjY?n+uAmajp*Y#tTobtB{zLAW0
z=51p#obYtMRuqOqK=JOkfjXO@@Mvvy(yDH2ubnc_ft#
znHmnx1F4!@JM=v#w#BlFsp)VHW1NGY9DkJ-)hy+M3sp
z0HD_DVD|CWWP7T^EnV0Wp_ObtIPae@Z!RZWdjgK_r_o@MBXG(&fHp#4gj_>LGE=6L
z$w#~tXv;vbj>i_WuNRaX(*M*zgy)H`UCSb02hBFU2T);NjcbT(Dy!nff0sGw(TgjJ
z{c7xa7KJ9La$?WOJICIr;65|RAju#(DvI55{Bw!w0?=OC
z==|k)|E@RjS;5r=>feotJoo1)TZ&Kx7tY?v4_BBDS2VeGCa9<`m=fmK&3O@}hZv6#
z5b6O$^qyYf!|@&0;oNB2x&~D`KcO*UwfVE4Nd%<(nYVit3=h%{E?m$tc-s#q7mRZf
zx`UtY%+ZC{rBtbivL|4*!lpHe4W73;we{b@BzOkfs5~3ivaeeR&^e_5@Fxv}@--k8
z2a{h!LC`@6pF;D=HBB3_?CZ`R_ByAzb;EZQuRKl!f+FAY`+R%t*jH?L8-fg|B3`q-
zNM$!C0X&ij
z$m4(J{E#U^=JKRn-q!X}X=BoaeObA+$(a1V58{Xa;MqHWyR&_z?C9*Qk;iOzZxMAE
zG5bOgmC75K1N_G;eQIjK`fXk_AwZ(6?8`k=b_nivxkRZ
z9-{KfcieQzpR#d2_#kFs5&!O;C5=RW?#;hlhz@@8`T5^0ZwJlYTk0!4*Gsot|3{;~
z<|AOot~F_4?ab=x7u!I*CPz>cUJ*}EPgT{C>#
zOhYm+03)M-!X6wrBn<>EnaEPXGyfBlHAxyLViA6_^FZ#i7sofrwiP&j*?%6Vv9`
zwGh;}-}0vF`y2QE+iEtZ=+K=t~ODNE*Kulo^+WFhDSp@*oJA42W&Q
zY&51cH~pRw5D}Rz;RWTzF~5p-$vO&QZF~xpURVp7)7C_6Obo3f&<+l|9(|1AMS|X5
z(=?MoGa8Aa$jwE(&)`rq>P@ON8O#Fj0&pqgrit$gd7kG;zi%-Dq8L6tKA*#?4M0}I
z43Kcf^<&5fhlEHEIBfn@QkNh$8Oe`2sIzc_=hxTs18Jh9Y9@;-s4$8~5>S?&WNubI
zzE{Dpqs?)!TDbSPdZsWSbQ^>%AR-GkK~xl3nn;rbR*478-E)GkHeFp<1DUC+>GE_V
zCWfHsikOJ`WlUlsZ1d;i@i%vYpep#e4
z#p|S2{O-6-n50Ye2z4Bi_(riVFL9RKueP@dPPN#?-OFrI)N
zTacxtju-3KCCnaMAA0GAs?mSHzR(w~jHfdjUpK- luIJAkw*
zd~0_uhGqR3xC0pclQ0)OtmUXzvbhp2wIOiWd(sg&AjT&FkisLBI!AfMysv+asaDQ^
z>eL;kguP+!c6_rr`p&&Vl|O$2Risdtg4677n7-g^L9Lu>0*FS#WZb%&kd}{C>AQFDew4i8HXTCcCWS;$7L{ziPXz9|@T*om2lGVs9$nN~HiC@>K~eZ0ym8v~{mK
z$2V-U)$}l-j4WXawEISuEhdD@nwsOMha1k@Qw1dxPBWQe0SIF$uMLbZ=yqgLz5#@+
zgM0=ChKwWNMvVblm)HQIl3ZUKJzcW6=~zAZ@G)=}l&B(nu+{E8e)*?x6mikO`cwIc
zNk}wRRiABz-Qzx59b6d8^tT0}9~g*kwKHB$&&M~piAI&6Sc#XdCgpGUNLqDOLLV{O
zoDf(daa-HR7&L`_%AH04=t*HppAsfxs%
%+qwaTwzISIJ3zh;dV2GPYDM`1
z&f8j@Si%Fs+<2zhplWp;C&UAaH>kZZ6V|2*p8Km8Vv@@AhZ|$@SYL=;KWcy0^W$jD
zQmX@w%hZp!*RH19U<)K9+8T~$lB1)kVMyqtKrd|`=
z&VNiZQ2oAXWmG1CCPo16E*2&xnc!F5Iw#B7Mvo;O$OpYdbjXZGtR|NBq3!kBU(W$w
z`u?~njTmFuokC~e{LHrtQNkAS%0r&I9xz;70nG*AXs^9iVmekx#6Kr;vmz?9F19CM
zXXRdR{s;l&eFgT9S@b@SNp^8@F;MDNe_aF$a&wRM$&x*_q2lA?V`jDl
z>wP{))zu%x_lR(7}nSs>Y_=Xsf)8Vo8nN~3iy}5D__9#H#lkRC8n~Dnh_a3^k;4b
zMz9#?Gn5jR{v_>P-*o`I{yC&_DtvXlGa|>K!)5j2V%-Jtv}&!zx%Z$smeB_hrHN0t
z3ofiF+?_x2-bKt@jpNVEbto!VM{;f5%fUhLpf|c!hdtVL+snA0F138Co_)+?Y0qUo
z`qg_;YV*Xnf|<$d>Rj<>5fA7KK=$asqlRyMRs9|QRLz<;PDAiD7d&(7e>m+$^x%=)iBCG@r=A4=*<)yCW>Ze
zW_j|dAmb|Cj;)4oQR+a#U%yW<@bHW;h!;gxaDE%#(88Dh3&rn$!u}YxMTxrY{UvVB6VbwijPW!7DfgFYv1y-K3RR8)0T{kOtc&8+$I`?
zivH5FB_FfXrWnINd#vUG(?76`8xpz`m`eEkyxx#7un6DFrpAN%*xuTj=*82TnL-ji
z1CDZqBo68A2JdoWphPP&0+E~wMynBw^-Nq`x~Y3v5&xFtM{T)PKrI1Z1tl1VoR{_t
zx*j+N)E*#AbvyRS0;>d+7GsJ{#lBl^I{+z^m6vaM-uyX^1)-C{_xJB{Tan$lLrsho
zLC?W)kQ=WP-8MOj+SUga5s0FE9;XMO;&~&*AqgLMwG8NDt)Mh$AFslcXWUvlwYHAy+{wD
zMazaI{7WAsC^g`4{`x!I8O0UeMS-dEq5B!+g#kS=2{>t-kLlIW|2LLX_*WY?W-nJ(13;6*A
zGVz4v83Gi26)jN*nWe7Z=7)|z-?j48d!~;%a`v@kk!8r5E`Q2C#|FegQbLF;UUb5
zPl`pO_QOAT(aA^a4BGrh)ZO!%4^e{>
z6c$8L5pbSq#B7Plfp*6un@DGL>*e*>=a2vEIr>B;;X3c__1)ha_3A~>XXA^BistWq
z_od7)2bJu(xmF+o`n~mgS@MuJ!u@(^U)%dW04t&rDHqfhi|74p+XHgxo?Mc4B2R*A
zT(5i8FrD^h{r78fNf-Lh#~=hHZZzSqYt1S_Is8P*{$DfsmlXv&@V9myVPQorEhgu;
zq^Rwb5IbTECg_EPgjiUtfJI)0GJ^i}5|TokBW5J7T)h@($YbjrLrUEd(LUdf9#)pxs&m!PjAE`h_~=-
zfz%Cn6c9K@1m(!a7J0S#d1d4F5PR!-u=zw
z8r=U^peGcx4QYUT+fTsM|^wnc9Cd3kFGK
zPS3-*YdnX)OeNvEkNlC7|b0XX6Y;Q!}-RlkWXY)x$
zTMm3{*EPELSNwvwNnB{X!NI`*n@rX^KC)I{@H2R!
zA-=@O%DPiIrc;hn00ALCUGghs1!JKOknk7R8|+6z*XJMEUSyL=|Er^zL6-PAL|1!>
zp-B`TxOg{6@J`9q`HGO~dlqE7C{NHAk;vy5)=y7_!KzWss
zhAVjDZRIFq3(AMb!Js5r7?-`hJuGjeRUwW4H#mA@d@9&1XFQIl9U2)2fuM;Houhx%
z=S^%#hgqu{G_Yx_aBe%f5_uIX~q>&Jo}H!_aB$<
zKQ7<@?=D}OdpsTZCx{b_E~z$Kkcr9u)f}Mf`Fjbt#i2n#nKkFuJje*hyc!f5I#uV)%9-Z2
z%@Yw1ypi$^aAIQP;-){frLNQkolbVlFFt);`FsCfe(%-L9BYIyLTHGQM5aQ_Y+3h2
zr3YZw#jYW6q;G@&dbnlDqEnwRaTAv!$I0E@T~jl0|L?=Jw2Ol?c=Khs5%w{J5XC7$
ztB>99+gAospqJ0|{Z&EfaGpZBGr+rj<>$6Ypr4dXu}G4zdvGXMw&n_Sh(=?L*n$p7
zTrSWyA8R<6B|e2uK_Q=fe*7M2$?ZC8)3T{c5m-QL#0?iuOO9ej9ley#^K;v(atE!GppQAC5a{9{MYauo;yuie|QmoxGrWc>QwLnMI
zxd$19+(GjiYcvG#EjoI7zz-*vFF|K9jKw{PS+J7GD|Wytt{e-xv4n2&xric`MY0Eo
zK(H?GuL2Bb!qbR_=doLqN*DBK3%lnGJf&?Ceit@wi3p0V&zDRFT3TdR_Z0&Ehubbk
zg?{3!KLtIwxJ^olkFPLRE6vB8)`r0I&~tJ+g8l~}q6r#z;+P9VcR}YS^;r1B_-r5&
zf$pN;!H-Gmozk2=
z!1KQMbKd87{y3laocY|J+nHgmeeG-SwZH4T*7`0#IT;Bo3}Orj1cLSQg{T4qa$6g`
zKSM_X|3?$`n+N~U*@>yx=~-Gin;96|K_m<;3~Y4m4D>19I#U?i*;(ZDx0dEb#i+B#7YiF|NcnGH
zR9XjoVttCSqswfVJis%$$B3IvpF&3V`fdM%EQw5lP;Wx1&(vr7&lp(wABL_`S>0B)
z6MKv$%J;L%_c{F$zIWPBr!#_>-vv0h8cSuqcMfUuT1DBuIfrTAs9fB~2uvmiM-cFcRf%##4?Vn!~GUGqA*(TSY6vlR)6fjVuLQ6b<
zN^SM`+k=8bj%*2>qbU+E8mut;0ul?~g~OK-Em)p?YL?n_QIYS*UAQn>4NF!_iBWPS
z8qMdcO{LR#?+%lS=cM
zGa#`jJn-uKOi*MNVai3w!Gl2N-4_w+6;-Y*xeR0}G;Y7QH8KYr(H7M&4Y)jdzNmfj
z;%nE)QKAh<(t1OH%WqoUQ7RBG(S6FyWG1>9^V{BQX^w#9!I~r$RI04{<3`sH%mjUUdi>3={D4@D7H
zLDmm?fPVk2FS;i(>ib4>!e>z-vj_GIVv{&H1QD8emwX&^yN<|)qKnE(a-jyO5$z1<
z^zFX1cWFD>cZ*DfA5$*BTS1eNdGjl4aJ82*asRw}Ghoe5IdM5FTQTEAaO&kYA+SL3
z(rBQn_Ly$VYQ-iKYxGI;oPw;_si2}pL+otO53WG
zZ>=RICCOa38(-(C{iJ=Xr>9f1+t^NtqFMzk2p6&r&@TINEt7BBkk
z42+C2($o8QF3<5g@^atD{i1l)0bR9;RlG&B^4h$uELZoJxY%l&MgJeUpT&q#r6V%70de9r^P
zlZ}d*IF|ae-7ZQIZ#kUKfkNHpTenfIcths$t2e)&ZdT*s;&yln9ggZPsh(hWC&Hrn
zUI`@s_7bsWD*iJiD
zNJM0$Ur-IY^(fR~ee|bUe}$zsyv8mtsnGJ27dCLDK)38KqEjak2(>y;iQ;pbK9x
z@yRD)d5(}RrMA{qZj*sexn%WcD`_E>uLnQ;2q3W4Bfg(VLO{^QlBc_|p0cg*83h6X
z+l-DmN=jSAT^Lud?RM;i+3{ygjFn{2dcHLX=D2C5hQGF~`?_-Yq=y}9IawJ7zuXCN
zmwPEH+EQ*lv9bu(yz0C&lA*`8>hnpI&pq;s)r{os^pB`mU$PB*;&!BL#P)4Q3w6)J
z)i$T9N38tmSWN~V=NrUvoAX)EwIsV=#KKSaSLb)!xGsOa(YEebPIm7_o2(NH!2j{%
z$GgMz+33LxS)K&GXEM>j9lc41!e|%bn)Qf$lh4-8yjJo`ZXGyUb~y7#By959FV7`<1qBiinXe0|z%92Ju`
zm~V=t$Y2h+TSUvhqb`q=FJHD9^(8Gv|DAEKpB?%zI;z4~x7Q2f#fDntapewRWBmJ^
zm$n2mETd-L|J7vH_->a~`~f=O7V_80om@3{`)w;79i8*xn9=bfIJocf4d#Wpd;dJY
zV`?HWQe@!fCdgwkQEu+ofuZ;(THDQep*>8=zJoei&bdIVmQ-*2Iri3|tWZyP_r6m-
z%aX8!W9E*z19ZC$I{j{|*3R|M5FLC}(f|Mxz@jFkMg~He&Hl5zh`_+Wxw*GhA0ijK
zqT{u(;*+7}CWEaNznniky^VTz+bOX#N#I=|rnbw2wUL5l)URK^=DaBov|9*&lb*SCVHo?Zq6M?BWVtDYnA7-Kn~3+_)Ons6*FoZPg@0}O
zvuzCTAlNHe*{$I`^#FXPBLfA6fi`k?GlIqiu>D`N#e;}d-8Z)*-)P;RaK1lhHeC}d
zb+J}|5g>I$@Z`~>M|@z~k&!=c9mm6GHKe7u6{0n4UIHi`7#L_T-KlbY1bzJYv799b
z2gjzpdlt>3T)SF#>CKM1Q725p%n_73xw$J|AZLea@d@vuc@atzfS6
z{;T#oSVZjU&U;G>pW2NU78gw}){{w%em8s|+j1;E8&kHn-T{cAUSR=!_)zTUcZUr=
zgU$$C8kzQGQ#H5qgAsvuhk?Pt7L%21u_Iucqx%+dJN$~NADzCK4&$r0oDl@5VY}45
zI#NJ>Y(mUM{V4w6Xg(yF*S6KH!TqQAI@P6EJUAS-Rje47Xky(-0x)XouWsij6*F$9
z?w5bv)zQR6zyU!^6cb=`r>7{qLBA^>_Wy|oM2!6Z5Tb_(%=xJf
z3<>fHz3dh7>wUDaWec9?GowePMZdBu|}(gJZ9QCYk@;VXkU5K&SQf^_rR*0AYP`
zTwTc}ot>RYE`PF)j+{c;_6{d3<1-*SC=>TXf0{9hKbr{qh5`9ZQ9*Uij;S*>?Xs5l
zM#EYxgiPQ8F7E22L043)5sJtB%5Urso};6qU@Q6?wJ&xFLZayuzghhPs73~b3K(`1
z^0}?$Rb-lOPF4jQ*>8*=07o+?Tq&Z1R!&8cHl~6!5zMI!i9xP{nnbDUtVIP2ovw2<
zPq6U?13ZfQF#D9RQY1y^aDB{md#_Ju56OHumneGDVZwZW_joxuWBw!By#Z?*8ygdo
ztc%=@i3*bG9~&QF$cJEA*yDnz3pZXUqIkRvod`3GMJc!FxrZ4L5Kv|MD_dFP0VFu>
zBMg}j5Cf-&VpdzFv*_{MD;XA=e9MZvvj2#*=P
zF=w$qKu3c;P@VPe7{UhL+>%qC2&?J#RIN}|RaF`-gO8BApZ!$RQoS0=jE;d}?N<%~
zQ%lEZns_gHdHJPcW)_y^#cG>H$#d1aJl-&52k?jvv^-Hdt3sB{~i8yT4N}
z^(IOwC-T|Pv83H@wzjkKd4|PQt=sI)WiqgE{&%J#ipO$#(sq{0dM|B@rS3@B9ltr~
zW^xazgQ$NaRt$oB=Ad8Ejigo4?T1Tq6cHkL@*;PSr0r)T6bVJ>d|z>Ex>h2X)cNel
znz;&b>~)T|XUS&BGt2?J#c~+!8?n`-+zc9MsCUN*V6XjxJVmk$MEb8Tf_?gGB*Ks9;z!uQ1
z4fKP#L>xWjmFHwIXBT6kJ;Kck!7Ht1h54xI=vG_&a5WxY?=Qw^tLn=aFIeG1N54NJ
zPR8OQSZs^dK%&LP#bBc#GJ)^!{%&fFmYb`)aI>e~((&9tZ$Gl2Uq-iqJz#LXjnn{a
z(=UK=#UA#y#TSrHZTnx!etv%MPxg=Z2W5MVZ~aEtDCAkD#|AnC=h4vSMmu8FAs%wK
zChkKhL^G!*!Vu@=79S`q+!YF1E5D%PG2TO_IG}!VDaXz9rd9KYL
z%;eej6_45Lp%pA(Vh8K5VkLAY^C8`2uw`X()5eiFmh7ySK5hl#YbznIiJc&|Fz~ha
zDdHDH+jzlYP!cCnb>t8!It>hl-wyZ{^CX)YRljn&pCDz=i|ZGvZZUWzqp`QAy9ada@E&AXdMPQOHO1})a!hCd|8pz9dp&DR(D~@i)uEyJ2>Pf@g=@+?&Ma;PZDAU87
z^H%1%*>mktKxq7cdCJ6dZva#Qr#Ec0sS6yKp8A+%#8mbtx@+RW4oi1A+_6S4<%4Lqsueu
zI`IPC#EwG}zGRoDVacGrlwtb)v*M}MZ;v{(ien0H`QqOs#Y=rS(**byK
z0E3T#nVFe`747q9`yg?b~>=)FM*aytHJuwp4S&-W^c6jPuFw~z2-l`A0`XMrRkClVRS#8
zm|3Box~(K8d(YX~nZu}epSOp=00X`N$n?&0UESIKR1u@@80!i1iuZdxJp6^cwoCoE
ztqmV;J8n&DTGttD<1GXFywnzUur|V?QQ5Cy)hUPwX~3UZYWE*Rx*pB?aH>YkeT%m^
z0j`^uxVF2mZ~1vgVlu04Qy7&5EeesXi!)%9KW#EZKhwxA$zZ6iPzFk5*Cg1urm=sd
zXJV>77*>bb|0#Wcdgy+!m(0t{yL+iflfn8mI*4X_pXT@kf}dqQWdLUON7qnxrU%*MhaBb5~tY#NH8@jDWMgwNBgYVhNZ
zmZbd0YTr2uMnwm)J_4x~1B}md)4_Kw^)I(HZcqF~;Uw|eE$&?F^hX#(ql3P}U1-NR?8gk=|jEt>`ioR{F5)c8(k%Y8~@Q_62YE%_l&eRQ~4>uSM;0S~*CSTt_Xoe;fS`B|)o$yj6j?;SvuTOh7<=<+
zy>b0^6DFzRN_yyz1QFd|><*vjFH>Za_?=h3M&lC@DDsUHw2(>j(DVlm;b&`DOw4G#`y~|SK$v|e
zY{bJd(024k#v%bZxg}F-JDzsc)5QZlvDha5wJ-^?R7Xr(wcMRI1zPW?>_-xxT7Lmi
z76`b%QA=#83zIg}3GNlRp9eG%XRC%w=qDL`d8PZd=W9(_jy2+VRQb3~S4|q3N!Ubp
zAMF}M76fobr26-*l<2_Tnw~i5zrcjWKs%UG^rX|lCGTPEr93QLat;%d4E&wFu_|qs
zyIm}UwlqY_d%<6P?g|#F&jqr3aqR+=GIts=P*NH)W7!vF2XyVWmSeX$U^vYqqb+<+
zd?_e)f2SN-)JogO$*To9BEpi2x}&}S7~=EE(iyvDz0+$A?C9-XiV~*rVjPay-AGR+
zbw@RDB4=q|yeh64V}v%Tc^hq2VH&igw=<5ih^-lkCt_d946V^nO%rpA}-
zD@0PSaL7n%&Moz?41@G9We}+TTsdP0YxM
zZ6kZv?fTN;-P&-T&A%@xf!yoY_)}8D6i;k0I4%H$>*?urb#;N`{MHc|?i{Yv!{i;B
zfY~8u@mOJJ-UW^Q!lbx3Ns=P@V;}oJM*8}X1zr$MKOD%{go$ub?h)p=pZi(p2
z$OKQ7Sp~W;brZQ{E++4W8NDP6qwaQ&pnKJ=v8zc@pbm?mTX?6qSwF8ltz+$TFz
zWayRD6c$>w_}=F*?50Sr2KM#x^OLYL_w7cst@_Ig5Oi&gmvudu*HGc;o%<2cWAlgb
zz^Ks^#alW#uU=pD9)6m5&?6z4kU+b4T_4$D+6oHnb}MqG-7HxgrB;BhN#Ad8Qbhi9
z$5EkOxHUm*fCC512fIDbmNzYyR0!<{7VQCSt46Q9;!#GS)@v7xDv*>|?n{>axzZ?{
zbW7wrvtW_Z4v{pzE%%T6;&?h)WxJ~JhYW?&9aTdmVJKH3glx!g
zyws%bRxedX3eN*AsU;RNHTMg@xj|aP-UMFPqwU&ve{CpxW=Sk-8rzU;T-SnLK}kTq
zrFyFlKHS+QPAD&yg4s@&`)Suf-)ZI-z5ViA*jwNX_-}jLX}Z46n&XIL91YGLn%rp5
z`~!U%+w{1l0zpUoh-s_MHy@H!+1`!*Tx~h^y-elJ%XI~0KPg=l(vo)WdWEO0j-R-n
zG@p94t8Q%-cc>^T%_2D0Z+HB*UJe_7$s*@HL&2kk&$34(GI*CucrTRYnEa^Jq-7#FQ{!t!B3}ujs_P(mG4-z@vJKz)qf!^lDf}-ML*ZoLM@>}
zZu|l%Dw;+*qhHYH
zNB;v?mY+?x3E74Is|59U*bh^d#`J9?EM(jn*-4pM2iImnuS~SW^B8Q@xG0I8fG1aqB~1*4?BPX90N
z%f;{YE=>A*@N+!y3sW%!CB7!j&5BRO9n5}@UB%y!raM3Hoy^?_@i%xqBeb1{TGYOO
zfczW>{HGMK2uCvi0Q*FN?n!};fq5MFO#L@DCE|l;lW^UWeta+3-lz1}FX{3~z6tVJ`A$L*KL?j{Auh+;wAvf>TR7ILHwYYnx;;JFl>k$I-6ZNir%%6u%@dM1O7X;D
z_V;}|e(HK{2N#ZiA0l%~w{57k<6!O)e^P1X$w)I0n}_i20uS-U!RO{96Jo+Bn>bgure$3CAG^?7eO-ru+`TDC=q$diZvOJ+
zQPEW`gCYXmw%SI1YStTzWNvP5sXLbH_0TVu(Xh=%@-;!q4O*UEw$SkzBn3|YZe`4aiz*O?}jKvGR1I=)-Mrt|%m&&g_R7)*Xvzmg^=TW9{`Z{pPm`#*3
zF)`%EhDw-U1#KhL!(7yzL^$XS+I&Rl?WY1Hdq-qo9h
znVG&znIb;ebFZ{11O>H@$|$OdSXfy4y1QMsYX9Edq9*0D%LGwgu4;*mt*uu~=k>Ij
z;cwbQeaRM)^!3`iRWv`5XnoDiHS3I^14-+Evcm6HBT(Y$K5RurMHBp%jzcw(OQF2;
zzx9_;T-@fi8P`2&adB_Ub6!yEiB3#>53(9<*W=={KbHLX0@d3rFgY2Dx~?1pfScQN
z=mldQwuYmRchGYsHIZhwRw*f|&*9;M#|ya4rzHu1VB-;YCkqLI91chy276uYHBvF+
z$7zzaogi)jbhx9f#Hf#%mX@q2T(vnNA)z&hL_$8He)C^qO~WC1aD3By$pPVOz2ufuSmX>BJ6uvaUA
z8G#}b5ug|#7SVZMy0<4@=A8i?O|^wV^2bpCL-F6mK3fE3%+Q$0*@^m$hybA8-BO^aNmQ<$u7
zPNXiSHm-3%6LfRj&b=>3?Fsd89yvKxTUpN3(KLB6%%IdHa-hI*!rjd^+4a#4cm^Q=
zg?4h5C!U7c&7!??q>XkeRdd6>cDy@bJQ-X3jQez2NP{UE{R(T5mD3P6#Bgjsaav52
zbj-{>cj&s!_S3%H-H8WUzjJwS%heh881D*vDKd!>$ecy&SZUC6#+L;IzJRe_{COw+!}9B-Et(qx
z%JrX05r2}eZf4-)mXQt)@ZIK=%yAz)h~7GIb4(bl%F<+7O?&9RVk7pMJ5M2h?U$j^
zY^{yY4eXjm02CA3xN#UMYEQr#Vop+C!?sF0Ks+SJ>xaj~N30cC|ErCF5Z;tq8!$kX
zmCO4I);!{FF-1~ofDBQ9oK)tm{&%LXuC6}VPz6QB
zoeK<8(o$oS)d8e$27xGdpRL>m8T`!V;r{u>?40~?thX;;zI^`tIY^lb3wxGnXrE3w
z&VV$UfuZ5F(|iypU=7pHRV^pM2ftzc(gJV>AIX1cdTX1!+`H7y8H)ky%M|^ukv}8b
z-|g4hLlic)-2TG+DTa_BoS=G)GoX@}n)sZNiHZ7lgA}mL9<9FjnK#s-u2ze|f
z0g&b?gZf~gzyEl>yATu*$x}o*q6{B$c|VtuibS+pIlJIj?rd=}d#AdJ%8NEcz;DzO
zXOM>iSpgPHVHcK%_}LstKy*637S9AH)y&L{vFaJ$bfq8D5`xSwp#xHV|QWW4I)7cCP9G2gp~II$V|3@(+eL+_f!1=@G)qP2OLWPk3%Y%C=K`35=uWAr~1SHB9+#7
z&AiQT;(9e*1R#^)b#BT$NTqf%kU~;!fwEt}0f-Ur-@iW>pB?RT@CRjYjIy?5;2j5f
z*H$jrgV@+ufRJR+K5)%Sf9kLr`d6V^*=azfwRhD(fs9U)dIuWSyCRkV|kr+y*
zSn9{WKi)gpU!{sdf~+GJcIf`yF9XEH1Mf0CG+@U46niu$Af0UuS2i&*0c9$hWLM!f
zOnkZRM-hOEkAp`ZGhz1HZGvU-ypVS~8s0fpVe0yG6
z;teu3K_ooixQU2}fFohAnWr3Xr`3?gOOV
z&Vd82!trQe#ro1p+?m7K&UH4b7xEKnbF^r_JC>sg8S$a9N=Ps!O
zP!GFxoLv_L^mSULhACmip4FRf?U5Sf235&Zom|Ts(WK+k{QoZPt4ijLpSOWvqT0#+$WNIa+
z1HDxaWr}ayH+GzexlNv46zftL^y&EA3IGrI-N=3l9$mDbsYa(x+C>j=wb-n>ubz#wH~(N14jrf*kn)C@Ox=nXn0y&YKGAUV-0&{6dsn@jV9-^B=IhH7|N6f1$hFgvJ9YR`(QCJG^F%B`p3H?
zBx`GHi<$PoI(BIc+*61Q`gDYn17TX?v3YRfx%9X
zcIJh7)A&&!a;n#;lktxyM+U~?tAn$Vo2t84uKQ@D_rHkNurR|e9mJyn=kAw8F2s{p
zy>_QsLp(S*i|C|-6sEblGBPqC^Rrq@Lq(+r9#_c*2nhhisvZZRA>R;Rm^KkpJmhz!K-V43SWIp{9GV`q5Ut*?(G<%@?yboblS*q
zm+9>c%qG{)(9(3CizutTJTdz?9Nk)BOgxhN@?#fsPwa)q4Rw;E?Yh2fbh!*=?A}b`
z=>ipk5N+(W?&;$kKZVaF+LL_kq`AJMSFx*r!Vv(N+#G^#`R^iaej6+$IUC%|jVDZT;Q7^91egvwd*28Ny-BjZg0h<#5B-a_
zO)ek?s}#BZqwVg&i`jo6azF2}OZxU=%?*WZ_?Lw3HKIo6CVvGU+N@%~WHu_1%XIL{
zbkKFNfrD>|#UkP6G#lOicux?Ap#jCof7<=>eG!rd~nDbUFXfB9nIz_%;
zrNuw)wx(yCATq*pSa~=cgFrDRCT3;D`0H~ddo3i^sn|qR?hQ&~nTi>~
z0~CLZUIC0i21#zRr&c=EKr#UoSRl{<4p9|;`(v)IzP^4j!I&l;EUVzxz318_gq+8A
zcUFWo5-4Nt60$xc!bL^}&-x5aU1V1P0bD9-YCw1~j)*!PjSM!%N{mbfm**X)oips$
zh7BKbIBowX&~^>O24M@&`xCoV?sOf1mOlJq0P_-%-{-mcM`V0rGQ;Xn-lmo#7CjH7
zgC9hDOwIxZ2v}qPGUWH>kV38n%KpVlbP*kV
zW{syd3oi^_;au@tb!2!uE!HFJ-wn5VHgmHyOO1a5B_5n9_3uX@MgWDNfgLcRkM1+}
z_Zkr(X>3f>*gxE<*kUYmhy3c=FSDF_acb344cGClCS%^+*N8E{q;90A<{_45?cP7W
zP#J{8gow*@2;klig?hkIK`h-V6Gsa_5a}N|
z@iW6B0^98{AgY?Px**LnCAt%7^uYrWfzi^^(@RQA1IJZYIi~HQk+u!@JDD2GFaJmI
z8#<}$>dH^>z%twtLV3q=9M5{5D#mLLne8jHgQ4I;inHH@Pq+l*g<3urQ0t-`VzF@rG
z7_R-NNii`m@FgbKYsmxi)XF4{3uY3z$^ch3AIc0XYdPt$(bd&$cPr^8F9;8NNYj4W
z_-l8u%j+=iC2MKfE*@JPnA62kRKBOvE&inT_I6Oalrk`FI)=wm%faCg)R2Gp
zcNXe*JXkdHi71j3bkbY*ZdidsQ?p)Ue_+Par1UMSn)Lr;YYMkcdoR1!PVh2NE
zhUO3?1XIun7@(lfOHG{`#L7AZJ7sp^3k;h84=+Z!JLUPu?ioqvvz<1t@&4x~IFZBs
zp--Nt6f5e#$oND9th&_OdoWP>@WFpa_`=SR_A7Rr^%*IW*t4#@_@5vB1fOIwB7!rh
z=RfGIk)um6IU>ZL8r<94+b{*`9!bj_Alon<{MG}a9z^u$qpUw
zlw2&-9K@|{Xg(5o-kH?>1wu%GNmy?q2)=S5o8bV`0XM?>FafdwAr?khaPZZtgQ8k9
z@huY3?>LfhbW;q({EbG>8;DJH9FgEELEF@sRl}i_A60DgC`FgfYJUY!Dr?gI>M;tEj
z64lpMhCL2e1T+Eh(Zg3KTdVf4PqicVAZ>2#R~Cah^Lz8?g|5iOUyuHA6|&-iS~AK#
zqnUI4O)AOcj4Wt0?@8RgMpe!Ce-5^u^8zzJDQ(BuW+Omgx$LF!f!VZOk1B!c+*9Qn
zF|%)booQC*~n8aYQ^*M|s`b|NLb@q)n!3lf+
zOMxbtW(eDOxLWtsTT?R|n^y{-XgsmCs+LTSa|0OvF=Ku0GNoO2x8d=~Hv#`Pgz_3H
z(}Oe;QKv`5t&g=nyFHAo8QSYO^ot(*73X>FzcP2)>#`kQkY-@qWHs-Pz4?Xxe_qu8
zsmmt!_xEr4Vg*7T#44wF)?XYH+_`fH@HZTWCPW~HSo^(Du9_BbY@#9}@bK_}Gn31c
zTU3GRG(ZfF9cSK0gI8><^L!8wDDejT`gOC#4ZNeoE;XX!;+?2PrKNTo<7J@AA$P(r
zz0u@_2?RuR(YQds<+k&KU{z?c@$siX
zFx+VbbXDBk+=n0uYF7iK20%}ViIt7Wa)8$-STWZSq3GD9Y8c$|`|1;aifCMLC%=Gz
z`FWrL04k`KiaM9;U3v!UA4*D~#`cp?D+Qo8h*pPZ-o8HJ$4zwr)kM9v;K(i_8=sw>
z9oM_pCW=FUcE05v;^%iiD79@6WFbKupd$z)6>@VG7Z(SzOrY~6zJLEY8p!o>!1HDp
z3Gngf;tj5rQ0_UmuO!^~#f5A`{&2o)f6DVE%dh-Eg!|Us{wBV(!oE7F*CB7PuP?wq
zLIg;hI{?k}O#X1oAMCu0^_hAh9v-0Wv)%sPNV)r_a`yVT^fsfiad9b|U^OG(Y#K&8
zg?9-T=go0K7G5Bk{&Tag>zfp7fSvm@$dV#-_pa{9w
z&C%y;ANgEnXRV;#W~Y^#`V2Z>oID2yW99(Sa$jP4h0`3tIy%Q1U(;2i!AQ@@
zJZB@scpnl+(8Y9WVZ+uHbYSdZ^CZf(RCJ20Cl42eZ2YswgSnp)J?1U*keF%OyhP5$
z>%c-N4HPi$tZ#(M
zw;CB2xC6O%Pc&!Lh}OFv1$VNxmx|v~N_aMI@RI3-KrZFCd*iVdy&w+07&F`}8w0ffL}
z{^|mGpqEUob&kx+bJ}Oh6`-JziAp9@y*R9>|0(qyEz>`VYdm$08xL<@YM9G({EdpCla;QZpoXO&k1~7ph
zY$-e51J
zrZG3hqCw8zDO+eDFGTV^^a2o0AYN6@Y8udJ-x(Q-LlH*pf`mywm7sjy6*tu+K{`xI`n
z5FZO}detf$gPFSbdG~Wa3fccr>WX3t@b_PdDq((VkWBX*R6mLxx3qelj<$8Pf%>q3
z+kCtQYh^kunx8gxZ^&@0*uahzRM-i$oIdDbfW3&ZrOj1qwXiuO9|E;zpeh>xp_HV=
zsV5gURYqvFerq&Kj;m&F>Rp6P#!CuRpGUvSX<<{o0lHi@wQ*2gvvj(&eQZmNvaW#0
z-f{=Gw6q`u45LNOa?-vvsIGbm&CG&K^*}}qza#eTsPhM10Xah_-jZqv|1Ns1dd%S4
zpn8z7T>~0Lpi5;^D}4&o#Z0^|GY{qnWp-lS&)2l^hUI#~`L88lxNzphJk6kqu-Tw)
zf&IPSML|7S`2TI5K&7qt4=orW{1Lch5P{*mnQ_}A&`Fu}miiE@Pm4gF!1s)%reo)IUVE~thg@Bj#gy?Kg>+LB=->l{>lAvG~Y
z(2kKGG*R?|X?>m>zFDgiVU+faH#bNuUV1c~4r4i?_-Oowu|}MKcTNShs;hyjHT{``it$#;uw9deGqjmON*S
zTsoQP5HxumQ9%(QKM9XT1e(DHzd#Q^qTx!acmkZ-NZ7wEX|)IFf?ClkMg?k=m>r=IneQo>5CR(rkFPrZP
z_3wa4{p4WH$cg+B0Iv0XFTFD$^z0X5x{L_nm)iAZ)a^|Jy1AwBEhOhDpcJE=iMP3g
z7VM%1!1kAWABK1CQQgCs2)nU@3w^c(zCKz-S{9!K%!#lZVCC_T+OK4EPyRW=GPHnQ
zVtBGf^s9vH)>hk85->`OY1(=YcNWN(}<&;S12nsEmu`6$BWrZ{VS
z7A>=>kdfh$qab9NQdAXzBk9
zKk1R2Y~^QReBc4XlRg+fwR>if*hSQ{aKuY%@hoe`c^O(#`;gb`pm;#BJ<)NR*ZsoP
zU;G~a?p?wG`yIcp{(Z*9=!&VU{8Sl*)uf^mn0xtV0
z+=Jhw_pq$Ucu{J@w$<3(cluBEWbu;4*?-83l4t
zM0ZxnYc1|3UwS0XXEM6f5fxO;r}?k5U<4EQ)5d5Qh*;CFm2X#y)UQhx64$!uCzN$i
zb1U{2L(j&{Z40*B6ZI++Cf|e5^2c{cpuu^8P@%_&l^;hovDHzPvJp+Qi1xa;M@zTH
zeZgSK@#9CnEP3jX6sD-;-xJTX*9b~RO*2RB$e){%xo}>&3R%Mt%Jfsbivn5+plAvK
zQXNvoqY^wWplE8@32t@xw2>(Zv^@V%DFv>NHt3Cbx~_q|%Id8jfB0@UGmC{H1VC(<
zLXNwA6lflLt`aNk(nC`KWpwM1I(@m|4(9lwqm{8|1W!hN-ts#
zLhxZm_g;DihIfCmGUn#z1;|3q-fZ2{DTLl4>S=w8TnaVGhOpkTfeo;fK!!K+KpN)9
z!#DdYKdIQ*!g?2lYROoy+Gj98FYVv;F%dNP<1TG}t#6=fRT(rnIs>UOICv{Z;KDE2
zRkq7~Nxy)00W_Hg{Hqh@fA)o_^DPP8m4T+sx3|BrAR@dT->@&K2TkbmjDw{gbTCXt1V`nD4Z!2*&aYjFEpc{u?b=bUr2vIJBc62ee+gBU^
ze8dcxNrc>@(A5tpd28*YfK5`A!Cf7%*`Hc@9fOW
zG84)0=1-O1A4(q(YG@#8`;^$u@>5Fqh~IGqE{tYGeapDXu#a^m^e;jmJQ~Iu}Gn
zKyg`D2D*}#k}%2r;>q8~U*4~t`tKeh$#w21j|xN&(W~u=uqk#-rzv(CnS|epf+s6f
zMZK0+4M})spw|V6U&wfE7C`!~YQdYt;`c`YBkx?@6Usq%(OI$=y!Wls^JXX#?AnGN
zj5?%eAvHX1-*{2djig?2PhcO1Jb};Nf94fXWcU1Q4?L301_lQ$%R}ShI+Z~`2q1fN
z+CQ|*&4`%f_8fCLjo3g3(|_JlZf53S_hGBqCNDclP-O**E-m4t_e>I}O9
z#i{0ZyN<3dM4c^=hyx^@p?$<476R4&szOkO-#612zukZgx{7wDlzQ(kI6QuRgo^9|
zEdK$4YX6!85V0o09P9pM=YXOa=-k}{TI0B%NDcoBJw6xY8U_#*si_HFMA-dOOhz
Date: Wed, 4 Jan 2023 17:18:35 -0800
Subject: [PATCH 011/108] Update README.md
Fixes incorrect version
---
README.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/README.md b/README.md
index 38e48ed1c8d..eedc4cfa030 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-HDF5 version 1.15.0 currently under development
+HDF5 version 1.12.3-1 currently under development
![HDF5 Logo](doxygen/img/HDF5.png)
From 363b26fa0d9dadcd96e611ed38956afeaa94851a Mon Sep 17 00:00:00 2001
From: Dana Robinson <43805+derobins@users.noreply.github.com>
Date: Thu, 5 Jan 2023 08:59:24 -0800
Subject: [PATCH 012/108] Minor change to H5private.h to bring in line with
develop (#2391)
---
src/H5private.h | 25 +++++--------------------
1 file changed, 5 insertions(+), 20 deletions(-)
diff --git a/src/H5private.h b/src/H5private.h
index e470f17f6c5..f54c6dff617 100644
--- a/src/H5private.h
+++ b/src/H5private.h
@@ -26,25 +26,6 @@
#include "H5public.h" /* Include Public Definitions */
-/* include the pthread header */
-#ifdef H5_HAVE_THREADSAFE
-#ifdef H5_HAVE_WIN32_API
-#ifndef H5_HAVE_WIN_THREADS
-#ifdef H5_HAVE_PTHREAD_H
-#include
-#endif /* H5_HAVE_PTHREAD_H */
-#endif /* H5_HAVE_WIN_THREADS */
-#else /* H5_HAVE_WIN32_API */
-#ifdef H5_HAVE_PTHREAD_H
-#include
-#endif /* H5_HAVE_PTHREAD_H */
-#endif /* H5_HAVE_WIN32_API */
-#endif /* H5_HAVE_THREADSAFE */
-
-/*
- * Include ANSI-C header files.
- */
-#ifdef H5_STDC_HEADERS
#include
#include
#include
@@ -56,7 +37,6 @@
#include
#include
#include
-#endif
/* POSIX headers */
#ifdef H5_HAVE_SYS_TIME_H
@@ -82,6 +62,11 @@
#endif
#endif
+/* Include the Pthreads header, if necessary */
+#if defined(H5_HAVE_THREADSAFE) && defined(H5_HAVE_PTHREAD_H)
+#include
+#endif
+
/*
* The `struct stat' data type for stat() and fstat(). This is a POSIX file
* but often appears on non-POSIX systems also. The `struct stat' is required
From cedd2a41f5143834084a6e2f609929f19a55cb2b Mon Sep 17 00:00:00 2001
From: Dana Robinson <43805+derobins@users.noreply.github.com>
Date: Thu, 5 Jan 2023 12:51:25 -0800
Subject: [PATCH 013/108] Brings mirror VFD test fixes from develop (#2392)
---
src/H5FDmirror.c | 72 +-
test/mirror_vfd.c | 2913 ++++++++++++-------------
test/test_mirror.sh.in | 18 +-
test/use_append_chunk_mirror.c | 63 +-
utils/mirror_vfd/CMakeLists.txt | 4 +-
utils/mirror_vfd/mirror_server.c | 64 +-
utils/mirror_vfd/mirror_server_stop.c | 11 +
utils/mirror_vfd/mirror_writer.c | 37 +-
8 files changed, 1548 insertions(+), 1634 deletions(-)
diff --git a/src/H5FDmirror.c b/src/H5FDmirror.c
index f7f79fcecc5..c0efb96d10f 100644
--- a/src/H5FDmirror.c
+++ b/src/H5FDmirror.c
@@ -244,7 +244,7 @@ H5FD_mirror_init(void)
FUNC_ENTER_NOAPI(H5I_INVALID_HID)
- LOG_OP_CALL(FUNC);
+ LOG_OP_CALL(__func__);
if (H5I_VFL != H5I_get_type(H5FD_MIRROR_g)) {
H5FD_MIRROR_g = H5FD_register(&H5FD_mirror_g, sizeof(H5FD_class_t), FALSE);
@@ -268,12 +268,12 @@ H5FD_mirror_init(void)
static herr_t
H5FD__mirror_term(void)
{
- FUNC_ENTER_STATIC_NOERR
+ FUNC_ENTER_PACKAGE_NOERR
/* Reset VFL ID */
H5FD_MIRROR_g = 0;
- LOG_OP_CALL(FUNC);
+ LOG_OP_CALL(__func__);
FUNC_LEAVE_NOAPI(SUCCEED)
} /* end H5FD__mirror_term() */
@@ -337,7 +337,7 @@ H5FD__mirror_xmit_decode_uint32(uint32_t *out, const unsigned char *_buf)
/* ---------------------------------------------------------------------------
* Function: is_host_little_endian
*
- * Purpose: Determine whether the host machine is is little-endian.
+ * Purpose: Determine whether the host machine is little-endian.
*
* Store an integer with a known value, re-map the memory to a
* character array, and inspect the array's contents.
@@ -1127,9 +1127,9 @@ H5FD__mirror_verify_reply(H5FD_mirror_t *file)
ssize_t read_ret = 0;
herr_t ret_value = SUCCEED;
- FUNC_ENTER_STATIC
+ FUNC_ENTER_PACKAGE
- LOG_OP_CALL(FUNC);
+ LOG_OP_CALL(__func__);
HDassert(file && file->sock_fd);
@@ -1182,9 +1182,9 @@ H5FD__mirror_fapl_get(H5FD_t *_file)
H5FD_mirror_fapl_t *fa = NULL;
void *ret_value = NULL;
- FUNC_ENTER_STATIC
+ FUNC_ENTER_PACKAGE
- LOG_OP_CALL(FUNC);
+ LOG_OP_CALL(__func__);
fa = (H5FD_mirror_fapl_t *)H5MM_calloc(sizeof(H5FD_mirror_fapl_t));
if (NULL == fa)
@@ -1218,9 +1218,9 @@ H5FD__mirror_fapl_copy(const void *_old_fa)
H5FD_mirror_fapl_t *new_fa = NULL;
void *ret_value = NULL;
- FUNC_ENTER_STATIC
+ FUNC_ENTER_PACKAGE
- LOG_OP_CALL(FUNC);
+ LOG_OP_CALL(__func__);
new_fa = (H5FD_mirror_fapl_t *)H5MM_malloc(sizeof(H5FD_mirror_fapl_t));
if (new_fa == NULL)
@@ -1250,9 +1250,9 @@ H5FD__mirror_fapl_free(void *_fa)
{
H5FD_mirror_fapl_t *fa = (H5FD_mirror_fapl_t *)_fa;
- FUNC_ENTER_STATIC_NOERR
+ FUNC_ENTER_PACKAGE_NOERR
- LOG_OP_CALL(FUNC);
+ LOG_OP_CALL(__func__);
/* sanity check */
HDassert(fa != NULL);
@@ -1283,7 +1283,7 @@ H5Pget_fapl_mirror(hid_t fapl_id, H5FD_mirror_fapl_t *fa_dst)
FUNC_ENTER_API(FAIL)
H5TRACE2("e", "i*x", fapl_id, fa_dst);
- LOG_OP_CALL(FUNC);
+ LOG_OP_CALL(__func__);
if (NULL == fa_dst)
HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "fa_dst is NULL");
@@ -1324,7 +1324,7 @@ H5Pset_fapl_mirror(hid_t fapl_id, H5FD_mirror_fapl_t *fa)
FUNC_ENTER_API(FAIL)
H5TRACE2("e", "i*x", fapl_id, fa);
- LOG_OP_CALL(FUNC);
+ LOG_OP_CALL(__func__);
plist = H5P_object_verify(fapl_id, H5P_FILE_ACCESS);
if (NULL == plist)
@@ -1368,9 +1368,9 @@ H5FD__mirror_open(const char *name, unsigned flags, hid_t fapl_id, haddr_t maxad
H5FD_mirror_xmit_open_t *open_xmit = NULL;
H5FD_t *ret_value = NULL;
- FUNC_ENTER_STATIC
+ FUNC_ENTER_PACKAGE
- LOG_OP_CALL(FUNC);
+ LOG_OP_CALL(__func__);
/* --------------- */
/* Check arguments */
@@ -1493,9 +1493,9 @@ H5FD__mirror_close(H5FD_t *_file)
int xmit_encoded = 0; /* monitor point of failure */
herr_t ret_value = SUCCEED;
- FUNC_ENTER_STATIC
+ FUNC_ENTER_PACKAGE
- LOG_OP_CALL(FUNC);
+ LOG_OP_CALL(__func__);
/* Sanity check */
HDassert(file);
@@ -1565,9 +1565,9 @@ H5FD__mirror_close(H5FD_t *_file)
static herr_t
H5FD__mirror_query(const H5FD_t H5_ATTR_UNUSED *_file, unsigned long *flags)
{
- FUNC_ENTER_STATIC_NOERR;
+ FUNC_ENTER_PACKAGE_NOERR;
- LOG_OP_CALL(FUNC);
+ LOG_OP_CALL(__func__);
/* Notice: the Mirror VFD Writer currently uses only the Sec2 driver as
* the underlying driver -- as such, the Mirror VFD implementation copies
@@ -1602,9 +1602,9 @@ H5FD__mirror_get_eoa(const H5FD_t *_file, H5FD_mem_t H5_ATTR_UNUSED type)
{
const H5FD_mirror_t *file = (const H5FD_mirror_t *)_file;
- FUNC_ENTER_STATIC_NOERR
+ FUNC_ENTER_PACKAGE_NOERR
- LOG_OP_CALL(FUNC);
+ LOG_OP_CALL(__func__);
HDassert(file);
@@ -1629,9 +1629,9 @@ H5FD__mirror_set_eoa(H5FD_t *_file, H5FD_mem_t type, haddr_t addr)
H5FD_mirror_t *file = (H5FD_mirror_t *)_file;
herr_t ret_value = SUCCEED;
- FUNC_ENTER_STATIC
+ FUNC_ENTER_PACKAGE
- LOG_OP_CALL(FUNC);
+ LOG_OP_CALL(__func__);
HDassert(file);
@@ -1684,9 +1684,9 @@ H5FD__mirror_get_eof(const H5FD_t *_file, H5FD_mem_t H5_ATTR_UNUSED type)
{
const H5FD_mirror_t *file = (const H5FD_mirror_t *)_file;
- FUNC_ENTER_STATIC_NOERR
+ FUNC_ENTER_PACKAGE_NOERR
- LOG_OP_CALL(FUNC);
+ LOG_OP_CALL(__func__);
HDassert(file);
@@ -1705,9 +1705,9 @@ static herr_t
H5FD__mirror_read(H5FD_t H5_ATTR_UNUSED *_file, H5FD_mem_t H5_ATTR_UNUSED type, hid_t H5_ATTR_UNUSED fapl_id,
haddr_t H5_ATTR_UNUSED addr, size_t H5_ATTR_UNUSED size, void H5_ATTR_UNUSED *buf)
{
- FUNC_ENTER_STATIC_NOERR
+ FUNC_ENTER_PACKAGE_NOERR
- LOG_OP_CALL(FUNC);
+ LOG_OP_CALL(__func__);
FUNC_LEAVE_NOAPI(FAIL)
} /* end H5FD__mirror_read() */
@@ -1738,9 +1738,9 @@ H5FD__mirror_write(H5FD_t *_file, H5FD_mem_t type, hid_t H5_ATTR_UNUSED dxpl_id,
H5FD_mirror_t *file = (H5FD_mirror_t *)_file;
herr_t ret_value = SUCCEED;
- FUNC_ENTER_STATIC
+ FUNC_ENTER_PACKAGE
- LOG_OP_CALL(FUNC);
+ LOG_OP_CALL(__func__);
HDassert(file);
HDassert(buf);
@@ -1801,9 +1801,9 @@ H5FD__mirror_truncate(H5FD_t *_file, hid_t H5_ATTR_UNUSED dxpl_id, hbool_t H5_AT
H5FD_mirror_t *file = (H5FD_mirror_t *)_file;
herr_t ret_value = SUCCEED;
- FUNC_ENTER_STATIC
+ FUNC_ENTER_PACKAGE
- LOG_OP_CALL(FUNC);
+ LOG_OP_CALL(__func__);
file->xmit.xmit_count = (file->xmit_i)++;
file->xmit.op = H5FD_MIRROR_OP_TRUNCATE;
@@ -1849,9 +1849,9 @@ H5FD__mirror_lock(H5FD_t *_file, hbool_t rw)
H5FD_mirror_t *file = (H5FD_mirror_t *)_file;
herr_t ret_value = SUCCEED;
- FUNC_ENTER_STATIC
+ FUNC_ENTER_PACKAGE
- LOG_OP_CALL(FUNC);
+ LOG_OP_CALL(__func__);
file->xmit.xmit_count = (file->xmit_i)++;
file->xmit.op = H5FD_MIRROR_OP_LOCK;
@@ -1896,9 +1896,9 @@ H5FD__mirror_unlock(H5FD_t *_file)
H5FD_mirror_t *file = (H5FD_mirror_t *)_file;
herr_t ret_value = SUCCEED;
- FUNC_ENTER_STATIC
+ FUNC_ENTER_PACKAGE
- LOG_OP_CALL(FUNC);
+ LOG_OP_CALL(__func__);
file->xmit.xmit_count = (file->xmit_i)++;
file->xmit.op = H5FD_MIRROR_OP_UNLOCK;
diff --git a/test/mirror_vfd.c b/test/mirror_vfd.c
index f8caea8a57c..b4f4c2c185e 100644
--- a/test/mirror_vfd.c
+++ b/test/mirror_vfd.c
@@ -75,6 +75,25 @@ static unsigned int g_verbosity = DEFAULT_VERBOSITY;
#define MIRR_MESG_SIZE 128
static char mesg[MIRR_MESG_SIZE + 1];
+/* ----------------------------------------------------------------------------
+ * Structure: struct mt_opts
+ *
+ * Purpose: Convenience structure to hold options as parsed from the
+ * command line.
+ *
+ * `portno` (int)
+ * Port number, as received from arguments.
+ *
+ * `ip` (char *)
+ * IP address string as received from arguments.
+ *
+ * ----------------------------------------------------------------------------
+ */
+struct mt_opts {
+ int portno;
+ char ip[H5FD_MIRROR_MAX_IP_LEN + 1];
+};
+
/* Convenience structure for passing file names via helper functions.
*/
struct mirrortest_filenames {
@@ -95,82 +114,57 @@ static herr_t _create_chunking_ids(hid_t file_id, unsigned min_dset, unsigned ma
static herr_t _close_chunking_ids(unsigned min_dset, unsigned max_dset, hid_t *dataspace_ids,
hid_t *filespace_ids, hid_t *dataset_ids, hid_t *memspace_id);
-static herr_t _populate_filepath(const char *dirname, const char *_basename, hid_t fapl_id, char *path_out,
- hbool_t h5suffix);
-
-static hid_t create_mirroring_split_fapl(const char *_basename, struct mirrortest_filenames *names);
-
-static void mybzero(void *dest, size_t size);
+static herr_t populate_filepath(const char *dirname, const char *_basename, hid_t fapl_id, char *path_out,
+ hbool_t h5suffix);
-/* ----------------------------------------------------------------------------
- * Function: mybzero
- *
- * Purpose: Have bzero simplicity and abstraction in (possible) absence of
- * it being available.
- *
- * Programmer: Jacob Smith
- * 2020-03-30
- * ----------------------------------------------------------------------------
- */
-static void
-mybzero(void *dest, size_t size)
-{
- size_t i = 0;
- char *s = NULL;
- HDassert(dest != NULL);
- s = (char *)dest;
- for (i = 0; i < size; i++) {
- *(s + i) = 0;
- }
-} /* end mybzero() */
+static hid_t create_mirroring_split_fapl(const char *_basename, struct mirrortest_filenames *names,
+ const struct mt_opts *opts);
/* ----------------------------------------------------------------------------
- * Function: _populate_filepath
+ * Function: populate_filepath
*
* Purpose: Given a directory name and a base name, concatenate the two and
* run h5fixname() to get the "actual" path to the intended target.
* `h5suffix' should be FALSE to keep the base name unaltered;
* TRUE will append the '.h5' h5suffix to the basename...
* FALSE -> h5fixname_no_suffix(), TRUE -> h5fixname()
- * / / <_basename>
+ * / / |