Skip to content
Snippets Groups Projects

Hdf5 1.10.2 parallel read workaround

Merged Matthieu Schaller requested to merge hdf5_1_10_2_workaround into master
1 file
+ 51
0
Compare changes
  • Side-by-side
  • Inline
+ 51
0
@@ -206,6 +206,57 @@ void readArray(hid_t grp, struct io_props props, size_t N, long long N_total,
@@ -206,6 +206,57 @@ void readArray(hid_t grp, struct io_props props, size_t N, long long N_total,
const hid_t h_data = H5Dopen2(grp, props.name, H5P_DEFAULT);
const hid_t h_data = H5Dopen2(grp, props.name, H5P_DEFAULT);
if (h_data < 0) error("Error while opening data space '%s'.", props.name);
if (h_data < 0) error("Error while opening data space '%s'.", props.name);
 
/* Parallel-HDF5 1.10.2 incorrectly reads data that was compressed */
 
/* We detect this here and crash with an error message instead of */
 
/* continuing with garbage data. */
 
#if H5_VERSION_LE(1, 10, 2) && H5_VERSION_GE(1, 10, 2)
 
if (mpi_rank == 0) {
 
 
/* Recover the list of filters that were applied to the data */
 
const hid_t h_plist = H5Dget_create_plist(h_data);
 
if (h_plist < 0)
 
error("Error getting property list for data set '%s'", props.name);
 
 
/* Recover the number of filters in the list */
 
const int n_filters = H5Pget_nfilters(h_plist);
 
 
for (int n = 0; n < n_filters; ++n) {
 
 
unsigned int flag;
 
size_t cd_nelmts = 32;
 
unsigned int* cd_values = malloc(cd_nelmts * sizeof(unsigned int));
 
size_t namelen = 256;
 
char* name = calloc(namelen, sizeof(char));
 
unsigned int filter_config;
 
 
/* Recover the n^th filter in the list */
 
const H5Z_filter_t filter =
 
H5Pget_filter(h_plist, n, &flag, &cd_nelmts, cd_values, namelen, name,
 
&filter_config);
 
if (filter < 0)
 
error("Error retrieving %d^th (%d) filter for data set '%s'", n,
 
n_filters, props.name);
 
 
/* Now check whether the deflate filter had been applied */
 
if (filter == H5Z_FILTER_DEFLATE)
 
error(
 
"HDF5 1.10.2 cannot correctly read data that was compressed with "
 
"the 'deflate' filter.\nThe field '%s' has had this filter applied "
 
"and the code would silently read garbage into the particle arrays "
 
"so we'd rather stop here. You can:\n - Recompile the code with an "
 
"earlier or older version of HDF5.\n - Use the 'h5repack' tool to "
 
"remove the filter from the ICs (e.g. h5repack -f NONE -i in_file "
 
"-o out_file).\n",
 
props.name);
 
 
free(name);
 
free(cd_values);
 
}
 
 
H5Pclose(h_plist);
 
}
 
#endif
 
/* Create property list for collective dataset read. */
/* Create property list for collective dataset read. */
const hid_t h_plist_id = H5Pcreate(H5P_DATASET_XFER);
const hid_t h_plist_id = H5Pcreate(H5P_DATASET_XFER);
H5Pset_dxpl_mpio(h_plist_id, H5FD_MPIO_COLLECTIVE);
H5Pset_dxpl_mpio(h_plist_id, H5FD_MPIO_COLLECTIVE);
Loading