diff --git a/src/lightcone/lightcone_particle_io.c b/src/lightcone/lightcone_particle_io.c index 9d3584412a56ca60e077fb9f3d8d59e24f7276a7..10c0a8b9ca29d8125b542ff50f667723ecb92ac4 100644 --- a/src/lightcone/lightcone_particle_io.c +++ b/src/lightcone/lightcone_particle_io.c @@ -642,34 +642,33 @@ void append_dataset(const struct unit_system *snapshot_units, hid_t mem_type_id, hsize_t chunk_size, int lossy_compression, enum lossy_compression_schemes compression_scheme, - int gzip_level, const int rank, const hsize_t *dims, + int gzip_level, const int rank, const hsize_t dims[2], const hsize_t num_written, const void *data) { - const int max_rank = 2; - if (rank > max_rank) - error("HDF5 dataset has too may dimensions. Increase max_rank."); + if (rank > 2) + error("HDF5 dataset has too may dimensions."); if (rank < 1) error("HDF5 dataset must be at least one dimensional"); /* If we have zero elements to append, there's nothing to do */ if (dims[0] == 0) return; /* Determine size of the dataset after we append our data */ - hsize_t full_dims[max_rank]; + hsize_t full_dims[rank]; for (int i = 0; i < rank; i += 1) full_dims[i] = dims[i]; full_dims[0] += num_written; /* Determine maximum size in each dimension */ - hsize_t max_dims[max_rank]; + hsize_t max_dims[rank]; for (int i = 1; i < rank; i += 1) max_dims[i] = full_dims[i]; max_dims[0] = H5S_UNLIMITED; /* Determine chunk size in each dimension */ - hsize_t chunk_dims[max_rank]; + hsize_t chunk_dims[rank]; for (int i = 1; i < rank; i += 1) chunk_dims[i] = full_dims[i]; chunk_dims[0] = (hsize_t)chunk_size; /* Find offset to region to write in each dimension */ - hsize_t offset[max_rank]; + hsize_t offset[rank]; for (int i = 1; i < rank; i += 1) offset[i] = 0; offset[0] = num_written;