serial_io.c 59.7 KB
Newer Older
1 2
/*******************************************************************************
 * This file is part of SWIFT.
3
 * Copyright (c) 2012 Pedro Gonnet (pedro.gonnet@durham.ac.uk),
4
 *                    Matthieu Schaller (matthieu.schaller@durham.ac.uk).
5
 *
6 7 8 9
 * This program is free software: you can redistribute it and/or modify
 * it under the terms of the GNU Lesser General Public License as published
 * by the Free Software Foundation, either version 3 of the License, or
 * (at your option) any later version.
10
 *
11 12 13 14
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
15
 *
16 17
 * You should have received a copy of the GNU Lesser General Public License
 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
18
 *
19 20 21 22 23
 ******************************************************************************/

/* Config parameters. */
#include "../config.h"

24
#if defined(HAVE_HDF5) && defined(WITH_MPI) && !defined(HAVE_PARALLEL_HDF5)
25 26

/* Some standard headers. */
27 28
#include <hdf5.h>
#include <math.h>
29
#include <mpi.h>
30
#include <stddef.h>
31 32 33
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
34
#include <time.h>
35

36 37 38 39
/* This object's header. */
#include "serial_io.h"

/* Local includes. */
40
#include "black_holes_io.h"
41
#include "chemistry_io.h"
42
#include "common_io.h"
43
#include "cooling_io.h"
44
#include "dimension.h"
45
#include "engine.h"
46
#include "error.h"
47
#include "fof_io.h"
48
#include "gravity_io.h"
49
#include "gravity_properties.h"
50
#include "hydro_io.h"
51
#include "hydro_properties.h"
52
#include "io_properties.h"
53
#include "memuse.h"
54
#include "output_list.h"
55
#include "output_options.h"
56
#include "part.h"
lhausamm's avatar
lhausamm committed
57
#include "part_type.h"
Loic Hausammann's avatar
Loic Hausammann committed
58
#include "sink_io.h"
59
#include "star_formation_io.h"
Matthieu Schaller's avatar
Typos  
Matthieu Schaller committed
60
#include "stars_io.h"
61
#include "tracers_io.h"
62
#include "units.h"
63
#include "velociraptor_io.h"
64
#include "xmf.h"
65 66 67 68 69

/**
 * @brief Reads a data array from a given HDF5 group.
 *
 * @param grp The group from which to read.
70 71 72 73 74 75
 * @param props The #io_props of the field to read
 * @param N The number of particles to read on this rank.
 * @param N_total The total number of particles on all ranks.
 * @param offset The offset position where this rank starts reading.
 * @param internal_units The #unit_system used internally
 * @param ic_units The #unit_system used in the ICs
76
 * @param cleanup_h Are we removing h-factors from the ICs?
77 78
 * @param cleanup_sqrt_a Are we cleaning-up the sqrt(a) factors in the Gadget
 * IC velocities?
79
 * @param h The value of the reduced Hubble constant to use for cleaning.
80
 * @param a The current value of the scale-factor.
81
 *
82
 * @todo A better version using HDF5 hyper-slabs to read the file directly into
83
 * the part array will be written once the structures have been stabilized.
84
 */
85 86 87 88 89
void read_array_serial(hid_t grp, const struct io_props props, size_t N,
                       long long N_total, long long offset,
                       const struct unit_system* internal_units,
                       const struct unit_system* ic_units, int cleanup_h,
                       int cleanup_sqrt_a, double h, double a) {
90

91
  const size_t typeSize = io_sizeof_type(props.type);
92 93
  const size_t copySize = typeSize * props.dimension;
  const size_t num_elements = N * props.dimension;
94 95

  /* Check whether the dataspace exists or not */
96
  const htri_t exist = H5Lexists(grp, props.name, 0);
97
  if (exist < 0) {
98
    error("Error while checking the existence of data set '%s'.", props.name);
99
  } else if (exist == 0) {
100 101
    if (props.importance == COMPULSORY) {
      error("Compulsory data set '%s' not present in the file.", props.name);
102
    } else {
103 104
      for (size_t i = 0; i < N; ++i)
        memset(props.field + i * props.partSize, 0, copySize);
105
      return;
106
    }
107
  }
108

109 110 111
  /* message( "Reading %s '%s' array...", importance == COMPULSORY ? */
  /* 	   "compulsory": "optional  ", name); */
  /* fflush(stdout); */
112 113

  /* Open data space */
114 115
  const hid_t h_data = H5Dopen(grp, props.name, H5P_DEFAULT);
  if (h_data < 0) error("Error while opening data space '%s'.", props.name);
116 117

  /* Allocate temporary buffer */
118
  void* temp = malloc(num_elements * typeSize);
119
  if (temp == NULL) error("Unable to allocate memory for temporary buffer");
120

121
  /* Prepare information for hyper-slab */
122 123 124
  hsize_t shape[2], offsets[2];
  int rank;
  if (props.dimension > 1) {
125 126
    rank = 2;
    shape[0] = N;
127
    shape[1] = props.dimension;
128 129 130
    offsets[0] = offset;
    offsets[1] = 0;
  } else {
131
    rank = 2;
132
    shape[0] = N;
133
    shape[1] = 1;
134 135 136
    offsets[0] = offset;
    offsets[1] = 0;
  }
137 138

  /* Create data space in memory */
139
  const hid_t h_memspace = H5Screate_simple(rank, shape, NULL);
140

141
  /* Select hyper-slab in file */
142
  const hid_t h_filespace = H5Dget_space(h_data);
143 144
  H5Sselect_hyperslab(h_filespace, H5S_SELECT_SET, offsets, NULL, shape, NULL);

145 146 147
  /* Read HDF5 dataspace in temporary buffer */
  /* Dirty version that happens to work for vectors but should be improved */
  /* Using HDF5 dataspaces would be better */
148
  const hid_t h_err = H5Dread(h_data, io_hdf5_type(props.type), h_memspace,
149
                              h_filespace, H5P_DEFAULT, temp);
150
  if (h_err < 0) error("Error while reading data array '%s'.", props.name);
151 152 153 154 155 156

  /* Unit conversion if necessary */
  const double factor =
      units_conversion_factor(ic_units, internal_units, props.units);
  if (factor != 1. && exist != 0) {

Matthieu Schaller's avatar
Matthieu Schaller committed
157
    /* message("Converting ! factor=%e", factor); */
158

159
    if (io_is_double_precision(props.type)) {
Matthieu Schaller's avatar
Matthieu Schaller committed
160
      double* temp_d = (double*)temp;
161 162
      for (size_t i = 0; i < num_elements; ++i) temp_d[i] *= factor;
    } else {
Matthieu Schaller's avatar
Matthieu Schaller committed
163
      float* temp_f = (float*)temp;
164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194

#ifdef SWIFT_DEBUG_CHECKS
      float maximum = 0.f;
      float minimum = FLT_MAX;
#endif

      /* Loop that converts the Units */
      for (size_t i = 0; i < num_elements; ++i) {

#ifdef SWIFT_DEBUG_CHECKS
        /* Find the absolute minimum and maximum values */
        const float abstemp_f = fabsf(temp_f[i]);
        if (abstemp_f != 0.f) {
          maximum = max(maximum, abstemp_f);
          minimum = min(minimum, abstemp_f);
        }
#endif

        /* Convert the float units */
        temp_f[i] *= factor;
      }

#ifdef SWIFT_DEBUG_CHECKS
      /* The two possible errors: larger than float or smaller
       * than float precision. */
      if (factor * maximum > FLT_MAX) {
        error("Unit conversion results in numbers larger than floats");
      } else if (factor * minimum < FLT_MIN) {
        error("Numbers smaller than float precision");
      }
#endif
195
    }
196
  }
197

198 199
  /* Clean-up h if necessary */
  const float h_factor_exp = units_h_factor(internal_units, props.units);
200
  if (cleanup_h && h_factor_exp != 0.f && exist != 0) {
201 202 203 204 205 206

    /* message("Multipltying '%s' by h^%f=%f", props.name, h_factor_exp,
     * h_factor); */

    if (io_is_double_precision(props.type)) {
      double* temp_d = (double*)temp;
207
      const double h_factor = pow(h, h_factor_exp);
208 209 210
      for (size_t i = 0; i < num_elements; ++i) temp_d[i] *= h_factor;
    } else {
      float* temp_f = (float*)temp;
211
      const float h_factor = pow(h, h_factor_exp);
212 213 214 215
      for (size_t i = 0; i < num_elements; ++i) temp_f[i] *= h_factor;
    }
  }

216 217 218 219 220 221 222 223 224 225 226 227 228 229
  /* Clean-up a if necessary */
  if (cleanup_sqrt_a && a != 1. && (strcmp(props.name, "Velocities") == 0)) {

    if (io_is_double_precision(props.type)) {
      double* temp_d = (double*)temp;
      const double vel_factor = sqrt(a);
      for (size_t i = 0; i < num_elements; ++i) temp_d[i] *= vel_factor;
    } else {
      float* temp_f = (float*)temp;
      const float vel_factor = sqrt(a);
      for (size_t i = 0; i < num_elements; ++i) temp_f[i] *= vel_factor;
    }
  }

230
  /* Copy temporary buffer to particle data */
Matthieu Schaller's avatar
Matthieu Schaller committed
231
  char* temp_c = (char*)temp;
232 233
  for (size_t i = 0; i < N; ++i)
    memcpy(props.field + i * props.partSize, &temp_c[i * copySize], copySize);
234

235 236
  /* Free and close everything */
  free(temp);
237 238
  H5Sclose(h_filespace);
  H5Sclose(h_memspace);
239 240 241
  H5Dclose(h_data);
}

242 243 244 245 246 247
void prepare_array_serial(const struct engine* e, hid_t grp, char* fileName,
                          FILE* xmfFile, char* partTypeGroupName,
                          const struct io_props props,
                          unsigned long long N_total,
                          const struct unit_system* internal_units,
                          const struct unit_system* snapshot_units) {
248 249

  /* Create data space */
250
  const hid_t h_space = H5Screate(H5S_SIMPLE);
251
  if (h_space < 0)
252
    error("Error while creating data space for field '%s'.", props.name);
253

254
  /* Decide what chunk size to use based on compression */
255
  int log2_chunk_size = e->snapshot_compression > 0 ? 12 : 18;
256

257 258 259 260
  int rank = 0;
  hsize_t shape[2];
  hsize_t chunk_shape[2];
  if (props.dimension > 1) {
261 262
    rank = 2;
    shape[0] = N_total;
263
    shape[1] = props.dimension;
264
    chunk_shape[0] = 1 << log2_chunk_size;
265
    chunk_shape[1] = props.dimension;
266 267 268 269
  } else {
    rank = 1;
    shape[0] = N_total;
    shape[1] = 0;
270
    chunk_shape[0] = 1 << log2_chunk_size;
271
    chunk_shape[1] = 0;
272 273
  }

274
  /* Make sure the chunks are not larger than the dataset */
275
  if (chunk_shape[0] > N_total) chunk_shape[0] = N_total;
276

277
  /* Change shape of data space */
278
  hid_t h_err = H5Sset_extent_simple(h_space, rank, shape, shape);
279
  if (h_err < 0)
280
    error("Error while changing data space shape for field '%s'.", props.name);
281

282
  /* Dataset properties */
283
  const hid_t h_prop = H5Pcreate(H5P_DATASET_CREATE);
284 285 286

  /* Set chunk size */
  h_err = H5Pset_chunk(h_prop, rank, chunk_shape);
287
  if (h_err < 0)
288
    error("Error while setting chunk size (%llu, %llu) for field '%s'.",
289
          chunk_shape[0], chunk_shape[1], props.name);
290

291 292 293 294
  /* Impose check-sum to verify data corruption */
  h_err = H5Pset_fletcher32(h_prop);
  if (h_err < 0)
    error("Error while setting checksum options for field '%s'.", props.name);
295 296

  /* Impose data compression */
297
  if (e->snapshot_compression > 0) {
298 299 300 301 302
    h_err = H5Pset_shuffle(h_prop);
    if (h_err < 0)
      error("Error while setting shuffling options for field '%s'.",
            props.name);

303
    h_err = H5Pset_deflate(h_prop, e->snapshot_compression);
304
    if (h_err < 0)
305
      error("Error while setting compression options for field '%s'.",
306
            props.name);
307 308
  }

309
  /* Create dataset */
310 311
  const hid_t h_data = H5Dcreate(grp, props.name, io_hdf5_type(props.type),
                                 h_space, H5P_DEFAULT, h_prop, H5P_DEFAULT);
312
  if (h_data < 0) error("Error while creating dataspace '%s'.", props.name);
313 314

  /* Write XMF description for this data set */
315 316
  if (xmfFile != NULL)
    xmf_write_line(xmfFile, fileName, partTypeGroupName, props.name, N_total,
Loic Hausammann's avatar
Loic Hausammann committed
317
                   props.dimension, props.type);
318 319

  /* Write unit conversion factors for this data set */
320
  char buffer[FIELD_BUFFER_SIZE] = {0};
321 322
  units_cgs_conversion_string(buffer, snapshot_units, props.units,
                              props.scale_factor_exponent);
323 324 325 326 327 328 329
  float baseUnitsExp[5];
  units_get_base_unit_exponents_array(baseUnitsExp, props.units);
  io_write_attribute_f(h_data, "U_M exponent", baseUnitsExp[UNIT_MASS]);
  io_write_attribute_f(h_data, "U_L exponent", baseUnitsExp[UNIT_LENGTH]);
  io_write_attribute_f(h_data, "U_t exponent", baseUnitsExp[UNIT_TIME]);
  io_write_attribute_f(h_data, "U_I exponent", baseUnitsExp[UNIT_CURRENT]);
  io_write_attribute_f(h_data, "U_T exponent", baseUnitsExp[UNIT_TEMPERATURE]);
330 331
  io_write_attribute_f(h_data, "h-scale exponent", 0.f);
  io_write_attribute_f(h_data, "a-scale exponent", props.scale_factor_exponent);
332 333 334
  io_write_attribute_s(h_data, "Expression for physical CGS units", buffer);

  /* Write the actual number this conversion factor corresponds to */
335 336
  const double factor =
      units_cgs_conversion_factor(snapshot_units, props.units);
337 338 339 340 341 342
  io_write_attribute_d(
      h_data,
      "Conversion factor to CGS (not including cosmological corrections)",
      factor);
  io_write_attribute_d(
      h_data,
343
      "Conversion factor to physical CGS (including cosmological corrections)",
344 345 346 347 348 349 350 351 352
      factor * pow(e->cosmology->a, props.scale_factor_exponent));

#ifdef SWIFT_DEBUG_CHECKS
  if (strlen(props.description) == 0)
    error("Invalid (empty) description of the field '%s'", props.name);
#endif

  /* Write the full description */
  io_write_attribute_s(h_data, "Description", props.description);
353

354
  /* Close everything */
355
  H5Pclose(h_prop);
356 357 358 359
  H5Dclose(h_data);
  H5Sclose(h_space);
}

360 361 362
/**
 * @brief Writes a data array in given HDF5 group.
 *
363
 * @param e The #engine we are writing from.
364
 * @param grp The group in which to write.
365 366
 * @param fileName The name of the file in which the data is written
 * @param xmfFile The FILE used to write the XMF description
367
 * @param partTypeGroupName The name of the group containing the particles in
368 369
 * the HDF5 file.
 * @param props The #io_props of the field to read
370
 * @param N The number of particles to write.
371 372 373 374 375 376 377 378
 * @param N_total The total number of particles on all ranks.
 * @param offset The offset position where this rank starts writing.
 * @param mpi_rank The MPI rank of this node
 * @param internal_units The #unit_system used internally
 * @param snapshot_units The #unit_system used in the snapshots
 *
 * @todo A better version using HDF5 hyper-slabs to write the file directly from
 * the part array will be written once the structures have been stabilized.
379
 */
380 381 382 383 384 385
void write_array_serial(const struct engine* e, hid_t grp, char* fileName,
                        FILE* xmfFile, char* partTypeGroupName,
                        const struct io_props props, size_t N,
                        long long N_total, int mpi_rank, long long offset,
                        const struct unit_system* internal_units,
                        const struct unit_system* snapshot_units) {
386

387
  const size_t typeSize = io_sizeof_type(props.type);
388
  const size_t num_elements = N * props.dimension;
389

Matthieu Schaller's avatar
Matthieu Schaller committed
390
  /* message("Writing '%s' array...", props.name); */
391

392
  /* Prepare the arrays in the file */
Matthieu Schaller's avatar
Matthieu Schaller committed
393
  if (mpi_rank == 0)
394 395
    prepare_array_serial(e, grp, fileName, xmfFile, partTypeGroupName, props,
                         N_total, internal_units, snapshot_units);
396

397
  /* Allocate temporary buffer */
398
  void* temp = NULL;
Peter W. Draper's avatar
Peter W. Draper committed
399
  if (swift_memalign("writebuff", (void**)&temp, IO_BUFFER_ALIGNMENT,
400
                     num_elements * typeSize) != 0)
401
    error("Unable to allocate temporary i/o buffer");
402

403 404
  /* Copy the particle data to the temporary buffer */
  io_copy_temp_buffer(temp, e, props, N, internal_units, snapshot_units);
405

406
  /* Construct information for the hyper-slab */
407 408 409 410
  int rank;
  hsize_t shape[2];
  hsize_t offsets[2];
  if (props.dimension > 1) {
411 412
    rank = 2;
    shape[0] = N;
413
    shape[1] = props.dimension;
414 415 416 417 418 419 420 421 422
    offsets[0] = offset;
    offsets[1] = 0;
  } else {
    rank = 1;
    shape[0] = N;
    shape[1] = 0;
    offsets[0] = offset;
    offsets[1] = 0;
  }
423 424

  /* Create data space in memory */
425
  const hid_t h_memspace = H5Screate(H5S_SIMPLE);
426
  if (h_memspace < 0)
427 428
    error("Error while creating data space (memory) for field '%s'.",
          props.name);
429 430

  /* Change shape of memory data space */
431
  hid_t h_err = H5Sset_extent_simple(h_memspace, rank, shape, NULL);
432 433
  if (h_err < 0)
    error("Error while changing data space (memory) shape for field '%s'.",
434
          props.name);
435

436
  /* Open pre-existing data set */
437 438
  const hid_t h_data = H5Dopen(grp, props.name, H5P_DEFAULT);
  if (h_data < 0) error("Error while opening dataset '%s'.", props.name);
439

440
  /* Select data space in that data set */
441
  const hid_t h_filespace = H5Dget_space(h_data);
442
  H5Sselect_hyperslab(h_filespace, H5S_SELECT_SET, offsets, NULL, shape, NULL);
443

444
  /* Write temporary buffer to HDF5 dataspace */
445
  h_err = H5Dwrite(h_data, io_hdf5_type(props.type), h_memspace, h_filespace,
446 447
                   H5P_DEFAULT, temp);
  if (h_err < 0) error("Error while writing data array '%s'.", props.name);
448

449
  /* Free and close everything */
Peter W. Draper's avatar
Peter W. Draper committed
450
  swift_free("writebuff", temp);
451
  H5Dclose(h_data);
452 453
  H5Sclose(h_memspace);
  H5Sclose(h_filespace);
454 455
}

456 457 458 459
/**
 * @brief Reads an HDF5 initial condition file (GADGET-3 type)
 *
 * @param fileName The file to read.
460
 * @param internal_units The system units used internally
461
 * @param dim (output) The dimension of the volume read from the file.
462 463
 * @param parts (output) The array of #part (gas particles) read from the file.
 * @param gparts (output) The array of #gpart read from the file.
Loic Hausammann's avatar
Loic Hausammann committed
464
 * @param sinks (output) Array of #sink particles.
465
 * @param sparts (output) Array of #spart particles.
466
 * @param bparts (output) Array of #bpart particles.
467 468
 * @param Ngas (output) The number of #part read from the file on that node.
 * @param Ngparts (output) The number of #gpart read from the file on that node.
Matthieu Schaller's avatar
Matthieu Schaller committed
469 470
 * @param Ngparts_background (output) The number of background #gpart (type 2)
 * read from the file on that node.
Loic Hausammann's avatar
Loic Hausammann committed
471
 * @param Nsinks (output) The number of #sink read from the file on that node.
472
 * @param Nstars (output) The number of #spart read from the file on that node.
473 474
 * @param Nblackholes (output) The number of #bpart read from the file on that
 * node.
475 476
 * @param flag_entropy (output) 1 if the ICs contained Entropy in the
 * InternalEnergy field
477 478
 * @param with_hydro Are we reading gas particles ?
 * @param with_gravity Are we reading/creating #gpart arrays ?
Loic Hausammann's avatar
Loic Hausammann committed
479
 * @param with_sink Are we reading sink particles ?
480
 * @param with_stars Are we reading star particles ?
481
 * @param with_black_holes Are we reading black hole particles ?
Josh Borrow's avatar
Josh Borrow committed
482
 * @param with_cosmology Are we running with cosmology ?
483
 * @param cleanup_h Are we cleaning-up h-factors from the quantities we read?
484 485
 * @param cleanup_sqrt_a Are we cleaning-up the sqrt(a) factors in the Gadget
 * IC velocities?
486
 * @param h The value of the reduced Hubble constant to use for correction.
487
 * @param a The current value of the scale-factor.
488 489 490 491
 * @param mpi_rank The MPI rank of this node
 * @param mpi_size The number of MPI ranks
 * @param comm The MPI communicator
 * @param info The MPI information object
492
 * @param n_threads The number of threads to use for local operations.
493
 * @param dry_run If 1, don't read the particle. Only allocates the arrays.
494 495 496 497 498 499 500 501 502
 *
 * Opens the HDF5 file fileName and reads the particles contained
 * in the parts array. N is the returned number of particles found
 * in the file.
 *
 * @warning Can not read snapshot distributed over more than 1 file !!!
 * @todo Read snapshots distributed in more than one file.
 *
 */
503
void read_ic_serial(char* fileName, const struct unit_system* internal_units,
504
                    double dim[3], struct part** parts, struct gpart** gparts,
Loic Hausammann's avatar
Loic Hausammann committed
505 506 507
                    struct sink** sinks, struct spart** sparts,
                    struct bpart** bparts, size_t* Ngas, size_t* Ngparts,
                    size_t* Ngparts_background, size_t* Nsinks, size_t* Nstars,
508
                    size_t* Nblackholes, int* flag_entropy, int with_hydro,
Loic Hausammann's avatar
Loic Hausammann committed
509 510 511 512
                    int with_gravity, int with_sink, int with_stars,
                    int with_black_holes, int with_cosmology, int cleanup_h,
                    int cleanup_sqrt_a, double h, double a, int mpi_rank,
                    int mpi_size, MPI_Comm comm, MPI_Info info, int n_threads,
513
                    int dry_run, int remap_ids) {
514

515 516
  hid_t h_file = 0, h_grp = 0;
  /* GADGET has only cubic boxes (in cosmological mode) */
517 518
  double boxSize[3] = {0.0, -1.0, -1.0};
  /* GADGET has 6 particle types. We only keep the type 0 & 1 for now*/
519 520 521 522 523
  long long numParticles[swift_type_count] = {0};
  long long numParticles_highWord[swift_type_count] = {0};
  size_t N[swift_type_count] = {0};
  long long N_total[swift_type_count] = {0};
  long long offset[swift_type_count] = {0};
524
  int dimension = 3; /* Assume 3D if nothing is specified */
525
  size_t Ndm = 0;
526
  size_t Ndm_background = 0;
Matthieu Schaller's avatar
Matthieu Schaller committed
527 528
  struct unit_system* ic_units =
      (struct unit_system*)malloc(sizeof(struct unit_system));
529

530
  /* Initialise counters */
531
  *Ngas = 0, *Ngparts = 0, *Ngparts_background = 0, *Nstars = 0,
Loic Hausammann's avatar
Loic Hausammann committed
532
  *Nblackholes = 0, *Nsinks = 0;
533

534 535 536 537 538 539 540 541 542 543 544
  /* First read some information about the content */
  if (mpi_rank == 0) {

    /* Open file */
    /* message("Opening file '%s' as IC.", fileName); */
    h_file = H5Fopen(fileName, H5F_ACC_RDONLY, H5P_DEFAULT);
    if (h_file < 0)
      error("Error while opening file '%s' for initial read.", fileName);

    /* Open header to read simulation properties */
    /* message("Reading file header..."); */
545
    h_grp = H5Gopen(h_file, "/Header", H5P_DEFAULT);
546 547
    if (h_grp < 0) error("Error while opening file header\n");

548 549 550 551
    /* Check the dimensionality of the ICs (if the info exists) */
    const hid_t hid_dim = H5Aexists(h_grp, "Dimension");
    if (hid_dim < 0)
      error("Error while testing existance of 'Dimension' attribute");
552
    if (hid_dim > 0) io_read_attribute(h_grp, "Dimension", INT, &dimension);
553 554 555 556
    if (dimension != hydro_dimension)
      error("ICs dimensionality (%dD) does not match code dimensionality (%dD)",
            dimension, (int)hydro_dimension);

557 558
    /* Check whether the number of files is specified (if the info exists) */
    const hid_t hid_files = H5Aexists(h_grp, "NumFilesPerSnapshot");
559
    int num_files = 1;
560 561 562 563 564 565 566 567 568 569 570 571 572 573
    if (hid_files < 0)
      error(
          "Error while testing the existance of 'NumFilesPerSnapshot' "
          "attribute");
    if (hid_files > 0)
      io_read_attribute(h_grp, "NumFilesPerSnapshot", INT, &num_files);
    if (num_files != 1)
      error(
          "ICs are split over multiples files (%d). SWIFT cannot handle this "
          "case. The script /tools/combine_ics.py is availalbe in the "
          "repository "
          "to combine files into a valid input file.",
          num_files);

574
    /* Read the relevant information and print status */
575
    int flag_entropy_temp[6];
576
    io_read_attribute(h_grp, "Flag_Entropy_ICs", INT, flag_entropy_temp);
577
    *flag_entropy = flag_entropy_temp[0];
578 579 580 581
    io_read_attribute(h_grp, "BoxSize", DOUBLE, boxSize);
    io_read_attribute(h_grp, "NumPart_Total", LONGLONG, numParticles);
    io_read_attribute(h_grp, "NumPart_Total_HighWord", LONGLONG,
                      numParticles_highWord);
582

Josh Borrow's avatar
Josh Borrow committed
583 584 585 586 587 588 589
    /* Check that the user is not doing something silly when they e.g. restart
     * from a snapshot by asserting that the current scale-factor (from
     * parameter file) and the redshift in the header are consistent */
    if (with_cosmology) {
      io_assert_valid_header_cosmology(h_grp, a);
    }

590
    for (int ptype = 0; ptype < swift_type_count; ++ptype)
Matthieu Schaller's avatar
Matthieu Schaller committed
591 592
      N_total[ptype] =
          (numParticles[ptype]) + (numParticles_highWord[ptype] << 32);
593

594
    /* Get the box size if not cubic */
595 596 597 598
    dim[0] = boxSize[0];
    dim[1] = (boxSize[1] < 0) ? boxSize[0] : boxSize[1];
    dim[2] = (boxSize[2] < 0) ? boxSize[0] : boxSize[2];

599 600 601 602 603 604
    /* Change box size in the 1D and 2D case */
    if (hydro_dimension == 2)
      dim[2] = min(dim[0], dim[1]);
    else if (hydro_dimension == 1)
      dim[2] = dim[1] = dim[0];

605 606 607 608 609 610 611
    /* Convert the box size if we want to clean-up h-factors */
    if (cleanup_h) {
      dim[0] /= h;
      dim[1] /= h;
      dim[2] /= h;
    }

612
    /* message("Found %lld particles in a %speriodic box of size [%f %f %f].",
613
       N_total, (periodic ? "": "non-"), dim[0], dim[1], dim[2]); */
614 615 616 617

    /* Close header */
    H5Gclose(h_grp);

618 619
    /* Read the unit system used in the ICs */
    if (ic_units == NULL) error("Unable to allocate memory for IC unit system");
620
    io_read_unit_system(h_file, ic_units, internal_units, mpi_rank);
621 622 623

    if (units_are_equal(ic_units, internal_units)) {

Matthieu Schaller's avatar
Matthieu Schaller committed
624
      message("IC and internal units match. No conversion needed.");
625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649

    } else {

      message("Conversion needed from:");
      message("(ICs) Unit system: U_M =      %e g.", ic_units->UnitMass_in_cgs);
      message("(ICs) Unit system: U_L =      %e cm.",
              ic_units->UnitLength_in_cgs);
      message("(ICs) Unit system: U_t =      %e s.", ic_units->UnitTime_in_cgs);
      message("(ICs) Unit system: U_I =      %e A.",
              ic_units->UnitCurrent_in_cgs);
      message("(ICs) Unit system: U_T =      %e K.",
              ic_units->UnitTemperature_in_cgs);
      message("to:");
      message("(internal) Unit system: U_M = %e g.",
              internal_units->UnitMass_in_cgs);
      message("(internal) Unit system: U_L = %e cm.",
              internal_units->UnitLength_in_cgs);
      message("(internal) Unit system: U_t = %e s.",
              internal_units->UnitTime_in_cgs);
      message("(internal) Unit system: U_I = %e A.",
              internal_units->UnitCurrent_in_cgs);
      message("(internal) Unit system: U_T = %e K.",
              internal_units->UnitTemperature_in_cgs);
    }

650 651 652 653
    /* Close file */
    H5Fclose(h_file);
  }

654
  /* Convert the dimensions of the box */
Matthieu Schaller's avatar
Matthieu Schaller committed
655 656 657
  for (int j = 0; j < 3; j++)
    dim[j] *=
        units_conversion_factor(ic_units, internal_units, UNIT_CONV_LENGTH);
658

659
  /* Now need to broadcast that information to all ranks. */
660
  MPI_Bcast(flag_entropy, 1, MPI_INT, 0, comm);
661
  MPI_Bcast(N_total, swift_type_count, MPI_LONG_LONG_INT, 0, comm);
662
  MPI_Bcast(dim, 3, MPI_DOUBLE, 0, comm);
663
  MPI_Bcast(ic_units, sizeof(struct unit_system), MPI_BYTE, 0, comm);
664 665

  /* Divide the particles among the tasks. */
666
  for (int ptype = 0; ptype < swift_type_count; ++ptype) {
667 668 669
    offset[ptype] = mpi_rank * N_total[ptype] / mpi_size;
    N[ptype] = (mpi_rank + 1) * N_total[ptype] / mpi_size - offset[ptype];
  }
670

671
  /* Allocate memory to store SPH particles */
672 673
  if (with_hydro) {
    *Ngas = N[0];
674
    if (swift_memalign("parts", (void**)parts, part_align,
Matthieu Schaller's avatar
Matthieu Schaller committed
675
                       *Ngas * sizeof(struct part)) != 0)
676 677 678 679
      error("Error while allocating memory for SPH particles");
    bzero(*parts, *Ngas * sizeof(struct part));
  }

Loic Hausammann's avatar
Loic Hausammann committed
680 681 682 683 684 685 686 687 688
  /* Allocate memory to store sinks particles */
  if (with_sink) {
    *Nsinks = N[swift_type_sink];
    if (swift_memalign("sinks", (void**)sinks, sink_align,
                       *Nsinks * sizeof(struct sink)) != 0)
      error("Error while allocating memory for sink particles");
    bzero(*sinks, *Nsinks * sizeof(struct sink));
  }

689
  /* Allocate memory to store stars particles */
690
  if (with_stars) {
691
    *Nstars = N[swift_type_stars];
692
    if (swift_memalign("sparts", (void**)sparts, spart_align,
693
                       *Nstars * sizeof(struct spart)) != 0)
694
      error("Error while allocating memory for stars particles");
695 696 697
    bzero(*sparts, *Nstars * sizeof(struct spart));
  }

698 699 700 701 702 703 704 705
  /* Allocate memory to store stars particles */
  if (with_black_holes) {
    *Nblackholes = N[swift_type_black_hole];
    if (swift_memalign("bparts", (void**)bparts, bpart_align,
                       *Nblackholes * sizeof(struct bpart)) != 0)
      error("Error while allocating memory for black hole particles");
    bzero(*bparts, *Nblackholes * sizeof(struct bpart));
  }
706

707 708
  /* Allocate memory to store all gravity  particles */
  if (with_gravity) {
709
    Ndm = N[swift_type_dark_matter];
710
    Ndm_background = N[swift_type_dark_matter_background];
711 712
    *Ngparts = (with_hydro ? N[swift_type_gas] : 0) +
               N[swift_type_dark_matter] +
713
               N[swift_type_dark_matter_background] +
Loic Hausammann's avatar
Loic Hausammann committed
714
               (with_sink ? N[swift_type_sink] : 0) +
715 716
               (with_stars ? N[swift_type_stars] : 0) +
               (with_black_holes ? N[swift_type_black_hole] : 0);
717
    *Ngparts_background = Ndm_background;
718
    if (swift_memalign("gparts", (void**)gparts, gpart_align,
719 720 721 722
                       *Ngparts * sizeof(struct gpart)) != 0)
      error("Error while allocating memory for gravity particles");
    bzero(*gparts, *Ngparts * sizeof(struct gpart));
  }
Matthieu Schaller's avatar
Matthieu Schaller committed
723

724 725
  /* message("Allocated %8.2f MB for particles.", *N * sizeof(struct part) / */
  /* 	  (1024.*1024.)); */
726 727
  /* message("BoxSize = %lf", dim[0]); */
  /* message("NumPart = [%zd, %zd] Total = %zd", *Ngas, Ndm, *Ngparts); */
Matthieu Schaller's avatar
Matthieu Schaller committed
728

729 730 731
  /* For dry runs, only need to do this on rank 0 */
  if (dry_run) mpi_size = 1;

732
  /* Now loop over ranks and read the data */
733
  for (int rank = 0; rank < mpi_size; ++rank) {
734 735 736

    /* Is it this rank's turn to read ? */
    if (rank == mpi_rank) {
Matthieu Schaller's avatar
Matthieu Schaller committed
737

738 739 740 741
      h_file = H5Fopen(fileName, H5F_ACC_RDONLY, H5P_DEFAULT);
      if (h_file < 0)
        error("Error while opening file '%s' on rank %d.", fileName, mpi_rank);

742
      /* Loop over all particle types */
743
      for (int ptype = 0; ptype < swift_type_count; ptype++) {
744

Matthieu Schaller's avatar
Matthieu Schaller committed
745 746 747 748 749 750 751 752
        /* Don't do anything if no particle of this kind */
        if (N[ptype] == 0) continue;

        /* Open the particle group in the file */
        char partTypeGroupName[PARTICLE_GROUP_BUFFER_SIZE];
        snprintf(partTypeGroupName, PARTICLE_GROUP_BUFFER_SIZE, "/PartType%d",
                 ptype);
        h_grp = H5Gopen(h_file, partTypeGroupName, H5P_DEFAULT);
753
        if (h_grp < 0)
Matthieu Schaller's avatar
Matthieu Schaller committed
754 755
          error("Error while opening particle group %s.", partTypeGroupName);

756 757
        int num_fields = 0;
        struct io_props list[100];
758
        size_t Nparticles = 0;
759

Matthieu Schaller's avatar
Matthieu Schaller committed
760 761 762
        /* Read particle fields into the particle structure */
        switch (ptype) {

763
          case swift_type_gas:
764 765 766
            if (with_hydro) {
              Nparticles = *Ngas;
              hydro_read_particles(*parts, list, &num_fields);
767
              num_fields += chemistry_read_particles(*parts, list + num_fields);
768
            }
Matthieu Schaller's avatar
Matthieu Schaller committed
769 770
            break;

771
          case swift_type_dark_matter:
772 773 774 775 776 777
            if (with_gravity) {
              Nparticles = Ndm;
              darkmatter_read_particles(*gparts, list, &num_fields);
            }
            break;

778 779 780 781 782 783 784
          case swift_type_dark_matter_background:
            if (with_gravity) {
              Nparticles = Ndm_background;
              darkmatter_read_particles(*gparts + Ndm, list, &num_fields);
            }
            break;

Loic Hausammann's avatar
Loic Hausammann committed
785 786 787 788 789 790 791
          case swift_type_sink:
            if (with_sink) {
              Nparticles = *Nsinks;
              sink_read_particles(*sinks, list, &num_fields);
            }
            break;

792
          case swift_type_stars:
793 794
            if (with_stars) {
              Nparticles = *Nstars;
795
              stars_read_particles(*sparts, list, &num_fields);
796
            }
Matthieu Schaller's avatar
Matthieu Schaller committed
797 798
            break;

799 800 801 802 803 804
          case swift_type_black_hole:
            if (with_black_holes) {
              Nparticles = *Nblackholes;
              black_holes_read_particles(*bparts, list, &num_fields);
            }
            break;
805

Matthieu Schaller's avatar
Matthieu Schaller committed
806
          default:
807 808 809
            if (mpi_rank == 0)
              message("Particle Type %d not yet supported. Particles ignored",
                      ptype);
Matthieu Schaller's avatar
Matthieu Schaller committed
810 811
        }

812 813
        /* Read everything */
        if (!dry_run)
814 815 816 817 818 819
          for (int i = 0; i < num_fields; ++i) {
            /* If we are remapping ParticleIDs later, don't need to read them.
             */
            if (remap_ids && strcmp(list[i].name, "ParticleIDs") == 0) continue;

            /* Read array. */
820 821 822
            read_array_serial(h_grp, list[i], Nparticles, N_total[ptype],
                              offset[ptype], internal_units, ic_units,
                              cleanup_h, cleanup_sqrt_a, h, a);
823
          }
824

Matthieu Schaller's avatar
Matthieu Schaller committed
825 826
        /* Close particle group */
        H5Gclose(h_grp);
827
      }
Matthieu Schaller's avatar
Matthieu Schaller committed
828

829 830 831 832 833 834 835 836
      /* Close file */
      H5Fclose(h_file);
    }

    /* Wait for the read of the reading to complete */
    MPI_Barrier(comm);
  }

837 838 839
  /* If we are remapping ParticleIDs later, start by setting them to 1. */
  if (remap_ids) set_ids_to_one(*gparts, *Ngparts);

840 841
  /* Duplicate the parts for gravity */
  if (!dry_run && with_gravity) {
842

843 844 845
    /* Let's initialise a bit of thread parallelism here */
    struct threadpool tp;
    threadpool_init(&tp, n_threads);
846

847 848 849
    /* Prepare the DM particles */
    io_prepare_dm_gparts(&tp, *gparts, Ndm);

850 851 852
    /* Prepare the DM background particles */
    io_prepare_dm_background_gparts(&tp, *gparts + Ndm, Ndm_background);

853
    /* Duplicate the hydro particles into gparts */
854 855 856
    if (with_hydro)
      io_duplicate_hydro_gparts(&tp, *parts, *gparts, *Ngas,
                                Ndm + Ndm_background);
857

Loic Hausammann's avatar
Loic Hausammann committed
858 859 860 861 862
    /* Duplicate the sink particles into gparts */
    if (with_sink)
      io_duplicate_sinks_gparts(&tp, *sinks, *gparts, *Nsinks,
                                Ndm + Ndm_background + *Ngas);

863
    /* Duplicate the stars particles into gparts */
864
    if (with_stars)
865
      io_duplicate_stars_gparts(&tp, *sparts, *gparts, *Nstars,
Loic Hausammann's avatar
Loic Hausammann committed
866
                                Ndm + Ndm_background + *Ngas + *Nsinks);
867

868 869
    /* Duplicate the black holes particles into gparts */
    if (with_black_holes)
Loic Hausammann's avatar
Loic Hausammann committed
870 871 872
      io_duplicate_black_holes_gparts(
          &tp, *bparts, *gparts, *Nblackholes,
          Ndm + Ndm_background + *Ngas + *Nsinks + *Nstars);
873

874 875
    threadpool_clean(&tp);
  }
876

877
  /* message("Done Reading particles..."); */
878 879 880

  /* Clean up */
  free(ic_units);
881 882
}

883
/**
884
 * @brief Writes an HDF5 output file (GADGET-3 type) with its XMF descriptor
885
 *
886
 * @param e The engine containing all the system.
887 888
 * @param internal_units The #unit_system used internally
 * @param snapshot_units The #unit_system used in the snapshots
889 890 891 892
 * @param mpi_rank The MPI rank of this node.
 * @param mpi_size The number of MPI ranks.
 * @param comm The MPI communicator.
 * @param info The MPI information object
893
 *
894
 * Creates an HDF5 output file and writes the particles contained
895
 * in the engine. If such a file already exists, it is erased and replaced
896
 * by the new one.
897
 * The companion XMF file is also updated accordingly.
898 899 900 901
 *
 * Calls #error() if an error occurs.
 *
 */
902
void write_output_serial(struct engine* e,
903 904
                         const struct unit_system* internal_units,
                         const struct unit_system* snapshot_units, int mpi_rank,
905 906
                         int mpi_size, MPI_Comm comm, MPI_Info info) {

907
  hid_t h_file = 0, h_grp = 0;
908
  int numFiles = 1;
909 910 911 912
  const struct part* parts = e->s->parts;
  const struct xpart* xparts = e->s->xparts;
  const struct gpart* gparts = e->s->gparts;
  const struct spart* sparts = e->s->sparts;
913
  const struct bpart* bparts = e->s->bparts;
Loic Hausammann's avatar
Loic Hausammann committed
914
  const struct sink* sinks = e->s->sinks;
915 916
  struct output_options* output_options = e->output_options;
  struct output_list* output_list = e->output_list_snapshots;
917
  const int with_cosmology = e->policy & engine_policy_cosmology;
918 919
  const int with_cooling = e->policy & engine_policy_cooling;
  const int with_temperature = e->policy & engine_policy_temperature;
920
  const int with_fof = e->policy & engine_policy_fof;
921
  const int with_DM_background = e->s->with_DM_background;
922 923 924 925 926 927
#ifdef HAVE_VELOCIRAPTOR
  const int with_stf = (e->policy & engine_policy_structure_finding) &&
                       (e->s->gpart_group_data != NULL);
#else
  const int with_stf = 0;
#endif
928

929
  FILE* xmfFile = 0;
930

931 932 933
  /* Number of particles currently in the arrays */
  const size_t Ntot = e->s->nr_gparts;
  const size_t Ngas = e->s->nr_parts;
Loic Hausammann's avatar
Loic Hausammann committed
934
  const size_t Nsinks = e->s->nr_sinks;
935
  const size_t Nstars = e->s->nr_sparts;
936
  const size_t Nblackholes = e->s->nr_bparts;
Matthieu Schaller's avatar
Matthieu Schaller committed
937 938
  // const size_t Nbaryons = Ngas + Nstars;
  // const size_t Ndm = Ntot > 0 ? Ntot - Nbaryons : 0;
939

940 941 942 943 944 945 946
  size_t Ndm_background = 0;
  if (with_DM_background) {
    Ndm_background = io_count_dm_background_gparts(gparts, Ntot);
  }

  /* Number of particles that we will write
   * Recall that background particles are never inhibited and have no extras */
947 948 949 950
  const size_t Ntot_written =
      e->s->nr_gparts - e->s->nr_inhibited_gparts - e->s->nr_extra_gparts;
  const size_t Ngas_written =
      e->s->nr_parts - e->s->nr_inhibited_parts - e->s->nr_extra_parts;
Loic Hausammann's avatar
Loic Hausammann committed
951 952
  const size_t Nsinks_written =
      e->s->nr_sinks - e->s->nr_inhibited_sinks - e->s->nr_extra_sinks;
953 954
  const size_t Nstars_written =
      e->s->nr_sparts - e->s->nr_inhibited_sparts - e->s->nr_extra_sparts;
955 956
  const size_t Nblackholes_written =
      e->s->nr_bparts - e->s->nr_inhibited_bparts - e->s->nr_extra_bparts;
957
  const size_t Nbaryons_written =
Loic Hausammann's avatar
Loic Hausammann committed
958
      Ngas_written + Nstars_written + Nblackholes_written + Nsinks_written;