single_io.c 50.2 KB
Newer Older
1 2
/*******************************************************************************
 * This file is part of SWIFT.
3
 * Copyright (c) 2012 Pedro Gonnet (pedro.gonnet@durham.ac.uk),
4
 *                    Matthieu Schaller (matthieu.schaller@durham.ac.uk).
5
 *
6 7 8 9
 * This program is free software: you can redistribute it and/or modify
 * it under the terms of the GNU Lesser General Public License as published
 * by the Free Software Foundation, either version 3 of the License, or
 * (at your option) any later version.
10
 *
11 12 13 14
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
15
 *
16 17
 * You should have received a copy of the GNU Lesser General Public License
 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
18
 *
19 20 21 22 23 24 25 26
 ******************************************************************************/

/* Config parameters. */
#include "../config.h"

#if defined(HAVE_HDF5) && !defined(WITH_MPI)

/* Some standard headers. */
27 28 29
#include <hdf5.h>
#include <math.h>
#include <stddef.h>
30 31 32
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
33
#include <time.h>
34

35 36 37 38
/* This object's header. */
#include "single_io.h"

/* Local includes. */
39
#include "black_holes_io.h"
40
#include "chemistry_io.h"
41
#include "common_io.h"
42
#include "cooling_io.h"
43
#include "dimension.h"
44
#include "engine.h"
45
#include "error.h"
46
#include "fof_io.h"
47
#include "gravity_io.h"
48
#include "gravity_properties.h"
49
#include "hydro_io.h"
50
#include "hydro_properties.h"
51
#include "io_properties.h"
52
#include "memuse.h"
53
#include "output_list.h"
54
#include "output_options.h"
55
#include "part.h"
lhausamm's avatar
lhausamm committed
56
#include "part_type.h"
Loic Hausammann's avatar
Loic Hausammann committed
57
#include "sink_io.h"
58
#include "star_formation_io.h"
59
#include "stars_io.h"
60
#include "tracers_io.h"
61
#include "units.h"
62
#include "velociraptor_io.h"
63
#include "xmf.h"
64 65 66 67

/**
 * @brief Reads a data array from a given HDF5 group.
 *
68
 * @param h_grp The group from which to read.
69
 * @param prop The #io_props of the field to read
70
 * @param N The number of particles.
71 72
 * @param internal_units The #unit_system used internally
 * @param ic_units The #unit_system used in the ICs
73
 * @param cleanup_h Are we removing h-factors from the ICs?
74 75 76 77
 * @param cleanup_sqrt_a Are we cleaning-up the sqrt(a) factors in the Gadget
 * IC velocities?
 * @param h The value of the reduced Hubble constant.
 * @param a The current value of the scale-factor.
78
 *
79
 * @todo A better version using HDF5 hyper-slabs to read the file directly into
80
 * the part array will be written once the structures have been stabilized.
81
 */
82 83 84 85
void read_array_single(hid_t h_grp, const struct io_props props, size_t N,
                       const struct unit_system* internal_units,
                       const struct unit_system* ic_units, int cleanup_h,
                       int cleanup_sqrt_a, double h, double a) {
86

87 88 89
  const size_t typeSize = io_sizeof_type(props.type);
  const size_t copySize = typeSize * props.dimension;
  const size_t num_elements = N * props.dimension;
90 91

  /* Check whether the dataspace exists or not */
92
  const htri_t exist = H5Lexists(h_grp, props.name, 0);
93
  if (exist < 0) {
94
    error("Error while checking the existence of data set '%s'.", props.name);
95
  } else if (exist == 0) {
96 97
    if (props.importance == COMPULSORY) {
      error("Compulsory data set '%s' not present in the file.", props.name);
98 99
    } else {
      /* message("Optional data set '%s' not present. Zeroing this particle
100
       * props...", name);	   */
101

102
      for (size_t i = 0; i < N; ++i)
103
        memset(props.field + i * props.partSize, 0, copySize);
104 105

      return;
106
    }
107 108
  }

Matthieu Schaller's avatar
Matthieu Schaller committed
109
  /* message("Reading %s '%s' array...", */
110 111
  /*         props.importance == COMPULSORY ? "compulsory" : "optional  ", */
  /*         props.name); */
112 113

  /* Open data space */
114 115
  const hid_t h_data = H5Dopen(h_grp, props.name, H5P_DEFAULT);
  if (h_data < 0) error("Error while opening data space '%s'.", props.name);
116

117
  /* Allocate temporary buffer */
118
  void* temp = malloc(num_elements * typeSize);
119
  if (temp == NULL) error("Unable to allocate memory for temporary buffer");
120 121 122 123

  /* Read HDF5 dataspace in temporary buffer */
  /* Dirty version that happens to work for vectors but should be improved */
  /* Using HDF5 dataspaces would be better */
124 125
  const hid_t h_err = H5Dread(h_data, io_hdf5_type(props.type), H5S_ALL,
                              H5S_ALL, H5P_DEFAULT, temp);
126
  if (h_err < 0) error("Error while reading data array '%s'.", props.name);
127

128
  /* Unit conversion if necessary */
129
  const double unit_factor =
130
      units_conversion_factor(ic_units, internal_units, props.units);
131
  if (unit_factor != 1. && exist != 0) {
132

Matthieu Schaller's avatar
Matthieu Schaller committed
133
    /* message("Converting ! factor=%e", factor); */
134

135
    if (io_is_double_precision(props.type)) {
Matthieu Schaller's avatar
Matthieu Schaller committed
136
      double* temp_d = (double*)temp;
137
      for (size_t i = 0; i < num_elements; ++i) temp_d[i] *= unit_factor;
138

139
    } else {
Matthieu Schaller's avatar
Matthieu Schaller committed
140
      float* temp_f = (float*)temp;
141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171

#ifdef SWIFT_DEBUG_CHECKS
      float maximum = 0.f;
      float minimum = FLT_MAX;
#endif

      /* Loop that converts the Units */
      for (size_t i = 0; i < num_elements; ++i) {

#ifdef SWIFT_DEBUG_CHECKS
        /* Find the absolute minimum and maximum values */
        const float abstemp_f = fabsf(temp_f[i]);
        if (abstemp_f != 0.f) {
          maximum = max(maximum, abstemp_f);
          minimum = min(minimum, abstemp_f);
        }
#endif

        /* Convert the float units */
        temp_f[i] *= unit_factor;
      }

#ifdef SWIFT_DEBUG_CHECKS
      /* The two possible errors: larger than float or smaller
       * than float precision. */
      if (unit_factor * maximum > FLT_MAX) {
        error("Unit conversion results in numbers larger than floats");
      } else if (unit_factor * minimum < FLT_MIN) {
        error("Numbers smaller than float precision");
      }
#endif
172 173 174 175
    }
  }

  /* Clean-up h if necessary */
176
  const float h_factor_exp = units_h_factor(internal_units, props.units);
177
  if (cleanup_h && h_factor_exp != 0.f && exist != 0) {
178

179
    /* message("Multipltying '%s' by h^%f=%f", props.name, h_factor_exp,
180 181
     * h_factor); */

182
    if (io_is_double_precision(props.type)) {
183
      double* temp_d = (double*)temp;
184
      const double h_factor = pow(h, h_factor_exp);
185 186 187
      for (size_t i = 0; i < num_elements; ++i) temp_d[i] *= h_factor;
    } else {
      float* temp_f = (float*)temp;
188
      const float h_factor = pow(h, h_factor_exp);
189
      for (size_t i = 0; i < num_elements; ++i) temp_f[i] *= h_factor;
190 191 192
    }
  }

193
  /* Clean-up a if necessary */
194
  if (cleanup_sqrt_a && a != 1. && (strcmp(props.name, "Velocities") == 0)) {
195

196
    if (io_is_double_precision(props.type)) {
197 198 199 200 201 202 203 204 205 206
      double* temp_d = (double*)temp;
      const double vel_factor = sqrt(a);
      for (size_t i = 0; i < num_elements; ++i) temp_d[i] *= vel_factor;
    } else {
      float* temp_f = (float*)temp;
      const float vel_factor = sqrt(a);
      for (size_t i = 0; i < num_elements; ++i) temp_f[i] *= vel_factor;
    }
  }

207
  /* Copy temporary buffer to particle data */
Matthieu Schaller's avatar
Matthieu Schaller committed
208
  char* temp_c = (char*)temp;
209
  for (size_t i = 0; i < N; ++i)
210
    memcpy(props.field + i * props.partSize, &temp_c[i * copySize], copySize);
211

212 213 214 215 216 217 218 219
  /* Free and close everything */
  free(temp);
  H5Dclose(h_data);
}

/**
 * @brief Writes a data array in given HDF5 group.
 *
220
 * @param e The #engine we are writing from.
221 222 223
 * @param grp The group in which to write.
 * @param fileName The name of the file in which the data is written
 * @param xmfFile The FILE used to write the XMF description
Matthieu Schaller's avatar
Matthieu Schaller committed
224
 * @param partTypeGroupName The name of the group containing the particles in
225 226
 * the HDF5 file.
 * @param props The #io_props of the field to read
227
 * @param N The number of particles to write.
228 229
 * @param internal_units The #unit_system used internally
 * @param snapshot_units The #unit_system used in the snapshots
230
 *
231
 * @todo A better version using HDF5 hyper-slabs to write the file directly from
232
 * the part array will be written once the structures have been stabilized.
233
 */
234 235 236 237 238
void write_array_single(const struct engine* e, hid_t grp, char* fileName,
                        FILE* xmfFile, char* partTypeGroupName,
                        const struct io_props props, size_t N,
                        const struct unit_system* internal_units,
                        const struct unit_system* snapshot_units) {
239

240
  const size_t typeSize = io_sizeof_type(props.type);
241
  const size_t num_elements = N * props.dimension;
242

Matthieu Schaller's avatar
Matthieu Schaller committed
243
  /* message("Writing '%s' array...", props.name); */
244 245

  /* Allocate temporary buffer */
246
  void* temp = NULL;
Peter W. Draper's avatar
Peter W. Draper committed
247
  if (swift_memalign("writebuff", (void**)&temp, IO_BUFFER_ALIGNMENT,
248
                     num_elements * typeSize) != 0)
249
    error("Unable to allocate temporary i/o buffer");
250

251 252
  /* Copy the particle data to the temporary buffer */
  io_copy_temp_buffer(temp, e, props, N, internal_units, snapshot_units);
253

254
  /* Create data space */
255
  const hid_t h_space = H5Screate(H5S_SIMPLE);
256 257 258
  if (h_space < 0)
    error("Error while creating data space for field '%s'.", props.name);

259 260 261
  /* Decide what chunk size to use based on compression */
  int log2_chunk_size = e->snapshot_compression > 0 ? 12 : 18;

262 263 264
  int rank;
  hsize_t shape[2];
  hsize_t chunk_shape[2];
265

266
  if (props.dimension > 1) {
267 268
    rank = 2;
    shape[0] = N;
269
    shape[1] = props.dimension;
270
    chunk_shape[0] = 1 << log2_chunk_size;
271
    chunk_shape[1] = props.dimension;
272 273 274 275
  } else {
    rank = 1;
    shape[0] = N;
    shape[1] = 0;
276
    chunk_shape[0] = 1 << log2_chunk_size;
277
    chunk_shape[1] = 0;
278 279
  }

280 281
  /* Make sure the chunks are not larger than the dataset */
  if (chunk_shape[0] > N) chunk_shape[0] = N;
282

283
  /* Change shape of data space */
284
  hid_t h_err = H5Sset_extent_simple(h_space, rank, shape, shape);
285
  if (h_err < 0)
286
    error("Error while changing data space shape for field '%s'.", props.name);
287

288
  /* Dataset properties */
289
  const hid_t h_prop = H5Pcreate(H5P_DATASET_CREATE);
290 291 292

  /* Set chunk size */
  h_err = H5Pset_chunk(h_prop, rank, chunk_shape);
293
  if (h_err < 0)
294
    error("Error while setting chunk size (%llu, %llu) for field '%s'.",
295
          chunk_shape[0], chunk_shape[1], props.name);
296

297 298 299 300
  /* Impose check-sum to verify data corruption */
  h_err = H5Pset_fletcher32(h_prop);
  if (h_err < 0)
    error("Error while setting checksum options for field '%s'.", props.name);
301 302

  /* Impose data compression */
303
  if (e->snapshot_compression > 0) {
304 305 306 307 308
    h_err = H5Pset_shuffle(h_prop);
    if (h_err < 0)
      error("Error while setting shuffling options for field '%s'.",
            props.name);

309
    h_err = H5Pset_deflate(h_prop, e->snapshot_compression);
310
    if (h_err < 0)
311 312
      error("Error while setting compression options for field '%s'.",
            props.name);
313 314
  }

315
  /* Create dataset */
316 317
  const hid_t h_data = H5Dcreate(grp, props.name, io_hdf5_type(props.type),
                                 h_space, H5P_DEFAULT, h_prop, H5P_DEFAULT);
318
  if (h_data < 0) error("Error while creating dataspace '%s'.", props.name);
319

320
  /* Write temporary buffer to HDF5 dataspace */
321 322
  h_err = H5Dwrite(h_data, io_hdf5_type(props.type), h_space, H5S_ALL,
                   H5P_DEFAULT, temp);
323
  if (h_err < 0) error("Error while writing data array '%s'.", props.name);
324 325

  /* Write XMF description for this data set */
lhausamm's avatar
lhausamm committed
326 327
  if (xmfFile != NULL)
    xmf_write_line(xmfFile, fileName, partTypeGroupName, props.name, N,
Loic Hausammann's avatar
Loic Hausammann committed
328
                   props.dimension, props.type);
329 330

  /* Write unit conversion factors for this data set */
331
  char buffer[FIELD_BUFFER_SIZE] = {0};
332 333
  units_cgs_conversion_string(buffer, snapshot_units, props.units,
                              props.scale_factor_exponent);
334 335 336 337 338 339 340
  float baseUnitsExp[5];
  units_get_base_unit_exponents_array(baseUnitsExp, props.units);
  io_write_attribute_f(h_data, "U_M exponent", baseUnitsExp[UNIT_MASS]);
  io_write_attribute_f(h_data, "U_L exponent", baseUnitsExp[UNIT_LENGTH]);
  io_write_attribute_f(h_data, "U_t exponent", baseUnitsExp[UNIT_TIME]);
  io_write_attribute_f(h_data, "U_I exponent", baseUnitsExp[UNIT_CURRENT]);
  io_write_attribute_f(h_data, "U_T exponent", baseUnitsExp[UNIT_TEMPERATURE]);
341
  io_write_attribute_f(h_data, "h-scale exponent", 0.f);
342
  io_write_attribute_f(h_data, "a-scale exponent", props.scale_factor_exponent);
343 344 345
  io_write_attribute_s(h_data, "Expression for physical CGS units", buffer);

  /* Write the actual number this conversion factor corresponds to */
346 347
  const double factor =
      units_cgs_conversion_factor(snapshot_units, props.units);
348 349 350 351 352 353
  io_write_attribute_d(
      h_data,
      "Conversion factor to CGS (not including cosmological corrections)",
      factor);
  io_write_attribute_d(
      h_data,
354
      "Conversion factor to physical CGS (including cosmological corrections)",
355 356
      factor * pow(e->cosmology->a, props.scale_factor_exponent));

357 358 359 360
#ifdef SWIFT_DEBUG_CHECKS
  if (strlen(props.description) == 0)
    error("Invalid (empty) description of the field '%s'", props.name);
#endif
361

362 363
  /* Write the full description */
  io_write_attribute_s(h_data, "Description", props.description);
364

365
  /* Free and close everything */
Peter W. Draper's avatar
Peter W. Draper committed
366
  swift_free("writebuff", temp);
367
  H5Pclose(h_prop);
368 369 370 371
  H5Dclose(h_data);
  H5Sclose(h_space);
}

372 373 374 375
/**
 * @brief Reads an HDF5 initial condition file (GADGET-3 type)
 *
 * @param fileName The file to read.
376
 * @param internal_units The system units used internally
377
 * @param dim (output) The dimension of the volume.
378
 * @param parts (output) Array of #part particles.
Matthieu Schaller's avatar
Matthieu Schaller committed
379
 * @param gparts (output) Array of #gpart particles.
Loic Hausammann's avatar
Loic Hausammann committed
380
 * @param sinks (output) Array of #sink particles.
381
 * @param sparts (output) Array of #spart particles.
382
 * @param bparts (output) Array of #bpart particles.
383
 * @param Ngas (output) number of Gas particles read.
Matthieu Schaller's avatar
Matthieu Schaller committed
384
 * @param Ngparts (output) The number of #gpart read.
Loic Hausammann's avatar
Loic Hausammann committed
385
 * @param Nsinks (output) The number of #sink read.
386
 * @param Nstars (output) The number of #spart read.
387
 * @param Nblackholes (output) The number of #bpart read.
388
 * @param flag_entropy (output) 1 if the ICs contained Entropy in the
389 390
 * InternalEnergy field
 * @param with_hydro Are we reading gas particles ?
Matthieu Schaller's avatar
Matthieu Schaller committed
391
 * @param with_gravity Are we reading/creating #gpart arrays ?
Loic Hausammann's avatar
Loic Hausammann committed
392
 * @param with_sink Are we reading sink particles ?
393
 * @param with_stars Are we reading star particles ?
394
 * @param with_black_hole Are we reading black hole particles ?
Josh Borrow's avatar
Josh Borrow committed
395
 * @param with_cosmology Are we running with cosmology ?
396
 * @param cleanup_h Are we cleaning-up h-factors from the quantities we read?
397 398
 * @param cleanup_sqrt_a Are we cleaning-up the sqrt(a) factors in the Gadget
 * IC velocities?
399
 * @param h The value of the reduced Hubble constant to use for correction.
400
 * @param a The current value of the scale-factor.
401
 * @prarm n_threads The number of threads to use for the temporary threadpool.
402
 * @param dry_run If 1, don't read the particle. Only allocates the arrays.
403 404 405 406 407 408 409 410
 *
 * Opens the HDF5 file fileName and reads the particles contained
 * in the parts array. N is the returned number of particles found
 * in the file.
 *
 * @warning Can not read snapshot distributed over more than 1 file !!!
 * @todo Read snapshots distributed in more than one file.
 */
411 412 413
void read_ic_single(const char* fileName,
                    const struct unit_system* internal_units, double dim[3],
                    struct part** parts, struct gpart** gparts,
Loic Hausammann's avatar
Loic Hausammann committed
414 415 416
                    struct sink** sinks, struct spart** sparts,
                    struct bpart** bparts, size_t* Ngas, size_t* Ngparts,
                    size_t* Ngparts_background, size_t* Nsinks, size_t* Nstars,
417
                    size_t* Nblackholes, int* flag_entropy, int with_hydro,
Loic Hausammann's avatar
Loic Hausammann committed
418 419 420
                    int with_gravity, int with_sink, int with_stars,
                    int with_black_holes, int with_cosmology, int cleanup_h,
                    int cleanup_sqrt_a, double h, double a, int n_threads,
421
                    int dry_run, int remap_ids) {
422

423 424
  hid_t h_file = 0, h_grp = 0;
  /* GADGET has only cubic boxes (in cosmological mode) */
425 426
  double boxSize[3] = {0.0, -1.0, -1.0};
  /* GADGET has 6 particle types. We only keep the type 0 & 1 for now...*/
427 428
  long long numParticles[swift_type_count] = {0};
  long long numParticles_highWord[swift_type_count] = {0};
429
  size_t N[swift_type_count] = {0};
430
  int dimension = 3; /* Assume 3D if nothing is specified */
431
  size_t Ndm = 0;
432
  size_t Ndm_background = 0;
433

434
  /* Initialise counters */
435
  *Ngas = 0, *Ngparts = 0, *Ngparts_background = 0, *Nstars = 0,
Loic Hausammann's avatar
Loic Hausammann committed
436
  *Nblackholes = 0, *Nsinks = 0;
437

438 439 440
  /* Open file */
  /* message("Opening file '%s' as IC.", fileName); */
  h_file = H5Fopen(fileName, H5F_ACC_RDONLY, H5P_DEFAULT);
441
  if (h_file < 0) error("Error while opening file '%s'.", fileName);
442 443 444

  /* Open header to read simulation properties */
  /* message("Reading file header..."); */
445
  h_grp = H5Gopen(h_file, "/Header", H5P_DEFAULT);
446 447
  if (h_grp < 0) error("Error while opening file header\n");

448 449 450 451
  /* Check the dimensionality of the ICs (if the info exists) */
  const hid_t hid_dim = H5Aexists(h_grp, "Dimension");
  if (hid_dim < 0)
    error("Error while testing existance of 'Dimension' attribute");
452
  if (hid_dim > 0) io_read_attribute(h_grp, "Dimension", INT, &dimension);
453 454 455 456
  if (dimension != hydro_dimension)
    error("ICs dimensionality (%dD) does not match code dimensionality (%dD)",
          dimension, (int)hydro_dimension);

457 458
  /* Check whether the number of files is specified (if the info exists) */
  const hid_t hid_files = H5Aexists(h_grp, "NumFilesPerSnapshot");
459
  int num_files = 1;
460 461 462 463 464 465 466 467 468 469 470 471
  if (hid_files < 0)
    error(
        "Error while testing the existance of 'NumFilesPerSnapshot' attribute");
  if (hid_files > 0)
    io_read_attribute(h_grp, "NumFilesPerSnapshot", INT, &num_files);
  if (num_files != 1)
    error(
        "ICs are split over multiples files (%d). SWIFT cannot handle this "
        "case. The script /tools/combine_ics.py is availalbe in the repository "
        "to combine files into a valid input file.",
        num_files);

472
  /* Read the relevant information and print status */
473
  int flag_entropy_temp[6];
474
  io_read_attribute(h_grp, "Flag_Entropy_ICs", INT, flag_entropy_temp);
475
  *flag_entropy = flag_entropy_temp[0];
476
  io_read_attribute(h_grp, "BoxSize", DOUBLE, boxSize);
477 478
  io_read_attribute(h_grp, "NumPart_Total", LONGLONG, numParticles);
  io_read_attribute(h_grp, "NumPart_Total_HighWord", LONGLONG,
479
                    numParticles_highWord);
480

Josh Borrow's avatar
Josh Borrow committed
481 482 483 484 485 486 487
  /* Check that the user is not doing something silly when they e.g. restart
   * from a snapshot by asserting that the current scale-factor (from
   * parameter file) and the redshift in the header are consistent */
  if (with_cosmology) {
    io_assert_valid_header_cosmology(h_grp, a);
  }

488
  for (int ptype = 0; ptype < swift_type_count; ++ptype)
489
    N[ptype] = (numParticles[ptype]) + (numParticles_highWord[ptype] << 32);
490

491
  /* Get the box size if not cubic */
492 493 494 495
  dim[0] = boxSize[0];
  dim[1] = (boxSize[1] < 0) ? boxSize[0] : boxSize[1];
  dim[2] = (boxSize[2] < 0) ? boxSize[0] : boxSize[2];

496 497 498 499 500 501
  /* Change box size in the 1D and 2D case */
  if (hydro_dimension == 2)
    dim[2] = min(dim[0], dim[1]);
  else if (hydro_dimension == 1)
    dim[2] = dim[1] = dim[0];

502 503 504 505 506 507 508
  /* Convert the box size if we want to clean-up h-factors */
  if (cleanup_h) {
    dim[0] /= h;
    dim[1] /= h;
    dim[2] /= h;
  }

509
  /* message("Found %d particles in a %speriodic box of size [%f %f %f].",  */
510
  /* 	  *N, (periodic ? "": "non-"), dim[0], dim[1], dim[2]);  */
511 512 513 514

  /* Close header */
  H5Gclose(h_grp);

515
  /* Read the unit system used in the ICs */
Matthieu Schaller's avatar
Matthieu Schaller committed
516 517
  struct unit_system* ic_units =
      (struct unit_system*)malloc(sizeof(struct unit_system));
518
  if (ic_units == NULL) error("Unable to allocate memory for IC unit system");
519
  io_read_unit_system(h_file, ic_units, internal_units, 0);
520 521

  /* Tell the user if a conversion will be needed */
522 523
  if (units_are_equal(ic_units, internal_units)) {

Matthieu Schaller's avatar
Matthieu Schaller committed
524
    message("IC and internal units match. No conversion needed.");
525 526

  } else {
527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549

    message("Conversion needed from:");
    message("(ICs) Unit system: U_M =      %e g.", ic_units->UnitMass_in_cgs);
    message("(ICs) Unit system: U_L =      %e cm.",
            ic_units->UnitLength_in_cgs);
    message("(ICs) Unit system: U_t =      %e s.", ic_units->UnitTime_in_cgs);
    message("(ICs) Unit system: U_I =      %e A.",
            ic_units->UnitCurrent_in_cgs);
    message("(ICs) Unit system: U_T =      %e K.",
            ic_units->UnitTemperature_in_cgs);
    message("to:");
    message("(internal) Unit system: U_M = %e g.",
            internal_units->UnitMass_in_cgs);
    message("(internal) Unit system: U_L = %e cm.",
            internal_units->UnitLength_in_cgs);
    message("(internal) Unit system: U_t = %e s.",
            internal_units->UnitTime_in_cgs);
    message("(internal) Unit system: U_I = %e A.",
            internal_units->UnitCurrent_in_cgs);
    message("(internal) Unit system: U_T = %e K.",
            internal_units->UnitTemperature_in_cgs);
  }

550
  /* Convert the dimensions of the box */
Matthieu Schaller's avatar
Matthieu Schaller committed
551 552 553
  for (int j = 0; j < 3; j++)
    dim[j] *=
        units_conversion_factor(ic_units, internal_units, UNIT_CONV_LENGTH);
554

555
  /* Allocate memory to store SPH particles */
556
  if (with_hydro) {
557
    *Ngas = N[swift_type_gas];
558
    if (swift_memalign("parts", (void**)parts, part_align,
Matthieu Schaller's avatar
Matthieu Schaller committed
559
                       *Ngas * sizeof(struct part)) != 0)
560 561 562
      error("Error while allocating memory for SPH particles");
    bzero(*parts, *Ngas * sizeof(struct part));
  }
563

Loic Hausammann's avatar
Loic Hausammann committed
564 565 566 567 568 569 570 571 572
  /* Allocate memory to store star particles */
  if (with_sink) {
    *Nsinks = N[swift_type_sink];
    if (swift_memalign("sinks", (void**)sinks, sink_align,
                       *Nsinks * sizeof(struct sink)) != 0)
      error("Error while allocating memory for sink particles");
    bzero(*sinks, *Nsinks * sizeof(struct sink));
  }

573
  /* Allocate memory to store star particles */
574
  if (with_stars) {
575
    *Nstars = N[swift_type_stars];
576
    if (swift_memalign("sparts", (void**)sparts, spart_align,
577
                       *Nstars * sizeof(struct spart)) != 0)
578
      error("Error while allocating memory for stars particles");
579 580 581
    bzero(*sparts, *Nstars * sizeof(struct spart));
  }

582
  /* Allocate memory to store black hole particles */
583 584 585 586
  if (with_black_holes) {
    *Nblackholes = N[swift_type_black_hole];
    if (swift_memalign("bparts", (void**)bparts, bpart_align,
                       *Nblackholes * sizeof(struct bpart)) != 0)
Loic Hausammann's avatar
Loic Hausammann committed
587
      error("Error while allocating memory for black hole particles");
588 589 590
    bzero(*bparts, *Nblackholes * sizeof(struct bpart));
  }

591 592
  /* Allocate memory to store all gravity particles */
  if (with_gravity) {
593
    Ndm = N[swift_type_dark_matter];
594
    Ndm_background = N[swift_type_dark_matter_background];
595 596
    *Ngparts = (with_hydro ? N[swift_type_gas] : 0) +
               N[swift_type_dark_matter] +
597
               N[swift_type_dark_matter_background] +
Loic Hausammann's avatar
Loic Hausammann committed
598
               (with_sink ? N[swift_type_sink] : 0) +
599 600
               (with_stars ? N[swift_type_stars] : 0) +
               (with_black_holes ? N[swift_type_black_hole] : 0);
601
    *Ngparts_background = Ndm_background;
602
    if (swift_memalign("gparts", (void**)gparts, gpart_align,
603 604 605 606
                       *Ngparts * sizeof(struct gpart)) != 0)
      error("Error while allocating memory for gravity particles");
    bzero(*gparts, *Ngparts * sizeof(struct gpart));
  }
607 608 609 610

  /* message("Allocated %8.2f MB for particles.", *N * sizeof(struct part) /
   * (1024.*1024.)); */

611 612
  /* message("BoxSize = %lf", dim[0]); */
  /* message("NumPart = [%zd, %zd] Total = %zd", *Ngas, Ndm, *Ngparts); */
613

614
  /* Loop over all particle types */
615
  for (int ptype = 0; ptype < swift_type_count; ptype++) {
Matthieu Schaller's avatar
Matthieu Schaller committed
616

617
    /* Don't do anything if no particle of this kind */
618
    if (N[ptype] == 0) continue;
619 620

    /* Open the particle group in the file */
621
    char partTypeGroupName[PARTICLE_GROUP_BUFFER_SIZE];
Matthieu Schaller's avatar
Matthieu Schaller committed
622 623
    snprintf(partTypeGroupName, PARTICLE_GROUP_BUFFER_SIZE, "/PartType%d",
             ptype);
624
    h_grp = H5Gopen(h_file, partTypeGroupName, H5P_DEFAULT);
625
    if (h_grp < 0)
626 627
      error("Error while opening particle group %s.", partTypeGroupName);

628 629
    int num_fields = 0;
    struct io_props list[100];
630
    size_t Nparticles = 0;
631

632
    /* Read particle fields into the structure */
633 634
    switch (ptype) {

635
      case swift_type_gas:
636 637 638
        if (with_hydro) {
          Nparticles = *Ngas;
          hydro_read_particles(*parts, list, &num_fields);
639
          num_fields += chemistry_read_particles(*parts, list + num_fields);
640
        }
641 642
        break;

643
      case swift_type_dark_matter:
644 645 646 647
        if (with_gravity) {
          Nparticles = Ndm;
          darkmatter_read_particles(*gparts, list, &num_fields);
        }
648 649
        break;

650 651 652 653 654 655 656
      case swift_type_dark_matter_background:
        if (with_gravity) {
          Nparticles = Ndm_background;
          darkmatter_read_particles(*gparts + Ndm, list, &num_fields);
        }
        break;

Loic Hausammann's avatar
Loic Hausammann committed
657 658 659 660 661 662 663
      case swift_type_sink:
        if (with_sink) {
          Nparticles = *Nsinks;
          sink_read_particles(*sinks, list, &num_fields);
        }
        break;

664
      case swift_type_stars:
665 666
        if (with_stars) {
          Nparticles = *Nstars;
667
          stars_read_particles(*sparts, list, &num_fields);
668
        }
669 670
        break;

671 672 673 674 675 676 677
      case swift_type_black_hole:
        if (with_black_holes) {
          Nparticles = *Nblackholes;
          black_holes_read_particles(*bparts, list, &num_fields);
        }
        break;

678
      default:
679
        message("Particle Type %d not yet supported. Particles ignored", ptype);
680 681
    }

682 683
    /* Read everything */
    if (!dry_run)
684 685 686 687 688
      for (int i = 0; i < num_fields; ++i) {
        /* If we are remapping ParticleIDs later, don't need to read them. */
        if (remap_ids && strcmp(list[i].name, "ParticleIDs") == 0) continue;

        /* Read array. */
689 690
        read_array_single(h_grp, list[i], Nparticles, internal_units, ic_units,
                          cleanup_h, cleanup_sqrt_a, h, a);
691
      }
692

693 694 695 696
    /* Close particle group */
    H5Gclose(h_grp);
  }

697 698 699
  /* If we are remapping ParticleIDs later, start by setting them to 1. */
  if (remap_ids) set_ids_to_one(*gparts, *Ngparts);

700 701
  /* Duplicate the parts for gravity */
  if (!dry_run && with_gravity) {
702

703 704 705
    /* Let's initialise a bit of thread parallelism here */
    struct threadpool tp;
    threadpool_init(&tp, n_threads);
706

707 708 709
    /* Prepare the DM particles */
    io_prepare_dm_gparts(&tp, *gparts, Ndm);

710 711 712
    /* Prepare the DM background particles */
    io_prepare_dm_background_gparts(&tp, *gparts + Ndm, Ndm_background);

713
    /* Duplicate the hydro particles into gparts */
714 715 716
    if (with_hydro)
      io_duplicate_hydro_gparts(&tp, *parts, *gparts, *Ngas,
                                Ndm + Ndm_background);
717

Loic Hausammann's avatar
Loic Hausammann committed
718 719 720 721 722
    /* Duplicate the star particles into gparts */
    if (with_sink)
      io_duplicate_sinks_gparts(&tp, *sinks, *gparts, *Nsinks,
                                Ndm + Ndm_background + *Ngas);

723 724
    /* Duplicate the star particles into gparts */
    if (with_stars)
725
      io_duplicate_stars_gparts(&tp, *sparts, *gparts, *Nstars,
Loic Hausammann's avatar
Loic Hausammann committed
726
                                Ndm + Ndm_background + *Ngas + *Nsinks);
727

728 729
    /* Duplicate the black hole particles into gparts */
    if (with_black_holes)
Loic Hausammann's avatar
Loic Hausammann committed
730 731 732
      io_duplicate_black_holes_gparts(
          &tp, *bparts, *gparts, *Nblackholes,
          Ndm + Ndm_background + *Ngas + *Nsinks + *Nstars);
733

734 735
    threadpool_clean(&tp);
  }
736

737 738
  /* message("Done Reading particles..."); */

739 740 741
  /* Clean up */
  free(ic_units);

742 743 744 745
  /* Close file */
  H5Fclose(h_file);
}

746 747 748 749
/**
 * @brief Writes an HDF5 output file (GADGET-3 type) with its XMF descriptor
 *
 * @param e The engine containing all the system.
750 751
 * @param internal_units The #unit_system used internally
 * @param snapshot_units The #unit_system used in the snapshots
752 753 754
 *
 * Creates an HDF5 output file and writes the particles contained
 * in the engine. If such a file already exists, it is erased and replaced
755
 * by the new one.
756 757 758 759 760
 * The companion XMF file is also updated accordingly.
 *
 * Calls #error() if an error occurs.
 *
 */
761
void write_output_single(struct engine* e,
762 763
                         const struct unit_system* internal_units,
                         const struct unit_system* snapshot_units) {
764

765
  hid_t h_file = 0, h_grp = 0;
766
  int numFiles = 1;
767 768 769 770
  const struct part* parts = e->s->parts;
  const struct xpart* xparts = e->s->xparts;
  const struct gpart* gparts = e->s->gparts;
  const struct spart* sparts = e->s->sparts;
771
  const struct bpart* bparts = e->s->bparts;
Loic Hausammann's avatar
Loic Hausammann committed
772
  const struct sink* sinks = e->s->sinks;
773
  struct output_options* output_options = e->output_options;
774
  struct output_list* output_list = e->output_list_snapshots;
775
  const int with_cosmology = e->policy & engine_policy_cosmology;
776 777
  const int with_cooling = e->policy & engine_policy_cooling;
  const int with_temperature = e->policy & engine_policy_temperature;
778
  const int with_fof = e->policy & engine_policy_fof;
779
  const int with_DM_background = e->s->with_DM_background;
780 781 782 783 784 785
#ifdef HAVE_VELOCIRAPTOR
  const int with_stf = (e->policy & engine_policy_structure_finding) &&
                       (e->s->gpart_group_data != NULL);
#else
  const int with_stf = 0;
#endif
786

787 788 789 790
  /* Number of particles currently in the arrays */
  const size_t Ntot = e->s->nr_gparts;
  const size_t Ngas = e->s->nr_parts;
  const size_t Nstars = e->s->nr_sparts;
Loic Hausammann's avatar
Loic Hausammann committed
791
  const size_t Nsinks = e->s->nr_sinks;
792
  const size_t Nblackholes = e->s->nr_bparts;
Matthieu Schaller's avatar
Matthieu Schaller committed
793 794
  // const size_t Nbaryons = Ngas + Nstars;
  // const size_t Ndm = Ntot > 0 ? Ntot - Nbaryons : 0;
795

796 797 798 799 800 801 802
  size_t Ndm_background = 0;
  if (with_DM_background) {
    Ndm_background = io_count_dm_background_gparts(gparts, Ntot);
  }

  /* Number of particles that we will write
   * Recall that background particles are never inhibited and have no extras */
803 804 805 806 807 808
  const size_t Ntot_written =
      e->s->nr_gparts - e->s->nr_inhibited_gparts - e->s->nr_extra_gparts;
  const size_t Ngas_written =
      e->s->nr_parts - e->s->nr_inhibited_parts - e->s->nr_extra_parts;
  const size_t Nstars_written =
      e->s->nr_sparts - e->s->nr_inhibited_sparts - e->s->nr_extra_sparts;
Loic Hausammann's avatar
Loic Hausammann committed
809 810
  const size_t Nsinks_written =
      e->s->nr_sinks - e->s->nr_inhibited_sinks - e->s->nr_extra_sinks;
811 812 813
  const size_t Nblackholes_written =
      e->s->nr_bparts - e->s->nr_inhibited_bparts - e->s->nr_extra_bparts;
  const size_t Nbaryons_written =
Loic Hausammann's avatar
Loic Hausammann committed
814
      Ngas_written + Nstars_written + Nblackholes_written + Nsinks_written;
Matthieu Schaller's avatar
Matthieu Schaller committed
815
  const size_t Ndm_written =
816
      Ntot_written > 0 ? Ntot_written - Nbaryons_written - Ndm_background : 0;
817 818

  /* Format things in a Gadget-friendly array */
819
  long long N_total[swift_type_count] = {
820
      (long long)Ngas_written,   (long long)Ndm_written,
Loic Hausammann's avatar
Loic Hausammann committed
821
      (long long)Ndm_background, (long long)Nsinks,
822
      (long long)Nstars_written, (long long)Nblackholes_written};
823

824
  /* File name */
825
  char fileName[FILENAME_BUFFER_SIZE];
826 827 828 829 830
  char xmfFileName[FILENAME_BUFFER_SIZE];
  io_get_snapshot_filename(fileName, xmfFileName, e->snapshot_int_time_label_on,
                           e->snapshot_invoke_stf, e->time, e->stf_output_count,
                           e->snapshot_output_count, e->snapshot_subdir,
                           e->snapshot_base_name);
831 832

  /* First time, we need to create the XMF file */
833
  if (e->snapshot_output_count == 0) xmf_create_file(xmfFileName);
834

835
  /* Prepare the XMF file for the new entry */
836
  FILE* xmfFile = 0;
837
  xmfFile = xmf_prepare_file(xmfFileName);
838 839

  /* Write the part corresponding to this specific output */
840
  xmf_write_outputheader(xmfFile, fileName, e->time);
841 842 843

  /* Open file */
  /* message("Opening file '%s'.", fileName); */
844
  h_file = H5Fcreate(fileName, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
845
  if (h_file < 0) error("Error while opening file '%s'.", fileName);
846 847 848

  /* Open header to write simulation properties */
  /* message("Writing file header..."); */
849
  h_grp = H5Gcreate(h_file, "/Header", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
850 851
  if (h_grp < 0) error("Error while creating file header\n");

852 853 854 855 856 857 858
  /* Convert basic output information to snapshot units */
  const double factor_time =
      units_conversion_factor(internal_units, snapshot_units, UNIT_CONV_TIME);
  const double factor_length =
      units_conversion_factor(internal_units, snapshot_units, UNIT_CONV_LENGTH);
  const double dblTime = e->time * factor_time;
  const double dim[3] = {e->s->dim[0] * factor_length,
859 860
                         e->s->dim[1] * factor_length,
                         e->s->dim[2] * factor_length};
861

862 863
  /* Determine if we are writing a reduced snapshot, and if so which
   * output selection type to use */
864 865
  char current_selection_name[FIELD_BUFFER_SIZE] =
      select_output_header_default_name;
866 867 868
  if (output_list) {
    /* Users could have specified a different Select Output scheme for each
     * snapshot. */
869
    output_list_get_current_select_output(output_list, current_selection_name);
870 871
  }

872
  /* Print the relevant information and print status */
873
  io_write_attribute(h_grp, "BoxSize", DOUBLE, dim, 3);
874
  io_write_attribute(h_grp, "Time", DOUBLE, &dblTime, 1);
875
  const int dimension = (int)hydro_dimension;
876
  io_write_attribute(h_grp, "Dimension", INT, &dimension, 1);
877 878
  io_write_attribute(h_grp, "Redshift", DOUBLE, &e->cosmology->z, 1);
  io_write_attribute(h_grp, "Scale-factor", DOUBLE, &e->cosmology->a, 1);
879
  io_write_attribute_s(h_grp, "Code", "SWIFT");
880
  io_write_attribute_s(h_grp, "RunName", e->run_name);
881

882 883 884 885 886 887 888
  /* Store the time at which the snapshot was written */
  time_t tm = time(NULL);
  struct tm* timeinfo = localtime(&tm);
  char snapshot_date[64];
  strftime(snapshot_date, 64, "%T %F %Z", timeinfo);
  io_write_attribute_s(h_grp, "Snapshot date", snapshot_date);

889
  /* GADGET-2 legacy values: number of particles of each type */
890 891
  unsigned int numParticles[swift_type_count] = {0};
  unsigned int numParticlesHighWord[swift_type_count] = {0};
892 893 894 895

  /* Total number of fields to write per ptype */
  int numFields[swift_type_count] = {0};

896
  for (int ptype = 0; ptype < swift_type_count; ++ptype) {
Matthieu Schaller's avatar
Matthieu Schaller committed
897 898
    numParticles[ptype] = (unsigned int)N_total[ptype];
    numParticlesHighWord[ptype] = (unsigned int)(N_total[ptype] >> 32);
899 900 901

    numFields[ptype] = output_options_get_num_fields_to_write(
        output_options, current_selection_name, ptype);
Matthieu Schaller's avatar
Matthieu Schaller committed
902
  }
903

904 905 906 907 908 909 910 911 912
  io_write_attribute(h_grp, "NumPart_ThisFile", LONGLONG, N_total,
                     swift_type_count);
  io_write_attribute(h_grp, "NumPart_Total", UINT, numParticles,
                     swift_type_count);
  io_write_attribute(h_grp, "NumPart_Total_HighWord", UINT,
                     numParticlesHighWord, swift_type_count);
  double MassTable[swift_type_count] = {0};
  io_write_attribute(h_grp, "MassTable", DOUBLE, MassTable, swift_type_count);
  unsigned int flagEntropy[swift_type_count] = {0};
913
  flagEntropy[0] = writeEntropyFlag();
914 915 916
  io_write_attribute(h_grp, "Flag_Entropy_ICs", UINT, flagEntropy,
                     swift_type_count);
  io_write_attribute(h_grp, "NumFilesPerSnapshot", INT, &numFiles, 1);
917
  io_write_attribute_i(h_grp, "ThisFile", 0);
Matthieu Schaller's avatar