single_io.c 46.5 KB
Newer Older
1
2
/*******************************************************************************
 * This file is part of SWIFT.
3
 * Copyright (c) 2012 Pedro Gonnet (pedro.gonnet@durham.ac.uk),
4
 *                    Matthieu Schaller (matthieu.schaller@durham.ac.uk).
5
 *
6
7
8
9
 * This program is free software: you can redistribute it and/or modify
 * it under the terms of the GNU Lesser General Public License as published
 * by the Free Software Foundation, either version 3 of the License, or
 * (at your option) any later version.
10
 *
11
12
13
14
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
15
 *
16
17
 * You should have received a copy of the GNU Lesser General Public License
 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
18
 *
19
20
21
22
23
24
25
26
 ******************************************************************************/

/* Config parameters. */
#include "../config.h"

#if defined(HAVE_HDF5) && !defined(WITH_MPI)

/* Some standard headers. */
27
28
29
#include <hdf5.h>
#include <math.h>
#include <stddef.h>
30
31
32
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
33
#include <time.h>
34

35
36
37
38
/* This object's header. */
#include "single_io.h"

/* Local includes. */
39
#include "black_holes_io.h"
40
#include "chemistry_io.h"
41
#include "common_io.h"
42
#include "cooling_io.h"
43
#include "dimension.h"
44
#include "engine.h"
45
#include "error.h"
46
#include "fof_io.h"
47
#include "gravity_io.h"
48
#include "gravity_properties.h"
49
#include "hydro_io.h"
50
#include "hydro_properties.h"
51
#include "io_properties.h"
52
#include "memuse.h"
53
#include "output_list.h"
54
#include "output_options.h"
55
#include "part.h"
lhausamm's avatar
lhausamm committed
56
#include "part_type.h"
57
#include "star_formation_io.h"
58
#include "stars_io.h"
59
#include "tracers_io.h"
60
#include "units.h"
61
#include "velociraptor_io.h"
62
#include "xmf.h"
63
64
65
66

/**
 * @brief Reads a data array from a given HDF5 group.
 *
67
 * @param h_grp The group from which to read.
68
 * @param prop The #io_props of the field to read
69
 * @param N The number of particles.
70
71
 * @param internal_units The #unit_system used internally
 * @param ic_units The #unit_system used in the ICs
72
 * @param cleanup_h Are we removing h-factors from the ICs?
73
74
75
76
 * @param cleanup_sqrt_a Are we cleaning-up the sqrt(a) factors in the Gadget
 * IC velocities?
 * @param h The value of the reduced Hubble constant.
 * @param a The current value of the scale-factor.
77
 *
78
 * @todo A better version using HDF5 hyper-slabs to read the file directly into
79
 * the part array will be written once the structures have been stabilized.
80
 */
81
82
83
84
void read_array_single(hid_t h_grp, const struct io_props props, size_t N,
                       const struct unit_system* internal_units,
                       const struct unit_system* ic_units, int cleanup_h,
                       int cleanup_sqrt_a, double h, double a) {
85

86
87
88
  const size_t typeSize = io_sizeof_type(props.type);
  const size_t copySize = typeSize * props.dimension;
  const size_t num_elements = N * props.dimension;
89
90

  /* Check whether the dataspace exists or not */
91
  const htri_t exist = H5Lexists(h_grp, props.name, 0);
92
  if (exist < 0) {
93
    error("Error while checking the existence of data set '%s'.", props.name);
94
  } else if (exist == 0) {
95
96
    if (props.importance == COMPULSORY) {
      error("Compulsory data set '%s' not present in the file.", props.name);
97
98
    } else {
      /* message("Optional data set '%s' not present. Zeroing this particle
99
       * props...", name);	   */
100

101
      for (size_t i = 0; i < N; ++i)
102
        memset(props.field + i * props.partSize, 0, copySize);
103
104

      return;
105
    }
106
107
  }

Matthieu Schaller's avatar
Matthieu Schaller committed
108
  /* message("Reading %s '%s' array...", */
109
110
  /*         props.importance == COMPULSORY ? "compulsory" : "optional  ", */
  /*         props.name); */
111
112

  /* Open data space */
113
114
  const hid_t h_data = H5Dopen(h_grp, props.name, H5P_DEFAULT);
  if (h_data < 0) error("Error while opening data space '%s'.", props.name);
115

116
  /* Allocate temporary buffer */
117
  void* temp = malloc(num_elements * typeSize);
118
  if (temp == NULL) error("Unable to allocate memory for temporary buffer");
119
120
121
122

  /* Read HDF5 dataspace in temporary buffer */
  /* Dirty version that happens to work for vectors but should be improved */
  /* Using HDF5 dataspaces would be better */
123
124
  const hid_t h_err = H5Dread(h_data, io_hdf5_type(props.type), H5S_ALL,
                              H5S_ALL, H5P_DEFAULT, temp);
125
  if (h_err < 0) error("Error while reading data array '%s'.", props.name);
126

127
  /* Unit conversion if necessary */
128
  const double unit_factor =
129
      units_conversion_factor(ic_units, internal_units, props.units);
130
  if (unit_factor != 1. && exist != 0) {
131

Matthieu Schaller's avatar
Matthieu Schaller committed
132
    /* message("Converting ! factor=%e", factor); */
133

134
    if (io_is_double_precision(props.type)) {
Matthieu Schaller's avatar
Matthieu Schaller committed
135
      double* temp_d = (double*)temp;
136
      for (size_t i = 0; i < num_elements; ++i) temp_d[i] *= unit_factor;
137

138
    } else {
Matthieu Schaller's avatar
Matthieu Schaller committed
139
      float* temp_f = (float*)temp;
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170

#ifdef SWIFT_DEBUG_CHECKS
      float maximum = 0.f;
      float minimum = FLT_MAX;
#endif

      /* Loop that converts the Units */
      for (size_t i = 0; i < num_elements; ++i) {

#ifdef SWIFT_DEBUG_CHECKS
        /* Find the absolute minimum and maximum values */
        const float abstemp_f = fabsf(temp_f[i]);
        if (abstemp_f != 0.f) {
          maximum = max(maximum, abstemp_f);
          minimum = min(minimum, abstemp_f);
        }
#endif

        /* Convert the float units */
        temp_f[i] *= unit_factor;
      }

#ifdef SWIFT_DEBUG_CHECKS
      /* The two possible errors: larger than float or smaller
       * than float precision. */
      if (unit_factor * maximum > FLT_MAX) {
        error("Unit conversion results in numbers larger than floats");
      } else if (unit_factor * minimum < FLT_MIN) {
        error("Numbers smaller than float precision");
      }
#endif
171
172
173
174
    }
  }

  /* Clean-up h if necessary */
175
  const float h_factor_exp = units_h_factor(internal_units, props.units);
176
  if (cleanup_h && h_factor_exp != 0.f && exist != 0) {
177

178
    /* message("Multipltying '%s' by h^%f=%f", props.name, h_factor_exp,
179
180
     * h_factor); */

181
    if (io_is_double_precision(props.type)) {
182
      double* temp_d = (double*)temp;
183
      const double h_factor = pow(h, h_factor_exp);
184
185
186
      for (size_t i = 0; i < num_elements; ++i) temp_d[i] *= h_factor;
    } else {
      float* temp_f = (float*)temp;
187
      const float h_factor = pow(h, h_factor_exp);
188
      for (size_t i = 0; i < num_elements; ++i) temp_f[i] *= h_factor;
189
190
191
    }
  }

192
  /* Clean-up a if necessary */
193
  if (cleanup_sqrt_a && a != 1. && (strcmp(props.name, "Velocities") == 0)) {
194

195
    if (io_is_double_precision(props.type)) {
196
197
198
199
200
201
202
203
204
205
      double* temp_d = (double*)temp;
      const double vel_factor = sqrt(a);
      for (size_t i = 0; i < num_elements; ++i) temp_d[i] *= vel_factor;
    } else {
      float* temp_f = (float*)temp;
      const float vel_factor = sqrt(a);
      for (size_t i = 0; i < num_elements; ++i) temp_f[i] *= vel_factor;
    }
  }

206
  /* Copy temporary buffer to particle data */
Matthieu Schaller's avatar
Matthieu Schaller committed
207
  char* temp_c = (char*)temp;
208
  for (size_t i = 0; i < N; ++i)
209
    memcpy(props.field + i * props.partSize, &temp_c[i * copySize], copySize);
210

211
212
213
214
215
216
217
218
  /* Free and close everything */
  free(temp);
  H5Dclose(h_data);
}

/**
 * @brief Writes a data array in given HDF5 group.
 *
219
 * @param e The #engine we are writing from.
220
221
222
 * @param grp The group in which to write.
 * @param fileName The name of the file in which the data is written
 * @param xmfFile The FILE used to write the XMF description
Matthieu Schaller's avatar
Matthieu Schaller committed
223
 * @param partTypeGroupName The name of the group containing the particles in
224
225
 * the HDF5 file.
 * @param props The #io_props of the field to read
226
 * @param N The number of particles to write.
227
228
 * @param internal_units The #unit_system used internally
 * @param snapshot_units The #unit_system used in the snapshots
229
 *
230
 * @todo A better version using HDF5 hyper-slabs to write the file directly from
231
 * the part array will be written once the structures have been stabilized.
232
 */
233
234
235
236
237
void write_array_single(const struct engine* e, hid_t grp, char* fileName,
                        FILE* xmfFile, char* partTypeGroupName,
                        const struct io_props props, size_t N,
                        const struct unit_system* internal_units,
                        const struct unit_system* snapshot_units) {
238

239
  const size_t typeSize = io_sizeof_type(props.type);
240
  const size_t num_elements = N * props.dimension;
241

Matthieu Schaller's avatar
Matthieu Schaller committed
242
  /* message("Writing '%s' array...", props.name); */
243
244

  /* Allocate temporary buffer */
245
  void* temp = NULL;
Peter W. Draper's avatar
Peter W. Draper committed
246
  if (swift_memalign("writebuff", (void**)&temp, IO_BUFFER_ALIGNMENT,
247
                     num_elements * typeSize) != 0)
248
    error("Unable to allocate temporary i/o buffer");
249

250
251
  /* Copy the particle data to the temporary buffer */
  io_copy_temp_buffer(temp, e, props, N, internal_units, snapshot_units);
252

253
  /* Create data space */
254
  const hid_t h_space = H5Screate(H5S_SIMPLE);
255
256
257
  if (h_space < 0)
    error("Error while creating data space for field '%s'.", props.name);

258
259
260
  int rank;
  hsize_t shape[2];
  hsize_t chunk_shape[2];
261

262
  if (props.dimension > 1) {
263
264
    rank = 2;
    shape[0] = N;
265
    shape[1] = props.dimension;
266
    chunk_shape[0] = 1 << 20; /* Just a guess...*/
267
    chunk_shape[1] = props.dimension;
268
269
270
271
  } else {
    rank = 1;
    shape[0] = N;
    shape[1] = 0;
272
    chunk_shape[0] = 1 << 20; /* Just a guess...*/
273
    chunk_shape[1] = 0;
274
275
  }

276
277
  /* Make sure the chunks are not larger than the dataset */
  if (chunk_shape[0] > N) chunk_shape[0] = N;
278

279
  /* Change shape of data space */
280
  hid_t h_err = H5Sset_extent_simple(h_space, rank, shape, shape);
281
  if (h_err < 0)
282
    error("Error while changing data space shape for field '%s'.", props.name);
283

284
  /* Dataset properties */
285
  const hid_t h_prop = H5Pcreate(H5P_DATASET_CREATE);
286
287
288

  /* Set chunk size */
  h_err = H5Pset_chunk(h_prop, rank, chunk_shape);
289
  if (h_err < 0)
290
    error("Error while setting chunk size (%llu, %llu) for field '%s'.",
291
          chunk_shape[0], chunk_shape[1], props.name);
292

293
294
295
296
  /* Impose check-sum to verify data corruption */
  h_err = H5Pset_fletcher32(h_prop);
  if (h_err < 0)
    error("Error while setting checksum options for field '%s'.", props.name);
297
298

  /* Impose data compression */
299
  if (e->snapshot_compression > 0) {
300
301
302
303
304
    h_err = H5Pset_shuffle(h_prop);
    if (h_err < 0)
      error("Error while setting shuffling options for field '%s'.",
            props.name);

305
    h_err = H5Pset_deflate(h_prop, e->snapshot_compression);
306
    if (h_err < 0)
307
308
      error("Error while setting compression options for field '%s'.",
            props.name);
309
310
  }

311
  /* Create dataset */
312
313
  const hid_t h_data = H5Dcreate(grp, props.name, io_hdf5_type(props.type),
                                 h_space, H5P_DEFAULT, h_prop, H5P_DEFAULT);
314
  if (h_data < 0) error("Error while creating dataspace '%s'.", props.name);
315

316
  /* Write temporary buffer to HDF5 dataspace */
317
318
  h_err = H5Dwrite(h_data, io_hdf5_type(props.type), h_space, H5S_ALL,
                   H5P_DEFAULT, temp);
319
  if (h_err < 0) error("Error while writing data array '%s'.", props.name);
320
321

  /* Write XMF description for this data set */
lhausamm's avatar
lhausamm committed
322
323
  if (xmfFile != NULL)
    xmf_write_line(xmfFile, fileName, partTypeGroupName, props.name, N,
Loic Hausammann's avatar
Format    
Loic Hausammann committed
324
                   props.dimension, props.type);
325
326

  /* Write unit conversion factors for this data set */
327
  char buffer[FIELD_BUFFER_SIZE] = {0};
328
329
  units_cgs_conversion_string(buffer, snapshot_units, props.units,
                              props.scale_factor_exponent);
330
331
332
333
334
335
336
  float baseUnitsExp[5];
  units_get_base_unit_exponents_array(baseUnitsExp, props.units);
  io_write_attribute_f(h_data, "U_M exponent", baseUnitsExp[UNIT_MASS]);
  io_write_attribute_f(h_data, "U_L exponent", baseUnitsExp[UNIT_LENGTH]);
  io_write_attribute_f(h_data, "U_t exponent", baseUnitsExp[UNIT_TIME]);
  io_write_attribute_f(h_data, "U_I exponent", baseUnitsExp[UNIT_CURRENT]);
  io_write_attribute_f(h_data, "U_T exponent", baseUnitsExp[UNIT_TEMPERATURE]);
337
  io_write_attribute_f(h_data, "h-scale exponent", 0.f);
338
  io_write_attribute_f(h_data, "a-scale exponent", props.scale_factor_exponent);
339
340
341
  io_write_attribute_s(h_data, "Expression for physical CGS units", buffer);

  /* Write the actual number this conversion factor corresponds to */
342
343
  const double factor =
      units_cgs_conversion_factor(snapshot_units, props.units);
344
345
346
347
348
349
  io_write_attribute_d(
      h_data,
      "Conversion factor to CGS (not including cosmological corrections)",
      factor);
  io_write_attribute_d(
      h_data,
350
      "Conversion factor to physical CGS (including cosmological corrections)",
351
352
      factor * pow(e->cosmology->a, props.scale_factor_exponent));

353
354
355
356
#ifdef SWIFT_DEBUG_CHECKS
  if (strlen(props.description) == 0)
    error("Invalid (empty) description of the field '%s'", props.name);
#endif
357

358
359
  /* Write the full description */
  io_write_attribute_s(h_data, "Description", props.description);
360

361
  /* Free and close everything */
Peter W. Draper's avatar
Peter W. Draper committed
362
  swift_free("writebuff", temp);
363
  H5Pclose(h_prop);
364
365
366
367
  H5Dclose(h_data);
  H5Sclose(h_space);
}

368
369
370
371
/**
 * @brief Reads an HDF5 initial condition file (GADGET-3 type)
 *
 * @param fileName The file to read.
372
 * @param internal_units The system units used internally
373
 * @param dim (output) The dimension of the volume.
374
 * @param parts (output) Array of #part particles.
Matthieu Schaller's avatar
Matthieu Schaller committed
375
 * @param gparts (output) Array of #gpart particles.
376
 * @param sparts (output) Array of #spart particles.
377
 * @param bparts (output) Array of #bpart particles.
378
 * @param Ngas (output) number of Gas particles read.
Matthieu Schaller's avatar
Matthieu Schaller committed
379
 * @param Ngparts (output) The number of #gpart read.
380
 * @param Nstars (output) The number of #spart read.
381
 * @param Nblackholes (output) The number of #bpart read.
382
 * @param flag_entropy (output) 1 if the ICs contained Entropy in the
383
384
 * InternalEnergy field
 * @param with_hydro Are we reading gas particles ?
Matthieu Schaller's avatar
Matthieu Schaller committed
385
 * @param with_gravity Are we reading/creating #gpart arrays ?
386
 * @param with_stars Are we reading star particles ?
387
 * @param with_black_hole Are we reading black hole particles ?
Josh Borrow's avatar
Josh Borrow committed
388
 * @param with_cosmology Are we running with cosmology ?
389
 * @param cleanup_h Are we cleaning-up h-factors from the quantities we read?
390
391
 * @param cleanup_sqrt_a Are we cleaning-up the sqrt(a) factors in the Gadget
 * IC velocities?
392
 * @param h The value of the reduced Hubble constant to use for correction.
393
 * @param a The current value of the scale-factor.
394
 * @prarm n_threads The number of threads to use for the temporary threadpool.
395
 * @param dry_run If 1, don't read the particle. Only allocates the arrays.
396
397
398
399
400
401
402
403
 *
 * Opens the HDF5 file fileName and reads the particles contained
 * in the parts array. N is the returned number of particles found
 * in the file.
 *
 * @warning Can not read snapshot distributed over more than 1 file !!!
 * @todo Read snapshots distributed in more than one file.
 */
404
405
406
void read_ic_single(const char* fileName,
                    const struct unit_system* internal_units, double dim[3],
                    struct part** parts, struct gpart** gparts,
407
                    struct spart** sparts, struct bpart** bparts, size_t* Ngas,
408
                    size_t* Ngparts, size_t* Ngparts_background, size_t* Nstars,
409
410
                    size_t* Nblackholes, int* flag_entropy, int with_hydro,
                    int with_gravity, int with_stars, int with_black_holes,
Josh Borrow's avatar
Josh Borrow committed
411
412
                    int with_cosmology, int cleanup_h, int cleanup_sqrt_a,
                    double h, double a, int n_threads, int dry_run) {
413

414
415
  hid_t h_file = 0, h_grp = 0;
  /* GADGET has only cubic boxes (in cosmological mode) */
416
417
  double boxSize[3] = {0.0, -1.0, -1.0};
  /* GADGET has 6 particle types. We only keep the type 0 & 1 for now...*/
418
419
  long long numParticles[swift_type_count] = {0};
  long long numParticles_highWord[swift_type_count] = {0};
420
  size_t N[swift_type_count] = {0};
421
  int dimension = 3; /* Assume 3D if nothing is specified */
422
  size_t Ndm = 0;
423
  size_t Ndm_background = 0;
424

425
  /* Initialise counters */
426
427
  *Ngas = 0, *Ngparts = 0, *Ngparts_background = 0, *Nstars = 0,
  *Nblackholes = 0;
428

429
430
431
  /* Open file */
  /* message("Opening file '%s' as IC.", fileName); */
  h_file = H5Fopen(fileName, H5F_ACC_RDONLY, H5P_DEFAULT);
432
  if (h_file < 0) error("Error while opening file '%s'.", fileName);
433
434
435

  /* Open header to read simulation properties */
  /* message("Reading file header..."); */
436
  h_grp = H5Gopen(h_file, "/Header", H5P_DEFAULT);
437
438
  if (h_grp < 0) error("Error while opening file header\n");

439
440
441
442
  /* Check the dimensionality of the ICs (if the info exists) */
  const hid_t hid_dim = H5Aexists(h_grp, "Dimension");
  if (hid_dim < 0)
    error("Error while testing existance of 'Dimension' attribute");
443
  if (hid_dim > 0) io_read_attribute(h_grp, "Dimension", INT, &dimension);
444
445
446
447
  if (dimension != hydro_dimension)
    error("ICs dimensionality (%dD) does not match code dimensionality (%dD)",
          dimension, (int)hydro_dimension);

448
449
  /* Check whether the number of files is specified (if the info exists) */
  const hid_t hid_files = H5Aexists(h_grp, "NumFilesPerSnapshot");
450
  int num_files = 1;
451
452
453
454
455
456
457
458
459
460
461
462
  if (hid_files < 0)
    error(
        "Error while testing the existance of 'NumFilesPerSnapshot' attribute");
  if (hid_files > 0)
    io_read_attribute(h_grp, "NumFilesPerSnapshot", INT, &num_files);
  if (num_files != 1)
    error(
        "ICs are split over multiples files (%d). SWIFT cannot handle this "
        "case. The script /tools/combine_ics.py is availalbe in the repository "
        "to combine files into a valid input file.",
        num_files);

463
  /* Read the relevant information and print status */
464
  int flag_entropy_temp[6];
465
  io_read_attribute(h_grp, "Flag_Entropy_ICs", INT, flag_entropy_temp);
466
  *flag_entropy = flag_entropy_temp[0];
467
  io_read_attribute(h_grp, "BoxSize", DOUBLE, boxSize);
468
469
  io_read_attribute(h_grp, "NumPart_Total", LONGLONG, numParticles);
  io_read_attribute(h_grp, "NumPart_Total_HighWord", LONGLONG,
470
                    numParticles_highWord);
471

Josh Borrow's avatar
Josh Borrow committed
472
473
474
475
476
477
478
  /* Check that the user is not doing something silly when they e.g. restart
   * from a snapshot by asserting that the current scale-factor (from
   * parameter file) and the redshift in the header are consistent */
  if (with_cosmology) {
    io_assert_valid_header_cosmology(h_grp, a);
  }

479
  for (int ptype = 0; ptype < swift_type_count; ++ptype)
480
    N[ptype] = (numParticles[ptype]) + (numParticles_highWord[ptype] << 32);
481

482
  /* Get the box size if not cubic */
483
484
485
486
  dim[0] = boxSize[0];
  dim[1] = (boxSize[1] < 0) ? boxSize[0] : boxSize[1];
  dim[2] = (boxSize[2] < 0) ? boxSize[0] : boxSize[2];

487
488
489
490
491
492
  /* Change box size in the 1D and 2D case */
  if (hydro_dimension == 2)
    dim[2] = min(dim[0], dim[1]);
  else if (hydro_dimension == 1)
    dim[2] = dim[1] = dim[0];

493
494
495
496
497
498
499
  /* Convert the box size if we want to clean-up h-factors */
  if (cleanup_h) {
    dim[0] /= h;
    dim[1] /= h;
    dim[2] /= h;
  }

500
  /* message("Found %d particles in a %speriodic box of size [%f %f %f].",  */
501
  /* 	  *N, (periodic ? "": "non-"), dim[0], dim[1], dim[2]);  */
502
503
504
505

  /* Close header */
  H5Gclose(h_grp);

506
  /* Read the unit system used in the ICs */
Matthieu Schaller's avatar
Matthieu Schaller committed
507
508
  struct unit_system* ic_units =
      (struct unit_system*)malloc(sizeof(struct unit_system));
509
  if (ic_units == NULL) error("Unable to allocate memory for IC unit system");
510
  io_read_unit_system(h_file, ic_units, internal_units, 0);
511
512

  /* Tell the user if a conversion will be needed */
513
514
  if (units_are_equal(ic_units, internal_units)) {

Matthieu Schaller's avatar
Matthieu Schaller committed
515
    message("IC and internal units match. No conversion needed.");
516
517

  } else {
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540

    message("Conversion needed from:");
    message("(ICs) Unit system: U_M =      %e g.", ic_units->UnitMass_in_cgs);
    message("(ICs) Unit system: U_L =      %e cm.",
            ic_units->UnitLength_in_cgs);
    message("(ICs) Unit system: U_t =      %e s.", ic_units->UnitTime_in_cgs);
    message("(ICs) Unit system: U_I =      %e A.",
            ic_units->UnitCurrent_in_cgs);
    message("(ICs) Unit system: U_T =      %e K.",
            ic_units->UnitTemperature_in_cgs);
    message("to:");
    message("(internal) Unit system: U_M = %e g.",
            internal_units->UnitMass_in_cgs);
    message("(internal) Unit system: U_L = %e cm.",
            internal_units->UnitLength_in_cgs);
    message("(internal) Unit system: U_t = %e s.",
            internal_units->UnitTime_in_cgs);
    message("(internal) Unit system: U_I = %e A.",
            internal_units->UnitCurrent_in_cgs);
    message("(internal) Unit system: U_T = %e K.",
            internal_units->UnitTemperature_in_cgs);
  }

541
  /* Convert the dimensions of the box */
Matthieu Schaller's avatar
Matthieu Schaller committed
542
543
544
  for (int j = 0; j < 3; j++)
    dim[j] *=
        units_conversion_factor(ic_units, internal_units, UNIT_CONV_LENGTH);
545

546
  /* Allocate memory to store SPH particles */
547
  if (with_hydro) {
548
    *Ngas = N[swift_type_gas];
549
    if (swift_memalign("parts", (void**)parts, part_align,
Matthieu Schaller's avatar
Matthieu Schaller committed
550
                       *Ngas * sizeof(struct part)) != 0)
551
552
553
      error("Error while allocating memory for SPH particles");
    bzero(*parts, *Ngas * sizeof(struct part));
  }
554

555
  /* Allocate memory to store star particles */
556
  if (with_stars) {
557
    *Nstars = N[swift_type_stars];
558
    if (swift_memalign("sparts", (void**)sparts, spart_align,
559
                       *Nstars * sizeof(struct spart)) != 0)
560
      error("Error while allocating memory for stars particles");
561
562
563
    bzero(*sparts, *Nstars * sizeof(struct spart));
  }

564
  /* Allocate memory to store black hole particles */
565
566
567
568
569
570
571
572
  if (with_black_holes) {
    *Nblackholes = N[swift_type_black_hole];
    if (swift_memalign("bparts", (void**)bparts, bpart_align,
                       *Nblackholes * sizeof(struct bpart)) != 0)
      error("Error while allocating memory for stars particles");
    bzero(*bparts, *Nblackholes * sizeof(struct bpart));
  }

573
574
  /* Allocate memory to store all gravity particles */
  if (with_gravity) {
575
    Ndm = N[swift_type_dark_matter];
576
    Ndm_background = N[swift_type_dark_matter_background];
577
578
    *Ngparts = (with_hydro ? N[swift_type_gas] : 0) +
               N[swift_type_dark_matter] +
579
               N[swift_type_dark_matter_background] +
580
581
               (with_stars ? N[swift_type_stars] : 0) +
               (with_black_holes ? N[swift_type_black_hole] : 0);
582
    *Ngparts_background = Ndm_background;
583
    if (swift_memalign("gparts", (void**)gparts, gpart_align,
584
585
586
587
                       *Ngparts * sizeof(struct gpart)) != 0)
      error("Error while allocating memory for gravity particles");
    bzero(*gparts, *Ngparts * sizeof(struct gpart));
  }
588
589
590
591

  /* message("Allocated %8.2f MB for particles.", *N * sizeof(struct part) /
   * (1024.*1024.)); */

592
593
  /* message("BoxSize = %lf", dim[0]); */
  /* message("NumPart = [%zd, %zd] Total = %zd", *Ngas, Ndm, *Ngparts); */
594

595
  /* Loop over all particle types */
596
  for (int ptype = 0; ptype < swift_type_count; ptype++) {
Matthieu Schaller's avatar
Matthieu Schaller committed
597

598
    /* Don't do anything if no particle of this kind */
599
    if (N[ptype] == 0) continue;
600
601

    /* Open the particle group in the file */
602
    char partTypeGroupName[PARTICLE_GROUP_BUFFER_SIZE];
Matthieu Schaller's avatar
Matthieu Schaller committed
603
604
    snprintf(partTypeGroupName, PARTICLE_GROUP_BUFFER_SIZE, "/PartType%d",
             ptype);
605
    h_grp = H5Gopen(h_file, partTypeGroupName, H5P_DEFAULT);
606
    if (h_grp < 0)
607
608
      error("Error while opening particle group %s.", partTypeGroupName);

609
610
    int num_fields = 0;
    struct io_props list[100];
611
    size_t Nparticles = 0;
612

613
    /* Read particle fields into the structure */
614
615
    switch (ptype) {

616
      case swift_type_gas:
617
618
619
        if (with_hydro) {
          Nparticles = *Ngas;
          hydro_read_particles(*parts, list, &num_fields);
620
          num_fields += chemistry_read_particles(*parts, list + num_fields);
621
        }
622
623
        break;

624
      case swift_type_dark_matter:
625
626
627
628
        if (with_gravity) {
          Nparticles = Ndm;
          darkmatter_read_particles(*gparts, list, &num_fields);
        }
629
630
        break;

631
632
633
634
635
636
637
      case swift_type_dark_matter_background:
        if (with_gravity) {
          Nparticles = Ndm_background;
          darkmatter_read_particles(*gparts + Ndm, list, &num_fields);
        }
        break;

638
      case swift_type_stars:
639
640
        if (with_stars) {
          Nparticles = *Nstars;
641
          stars_read_particles(*sparts, list, &num_fields);
642
        }
643
644
        break;

645
646
647
648
649
650
651
      case swift_type_black_hole:
        if (with_black_holes) {
          Nparticles = *Nblackholes;
          black_holes_read_particles(*bparts, list, &num_fields);
        }
        break;

652
      default:
653
        message("Particle Type %d not yet supported. Particles ignored", ptype);
654
655
    }

656
657
658
    /* Read everything */
    if (!dry_run)
      for (int i = 0; i < num_fields; ++i)
659
660
        read_array_single(h_grp, list[i], Nparticles, internal_units, ic_units,
                          cleanup_h, cleanup_sqrt_a, h, a);
661

662
663
664
665
    /* Close particle group */
    H5Gclose(h_grp);
  }

666
667
  /* Duplicate the parts for gravity */
  if (!dry_run && with_gravity) {
668

669
670
671
    /* Let's initialise a bit of thread parallelism here */
    struct threadpool tp;
    threadpool_init(&tp, n_threads);
672

673
674
675
    /* Prepare the DM particles */
    io_prepare_dm_gparts(&tp, *gparts, Ndm);

676
677
678
    /* Prepare the DM background particles */
    io_prepare_dm_background_gparts(&tp, *gparts + Ndm, Ndm_background);

679
    /* Duplicate the hydro particles into gparts */
680
681
682
    if (with_hydro)
      io_duplicate_hydro_gparts(&tp, *parts, *gparts, *Ngas,
                                Ndm + Ndm_background);
683
684
685

    /* Duplicate the star particles into gparts */
    if (with_stars)
686
687
      io_duplicate_stars_gparts(&tp, *sparts, *gparts, *Nstars,
                                Ndm + Ndm_background + *Ngas);
688

689
690
691
    /* Duplicate the black hole particles into gparts */
    if (with_black_holes)
      io_duplicate_black_holes_gparts(&tp, *bparts, *gparts, *Nblackholes,
692
                                      Ndm + Ndm_background + *Ngas + *Nstars);
693

694
695
    threadpool_clean(&tp);
  }
696

697
698
  /* message("Done Reading particles..."); */

699
700
701
  /* Clean up */
  free(ic_units);

702
703
704
705
  /* Close file */
  H5Fclose(h_file);
}

706
707
708
709
/**
 * @brief Writes an HDF5 output file (GADGET-3 type) with its XMF descriptor
 *
 * @param e The engine containing all the system.
710
711
 * @param internal_units The #unit_system used internally
 * @param snapshot_units The #unit_system used in the snapshots
712
713
714
 *
 * Creates an HDF5 output file and writes the particles contained
 * in the engine. If such a file already exists, it is erased and replaced
715
 * by the new one.
716
717
718
719
720
 * The companion XMF file is also updated accordingly.
 *
 * Calls #error() if an error occurs.
 *
 */
721
void write_output_single(struct engine* e,
722
723
                         const struct unit_system* internal_units,
                         const struct unit_system* snapshot_units) {
724

725
  hid_t h_file = 0, h_grp = 0;
726
  int numFiles = 1;
727
728
729
730
  const struct part* parts = e->s->parts;
  const struct xpart* xparts = e->s->xparts;
  const struct gpart* gparts = e->s->gparts;
  const struct spart* sparts = e->s->sparts;
731
  const struct bpart* bparts = e->s->bparts;
732
  struct output_options* output_options = e->output_options;
733
  struct output_list* output_list = e->output_list_snapshots;
734
  const int with_cosmology = e->policy & engine_policy_cosmology;
735
736
  const int with_cooling = e->policy & engine_policy_cooling;
  const int with_temperature = e->policy & engine_policy_temperature;
737
  const int with_fof = e->policy & engine_policy_fof;
738
  const int with_DM_background = e->s->with_DM_background;
739
740
741
742
743
744
#ifdef HAVE_VELOCIRAPTOR
  const int with_stf = (e->policy & engine_policy_structure_finding) &&
                       (e->s->gpart_group_data != NULL);
#else
  const int with_stf = 0;
#endif
745

746
747
748
749
  /* Number of particles currently in the arrays */
  const size_t Ntot = e->s->nr_gparts;
  const size_t Ngas = e->s->nr_parts;
  const size_t Nstars = e->s->nr_sparts;
750
  const size_t Nblackholes = e->s->nr_bparts;
Matthieu Schaller's avatar
Matthieu Schaller committed
751
752
  // const size_t Nbaryons = Ngas + Nstars;
  // const size_t Ndm = Ntot > 0 ? Ntot - Nbaryons : 0;
753

754
755
756
757
758
759
760
  size_t Ndm_background = 0;
  if (with_DM_background) {
    Ndm_background = io_count_dm_background_gparts(gparts, Ntot);
  }

  /* Number of particles that we will write
   * Recall that background particles are never inhibited and have no extras */
761
762
763
764
765
766
  const size_t Ntot_written =
      e->s->nr_gparts - e->s->nr_inhibited_gparts - e->s->nr_extra_gparts;
  const size_t Ngas_written =
      e->s->nr_parts - e->s->nr_inhibited_parts - e->s->nr_extra_parts;
  const size_t Nstars_written =
      e->s->nr_sparts - e->s->nr_inhibited_sparts - e->s->nr_extra_sparts;
767
768
769
770
  const size_t Nblackholes_written =
      e->s->nr_bparts - e->s->nr_inhibited_bparts - e->s->nr_extra_bparts;
  const size_t Nbaryons_written =
      Ngas_written + Nstars_written + Nblackholes_written;
Matthieu Schaller's avatar
Matthieu Schaller committed
771
  const size_t Ndm_written =
772
      Ntot_written > 0 ? Ntot_written - Nbaryons_written - Ndm_background : 0;
773
774

  /* Format things in a Gadget-friendly array */
775
  long long N_total[swift_type_count] = {
776
777
      (long long)Ngas_written,   (long long)Ndm_written,
      (long long)Ndm_background, 0,
778
      (long long)Nstars_written, (long long)Nblackholes_written};
779

780
  /* File name */
781
  char fileName[FILENAME_BUFFER_SIZE];
782
783
784
785
786
  char xmfFileName[FILENAME_BUFFER_SIZE];
  io_get_snapshot_filename(fileName, xmfFileName, e->snapshot_int_time_label_on,
                           e->snapshot_invoke_stf, e->time, e->stf_output_count,
                           e->snapshot_output_count, e->snapshot_subdir,
                           e->snapshot_base_name);
787
788

  /* First time, we need to create the XMF file */
789
  if (e->snapshot_output_count == 0) xmf_create_file(xmfFileName);
790

791
  /* Prepare the XMF file for the new entry */
792
  FILE* xmfFile = 0;
793
  xmfFile = xmf_prepare_file(xmfFileName);
794
795

  /* Write the part corresponding to this specific output */
796
  xmf_write_outputheader(xmfFile, fileName, e->time);
797
798
799

  /* Open file */
  /* message("Opening file '%s'.", fileName); */
800
  h_file = H5Fcreate(fileName, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
801
  if (h_file < 0) error("Error while opening file '%s'.", fileName);
802
803
804

  /* Open header to write simulation properties */
  /* message("Writing file header..."); */
805
  h_grp = H5Gcreate(h_file, "/Header", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
806
807
  if (h_grp < 0) error("Error while creating file header\n");

808
809
810
811
812
813
814
  /* Convert basic output information to snapshot units */
  const double factor_time =
      units_conversion_factor(internal_units, snapshot_units, UNIT_CONV_TIME);
  const double factor_length =
      units_conversion_factor(internal_units, snapshot_units, UNIT_CONV_LENGTH);
  const double dblTime = e->time * factor_time;
  const double dim[3] = {e->s->dim[0] * factor_length,
815
816
                         e->s->dim[1] * factor_length,
                         e->s->dim[2] * factor_length};
817

818
819
  /* Determine if we are writing a reduced snapshot, and if so which
   * output selection type to use */
820
821
  char current_selection_name[FIELD_BUFFER_SIZE] =
      select_output_header_default_name;
822
823
824
  if (output_list) {
    /* Users could have specified a different Select Output scheme for each
     * snapshot. */
825
    output_list_get_current_select_output(output_list, current_selection_name);
826
827
  }

828
  /* Print the relevant information and print status */
829
  io_write_attribute(h_grp, "BoxSize", DOUBLE, dim, 3);
830
  io_write_attribute(h_grp, "Time", DOUBLE, &dblTime, 1);
831
  const int dimension = (int)hydro_dimension;
832
  io_write_attribute(h_grp, "Dimension", INT, &dimension, 1);
833
834
  io_write_attribute(h_grp, "Redshift", DOUBLE, &e->cosmology->z, 1);
  io_write_attribute(h_grp, "Scale-factor", DOUBLE, &e->cosmology->a, 1);
835
  io_write_attribute_s(h_grp, "Code", "SWIFT");
836
  io_write_attribute_s(h_grp, "RunName", e->run_name);
837

838
839
840
841
842
843
844
  /* Store the time at which the snapshot was written */
  time_t tm = time(NULL);
  struct tm* timeinfo = localtime(&tm);
  char snapshot_date[64];
  strftime(snapshot_date, 64, "%T %F %Z", timeinfo);
  io_write_attribute_s(h_grp, "Snapshot date", snapshot_date);

845
  /* GADGET-2 legacy values */
846
  /* Number of particles of each type */
847
848
849
  unsigned int numParticles[swift_type_count] = {0};
  unsigned int numParticlesHighWord[swift_type_count] = {0};
  for (int ptype = 0; ptype < swift_type_count; ++ptype) {
Matthieu Schaller's avatar
Matthieu Schaller committed
850
851
852
    numParticles[ptype] = (unsigned int)N_total[ptype];
    numParticlesHighWord[ptype] = (unsigned int)(N_total[ptype] >> 32);
  }
853
854
855
856
857
858
859
860
861
  io_write_attribute(h_grp, "NumPart_ThisFile", LONGLONG, N_total,