serial_io.c 17.1 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
/*******************************************************************************
 * This file is part of SWIFT.
 * Coypright (c) 2012 Pedro Gonnet (pedro.gonnet@durham.ac.uk),
 *                    Matthieu Schaller (matthieu.schaller@durham.ac.uk).
 * 
 * This program is free software: you can redistribute it and/or modify
 * it under the terms of the GNU Lesser General Public License as published
 * by the Free Software Foundation, either version 3 of the License, or
 * (at your option) any later version.
 * 
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 * 
 * You should have received a copy of the GNU Lesser General Public License
 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
 * 
 ******************************************************************************/

/* Config parameters. */
#include "../config.h"

24
#if defined(HAVE_HDF5) && !defined(WITH_MPI)
25
26
27
28
29
30
31
32


/* Some standard headers. */
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <stddef.h>
#include <hdf5.h>
33
#include <math.h>
34

35
#include "const.h"
Pedro Gonnet's avatar
Pedro Gonnet committed
36
#include "cycle.h"
37
38
39
40
#include "lock.h"
#include "task.h"
#include "part.h"
#include "space.h"
41
#include "scheduler.h"
42
#include "engine.h"
43
#include "error.h"
44
#include "kernel.h"
45
#include "common_io.h"
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83



/*-----------------------------------------------------------------------------
 * Routines reading an IC file
 *-----------------------------------------------------------------------------*/

/**
 * @brief Reads a data array from a given HDF5 group.
 *
 * @param grp The group from which to read.
 * @param name The name of the array to read.
 * @param type The #DATA_TYPE of the attribute.
 * @param N The number of particles.
 * @param dim The dimension of the data (1 for scalar, 3 for vector)
 * @param part_c A (char*) pointer on the first occurence of the field of interest in the parts array
 * @param importance If COMPULSORY, the data must be present in the IC file. If OPTIONAL, the array will be zeroed when the data is not present.
 *
 * @todo A better version using HDF5 hyperslabs to read the file directly into the part array 
 * will be written once the strucutres have been stabilized.
 *  
 * Calls #error() if an error occurs.
 */
void readArrayBackEnd(hid_t grp, char* name, enum DATA_TYPE type, int N, int dim, char* part_c, enum DATA_IMPORTANCE importance)
{
  hid_t h_data=0, h_err=0, h_type=0;
  htri_t exist=0;
  void* temp;
  int i=0;
  const size_t typeSize = sizeOfType(type);
  const size_t copySize = typeSize * dim;
  const size_t partSize = sizeof(struct part);
  char* temp_c = 0;

  /* Check whether the dataspace exists or not */
  exist = H5Lexists(grp, name, 0);
  if(exist < 0)
    {
84
      error( "Error while checking the existence of data set '%s'." , name );
85
86
87
88
89
    }
  else if(exist == 0)
    {
      if(importance == COMPULSORY)
	{
90
	  error( "Compulsory data set '%s' not present in the file." , name );
91
92
93
	}
      else
	{
94
	  /* message("Optional data set '%s' not present. Zeroing this particle field...", name);	   */
95
96
97
98
99
100
101
102
	  
	  for(i=0; i<N; ++i)
	    memset(part_c+i*partSize, 0, copySize);
	  
	  return;
	}
   }

103
  /* message( "Reading %s '%s' array...", importance == COMPULSORY ? "compulsory": "optional  ", name); */
104
105
106
107
108

  /* Open data space */
  h_data = H5Dopen1(grp, name);
  if(h_data < 0)
    {
109
      error( "Error while opening data space '%s'." , name );
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
    }

  /* Check data type */
  h_type = H5Dget_type(h_data);
  if(h_type < 0)
    error("Unable to retrieve data type from the file");
  if(!H5Tequal(h_type, hdf5Type(type)))
    error("Non-matching types between the code and the file");
  
  /* Allocate temporary buffer */
  temp = malloc(N * dim * sizeOfType(type));
  if(temp == NULL)
    error("Unable to allocate memory for temporary buffer");

  /* Read HDF5 dataspace in temporary buffer */
  /* Dirty version that happens to work for vectors but should be improved */
  /* Using HDF5 dataspaces would be better */
  h_err = H5Dread(h_data, hdf5Type(type), H5S_ALL, H5S_ALL, H5P_DEFAULT, temp);
  if(h_err < 0)
    {
130
      error( "Error while reading data array '%s'." , name );
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
    }

  /* Copy temporary buffer to particle data */
  temp_c = temp;
  for(i=0; i<N; ++i)
    memcpy(part_c+i*partSize, &temp_c[i*copySize], copySize);
  
  /* Free and close everything */
  free(temp);
  H5Tclose(h_type);
  H5Dclose(h_data);
}

/**
 * @brief A helper macro to call the readArrayBackEnd function more easily.
 *
 * @param grp The group from which to read.
 * @param name The name of the array to read.
 * @param type The #DATA_TYPE of the attribute.
 * @param N The number of particles.
 * @param dim The dimension of the data (1 for scalar, 3 for vector)
 * @param part The array of particles to fill
 * @param field The name of the field (C code name as defined in part.h) to fill
 * @param importance Is the data compulsory or not
 *
 */
#define readArray(grp, name, type, N, dim, part, field, importance) readArrayBackEnd(grp, name, type, N, dim, (char*)(&(part[0]).field), importance)

/**
 * @brief Reads an HDF5 initial condition file (GADGET-3 type)
 *
 * @param fileName The file to read.
 * @param dim (output) The dimension of the volume read from the file.
 * @param parts (output) The array of #part read from the file.
 * @param N (output) The number of particles read from the file.
 * @param periodic (output) 1 if the volume is periodic, 0 if not.
 *
 * Opens the HDF5 file fileName and reads the particles contained
 * in the parts array. N is the returned number of particles found
 * in the file.
 *
 * @warning Can not read snapshot distributed over more than 1 file !!!
 * @todo Read snaphsots distributed in more than one file.
 *
 * Calls #error() if an error occurs.
 *
 */
void read_ic ( char* fileName, double dim[3], struct part **parts,  int* N, int* periodic)
{
  hid_t h_file=0, h_grp=0;
181
  double boxSize[3]={0.0,-1.0,-1.0};         /* GADGET has only cubic boxes (in cosmological mode) */
182
183
184
  int numParticles[6]={0};   /* GADGET has 6 particle types. We only keep the type 0*/

  /* Open file */
185
  /* message("Opening file '%s' as IC.", fileName); */
186
187
188
  h_file = H5Fopen(fileName, H5F_ACC_RDONLY, H5P_DEFAULT);
  if(h_file < 0)
    {
189
      error( "Error while opening file '%s'." , fileName );
190
191
192
    }

  /* Open header to read simulation properties */
193
  /* message("Reading runtime parameters..."); */
194
195
196
197
198
199
200
201
202
203
204
  h_grp = H5Gopen1(h_file, "/RuntimePars");
  if(h_grp < 0)
    error("Error while opening runtime parameters\n");

  /* Read the relevant information */
  readAttribute(h_grp, "PeriodicBoundariesOn", INT, periodic);

  /* Close runtime parameters */
  H5Gclose(h_grp);
  
  /* Open header to read simulation properties */
205
  /* message("Reading file header..."); */
206
207
208
209
210
  h_grp = H5Gopen1(h_file, "/Header");
  if(h_grp < 0)
    error("Error while opening file header\n");
    
  /* Read the relevant information and print status */
Pedro Gonnet's avatar
Pedro Gonnet committed
211
  readAttribute(h_grp, "BoxSize", DOUBLE, boxSize);
212
213
214
  readAttribute(h_grp, "NumPart_Total", UINT, numParticles);

  *N = numParticles[0];
Pedro Gonnet's avatar
Pedro Gonnet committed
215
  dim[0] = boxSize[0];
216
217
  dim[1] = ( boxSize[1] < 0 ) ? boxSize[0] : boxSize[1];
  dim[2] = ( boxSize[2] < 0 ) ? boxSize[0] : boxSize[2];
218

219
  /* message("Found %d particles in a %speriodic box of size [%f %f %f].",  */
220
221
222
223
224
225
  /* 	 *N, (periodic ? "": "non-"), dim[0], dim[1], dim[2]); */

  /* Close header */
  H5Gclose(h_grp);

  /* Allocate memory to store particles */
226
  if(posix_memalign( (void*)parts , part_align , *N * sizeof(struct part)) != 0)
227
228
229
    error("Error while allocating memory for particles");
  bzero( *parts , *N * sizeof(struct part) );

230
  /* message("Allocated %8.2f MB for particles.", *N * sizeof(struct part) / (1024.*1024.)); */
231
232
		  
  /* Open SPH particles group */
233
  /* message("Reading particle arrays..."); */
234
235
236
237
238
239
  h_grp = H5Gopen1(h_file, "/PartType0");
  if(h_grp < 0)
    error( "Error while opening particle group.\n");

  /* Read arrays */
  readArray(h_grp, "Coordinates", DOUBLE, *N, 3, *parts, x, COMPULSORY);
240
241
  readArray(h_grp, "Velocities", FLOAT, *N, 3, *parts, v, COMPULSORY);
  readArray(h_grp, "Masses", FLOAT, *N, 1, *parts, mass, COMPULSORY);
242
243
244
245
246
247
248
249
250
251
  readArray(h_grp, "SmoothingLength", FLOAT, *N, 1, *parts, h, COMPULSORY);
  readArray(h_grp, "InternalEnergy", FLOAT, *N, 1, *parts, u, COMPULSORY);
  readArray(h_grp, "ParticleIDs", ULONGLONG, *N, 1, *parts, id, COMPULSORY);
  readArray(h_grp, "TimeStep", FLOAT, *N, 1, *parts, dt, OPTIONAL);
  readArray(h_grp, "Acceleration", FLOAT, *N, 3, *parts, a, OPTIONAL);
  readArray(h_grp, "Density", FLOAT, *N, 1, *parts, rho, OPTIONAL );

  /* Close particle group */
  H5Gclose(h_grp);

252
  /* message("Done Reading particles..."); */
253
254
255
256
257
258
259
260
261

  /* Close file */
  H5Fclose(h_file);
}


/*-----------------------------------------------------------------------------
 * Routines writing an output file
 *-----------------------------------------------------------------------------*/
262
263
264
265
266

/**
 * @brief Writes a data array in given HDF5 group.
 *
 * @param grp The group in which to write.
267
268
 * @param fileName The name of the file in which the data is written
 * @param xmfFile The FILE used to write the XMF description
269
270
271
272
273
 * @param name The name of the array to write.
 * @param type The #DATA_TYPE of the array.
 * @param N The number of particles to write.
 * @param dim The dimension of the data (1 for scalar, 3 for vector)
 * @param part_c A (char*) pointer on the first occurence of the field of interest in the parts array
274
275
 * @param us The UnitSystem currently in use
 * @param convFactor The UnitConversionFactor for this array
276
277
278
279
280
281
 *
 * @todo A better version using HDF5 hyperslabs to write the file directly from the part array
 * will be written once the strucutres have been stabilized.
 *
 * Calls #error() if an error occurs.
 */
282
void writeArrayBackEnd(hid_t grp, char* fileName, FILE* xmfFile, char* name, enum DATA_TYPE type, int N, int dim, char* part_c, struct UnitSystem* us, enum UnitConversionFactor convFactor)
283
284
285
286
287
288
289
290
291
{
  hid_t h_data=0, h_err=0, h_space=0;
  void* temp = 0;
  int i=0, rank=0;
  const size_t typeSize = sizeOfType(type);
  const size_t copySize = typeSize * dim;
  const size_t partSize = sizeof(struct part);
  char* temp_c = 0;
  hsize_t shape[2];
292
  char buffer[150];
293

294
  /* message("Writing '%s' array...", name); */
295
296
297
298
299
300

  /* Allocate temporary buffer */
  temp = malloc(N * dim * sizeOfType(type));
  if(temp == NULL)
    error("Unable to allocate memory for temporary buffer");

301
  /* Copy particle data to temporary buffer */
302
303
304
305
306
307
308
309
  temp_c = temp;
  for(i=0; i<N; ++i)
    memcpy(&temp_c[i*copySize], part_c+i*partSize, copySize);

  /* Create data space */
  h_space = H5Screate(H5S_SIMPLE);
  if(h_space < 0)
    {
310
      error( "Error while creating data space for field '%s'." , name );
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
    }
  
  if(dim > 1)
    {
      rank = 2;
      shape[0] = N; shape[1] = dim;
    }
  else
    {
      rank = 1;
      shape[0] = N; shape[1] = 0;
    }
  
  /* Change shape of data space */
  h_err = H5Sset_extent_simple(h_space, rank, shape, NULL);
  if(h_err < 0)
    {
328
      error( "Error while changing data space shape for field '%s'." , name );
329
330
331
    }
  
  /* Create dataset */
332
  h_data = H5Dcreate1(grp, name, hdf5Type(type), h_space, H5P_DEFAULT);
333
334
  if(h_data < 0)
    {
335
      error( "Error while creating dataspace '%s'." , name );
336
337
338
339
340
341
    }
  
  /* Write temporary buffer to HDF5 dataspace */
  h_err = H5Dwrite(h_data, hdf5Type(type), h_space, H5S_ALL, H5P_DEFAULT, temp);
  if(h_err < 0)
    {
342
      error( "Error while writing data array '%s'." , name );
343
    }
344
345
346
347

  /* Write XMF description for this data set */
  writeXMFline(xmfFile, fileName, name, N, dim, type);

348
349
350
  /* Write unit conversion factors for this data set */
  conversionString( buffer, us, convFactor );
  writeAttribute_d( h_data, "CGS conversion factor", conversionFactor( us, convFactor ) );
351
352
  writeAttribute_f( h_data, "h-scale exponant", hFactor( us, convFactor ) );
  writeAttribute_f( h_data, "a-scale exponant", aFactor( us, convFactor ) );
353
  writeAttribute_s( h_data, "Conversion factor", buffer );
354
355
356
357
358
359
360
361
362
363
364
  
  /* Free and close everything */
  free(temp);
  H5Dclose(h_data);
  H5Sclose(h_space);
}

/**
 * @brief A helper macro to call the readArrayBackEnd function more easily.
 *
 * @param grp The group in which to write.
365
366
 * @param fileName The name of the file in which the data is written
 * @param xmfFile The FILE used to write the XMF description
367
368
369
370
 * @param name The name of the array to write.
 * @param type The #DATA_TYPE of the array.
 * @param N The number of particles to write.
 * @param dim The dimension of the data (1 for scalar, 3 for vector)
371
372
 * @param part A (char*) pointer on the first occurence of the field of interest in the parts array
 * @param field The name (code name) of the field to read from.
373
374
 * @param us The UnitSystem currently in use
 * @param convFactor The UnitConversionFactor for this array
375
376
 *
 */
377
#define writeArray(grp, fileName, xmfFile, name, type, N, dim, part, field, us, convFactor) writeArrayBackEnd(grp, fileName, xmfFile, name, type, N, dim, (char*)(&(part[0]).field), us, convFactor)
378
379

/**
380
 * @brief Writes an HDF5 output file (GADGET-3 type) with its XMF descriptor
381
 *
382
 * @param e The engine containing all the system.
383
 * @param us The UnitSystem used for the conversion of units in the output
384
 *
385
 * Creates an HDF5 output file and writes the particles contained
386
 * in the engine. If such a file already exists, it is erased and replaced
387
388
 * by the new one. 
 * The companion XMF file is also updated accordingly.
389
390
391
392
 *
 * Calls #error() if an error occurs.
 *
 */
393
void write_output (struct engine *e, struct UnitSystem* us)
394
{
395
  
396
  hid_t h_file=0, h_grp=0;
397
398
  int N = e->s->nr_parts;
  int periodic = e->s->periodic;
399
  int numParticles[6]={N,0};
400
401
402
  int numParticlesHighWord[6]={0};
  int numFiles = 1;
  struct part* parts = e->s->parts;
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
  FILE* xmfFile = 0;
  static int outputCount = 0;
  
  /* File name */
  char fileName[200];
  sprintf(fileName, "output_%03i.hdf5", outputCount);

  /* First time, we need to create the XMF file */
  if(outputCount == 0)
    createXMFfile();
  
  /* Prepare the XMF file for the new entry */
  xmfFile = prepareXMFfile();

  /* Write the part corresponding to this specific output */
  writeXMFheader(xmfFile, N, fileName, e->time);
419

420
421

  /* Open file */
422
  /* message("Opening file '%s'.", fileName); */
423
424
425
  h_file = H5Fcreate(fileName, H5F_ACC_TRUNC, H5P_DEFAULT,H5P_DEFAULT);
  if(h_file < 0)
    {
426
      error( "Error while opening file '%s'." , fileName );
427
428
    }

429
  /* Open header to write simulation properties */
430
  /* message("Writing runtime parameters..."); */
431
  h_grp = H5Gcreate1(h_file, "/RuntimePars", 0);
432
433
434
435
436
437
438
439
440
441
  if(h_grp < 0)
    error("Error while creating runtime parameters group\n");

  /* Write the relevant information */
  writeAttribute(h_grp, "PeriodicBoundariesOn", INT, &periodic, 1);

  /* Close runtime parameters */
  H5Gclose(h_grp);
  
  /* Open header to write simulation properties */
442
  /* message("Writing file header..."); */
443
  h_grp = H5Gcreate1(h_file, "/Header", 0);
444
445
446
  if(h_grp < 0)
    error("Error while creating file header\n");
    
447
  /* Print the relevant information and print status */
Pedro Gonnet's avatar
Pedro Gonnet committed
448
  writeAttribute(h_grp, "BoxSize", DOUBLE, e->s->dim, 3);
449
450
  writeAttribute(h_grp, "NumPart_ThisFile", UINT, numParticles, 6);
  writeAttribute(h_grp, "Time", DOUBLE, &e->time, 1);
451

452
453
454
  /* GADGET-2 legacy values */
  writeAttribute(h_grp, "NumPart_Total", UINT, numParticles, 6);
  writeAttribute(h_grp, "NumPart_Total_HighWord", UINT, numParticlesHighWord, 6);
455
456
  double MassTable[6] = {0., 0., 0., 0., 0., 0.};
  writeAttribute(h_grp, "MassTable", DOUBLE, MassTable, 6);
457
458
  writeAttribute(h_grp, "Flag_Entropy_ICs", UINT, numParticlesHighWord, 6);
  writeAttribute(h_grp, "NumFilesPerSnapshot", INT, &numFiles, 1);
459

460
  /* Close header */
461
  H5Gclose(h_grp);
462
463
464
465
466
467

  /* Print the SPH parameters */
  writeSPHflavour(h_file);

  /* Print the system of Units */
  writeUnitSystem(h_file, us);
468
469
		  
  /* Create SPH particles group */
470
  /* message("Writing particle arrays..."); */
471
  h_grp = H5Gcreate1(h_file, "/PartType0", 0);
472
473
474
475
  if(h_grp < 0)
    error( "Error while creating particle group.\n");

  /* Write arrays */
476
477
478
479
480
481
482
483
484
  writeArray(h_grp, fileName, xmfFile, "Coordinates", DOUBLE, N, 3, parts, x, us, UNIT_CONV_LENGTH);
  writeArray(h_grp, fileName, xmfFile, "Velocities", FLOAT, N, 3, parts, v, us, UNIT_CONV_SPEED);
  writeArray(h_grp, fileName, xmfFile, "Masses", FLOAT, N, 1, parts, mass, us, UNIT_CONV_MASS);
  writeArray(h_grp, fileName, xmfFile, "SmoothingLength", FLOAT, N, 1, parts, h, us, UNIT_CONV_LENGTH);
  writeArray(h_grp, fileName, xmfFile, "InternalEnergy", FLOAT, N, 1, parts, u, us, UNIT_CONV_ENERGY_PER_UNIT_MASS);
  writeArray(h_grp, fileName, xmfFile, "ParticleIDs", ULONGLONG, N, 1, parts, id, us, UNIT_CONV_NO_UNITS);
  writeArray(h_grp, fileName, xmfFile, "TimeStep", FLOAT, N, 1, parts, dt, us, UNIT_CONV_TIME);
  writeArray(h_grp, fileName, xmfFile, "Acceleration", FLOAT, N, 3, parts, a, us, UNIT_CONV_ACCELERATION);
  writeArray(h_grp, fileName, xmfFile, "Density", FLOAT, N, 1, parts, rho, us, UNIT_CONV_DENSITY);
485
486
487
488

  /* Close particle group */
  H5Gclose(h_grp);

489
490
491
  /* Write LXMF file descriptor */
  writeXMFfooter(xmfFile);

492
  /* message("Done writing particles..."); */
493
494
495

  /* Close file */
  H5Fclose(h_file);
496
497

  ++outputCount;
498
499
}

500

501
502
503
#endif  /* HAVE_HDF5 */