Commit 761fd975 authored by Matthieu Schaller's avatar Matthieu Schaller
Browse files

Simplify the construction of the i/o properties

parent bbdac8f8
......@@ -9,7 +9,7 @@ InternalUnitSystem:
# Parameters governing the time integration
TimeIntegration:
time_begin: 0. # The starting time of the simulation (in internal units).
time_end: 1. # The end time of the simulation (in internal units).
time_end: 0.01 # The end time of the simulation (in internal units).
dt_min: 1e-6 # The minimal time-step size of the simulation (in internal units).
dt_max: 1e-3 # The maximal time-step size of the simulation (in internal units).
......
......@@ -38,3 +38,11 @@ SPH:
# Parameters related to the initial conditions
InitialConditions:
file_name: ./uniformBox.hdf5 # The file to read
# External potential parameters
PointMass:
position_x: 50. # location of external point mass in internal units
position_y: 50.
position_z: 50.
mass: 1e10 # mass of external point mass in internal units
......@@ -652,4 +652,25 @@ void collect_dm_gparts(const struct gpart* const gparts, size_t Ntot,
count, Ndm);
}
/**
* @brief Construct an #io_props from its parameters
*/
struct io_props io_make_input_field_(char name[FIELD_BUFFER_SIZE],
enum DATA_TYPE type, int dimension,
enum DATA_IMPORTANCE importance,
enum UnitConversionFactor units,
char* field, size_t partSize) {
struct io_props r;
strcpy(r.name, name);
r.type = type;
r.dimension = dimension;
r.importance = importance;
r.units = units;
r.field = field;
r.partSize = partSize;
return r;
}
#endif
......@@ -69,6 +69,7 @@ enum PARTICLE_TYPE {
extern const char* particle_type_names[];
#define FILENAME_BUFFER_SIZE 150
#define FIELD_BUFFER_SIZE 50
#define PARTICLE_GROUP_BUFFER_SIZE 20
hid_t hdf5Type(enum DATA_TYPE type);
......@@ -109,6 +110,46 @@ void readUnitSystem(hid_t h_file, struct UnitSystem* us);
void writeUnitSystem(hid_t h_grp, const struct UnitSystem* us,
const char* groupName);
/**
* @brief The properties of a given dataset for i/o
*/
struct io_props {
/* Name */
char name[FIELD_BUFFER_SIZE];
/* Type of the field */
enum DATA_TYPE type;
/* Dimension (1D, 3D, ...) */
int dimension;
/* Is it compulsory ? (input only) */
enum DATA_IMPORTANCE importance;
/* Units of the quantity */
enum UnitConversionFactor units;
/* Pointer to the field of the first particle in the array */
char* field;
/* The size of the particles */
size_t partSize;
};
/**
* @brief Constructs an #io_props from its attributes
*/
#define io_make_input_field(name, type, dim, importance, units, part, field) \
io_make_input_field_(name, type, dim, importance, units, \
(char*)(&(part[0]).field), sizeof(part[0]))
struct io_props io_make_input_field_(char name[FIELD_BUFFER_SIZE],
enum DATA_TYPE type, int dimension,
enum DATA_IMPORTANCE importance,
enum UnitConversionFactor units,
char* field, size_t partSize);
#endif /* defined HDF5 */
#endif /* SWIFT_COMMON_IO_H */
......@@ -35,15 +35,35 @@ __attribute__((always_inline)) INLINE static void darkmatter_read_particles(
struct gpart* gparts, const struct UnitSystem* internal_units,
struct UnitSystem* ic_units) {
const int num_fields = 4;
struct io_props list[num_fields];
/* List what we want to read */
list[0] = io_make_input_field("Coordinates", DOUBLE, 3, COMPULSORY,
UNIT_CONV_LENGTH, gparts, x);
list[1] = io_make_input_field("Velocities", FLOAT, 3, COMPULSORY,
UNIT_CONV_SPEED, gparts, v_full);
list[2] = io_make_input_field("Masses", FLOAT, 1, COMPULSORY, UNIT_CONV_MASS,
gparts, mass);
list[3] = io_make_input_field("ParticleIDs", ULONGLONG, 1, COMPULSORY,
UNIT_CONV_NO_UNITS, gparts, id);
/* Read arrays */
readArray(h_grp, "Coordinates", DOUBLE, N, 3, gparts, N_total, offset, x,
COMPULSORY, internal_units, ic_units, UNIT_CONV_LENGTH);
readArray(h_grp, "Masses", FLOAT, N, 1, gparts, N_total, offset, mass,
COMPULSORY, internal_units, ic_units, UNIT_CONV_MASS);
readArray(h_grp, "Velocities", FLOAT, N, 3, gparts, N_total, offset, v_full,
COMPULSORY, internal_units, ic_units, UNIT_CONV_SPEED);
readArray(h_grp, "ParticleIDs", ULONGLONG, N, 1, gparts, N_total, offset, id,
COMPULSORY, internal_units, ic_units, UNIT_CONV_NO_UNITS);
/* readArray(h_grp, "Coordinates", DOUBLE, N, 3, gparts, N_total, offset, x,
*/
/* COMPULSORY, internal_units, ic_units, UNIT_CONV_LENGTH); */
/* readArray(h_grp, "Masses", FLOAT, N, 1, gparts, N_total, offset, mass, */
/* COMPULSORY, internal_units, ic_units, UNIT_CONV_MASS); */
/* readArray(h_grp, "Velocities", FLOAT, N, 3, gparts, N_total, offset,
* v_full, */
/* COMPULSORY, internal_units, ic_units, UNIT_CONV_SPEED); */
/* readArray(h_grp, "ParticleIDs", ULONGLONG, N, 1, gparts, N_total, offset,
* id, */
/* COMPULSORY, internal_units, ic_units, UNIT_CONV_NO_UNITS); */
/* And read everything */
for (int i = 0; i < num_fields; ++i)
readArray(h_grp, list[i], N, N_total, offset, internal_units, ic_units);
}
/**
......
......@@ -34,23 +34,52 @@ __attribute__((always_inline)) INLINE static void hydro_read_particles(
hid_t h_grp, int N, long long N_total, long long offset, struct part* parts,
const struct UnitSystem* internal_units, struct UnitSystem* ic_units) {
const int num_fields = 8;
struct io_props list[num_fields];
/* List what we want to read */
list[0] = io_make_input_field("Coordinates", DOUBLE, 3, COMPULSORY,
UNIT_CONV_LENGTH, parts, x);
list[1] = io_make_input_field("Velocities", FLOAT, 3, COMPULSORY,
UNIT_CONV_SPEED, parts, v);
list[2] = io_make_input_field("Masses", FLOAT, 1, COMPULSORY, UNIT_CONV_MASS,
parts, mass);
list[3] = io_make_input_field("SmoothingLength", FLOAT, 1, COMPULSORY,
UNIT_CONV_LENGTH, parts, h);
list[4] = io_make_input_field("InternalEnergy", FLOAT, 1, COMPULSORY,
UNIT_CONV_ENERGY, parts, entropy);
list[5] = io_make_input_field("ParticleIDs", ULONGLONG, 1, COMPULSORY,
UNIT_CONV_NO_UNITS, parts, id);
list[6] = io_make_input_field("Accelerations", FLOAT, 3, OPTIONAL,
UNIT_CONV_ACCELERATION, parts, a_hydro);
list[7] = io_make_input_field("Density", FLOAT, 1, OPTIONAL,
UNIT_CONV_DENSITY, parts, rho);
/* Read arrays */
readArray(h_grp, "Coordinates", DOUBLE, N, 3, parts, N_total, offset, x,
COMPULSORY, internal_units, ic_units, UNIT_CONV_LENGTH);
readArray(h_grp, "Velocities", FLOAT, N, 3, parts, N_total, offset, v,
COMPULSORY, internal_units, ic_units, UNIT_CONV_SPEED);
readArray(h_grp, "Masses", FLOAT, N, 1, parts, N_total, offset, mass,
COMPULSORY, internal_units, ic_units, UNIT_CONV_MASS);
readArray(h_grp, "SmoothingLength", FLOAT, N, 1, parts, N_total, offset, h,
COMPULSORY, internal_units, ic_units, UNIT_CONV_LENGTH);
readArray(h_grp, "InternalEnergy", FLOAT, N, 1, parts, N_total, offset,
entropy, COMPULSORY, internal_units, ic_units, UNIT_CONV_ENERGY);
readArray(h_grp, "ParticleIDs", ULONGLONG, N, 1, parts, N_total, offset, id,
COMPULSORY, internal_units, ic_units, UNIT_CONV_NO_UNITS);
readArray(h_grp, "Acceleration", FLOAT, N, 3, parts, N_total, offset, a_hydro,
OPTIONAL, internal_units, ic_units, UNIT_CONV_ACCELERATION);
readArray(h_grp, "Density", FLOAT, N, 1, parts, N_total, offset, rho,
OPTIONAL, internal_units, ic_units, UNIT_CONV_DENSITY);
/* readArray(h_grp, "Coordinates", DOUBLE, N, 3, parts, N_total, offset, x, */
/* COMPULSORY, internal_units, ic_units, UNIT_CONV_LENGTH); */
/* readArray(h_grp, "Velocities", FLOAT, N, 3, parts, N_total, offset, v, */
/* COMPULSORY, internal_units, ic_units, UNIT_CONV_SPEED); */
/* readArray(h_grp, "Masses", FLOAT, N, 1, parts, N_total, offset, mass, */
/* COMPULSORY, internal_units, ic_units, UNIT_CONV_MASS); */
/* readArray(h_grp, "SmoothingLength", FLOAT, N, 1, parts, N_total, offset, h,
*/
/* COMPULSORY, internal_units, ic_units, UNIT_CONV_LENGTH); */
/* readArray(h_grp, "InternalEnergy", FLOAT, N, 1, parts, N_total, offset, */
/* entropy, COMPULSORY, internal_units, ic_units, UNIT_CONV_ENERGY);
*/
/* readArray(h_grp, "ParticleIDs", ULONGLONG, N, 1, parts, N_total, offset,
* id, */
/* COMPULSORY, internal_units, ic_units, UNIT_CONV_NO_UNITS); */
/* readArray(h_grp, "Acceleration", FLOAT, N, 3, parts, N_total, offset,
* a_hydro, */
/* OPTIONAL, internal_units, ic_units, UNIT_CONV_ACCELERATION); */
/* readArray(h_grp, "Density", FLOAT, N, 1, parts, N_total, offset, rho, */
/* OPTIONAL, internal_units, ic_units, UNIT_CONV_DENSITY); */
/* And read everything */
for (int i = 0; i < num_fields; ++i)
readArray(h_grp, list[i], N, N_total, offset, internal_units, ic_units);
}
/**
......@@ -78,30 +107,39 @@ __attribute__((always_inline)) INLINE static void hydro_write_particles(
const struct UnitSystem* snapshot_units) {
/* Write arrays */
writeArray(h_grp, fileName, xmfFile, partTypeGroupName, "Coordinates", DOUBLE,
N, 3, parts, N_total, mpi_rank, offset, x, internal_units,
snapshot_units, UNIT_CONV_LENGTH);
writeArray(h_grp, fileName, xmfFile, partTypeGroupName, "Velocities", FLOAT,
N, 3, parts, N_total, mpi_rank, offset, v, internal_units,
snapshot_units, UNIT_CONV_SPEED);
writeArray(h_grp, fileName, xmfFile, partTypeGroupName, "Masses", FLOAT, N, 1,
parts, N_total, mpi_rank, offset, mass, internal_units,
snapshot_units, UNIT_CONV_MASS);
writeArray(h_grp, fileName, xmfFile, partTypeGroupName, "SmoothingLength",
FLOAT, N, 1, parts, N_total, mpi_rank, offset, h, internal_units,
snapshot_units, UNIT_CONV_LENGTH);
writeArray(h_grp, fileName, xmfFile, partTypeGroupName, "Entropy", FLOAT, N,
1, parts, N_total, mpi_rank, offset, entropy, internal_units,
snapshot_units, UNIT_CONV_ENTROPY_PER_UNIT_MASS);
writeArray(h_grp, fileName, xmfFile, partTypeGroupName, "ParticleIDs",
ULONGLONG, N, 1, parts, N_total, mpi_rank, offset, id,
internal_units, snapshot_units, UNIT_CONV_NO_UNITS);
writeArray(h_grp, fileName, xmfFile, partTypeGroupName, "Acceleration", FLOAT,
N, 3, parts, N_total, mpi_rank, offset, a_hydro, internal_units,
snapshot_units, UNIT_CONV_ACCELERATION);
writeArray(h_grp, fileName, xmfFile, partTypeGroupName, "Density", FLOAT, N,
1, parts, N_total, mpi_rank, offset, rho, internal_units,
snapshot_units, UNIT_CONV_DENSITY);
/* writeArray(h_grp, fileName, xmfFile, partTypeGroupName, "Coordinates",
* DOUBLE, */
/* N, 3, parts, N_total, mpi_rank, offset, x, internal_units, */
/* snapshot_units, UNIT_CONV_LENGTH); */
/* writeArray(h_grp, fileName, xmfFile, partTypeGroupName, "Velocities",
* FLOAT, */
/* N, 3, parts, N_total, mpi_rank, offset, v, internal_units, */
/* snapshot_units, UNIT_CONV_SPEED); */
/* writeArray(h_grp, fileName, xmfFile, partTypeGroupName, "Masses", FLOAT, N,
* 1, */
/* parts, N_total, mpi_rank, offset, mass, internal_units, */
/* snapshot_units, UNIT_CONV_MASS); */
/* writeArray(h_grp, fileName, xmfFile, partTypeGroupName, "SmoothingLength",
*/
/* FLOAT, N, 1, parts, N_total, mpi_rank, offset, h,
* internal_units, */
/* snapshot_units, UNIT_CONV_LENGTH); */
/* writeArray(h_grp, fileName, xmfFile, partTypeGroupName, "Entropy", FLOAT,
* N, */
/* 1, parts, N_total, mpi_rank, offset, entropy, internal_units, */
/* snapshot_units, UNIT_CONV_ENTROPY_PER_UNIT_MASS); */
/* writeArray(h_grp, fileName, xmfFile, partTypeGroupName, "ParticleIDs", */
/* ULONGLONG, N, 1, parts, N_total, mpi_rank, offset, id, */
/* internal_units, snapshot_units, UNIT_CONV_NO_UNITS); */
/* writeArray(h_grp, fileName, xmfFile, partTypeGroupName, "Acceleration",
* FLOAT, */
/* N, 3, parts, N_total, mpi_rank, offset, a_hydro, internal_units,
*/
/* snapshot_units, UNIT_CONV_ACCELERATION); */
/* writeArray(h_grp, fileName, xmfFile, partTypeGroupName, "Density", FLOAT,
* N, */
/* 1, parts, N_total, mpi_rank, offset, rho, internal_units, */
/* snapshot_units, UNIT_CONV_DENSITY); */
}
/**
......
......@@ -67,41 +67,48 @@
*the part array
* will be written once the structures have been stabilized.
*/
void readArrayBackEnd(hid_t grp, char* name, enum DATA_TYPE type, int N,
int dim, char* part_c, size_t partSize,
enum DATA_IMPORTANCE importance,
const struct UnitSystem* internal_units,
const struct UnitSystem* ic_units,
enum UnitConversionFactor convFactor) {
const size_t typeSize = sizeOfType(type);
const size_t copySize = typeSize * dim;
const size_t num_elements = N * dim;
/* void readArrayBackEnd(hid_t grp, char* name, enum DATA_TYPE type, int N, */
/* int dim, char* part_c, size_t partSize, */
/* enum DATA_IMPORTANCE importance, */
/* const struct UnitSystem* internal_units, */
/* const struct UnitSystem* ic_units, */
/* enum UnitConversionFactor convFactor) { */
void readArray(hid_t h_grp, const struct io_props prop, int N,
long long N_total, long long offset,
const struct UnitSystem* internal_units,
const struct UnitSystem* ic_units) {
const size_t typeSize = sizeOfType(prop.type);
const size_t copySize = typeSize * prop.dimension;
const size_t num_elements = N * prop.dimension;
/* Check whether the dataspace exists or not */
const htri_t exist = H5Lexists(grp, name, 0);
const htri_t exist = H5Lexists(h_grp, prop.name, 0);
if (exist < 0) {
error("Error while checking the existence of data set '%s'.", name);
error("Error while checking the existence of data set '%s'.", prop.name);
} else if (exist == 0) {
if (importance == COMPULSORY) {
error("Compulsory data set '%s' not present in the file.", name);
if (prop.importance == COMPULSORY) {
error("Compulsory data set '%s' not present in the file.", prop.name);
} else {
/* message("Optional data set '%s' not present. Zeroing this particle
* field...", name); */
* prop...", name); */
for (int i = 0; i < N; ++i) memset(part_c + i * partSize, 0, copySize);
for (int i = 0; i < N; ++i)
memset(prop.field + i * prop.partSize, 0, copySize);
return;
}
}
/* message( "Reading %s '%s' array...", importance == COMPULSORY ?
* "compulsory": "optional ", name); */
message("Reading %s '%s' array...",
prop.importance == COMPULSORY ? "compulsory" : "optional ",
prop.name);
/* Open data space */
const hid_t h_data = H5Dopen(grp, name, H5P_DEFAULT);
const hid_t h_data = H5Dopen(h_grp, prop.name, H5P_DEFAULT);
if (h_data < 0) {
error("Error while opening data space '%s'.", name);
error("Error while opening data space '%s'.", prop.name);
}
/* Check data type */
......@@ -118,19 +125,19 @@ void readArrayBackEnd(hid_t grp, char* name, enum DATA_TYPE type, int N,
/* Dirty version that happens to work for vectors but should be improved */
/* Using HDF5 dataspaces would be better */
const hid_t h_err =
H5Dread(h_data, hdf5Type(type), H5S_ALL, H5S_ALL, H5P_DEFAULT, temp);
H5Dread(h_data, hdf5Type(prop.type), H5S_ALL, H5S_ALL, H5P_DEFAULT, temp);
if (h_err < 0) {
error("Error while reading data array '%s'.", name);
error("Error while reading data array '%s'.", prop.name);
}
/* Unit conversion if necessary */
const double factor =
units_conversion_factor(ic_units, internal_units, convFactor);
units_conversion_factor(ic_units, internal_units, prop.units);
if (factor != 1. && exist != 0) {
message("aaa");
if (isDoublePrecision(type)) {
if (isDoublePrecision(prop.type)) {
double* temp_d = temp;
for (int i = 0; i < num_elements; ++i) temp_d[i] *= factor;
} else {
......@@ -142,7 +149,7 @@ void readArrayBackEnd(hid_t grp, char* name, enum DATA_TYPE type, int N,
/* Copy temporary buffer to particle data */
char* temp_c = temp;
for (int i = 0; i < N; ++i)
memcpy(part_c + i * partSize, &temp_c[i * copySize], copySize);
memcpy(prop.field + i * prop.partSize, &temp_c[i * copySize], copySize);
/* Free and close everything */
free(temp);
......@@ -280,7 +287,7 @@ void writeArrayBackEnd(hid_t grp, char* fileName, FILE* xmfFile,
writeXMFline(xmfFile, fileName, partTypeGroupName, name, N, dim, type);
/* Write unit conversion factors for this data set */
char buffer[FILENAME_BUFFER_SIZE];
char buffer[FIELD_BUFFER_SIZE];
units_cgs_conversion_string(buffer, snapshot_units, convFactor);
writeAttribute_d(h_data, "CGS conversion factor",
units_cgs_conversion_factor(snapshot_units, convFactor));
......@@ -315,11 +322,15 @@ void writeArrayBackEnd(hid_t grp, char* fileName, FILE* xmfFile,
* @param convFactor The UnitConversionFactor for this array
*
*/
#define readArray(grp, name, type, N, dim, part, N_total, offset, field, \
importance, internal_units, ic_units, convFactor) \
readArrayBackEnd(grp, name, type, N, dim, (char*)(&(part[0]).field), \
sizeof(part[0]), importance, internal_units, ic_units, \
convFactor)
/* #define readArray(grp, name, type, N, dim, part, N_total, offset, field, \
*/
/* importance, internal_units, ic_units, convFactor) \
*/
/* readArrayBackEnd(grp, name, type, N, dim, (char*)(&(part[0]).field), \
*/
/* sizeof(part[0]), importance, internal_units, ic_units, \
*/
/* convFactor) */
/**
* @brief A helper macro to call the readArrayBackEnd function more easily.
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment