diff --git a/doc/RTD/source/FriendsOfFriends/on_the_fly_fof.rst b/doc/RTD/source/FriendsOfFriends/on_the_fly_fof.rst
index ea0965d33b53c1bce974709cf1213e9301012780..9961cca88366e398bb05dace29633a7293a6ee77 100644
--- a/doc/RTD/source/FriendsOfFriends/on_the_fly_fof.rst
+++ b/doc/RTD/source/FriendsOfFriends/on_the_fly_fof.rst
@@ -13,6 +13,9 @@ based on physical considerations.
 **In this mode, no group catalogue is written to the disk. The resulting list
 of haloes is only used internally by SWIFT.**
 
+Note that a catalogue can nevertheless be written after every seeding call by
+setting the optional parameter ``dump_catalogue_when_seeding``.
+
 Once the haloes have been identified by the FOF code, SWIFT will iterate
 over the list of groups and will check whether each halo obeys the
 following criteria:
diff --git a/doc/RTD/source/FriendsOfFriends/parameter_description.rst b/doc/RTD/source/FriendsOfFriends/parameter_description.rst
index e3a6bf2a0803b4688ec29748ebc1e82807b9b801..934d0c57c31f245f16e11f7e67ff1e6b76d74414 100644
--- a/doc/RTD/source/FriendsOfFriends/parameter_description.rst
+++ b/doc/RTD/source/FriendsOfFriends/parameter_description.rst
@@ -57,9 +57,16 @@ halo (group) mass considered for seeding black holes. This is specified by
 the parameter ``black_hole_seed_halo_mass_Msun`` which is expressed in
 solar masses.
 
-Currently the only way to invoke FOF on the fly for purposes other than
-black hole seeding is to set the ``invoke_fof`` parameter in the ``Snapshots``
-section of the parameter file.
+There are two ways to invoke FOF on the fly for purposes other than black hole
+seeding:
+
+Firstly, one can switch on the ``invoke_fof`` parameter in the
+``Snapshots`` section of the parameter file. This will produce a catalogue every
+time the code writes a snapshot.
+
+The second option is to set ``dump_catalogue_when_seeding`` in the ``FOF``
+section. This will force the code to write a catalogue every time the BH seeding
+code is run. 
 
 ------------------------
 
@@ -93,6 +100,7 @@ A full FOF section of the YAML parameter file looks like:
        min_group_size:                  256         # The minimum no. of particles required for a group.
        linking_length_ratio:            0.2         # Linking length in units of the main inter-particle separation.
        black_hole_seed_halo_mass_Msun:  1.5e10      # Minimal halo mass in which to seed a black hole (in solar masses).
+       dump_catalogue_when_seeding:     0           # (Optional) Write a FOF catalogue when seeding black holes. Defaults to 0 if unspecified.
        absolute_linking_length:         -1.         # (Optional) Absolute linking length (in internal units).
        group_id_default:                2147483647  # (Optional) Sets the group ID of particles in groups below the minimum size.
        group_id_offset:                 1           # (Optional) Sets the offset of group ID labelling. Defaults to 1 if unspecified.
diff --git a/examples/main.c b/examples/main.c
index ebd9d420e7d14b82db30284f1debb782a53ce01d..b0ffbc50951d57e8b19b723be94c93bf1e3d590f 100644
--- a/examples/main.c
+++ b/examples/main.c
@@ -1507,7 +1507,8 @@ int main(int argc, char *argv[]) {
 
       /* Run FoF first, if we're adding FoF info to the snapshot */
       if (with_fof && e.snapshot_invoke_fof) {
-        engine_fof(&e, /*dump_results=*/0, /*seed_black_holes=*/0);
+        engine_fof(&e, /*dump_results=*/1, /*dump_debug=*/0,
+                   /*seed_black_holes=*/0);
       }
 
       engine_dump_snapshot(&e);
@@ -1696,7 +1697,7 @@ int main(int argc, char *argv[]) {
   }
 
   /* Write final output. */
-  if (!force_stop && nsteps == -2) {
+  if (!force_stop) {  // && nsteps == -2) {
 
     /* Move forward in time */
     e.ti_old = e.ti_current;
@@ -1737,7 +1738,8 @@ int main(int argc, char *argv[]) {
         !e.output_list_snapshots) {
 
       if (with_fof && e.snapshot_invoke_fof) {
-        engine_fof(&e, /*dump_results=*/0, /*seed_black_holes=*/0);
+        engine_fof(&e, /*dump_results=*/1, /*dump_debug=*/0,
+                   /*seed_black_holes=*/0);
       }
 
 #ifdef HAVE_VELOCIRAPTOR
diff --git a/examples/main_fof.c b/examples/main_fof.c
index 919b5868e5b81c9777d66a4953eb6a55d0f5d9cc..9002dde186f24be76100535c189c1f019e36c8ee 100644
--- a/examples/main_fof.c
+++ b/examples/main_fof.c
@@ -695,7 +695,7 @@ int main(int argc, char *argv[]) {
 #endif
 
   /* Perform the FOF search */
-  engine_fof(&e, /*dump_results=*/1, /*seed_black_holes=*/0);
+  engine_fof(&e, /*dump_results=*/1, /*dump_debug=*/0, /*seed_black_holes=*/0);
 
   /* Write output. */
   engine_dump_snapshot(&e);
diff --git a/examples/parameter_example.yml b/examples/parameter_example.yml
index e04380288659dda7ae174efc14b2242cd0c61b44..2f50e067f9501b07f63de4559837dad267f021ff 100644
--- a/examples/parameter_example.yml
+++ b/examples/parameter_example.yml
@@ -108,6 +108,7 @@ FOF:
   min_group_size:                  256         # The minimum no. of particles required for a group.
   linking_length_ratio:            0.2         # Linking length in units of the main inter-particle separation.
   seed_black_holes_enabled:        1           # Enable (1) or disable (0) seeding of black holes in FoF groups
+  dump_catalogue_when_seeding:     1           # Enable (1) or disable (0) dumping a group catalogue when runnning FOF for seeding purposes.
   black_hole_seed_halo_mass_Msun:  1.5e10      # Minimal halo mass in which to seed a black hole (in solar masses).
   absolute_linking_length:         -1.         # (Optional) Absolute linking length (in internal units). When not set to -1, this will overwrite the linking length computed from 'linking_length_ratio'.
   group_id_default:                2147483647  # (Optional) Sets the group ID of particles in groups below the minimum size. Defaults to 2^31 - 1 if unspecified. Has to be positive.
diff --git a/src/Makefile.am b/src/Makefile.am
index 2b477b203feab542b69f52dd202089687a043722..7f127064436d052924c452447ecde44dcd80821f 100644
--- a/src/Makefile.am
+++ b/src/Makefile.am
@@ -52,7 +52,8 @@ include_HEADERS += dump.h logger.h active.h timeline.h xmf.h gravity_properties.
 include_HEADERS += gravity_softened_derivatives.h vector_power.h collectgroup.h hydro_space.h sort_part.h 
 include_HEADERS += chemistry.h chemistry_io.h chemistry_struct.h cosmology.h restart.h space_getsid.h utilities.h 
 include_HEADERS += mesh_gravity.h cbrt.h exp10.h velociraptor_interface.h swift_velociraptor_part.h output_list.h 
-include_HEADERS += logger_io.h tracers_io.h tracers.h tracers_struct.h star_formation_io.h fof.h fof_struct.h fof_io.h 
+include_HEADERS += logger_io.h tracers_io.h tracers.h tracers_struct.h star_formation_io.h
+include_HEADERS += fof.h fof_struct.h fof_io.h fof_catalogue_io.h
 include_HEADERS += multipole.h multipole_accept.h multipole_struct.h binomial.h integer_power.h sincos.h 
 include_HEADERS += star_formation_struct.h star_formation.h star_formation_iact.h 
 include_HEADERS += star_formation_logger.h star_formation_logger_struct.h 
@@ -173,7 +174,8 @@ AM_SOURCES += statistics.c profiler.c dump.c logger.c part_type.c
 AM_SOURCES += gravity_properties.c gravity.c multipole.c 
 AM_SOURCES += collectgroup.c hydro_space.c equation_of_state.c io_compression.c 
 AM_SOURCES += chemistry.c cosmology.c mesh_gravity.c velociraptor_interface.c 
-AM_SOURCES += output_list.c velociraptor_dummy.c logger_io.c memuse.c mpiuse.c memuse_rnodes.c fof.c 
+AM_SOURCES += output_list.c velociraptor_dummy.c logger_io.c memuse.c mpiuse.c memuse_rnodes.c
+AM_SOURCES += fof.c fof_catalogue_io.c
 AM_SOURCES += hashmap.c pressure_floor.c logger_history.c
 AM_SOURCES += $(QLA_COOLING_SOURCES) 
 AM_SOURCES += $(EAGLE_COOLING_SOURCES) $(EAGLE_FEEDBACK_SOURCES) 
diff --git a/src/common_io.c b/src/common_io.c
index e65584df067caef5423645ae1aa76c9ddefec48d..222b2860387817bef2c464e3615ce45dab5ca918 100644
--- a/src/common_io.c
+++ b/src/common_io.c
@@ -477,6 +477,16 @@ void io_write_attribute_l(hid_t grp, const char* name, long data) {
   io_write_attribute(grp, name, LONG, &data, 1);
 }
 
+/**
+ * @brief Writes a long long value as an attribute
+ * @param grp The group in which to write
+ * @param name The name of the attribute
+ * @param data The value to write
+ */
+void io_write_attribute_ll(hid_t grp, const char* name, long long data) {
+  io_write_attribute(grp, name, LONGLONG, &data, 1);
+}
+
 /**
  * @brief Writes a string value as an attribute
  * @param grp The group in which to write
diff --git a/src/common_io.h b/src/common_io.h
index 3233df006128e765bc262fdfbfbc330f21c27736..05889edc2c4ae213c2d00bd793683f98eeb4c5c5 100644
--- a/src/common_io.h
+++ b/src/common_io.h
@@ -95,6 +95,7 @@ void io_write_attribute_d(hid_t grp, const char* name, double data);
 void io_write_attribute_f(hid_t grp, const char* name, float data);
 void io_write_attribute_i(hid_t grp, const char* name, int data);
 void io_write_attribute_l(hid_t grp, const char* name, long data);
+void io_write_attribute_ll(hid_t grp, const char* name, long long data);
 void io_write_attribute_s(hid_t grp, const char* name, const char* str);
 
 void io_write_meta_data(hid_t h_file, const struct engine* e,
diff --git a/src/engine.c b/src/engine.c
index 4d098bad9a1ee5e377278d6d1dd28101e7277190..f59f328e33f4d0b63156be3139ea006f8c44a653 100644
--- a/src/engine.c
+++ b/src/engine.c
@@ -1375,7 +1375,10 @@ int engine_prepare(struct engine *e) {
     engine_drift_all(e, /*drift_mpole=*/0);
     drifted_all = 1;
 
-    engine_fof(e, /*dump_results=*/0, /*seed_black_holes=*/1);
+    engine_fof(e, e->dump_catalogue_when_seeding, /*dump_debug=*/0,
+               /*seed_black_holes=*/1);
+
+    if (e->dump_catalogue_when_seeding) e->snapshot_output_count++;
   }
 
   /* Perform particle splitting. Only if there is a rebuild coming and no
@@ -2838,6 +2841,8 @@ void engine_init(
       parser_get_opt_param_int(params, "Snapshots:invoke_stf", 0);
   e->snapshot_invoke_fof =
       parser_get_opt_param_int(params, "Snapshots:invoke_fof", 0);
+  e->dump_catalogue_when_seeding =
+      parser_get_opt_param_int(params, "FOF:dump_catalogue_when_seeding", 0);
   e->snapshot_units = (struct unit_system *)malloc(sizeof(struct unit_system));
   units_init_default(e->snapshot_units, params, "Snapshots", internal_units);
   e->snapshot_output_count = 0;
diff --git a/src/engine.h b/src/engine.h
index f264f6fa86164d3d9186bf504765399fad5c27f9..d0afe37fb209d9926a2d8cc7575434042c4092bf 100644
--- a/src/engine.h
+++ b/src/engine.h
@@ -350,6 +350,7 @@ struct engine {
 
   /* FOF information */
   int run_fof;
+  int dump_catalogue_when_seeding;
 
   /* Statistics information */
   double a_first_statistics;
@@ -625,7 +626,7 @@ void engine_clean(struct engine *e, const int fof, const int restart);
 int engine_estimate_nr_tasks(const struct engine *e);
 void engine_print_task_counts(const struct engine *e);
 void engine_fof(struct engine *e, const int dump_results,
-                const int seed_black_holes);
+                const int dump_debug_results, const int seed_black_holes);
 void engine_activate_gpart_comms(struct engine *e);
 
 /* Function prototypes, engine_maketasks.c. */
diff --git a/src/engine_fof.c b/src/engine_fof.c
index 1f3a55595907aaa8e9c4d058f8d90e7067262751..effe972261de59ffd29cafe23c2599833ee56187 100644
--- a/src/engine_fof.c
+++ b/src/engine_fof.c
@@ -102,10 +102,12 @@ void engine_activate_fof_tasks(struct engine *e) {
  *
  * @param e the engine
  * @param dump_results Are we writing group catalogues to output files?
+ * @param dump_debug_results Are we writing a txt-file debug catalogue
+ * (including BH seed info)?
  * @param seed_black_holes Are we seeding black holes?
  */
 void engine_fof(struct engine *e, const int dump_results,
-                const int seed_black_holes) {
+                const int dump_debug_results, const int seed_black_holes) {
 
 #ifdef WITH_FOF
 
@@ -137,7 +139,7 @@ void engine_fof(struct engine *e, const int dump_results,
    * find groups which require black hole seeding.  */
   fof_search_tree(e->fof_properties, e->black_holes_properties,
                   e->physical_constants, e->cosmology, e->s, dump_results,
-                  seed_black_holes);
+                  dump_debug_results, seed_black_holes);
 
   /* Reset flag. */
   e->run_fof = 0;
diff --git a/src/engine_io.c b/src/engine_io.c
index d927f9eff835f876a6af37e1f9c126244e6a06b0..794a9c1d490234fca5d31dc36de02c983a908a55 100644
--- a/src/engine_io.c
+++ b/src/engine_io.c
@@ -384,7 +384,8 @@ void engine_check_for_dumps(struct engine *e) {
 
         /* Do we want FoF group IDs in the snapshot? */
         if (with_fof && e->snapshot_invoke_fof) {
-          engine_fof(e, /*dump_results=*/0, /*seed_black_holes=*/0);
+          engine_fof(e, /*dump_results=*/1, /*dump_debug=*/0,
+                     /*seed_black_holes=*/0);
         }
 
         /* Free the foreign particles to get more breathing space.
diff --git a/src/fof.c b/src/fof.c
index 33c6d8219276de087e0e278b7dd2c3a577af95fe..cff01b0f1091d003391c8b9d3d9ffaecde0fe5d7 100644
--- a/src/fof.c
+++ b/src/fof.c
@@ -39,6 +39,7 @@
 #include "black_holes.h"
 #include "common_io.h"
 #include "engine.h"
+#include "fof_catalogue_io.h"
 #include "hashmap.h"
 #include "memuse.h"
 #include "proxy.h"
@@ -1420,22 +1421,32 @@ void fof_unpack_group_mass_mapper(hashmap_key_t key, hashmap_value_t *value,
 
   /* Store elements from hash table in array. */
   mass_send[*nsend].global_root = key;
-  mass_send[(*nsend)].group_mass = value->value_dbl;
-  mass_send[(*nsend)].max_part_density_index = value->value_st;
-  mass_send[(*nsend)++].max_part_density = value->value_flt;
+  mass_send[*nsend].group_mass = value->value_dbl;
+  mass_send[*nsend].first_position[0] = value->value_array2_dbl[0];
+  mass_send[*nsend].first_position[1] = value->value_array2_dbl[1];
+  mass_send[*nsend].first_position[2] = value->value_array2_dbl[2];
+  mass_send[*nsend].centre_of_mass[0] = value->value_array_dbl[0];
+  mass_send[*nsend].centre_of_mass[1] = value->value_array_dbl[1];
+  mass_send[*nsend].centre_of_mass[2] = value->value_array_dbl[2];
+  mass_send[*nsend].max_part_density_index = value->value_st;
+  mass_send[*nsend].max_part_density = value->value_flt;
+
+  (*nsend)++;
 }
 
 #endif /* WITH_MPI */
 
 /**
- * @brief Calculates the total mass of each group above min_group_size and finds
- * the densest particle for black hole seeding.
+ * @brief Calculates the total mass and CoM of each group above min_group_size
+ * and finds the densest particle for black hole seeding.
  */
 void fof_calc_group_mass(struct fof_props *props, const struct space *s,
+                         const int seed_black_holes,
                          const size_t num_groups_local,
                          const size_t num_groups_prev,
                          size_t *restrict num_on_node,
-                         size_t *restrict first_on_node, double *group_mass) {
+                         size_t *restrict first_on_node,
+                         double *restrict group_mass) {
 
   const size_t nr_gparts = s->nr_gparts;
   struct gpart *gparts = s->gparts;
@@ -1443,141 +1454,110 @@ void fof_calc_group_mass(struct fof_props *props, const struct space *s,
   const size_t group_id_offset = props->group_id_offset;
   const size_t group_id_default = props->group_id_default;
   const double seed_halo_mass = props->seed_halo_mass;
+  const int periodic = s->periodic;
+  const double dim[3] = {s->dim[0], s->dim[1], s->dim[2]};
 
 #ifdef WITH_MPI
   size_t *group_index = props->group_index;
   const int nr_nodes = s->e->nr_nodes;
 
-  /* Allocate and initialise the densest particle array. */
-  if (swift_memalign("max_part_density_index",
-                     (void **)&props->max_part_density_index, 32,
-                     num_groups_local * sizeof(long long)) != 0)
-    error(
-        "Failed to allocate list of max group density indices for FOF search.");
-
-  if (swift_memalign("max_part_density", (void **)&props->max_part_density, 32,
-                     num_groups_local * sizeof(float)) != 0)
-    error("Failed to allocate list of max group densities for FOF search.");
-
   /* Direct pointers to the arrays */
   long long *max_part_density_index = props->max_part_density_index;
   float *max_part_density = props->max_part_density;
-
-  /* No densest particle found so far */
-  bzero(max_part_density, num_groups_local * sizeof(float));
-
-  /* Start by assuming that the haloes have no gas */
-  for (size_t i = 0; i < num_groups_local; i++) {
-    max_part_density_index[i] = fof_halo_has_no_gas;
-  }
+  double *centre_of_mass = props->group_centre_of_mass;
+  double *first_position = props->group_first_position;
 
   /* Start the hash map */
   hashmap_t map;
   hashmap_init(&map);
 
-  /* JSW TODO: Parallelise with threadpool*/
+  /* Collect information about the local particles and update the local AND
+   * foreign group fragments */
   for (size_t i = 0; i < nr_gparts; i++) {
 
+    if (gparts[i].time_bin == time_bin_inhibited) continue;
+
     /* Check if the particle is in a group above the threshold. */
     if (gparts[i].fof_data.group_id != group_id_default) {
 
       const size_t root = fof_find_global(i, group_index, nr_gparts);
 
-      /* Increment the mass of groups that are local */
       if (is_local(root, nr_gparts)) {
 
+        /* The root is local */
+
         const size_t index =
             gparts[i].fof_data.group_id - group_id_offset - num_groups_prev;
 
         /* Update group mass */
         group_mass[index] += gparts[i].mass;
 
-      }
-      /* Add mass fragments of groups that have a foreign root to a hash table.
-       */
-      else {
-
-        hashmap_value_t *data = hashmap_get(&map, (hashmap_key_t)root);
-
-        if (data != NULL) {
-          data->value_dbl += gparts[i].mass;
-
-          /* Find the densest gas particle.
-           * Account for groups that already have a black hole and groups that
-           * contain no gas. */
-          if (gparts[i].type == swift_type_gas &&
-              data->value_st != fof_halo_has_black_hole) {
+      } else {
 
-            const size_t gas_index = -gparts[i].id_or_neg_offset;
-            const float rho_com = hydro_get_comoving_density(&parts[gas_index]);
+        /* The root is *not* local */
 
-            /* Update index if a denser gas particle is found. */
-            if (rho_com > data->value_flt) {
-              data->value_flt = rho_com;
-              data->value_st = gas_index;
-            }
-          }
-          /* If there is already a black hole in the group we don't need to
-             create a new one. */
-          else if (gparts[i].type == swift_type_black_hole) {
-            data->value_st = fof_halo_has_black_hole;
-            data->value_flt = 0.f;
-          }
-        } else
+        /* Get the root in the foreign hashmap (create if necessary) */
+        hashmap_value_t *const data = hashmap_get(&map, (hashmap_key_t)root);
+        if (data == NULL)
           error("Couldn't find key (%zu) or create new one.", root);
-      }
-    }
-  }
 
-  /* Loop over particles and find the densest particle in each group. */
-  /* JSW TODO: Parallelise with threadpool*/
-  for (size_t i = 0; i < nr_gparts; i++) {
-
-    /* Only check groups above the minimum size and mass threshold. */
-    if (gparts[i].fof_data.group_id != group_id_default) {
+        /* Compute the centre of mass */
+        const double mass = gparts[i].mass;
+        double x[3] = {gparts[i].x[0], gparts[i].x[1], gparts[i].x[2]};
 
-      size_t root = fof_find_global(i, group_index, nr_gparts);
+        /* Add mass fragments of groups */
+        data->value_dbl += mass;
 
-      /* Increment the mass of groups that are local */
-      if (is_local(root, nr_gparts)) {
+        /* Record the first particle of this fragment that we encounter so we
+         * we can use it as reference frame for the centre of mass calculation
+         */
+        if (data->value_array2_dbl[0] == (double)(-FLT_MAX)) {
+          data->value_array2_dbl[0] = gparts[i].x[0];
+          data->value_array2_dbl[1] = gparts[i].x[1];
+          data->value_array2_dbl[2] = gparts[i].x[2];
+        }
 
-        const size_t index =
-            gparts[i].fof_data.group_id - group_id_offset - num_groups_prev;
+        if (periodic) {
+          x[0] = nearest(x[0] - data->value_array2_dbl[0], dim[0]);
+          x[1] = nearest(x[1] - data->value_array2_dbl[1], dim[1]);
+          x[2] = nearest(x[2] - data->value_array2_dbl[2], dim[2]);
+        }
 
-        /* Only seed groups above the mass threshold. */
-        if (group_mass[index] > seed_halo_mass) {
+        data->value_array_dbl[0] += mass * x[0];
+        data->value_array_dbl[1] += mass * x[1];
+        data->value_array_dbl[2] += mass * x[2];
 
-          /* Find the densest gas particle.
-           * Account for groups that already have a black hole and groups that
-           * contain no gas. */
-          if (gparts[i].type == swift_type_gas &&
-              max_part_density_index[index] != fof_halo_has_black_hole) {
+        /* Also accumulate the densest gas particle and its index */
+        if (gparts[i].type == swift_type_gas &&
+            data->value_st != fof_halo_has_black_hole) {
 
-            const size_t gas_index = -gparts[i].id_or_neg_offset;
-            const float rho_com = hydro_get_comoving_density(&parts[gas_index]);
+          const size_t gas_index = -gparts[i].id_or_neg_offset;
+          const float rho_com = hydro_get_comoving_density(&parts[gas_index]);
 
-            /* Update index if a denser gas particle is found. */
-            if (rho_com > max_part_density[index]) {
-              max_part_density_index[index] = gas_index;
-              max_part_density[index] = rho_com;
-            }
-          }
-          /* If there is already a black hole in the group we don't need to
-             create a new one. */
-          else if (gparts[i].type == swift_type_black_hole) {
-            max_part_density_index[index] = fof_halo_has_black_hole;
+          /* Update index if a denser gas particle is found. */
+          if (rho_com > data->value_flt) {
+            data->value_flt = rho_com;
+            data->value_st = gas_index;
           }
+
+        } else if (gparts[i].type == swift_type_black_hole) {
+
+          /* If there is already a black hole in the fragment we don't need to
+           create a new one. */
+          data->value_st = fof_halo_has_black_hole;
+          data->value_flt = 0.f;
         }
-      }
-    }
-  }
+
+      } /* Foreign root */
+    }   /* Particle is in a group */
+  }     /* Loop over particles */
 
   size_t nsend = map.size;
   struct fof_mass_send_hashmap hashmap_mass_send = {NULL, 0};
 
   /* Allocate and initialise a mass array. */
   if (posix_memalign((void **)&hashmap_mass_send.mass_send, 32,
-                     nsend * sizeof(struct fof_mass_send_hashmap)) != 0)
+                     nsend * sizeof(struct fof_final_mass)) != 0)
     error("Failed to allocate list of group masses for FOF search.");
 
   hashmap_mass_send.nsend = 0;
@@ -1590,8 +1570,10 @@ void fof_calc_group_mass(struct fof_props *props, const struct space *s,
 
   nsend = hashmap_mass_send.nsend;
 
+#ifdef SWIFT_DEBUG_CHECKS
   if (nsend != map.size)
     error("No. of mass fragments to send != elements in hash table.");
+#endif
 
   hashmap_free(&map);
 
@@ -1601,8 +1583,7 @@ void fof_calc_group_mass(struct fof_props *props, const struct space *s,
         compare_fof_final_mass_global_root);
 
   /* Determine how many entries go to each node */
-  int *sendcount = (int *)malloc(nr_nodes * sizeof(int));
-  for (int i = 0; i < nr_nodes; i += 1) sendcount[i] = 0;
+  int *sendcount = (int *)calloc(nr_nodes, sizeof(int));
   int dest = 0;
   for (size_t i = 0; i < nsend; i += 1) {
     while ((fof_mass_send[i].global_root >=
@@ -1629,36 +1610,131 @@ void fof_calc_group_mass(struct fof_props *props, const struct space *s,
 
   /* For each received global root, look up the group ID we assigned and
    * increment the group mass */
-  for (size_t i = 0; i < nrecv; i += 1) {
+  for (size_t i = 0; i < nrecv; i++) {
+#ifdef SWIFT_DEBUG_CHECKS
     if ((fof_mass_recv[i].global_root < node_offset) ||
         (fof_mass_recv[i].global_root >= node_offset + nr_gparts)) {
       error("Received global root index out of range!");
     }
-    group_mass[gparts[fof_mass_recv[i].global_root - node_offset]
-                   .fof_data.group_id -
-               group_id_offset - num_groups_prev] +=
-        fof_mass_recv[i].group_mass;
+#endif
+    const size_t local_root_index = fof_mass_recv[i].global_root - node_offset;
+    const size_t local_group_offset = group_id_offset + num_groups_prev;
+    const size_t index =
+        gparts[local_root_index].fof_data.group_id - local_group_offset;
+    group_mass[index] += fof_mass_recv[i].group_mass;
+  }
+
+  /* Loop over particles, densest particle in each *local* group.
+   * We can do this now as we eventually have the total group mass */
+  for (size_t i = 0; i < nr_gparts; i++) {
+
+    if (gparts[i].time_bin == time_bin_inhibited) continue;
+
+    /* Only check groups above the minimum mass threshold. */
+    if (gparts[i].fof_data.group_id != group_id_default) {
+
+      const size_t root = fof_find_global(i, group_index, nr_gparts);
+
+      if (is_local(root, nr_gparts)) {
+
+        const size_t index =
+            gparts[i].fof_data.group_id - group_id_offset - num_groups_prev;
+
+        /* Compute the centre of mass */
+        const double mass = gparts[i].mass;
+        double x[3] = {gparts[i].x[0], gparts[i].x[1], gparts[i].x[2]};
+
+        /* Record the first particle of this group that we encounter so we
+         * can use it as reference frame for the centre of mass calculation */
+        if (first_position[index * 3 + 0] == (double)(-FLT_MAX)) {
+          first_position[index * 3 + 0] = x[0];
+          first_position[index * 3 + 1] = x[1];
+          first_position[index * 3 + 2] = x[2];
+        }
+
+        if (periodic) {
+          x[0] = nearest(x[0] - first_position[index * 3 + 0], dim[0]);
+          x[1] = nearest(x[1] - first_position[index * 3 + 1], dim[1]);
+          x[2] = nearest(x[2] - first_position[index * 3 + 2], dim[2]);
+        }
+
+        centre_of_mass[index * 3 + 0] += mass * x[0];
+        centre_of_mass[index * 3 + 1] += mass * x[1];
+        centre_of_mass[index * 3 + 2] += mass * x[2];
+
+        /* Check haloes above the seeding threshold */
+        if (group_mass[index] > seed_halo_mass) {
+
+          /* Find the densest gas particle.
+           * Account for groups that already have a black hole and groups that
+           * contain no gas. */
+          if (gparts[i].type == swift_type_gas &&
+              max_part_density_index[index] != fof_halo_has_black_hole) {
+
+            const size_t gas_index = -gparts[i].id_or_neg_offset;
+            const float rho_com = hydro_get_comoving_density(&parts[gas_index]);
+
+            /* Update index if a denser gas particle is found. */
+            if (rho_com > max_part_density[index]) {
+              max_part_density_index[index] = gas_index;
+              max_part_density[index] = rho_com;
+            }
+          }
+          /* If there is already a black hole in the group we don't need to
+             create a new one. */
+          else if (gparts[i].type == swift_type_black_hole) {
+            max_part_density_index[index] = fof_halo_has_black_hole;
+          }
+
+        } else {
+          max_part_density_index[index] = fof_halo_has_too_low_mass;
+        }
+      }
+    }
   }
 
   /* For each received global root, look up the group ID we assigned and find
    * the global maximum gas density */
   for (size_t i = 0; i < nrecv; i++) {
 
-    const int offset =
-        gparts[fof_mass_recv[i].global_root - node_offset].fof_data.group_id -
-        group_id_offset - num_groups_prev;
+    const size_t local_root_index = fof_mass_recv[i].global_root - node_offset;
+    const size_t local_group_offset = group_id_offset + num_groups_prev;
+    const size_t index =
+        gparts[local_root_index].fof_data.group_id - local_group_offset;
+
+    double fragment_mass = fof_mass_recv[i].group_mass;
+    double fragment_centre_of_mass[3] = {
+        fof_mass_recv[i].centre_of_mass[0] / fof_mass_recv[i].group_mass,
+        fof_mass_recv[i].centre_of_mass[1] / fof_mass_recv[i].group_mass,
+        fof_mass_recv[i].centre_of_mass[2] / fof_mass_recv[i].group_mass};
+    fragment_centre_of_mass[0] += fof_mass_recv[i].first_position[0];
+    fragment_centre_of_mass[1] += fof_mass_recv[i].first_position[1];
+    fragment_centre_of_mass[2] += fof_mass_recv[i].first_position[2];
+
+    if (periodic) {
+      fragment_centre_of_mass[0] = nearest(
+          fragment_centre_of_mass[0] - first_position[3 * index + 0], dim[0]);
+      fragment_centre_of_mass[1] = nearest(
+          fragment_centre_of_mass[1] - first_position[3 * index + 1], dim[1]);
+      fragment_centre_of_mass[2] = nearest(
+          fragment_centre_of_mass[2] - first_position[3 * index + 2], dim[2]);
+    }
+
+    centre_of_mass[index * 3 + 0] += fragment_mass * fragment_centre_of_mass[0];
+    centre_of_mass[index * 3 + 1] += fragment_mass * fragment_centre_of_mass[1];
+    centre_of_mass[index * 3 + 2] += fragment_mass * fragment_centre_of_mass[2];
 
     /* Only seed groups above the mass threshold. */
-    if (group_mass[offset] > seed_halo_mass) {
+    if (group_mass[index] > seed_halo_mass) {
 
       /* Only check groups that don't already contain a black hole. */
-      if (max_part_density_index[offset] != fof_halo_has_black_hole) {
+      if (max_part_density_index[index] != fof_halo_has_black_hole) {
 
         /* Find the densest particle in each group using the densest particle
          * from each group fragment. */
-        if (fof_mass_recv[i].max_part_density > max_part_density[offset]) {
-          max_part_density[offset] = fof_mass_recv[i].max_part_density;
-          max_part_density_index[offset] =
+        if (fof_mass_recv[i].max_part_density > max_part_density[index]) {
+          max_part_density[index] = fof_mass_recv[i].max_part_density;
+          max_part_density_index[index] =
               fof_mass_recv[i].max_part_density_index;
         }
       }
@@ -1666,36 +1742,38 @@ void fof_calc_group_mass(struct fof_props *props, const struct space *s,
          new one. */
       else if (fof_mass_recv[i].max_part_density_index ==
                fof_halo_has_black_hole) {
-        max_part_density_index[offset] = fof_halo_has_black_hole;
+        max_part_density_index[index] = fof_halo_has_black_hole;
       }
     } else {
-      max_part_density_index[offset] = fof_halo_has_no_gas;
+      max_part_density_index[index] = fof_halo_has_too_low_mass;
     }
   }
 
   /* For each received global root, look up the group ID we assigned and send
    * the global maximum gas density index back */
   for (size_t i = 0; i < nrecv; i++) {
+
+#ifdef SWIFT_DEBUG_CHECKS
     if ((fof_mass_recv[i].global_root < node_offset) ||
         (fof_mass_recv[i].global_root >= node_offset + nr_gparts)) {
       error("Received global root index out of range!");
     }
-
-    const int offset =
-        gparts[fof_mass_recv[i].global_root - node_offset].fof_data.group_id -
-        group_id_offset - num_groups_prev;
+#endif
+    const size_t local_root_index = fof_mass_recv[i].global_root - node_offset;
+    const size_t local_group_offset = group_id_offset + num_groups_prev;
+    const size_t index =
+        gparts[local_root_index].fof_data.group_id - local_group_offset;
 
     /* If the densest particle found locally is not the global max, make sure we
      * don't seed two black holes. */
-    /* If the local index has been set to a foreign index then we don't need to
-     * seed a black hole locally. */
-    if (max_part_density_index[offset] ==
+    if (max_part_density_index[index] ==
         fof_mass_recv[i].max_part_density_index) {
-      max_part_density_index[offset] = fof_halo_has_black_hole;
-    }
-    /* The densest particle is on the same node as the global root so we don't
+      /* If the local index has been set to a foreign index then we don't need
+       * to seed a black hole locally. */
+      max_part_density_index[index] = fof_halo_has_black_hole;
+    } else {
+      /* The densest particle is on the same node as the global root so we don't
        need to seed a black hole on the other node. */
-    else {
       fof_mass_recv[i].max_part_density_index = fof_halo_has_black_hole;
     }
   }
@@ -1749,45 +1827,55 @@ void fof_calc_group_mass(struct fof_props *props, const struct space *s,
   free(recvoffset);
   free(fof_mass_send);
   free(fof_mass_recv);
-#else
-
-  /* Allocate and initialise the densest particle array. */
-  if (swift_memalign("max_part_density_index",
-                     (void **)&props->max_part_density_index, 32,
-                     num_groups_local * sizeof(long long)) != 0)
-    error(
-        "Failed to allocate list of max group density indices for FOF search.");
-
-  if (swift_memalign("max_part_density", (void **)&props->max_part_density, 32,
-                     num_groups_local * sizeof(float)) != 0)
-    error("Failed to allocate list of max group densities for FOF search.");
-
-  /* Direct pointers to the arrays */
-  long long *max_part_density_index = props->max_part_density_index;
-  float *max_part_density = props->max_part_density;
 
-  /* No densest particle found so far */
-  bzero(max_part_density, num_groups_local * sizeof(float));
-
-  /* Start by assuming that the haloes have no gas */
-  for (size_t i = 0; i < num_groups_local; i++) {
-    max_part_density_index[i] = fof_halo_has_no_gas;
-  }
+#else
 
   /* Increment the group mass for groups above min_group_size. */
   threadpool_map(&s->e->threadpool, fof_calc_group_mass_mapper, gparts,
                  nr_gparts, sizeof(struct gpart), threadpool_auto_chunk_size,
                  (struct space *)s);
 
-  /* Loop over particles and find the densest particle in each group. */
+  /* Direct pointers to the arrays */
+  long long *max_part_density_index = props->max_part_density_index;
+  float *max_part_density = props->max_part_density;
+  double *centre_of_mass = props->group_centre_of_mass;
+  double *first_position = props->group_first_position;
+
+  /* Loop over particles, compute CoM and find the densest particle in each
+   * group. */
   /* JSW TODO: Parallelise with threadpool*/
   for (size_t i = 0; i < nr_gparts; i++) {
 
+    if (gparts[i].time_bin == time_bin_inhibited) continue;
+
     const size_t index = gparts[i].fof_data.group_id - group_id_offset;
 
     /* Only check groups above the minimum mass threshold. */
     if (gparts[i].fof_data.group_id != group_id_default) {
 
+      /* Compute the centre of mass */
+      const double mass = gparts[i].mass;
+      double x[3] = {gparts[i].x[0], gparts[i].x[1], gparts[i].x[2]};
+
+      /* Record the first particle of this group that we encounter so we
+       * can use it as reference frame for the centre of mass calculation */
+      if (first_position[index * 3 + 0] == (double)(-FLT_MAX)) {
+        first_position[index * 3 + 0] = x[0];
+        first_position[index * 3 + 1] = x[1];
+        first_position[index * 3 + 2] = x[2];
+      }
+
+      if (periodic) {
+        x[0] = nearest(x[0] - first_position[index * 3 + 0], dim[0]);
+        x[1] = nearest(x[1] - first_position[index * 3 + 1], dim[1]);
+        x[2] = nearest(x[2] - first_position[index * 3 + 2], dim[2]);
+      }
+
+      centre_of_mass[index * 3 + 0] += mass * x[0];
+      centre_of_mass[index * 3 + 1] += mass * x[1];
+      centre_of_mass[index * 3 + 2] += mass * x[2];
+
+      /* Check haloes above the seeding threshold */
       if (group_mass[index] > seed_halo_mass) {
 
         /* Find the densest gas particle.
@@ -1805,8 +1893,8 @@ void fof_calc_group_mass(struct fof_props *props, const struct space *s,
             max_part_density[index] = rho_com;
           }
         }
-        /* If there is already a black hole in the group we don't need to create
-           a new one. */
+        /* If there is already a black hole in the group we don't need to
+           create a new one. */
         else if (gparts[i].type == swift_type_black_hole) {
           max_part_density_index[index] = fof_halo_has_black_hole;
         }
@@ -1933,6 +2021,56 @@ void fof_find_foreign_links_mapper(void *map_data, int num_elements,
 #endif
 }
 
+void fof_finalise_group_data(struct fof_props *props,
+                             const struct group_length *group_sizes,
+                             const struct gpart *gparts, const int periodic,
+                             const double dim[3], const int num_groups) {
+
+  size_t *group_size = (size_t *)malloc(num_groups * sizeof(size_t));
+  size_t *group_index = (size_t *)malloc(num_groups * sizeof(size_t));
+  double *group_centre_of_mass =
+      (double *)malloc(3 * num_groups * sizeof(double));
+
+  for (int i = 0; i < num_groups; i++) {
+
+    const size_t group_offset = group_sizes[i].index;
+
+    /* Centre of mass, including possible box wrapping */
+    double CoM[3] = {
+        props->group_centre_of_mass[i * 3 + 0] / props->group_mass[i],
+        props->group_centre_of_mass[i * 3 + 1] / props->group_mass[i],
+        props->group_centre_of_mass[i * 3 + 2] / props->group_mass[i]};
+    if (periodic) {
+      CoM[0] =
+          box_wrap(CoM[0] + props->group_first_position[i * 3 + 0], 0., dim[0]);
+      CoM[1] =
+          box_wrap(CoM[1] + props->group_first_position[i * 3 + 1], 0., dim[1]);
+      CoM[2] =
+          box_wrap(CoM[2] + props->group_first_position[i * 3 + 2], 0., dim[2]);
+    }
+
+#ifdef WITH_MPI
+    group_index[i] = gparts[group_offset - node_offset].fof_data.group_id;
+    group_size[i] = props->group_size[group_offset - node_offset];
+#else
+    group_index[i] = gparts[group_offset].fof_data.group_id;
+    group_size[i] = props->group_size[group_offset];
+#endif
+
+    group_centre_of_mass[i * 3 + 0] = CoM[0];
+    group_centre_of_mass[i * 3 + 1] = CoM[1];
+    group_centre_of_mass[i * 3 + 2] = CoM[2];
+  }
+
+  swift_free("fof_group_centre_of_mass", props->group_centre_of_mass);
+  swift_free("fof_group_size", props->group_size);
+  swift_free("fof_group_index", props->group_index);
+
+  props->group_centre_of_mass = group_centre_of_mass;
+  props->group_size = group_size;
+  props->group_index = group_index;
+}
+
 /**
  * @brief Seed black holes from gas particles in the haloes on the local MPI
  * rank that passed the criteria.
@@ -1949,8 +2087,7 @@ void fof_seed_black_holes(const struct fof_props *props,
                           const struct black_holes_props *bh_props,
                           const struct phys_const *constants,
                           const struct cosmology *cosmo, struct space *s,
-                          const int num_groups_local,
-                          struct group_length *group_sizes) {
+                          const int num_groups_local) {
 
   const long long *max_part_density_index = props->max_part_density_index;
 
@@ -2061,55 +2198,70 @@ void fof_seed_black_holes(const struct fof_props *props,
 }
 
 /* Dump FOF group data. */
-void fof_dump_group_data(const struct fof_props *props,
-                         const char *out_file_name, struct space *s,
-                         int num_groups, struct group_length *group_sizes) {
+void fof_dump_group_data(const struct fof_props *props, const int my_rank,
+                         const int nr_nodes, const char *out_file_name,
+                         struct space *s, const int num_groups) {
 
-  FILE *file = fopen(out_file_name, "w");
+  FILE *file = NULL;
 
-  struct gpart *gparts = s->gparts;
   struct part *parts = s->parts;
   size_t *group_size = props->group_size;
+  size_t *group_index = props->group_index;
   double *group_mass = props->group_mass;
+  double *group_centre_of_mass = props->group_centre_of_mass;
   const long long *max_part_density_index = props->max_part_density_index;
   const float *max_part_density = props->max_part_density;
 
-  fprintf(file, "# %8s %12s %12s %12s %18s %18s %12s\n", "Group ID",
-          "Group Size", "Group Mass", "Max Density", "Max Density Index",
-          "Particle ID", "Particle Density");
-  fprintf(file,
-          "#-------------------------------------------------------------------"
-          "-------------\n");
-
-  for (int i = 0; i < num_groups; i++) {
+  for (int rank = 0; rank < nr_nodes; ++rank) {
 
-    const size_t group_offset = group_sizes[i].index;
-    const long long part_id = max_part_density_index[i] >= 0
-                                  ? parts[max_part_density_index[i]].id
-                                  : -1;
 #ifdef WITH_MPI
-    fprintf(file, "  %8zu %12zu %12e %12e %18lld %18lld\n",
-            (size_t)gparts[group_offset - node_offset].fof_data.group_id,
-            group_size[group_offset - node_offset], group_mass[i],
-            max_part_density[i], max_part_density_index[i], part_id);
-#else
-    fprintf(file, "  %8zu %12zu %12e %12e %18lld %18lld\n",
-            (size_t)gparts[group_offset].fof_data.group_id,
-            group_size[group_offset], group_mass[i], max_part_density[i],
-            max_part_density_index[i], part_id);
+    MPI_Barrier(MPI_COMM_WORLD);
 #endif
-  }
 
-  /* Dump the extra black hole seeds. */
-  for (int i = num_groups; i < num_groups + props->extra_bh_seed_count; i++) {
-    const long long part_id = max_part_density_index[i] >= 0
-                                  ? parts[max_part_density_index[i]].id
-                                  : -1;
-    fprintf(file, "  %8zu %12zu %12e %12e %18lld %18lld\n", 0UL, 0UL, 0., 0.,
-            0LL, part_id);
-  }
+    if (rank == my_rank) {
 
-  fclose(file);
+      if (my_rank == 0)
+        file = fopen(out_file_name, "w");
+      else
+        file = fopen(out_file_name, "a");
+
+      if (my_rank == 0) {
+        fprintf(file, "# %8s %12s %12s %12s %12s %12s %12s %24s %24s \n",
+                "Group ID", "Group Size", "Group Mass", "CoM_x", "CoM_y",
+                "CoM_z", "Max Density", "Max Density Local Index",
+                "Particle ID");
+        fprintf(file,
+                "#-------------------------------------------------------------"
+                "------"
+                "------------------------------\n");
+      }
+
+      for (int i = 0; i < num_groups; i++) {
+
+        const long long part_id = props->max_part_density_index[i] >= 0
+                                      ? parts[max_part_density_index[i]].id
+                                      : -1;
+        fprintf(file, "  %8zu %12zu %12e %12e %12e %12e %12e %24lld %24lld\n",
+                group_index[i], group_size[i], group_mass[i],
+                group_centre_of_mass[i * 3 + 0],
+                group_centre_of_mass[i * 3 + 1],
+                group_centre_of_mass[i * 3 + 2], max_part_density[i],
+                max_part_density_index[i], part_id);
+      }
+
+      /* Dump the extra black hole seeds. */
+      for (int i = num_groups; i < num_groups + props->extra_bh_seed_count;
+           i++) {
+        const long long part_id = max_part_density_index[i] >= 0
+                                      ? parts[max_part_density_index[i]].id
+                                      : -1;
+        fprintf(file, "  %8zu %12zu %12e %12e %12e %12e %12e %24lld %24lld\n",
+                0UL, 0UL, 0., 0., 0., 0., 0., 0LL, part_id);
+      }
+
+      fclose(file);
+    }
+  }
 }
 
 struct mapper_data {
@@ -2419,6 +2571,7 @@ void fof_search_foreign_cells(struct fof_props *props, const struct space *s) {
                  MPI_COMM_WORLD);
 
   /* Clean up memory. */
+  free(group_link_counts);
   free(displ);
   swift_free("fof_group_links", props->group_links);
   props->group_links = NULL;
@@ -2600,14 +2753,17 @@ void fof_search_foreign_cells(struct fof_props *props, const struct space *s) {
  * @param constants The physical constants in internal units.
  * @param cosmo The current cosmological model.
  * @param s The #space containing the particles.
- * @param dump_results Do we want to write the group catalogue to a file?
+ * @param dump_debug_results Are we writing txt-file debug catalogues including
+ * BH-seeding info?
+ * @param dump_results Do we want to write the group catalogue to a hdf5 file?
  * @param seed_black_holes Do we want to seed black holes in haloes?
  */
 void fof_search_tree(struct fof_props *props,
                      const struct black_holes_props *bh_props,
                      const struct phys_const *constants,
                      const struct cosmology *cosmo, struct space *s,
-                     const int dump_results, const int seed_black_holes) {
+                     const int dump_results, const int dump_debug_results,
+                     const int seed_black_holes) {
 
   const size_t nr_gparts = s->nr_gparts;
   const size_t min_group_size = props->min_group_size;
@@ -2654,12 +2810,6 @@ void fof_search_tree(struct fof_props *props,
             clocks_from_ticks(getticks() - comms_tic), clocks_getunit());
 
   node_offset = nr_gparts_cumulative - nr_gparts_local;
-
-  snprintf(output_file_name + strlen(output_file_name), FILENAME_BUFFER_SIZE,
-           "_mpi_rank_%d.dat", engine_rank);
-#else
-  snprintf(output_file_name + strlen(output_file_name), FILENAME_BUFFER_SIZE,
-           ".dat");
 #endif
 
   /* Local copy of the arrays */
@@ -2953,55 +3103,111 @@ void fof_search_tree(struct fof_props *props,
     message("Group sorting took: %.3f %s.", clocks_from_ticks(getticks() - tic),
             clocks_getunit());
 
-  /* Allocate and initialise a group mass array. */
-  if (swift_memalign("group_mass", (void **)&props->group_mass, 32,
+  /* Allocate and initialise a group mass and centre of mass array. */
+  if (swift_memalign("fof_group_mass", (void **)&props->group_mass, 32,
                      num_groups_local * sizeof(double)) != 0)
     error("Failed to allocate list of group masses for FOF search.");
+  if (swift_memalign("fof_group_centre_of_mass",
+                     (void **)&props->group_centre_of_mass, 32,
+                     num_groups_local * 3 * sizeof(double)) != 0)
+    error("Failed to allocate list of group CoM for FOF search.");
+  if (swift_memalign("fof_group_first_position",
+                     (void **)&props->group_first_position, 32,
+                     num_groups_local * 3 * sizeof(double)) != 0)
+    error("Failed to allocate list of group first positions for FOF search.");
 
   bzero(props->group_mass, num_groups_local * sizeof(double));
+  bzero(props->group_centre_of_mass, num_groups_local * 3 * sizeof(double));
+  for (size_t i = 0; i < 3 * num_groups_local; i++) {
+    props->group_first_position[i] = -FLT_MAX;
+  }
+
+  /* Allocate and initialise arrays to identify the densest gas particle. */
+  if (swift_memalign("fof_max_part_density_index",
+                     (void **)&props->max_part_density_index, 32,
+                     num_groups_local * sizeof(long long)) != 0)
+    error(
+        "Failed to allocate list of max group density indices for FOF "
+        "search.");
+
+  if (swift_memalign("fof_max_part_density", (void **)&props->max_part_density,
+                     32, num_groups_local * sizeof(float)) != 0)
+    error("Failed to allocate list of max group densities for FOF search.");
+
+  /* No densest particle found so far */
+  bzero(props->max_part_density, num_groups_local * sizeof(float));
+
+  /* Start by assuming that the haloes have no gas */
+  for (size_t i = 0; i < num_groups_local; i++) {
+    props->max_part_density_index[i] = fof_halo_has_no_gas;
+  }
 
   const ticks tic_seeding = getticks();
 
-  double *group_mass = props->group_mass;
 #ifdef WITH_MPI
-  fof_calc_group_mass(props, s, num_groups_local, num_groups_prev, num_on_node,
-                      first_on_node, group_mass);
+  fof_calc_group_mass(props, s, seed_black_holes, num_groups_local,
+                      num_groups_prev, num_on_node, first_on_node,
+                      props->group_mass);
   free(num_on_node);
   free(first_on_node);
 #else
-  fof_calc_group_mass(props, s, num_groups_local, 0, NULL, NULL, group_mass);
+  fof_calc_group_mass(props, s, seed_black_holes, num_groups_local, 0, NULL,
+                      NULL, props->group_mass);
 #endif
 
+  /* Finalise the group data before dump */
+  fof_finalise_group_data(props, high_group_sizes, s->gparts, s->periodic,
+                          s->dim, num_groups_local);
+
   if (verbose)
     message("Computing group properties took: %.3f %s.",
             clocks_from_ticks(getticks() - tic_seeding), clocks_getunit());
 
   /* Dump group data. */
   if (dump_results) {
-    fof_dump_group_data(props, output_file_name, s, num_groups_local,
-                        high_group_sizes);
+#ifdef HAVE_HDF5
+    write_fof_hdf5_catalogue(props, num_groups_local, s->e);
+#else
+    error("Can't dump hdf5 catalogues with hdf5 switched off!");
+#endif
+  }
+
+  if (dump_debug_results) {
+#ifdef WITH_MPI
+    snprintf(output_file_name + strlen(output_file_name), FILENAME_BUFFER_SIZE,
+             "_mpi.dat");
+#else
+    snprintf(output_file_name + strlen(output_file_name), FILENAME_BUFFER_SIZE,
+             ".dat");
+#endif
+    fof_dump_group_data(props, s->e->nodeID, s->e->nr_nodes, output_file_name,
+                        s, num_groups_local);
   }
 
   /* Seed black holes */
   if (seed_black_holes) {
-    fof_seed_black_holes(props, bh_props, constants, cosmo, s, num_groups_local,
-                         high_group_sizes);
+    fof_seed_black_holes(props, bh_props, constants, cosmo, s,
+                         num_groups_local);
   }
 
   /* Free the left-overs */
   swift_free("fof_high_group_sizes", high_group_sizes);
-#endif /* #ifndef WITHOUT_GROUP_PROPS */
-  swift_free("fof_group_index", props->group_index);
-  swift_free("fof_group_size", props->group_size);
   swift_free("fof_group_mass", props->group_mass);
+  swift_free("fof_group_centre_of_mass", props->group_centre_of_mass);
+  swift_free("fof_group_first_position", props->group_first_position);
   swift_free("fof_max_part_density_index", props->max_part_density_index);
   swift_free("fof_max_part_density", props->max_part_density);
-  props->group_index = NULL;
-  props->group_size = NULL;
   props->group_mass = NULL;
+  props->group_centre_of_mass = NULL;
   props->max_part_density_index = NULL;
   props->max_part_density = NULL;
 
+#endif /* #ifndef WITHOUT_GROUP_PROPS */
+  swift_free("fof_group_index", props->group_index);
+  swift_free("fof_group_size", props->group_size);
+  props->group_index = NULL;
+  props->group_size = NULL;
+
   if (engine_rank == 0) {
     message(
         "No. of groups: %lld. No. of particles in groups: %lld. No. of "
@@ -3029,6 +3235,7 @@ void fof_struct_dump(const struct fof_props *props, FILE *stream) {
   temp.group_index = NULL;
   temp.group_size = NULL;
   temp.group_mass = NULL;
+  temp.group_centre_of_mass = NULL;
   temp.max_part_density_index = NULL;
   temp.max_part_density = NULL;
   temp.group_links = NULL;
diff --git a/src/fof.h b/src/fof.h
index 54c873af1d3d88743360688bcf604fcb3f059993..2a7a886bf651afed2defbaae1c581469927a3108 100644
--- a/src/fof.h
+++ b/src/fof.h
@@ -27,6 +27,7 @@
 #include "parser.h"
 
 /* Avoid cyclic inclusions */
+struct cell;
 struct gpart;
 struct space;
 struct engine;
@@ -35,23 +36,6 @@ struct phys_const;
 struct black_holes_props;
 struct cosmology;
 
-/* MPI message required for FOF. */
-struct fof_mpi {
-
-  /* The local particle's root ID.*/
-  size_t group_i;
-
-  /* The local group's size.*/
-  size_t group_i_size;
-
-  /* The foreign particle's root ID.*/
-  size_t group_j;
-
-  /* The local group's size.*/
-  size_t group_j_size;
-
-} SWIFT_STRUCT_ALIGN;
-
 struct fof_props {
 
   /*! Whether we're doing periodic FoF calls to seed black holes. */
@@ -102,6 +86,12 @@ struct fof_props {
   /*! Mass of the group a given gpart belongs to. */
   double *group_mass;
 
+  /*! Centre of mass of the group a given gpart belongs to. */
+  double *group_centre_of_mass;
+
+  /*! Position of the first particle of a given group. */
+  double *group_first_position;
+
   /*! Index of the part with the maximal density of each group. */
   long long *max_part_density_index;
 
@@ -120,8 +110,7 @@ struct fof_props {
   /*! The links between pairs of particles on this node and a foreign
    * node */
   struct fof_mpi *group_links;
-
-} SWIFT_STRUCT_ALIGN;
+};
 
 /* Store group size and offset into array. */
 struct group_length {
@@ -131,34 +120,51 @@ struct group_length {
 } SWIFT_STRUCT_ALIGN;
 
 #ifdef WITH_MPI
+
+/* MPI message required for FOF. */
+struct fof_mpi {
+
+  /* The local particle's root ID.*/
+  size_t group_i;
+
+  /* The local group's size.*/
+  size_t group_i_size;
+
+  /* The foreign particle's root ID.*/
+  size_t group_j;
+
+  /* The local group's size.*/
+  size_t group_j_size;
+};
+
 /* Struct used to find final group ID when using MPI */
 struct fof_final_index {
   size_t local_root;
   size_t global_root;
-} SWIFT_STRUCT_ALIGN;
+};
 
 /* Struct used to find the total mass of a group when using MPI */
 struct fof_final_mass {
   size_t global_root;
   double group_mass;
+  double first_position[3];
+  double centre_of_mass[3];
   long long max_part_density_index;
   float max_part_density;
-} SWIFT_STRUCT_ALIGN;
+};
 
 /* Struct used to iterate over the hash table and unpack the mass fragments of a
  * group when using MPI */
 struct fof_mass_send_hashmap {
   struct fof_final_mass *mass_send;
   size_t nsend;
-} SWIFT_STRUCT_ALIGN;
-#endif
+};
 
 /* Store local and foreign cell indices that touch. */
 struct cell_pair_indices {
-
   struct cell *local, *foreign;
-
-} SWIFT_STRUCT_ALIGN;
+};
+#endif
 
 /* Function prototypes. */
 void fof_init(struct fof_props *props, struct swift_params *params,
@@ -171,7 +177,8 @@ void fof_search_tree(struct fof_props *props,
                      const struct black_holes_props *bh_props,
                      const struct phys_const *constants,
                      const struct cosmology *cosmo, struct space *s,
-                     const int dump_results, const int seed_black_holes);
+                     const int dump_results, const int dump_debug_results,
+                     const int seed_black_holes);
 void rec_fof_search_self(const struct fof_props *props, const double dim[3],
                          const double search_r2, const int periodic,
                          const struct gpart *const space_gparts,
@@ -182,6 +189,7 @@ void rec_fof_search_pair(const struct fof_props *props, const double dim[3],
                          struct cell *restrict ci, struct cell *restrict cj);
 void fof_struct_dump(const struct fof_props *props, FILE *stream);
 void fof_struct_restore(struct fof_props *props, FILE *stream);
+
 #ifdef WITH_MPI
 /* MPI data type for the particle transfers */
 extern MPI_Datatype fof_mpi_type;
diff --git a/src/fof_catalogue_io.c b/src/fof_catalogue_io.c
new file mode 100644
index 0000000000000000000000000000000000000000..d0d065b0bdbe767dfc66b2ddadfb82206b41fd0c
--- /dev/null
+++ b/src/fof_catalogue_io.c
@@ -0,0 +1,328 @@
+/*******************************************************************************
+ * This file is part of SWIFT.
+ * Copyright (c) 2020  Matthieu Schaller (matthieu.schaller@durham.ac.uk)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as published
+ * by the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program.  If not, see <http://www.gnu.org/licenses/>.
+ *
+ ******************************************************************************/
+
+/* Config parameters. */
+#include "../config.h"
+
+/* Local headers */
+#include "engine.h"
+#include "fof.h"
+#include "hydro_io.h"
+
+#ifdef HAVE_HDF5
+
+void write_fof_hdf5_header(hid_t h_file, const struct engine* e,
+                           const long long num_groups_total,
+                           const long long num_groups_this_file,
+                           const struct fof_props* props) {
+
+  /* Open header to write simulation properties */
+  hid_t h_grp =
+      H5Gcreate(h_file, "/Header", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+  if (h_grp < 0) error("Error while creating file header\n");
+
+  /* Convert basic output information to snapshot units */
+  const double factor_time = units_conversion_factor(
+      e->internal_units, e->snapshot_units, UNIT_CONV_TIME);
+  const double factor_length = units_conversion_factor(
+      e->internal_units, e->snapshot_units, UNIT_CONV_LENGTH);
+  const double dblTime = e->time * factor_time;
+  const double dim[3] = {e->s->dim[0] * factor_length,
+                         e->s->dim[1] * factor_length,
+                         e->s->dim[2] * factor_length};
+
+  io_write_attribute(h_grp, "BoxSize", DOUBLE, dim, 3);
+  io_write_attribute_d(h_grp, "Time", dblTime);
+  io_write_attribute_d(h_grp, "Dimension", (int)hydro_dimension);
+  io_write_attribute_d(h_grp, "Redshift", e->cosmology->z);
+  io_write_attribute_d(h_grp, "Scale-factor", e->cosmology->a);
+  io_write_attribute_s(h_grp, "Code", "SWIFT");
+  io_write_attribute_s(h_grp, "RunName", e->run_name);
+
+  /* Write out the particle types */
+  io_write_part_type_names(h_grp);
+
+  /* Write out the time-base */
+  if (e->policy | engine_policy_cosmology) {
+    io_write_attribute_d(h_grp, "TimeBase_dloga", e->time_base);
+    const double delta_t = cosmology_get_timebase(e->cosmology, e->ti_current);
+    io_write_attribute_d(h_grp, "TimeBase_dt", delta_t);
+  } else {
+    io_write_attribute_d(h_grp, "TimeBase_dloga", 0);
+    io_write_attribute_d(h_grp, "TimeBase_dt", e->time_base);
+  }
+
+  /* Store the time at which the snapshot was written */
+  time_t tm = time(NULL);
+  struct tm* timeinfo = localtime(&tm);
+  char snapshot_date[64];
+  strftime(snapshot_date, 64, "%T %F %Z", timeinfo);
+  io_write_attribute_s(h_grp, "Snapshot date", snapshot_date);
+
+  /* GADGET-2 legacy values */
+  /* Number of particles of each type */
+  long long N_total[swift_type_count] = {0};
+  unsigned int numParticles[swift_type_count] = {0};
+  unsigned int numParticlesHighWord[swift_type_count] = {0};
+  for (int ptype = 0; ptype < swift_type_count; ++ptype) {
+    numParticles[ptype] = (unsigned int)N_total[ptype];
+    numParticlesHighWord[ptype] = (unsigned int)(N_total[ptype] >> 32);
+  }
+  io_write_attribute(h_grp, "NumPart_ThisFile", LONGLONG, N_total,
+                     swift_type_count);
+  io_write_attribute(h_grp, "NumPart_Total", UINT, numParticles,
+                     swift_type_count);
+  io_write_attribute(h_grp, "NumPart_Total_HighWord", UINT,
+                     numParticlesHighWord, swift_type_count);
+  double MassTable[swift_type_count] = {0};
+  io_write_attribute(h_grp, "MassTable", DOUBLE, MassTable, swift_type_count);
+  io_write_attribute(h_grp, "InitialMassTable", DOUBLE,
+                     e->s->initial_mean_mass_particles, swift_type_count);
+  unsigned int flagEntropy[swift_type_count] = {0};
+  flagEntropy[0] = writeEntropyFlag();
+  io_write_attribute(h_grp, "Flag_Entropy_ICs", UINT, flagEntropy,
+                     swift_type_count);
+  io_write_attribute_i(h_grp, "NumFilesPerSnapshot", e->nr_nodes);
+  io_write_attribute_i(h_grp, "ThisFile", e->nodeID);
+  io_write_attribute_s(h_grp, "OutputType", "FOF");
+  io_write_attribute_ll(h_grp, "NumGroups_Total", num_groups_total);
+  io_write_attribute_ll(h_grp, "NumGroups_ThisFile", num_groups_this_file);
+
+  /* Close group */
+  H5Gclose(h_grp);
+
+  io_write_meta_data(h_file, e, e->internal_units, e->snapshot_units);
+}
+
+void write_fof_hdf5_array(
+    const struct engine* e, hid_t grp, const char* fileName,
+    const char* partTypeGroupName, const struct io_props props, const size_t N,
+    const enum lossy_compression_schemes lossy_compression,
+    const struct unit_system* internal_units,
+    const struct unit_system* snapshot_units) {
+
+  const size_t typeSize = io_sizeof_type(props.type);
+  const size_t num_elements = N * props.dimension;
+
+  /* message("Writing '%s' array...", props.name); */
+
+  /* Allocate temporary buffer */
+  void* temp = NULL;
+  if (swift_memalign("writebuff", (void**)&temp, IO_BUFFER_ALIGNMENT,
+                     num_elements * typeSize) != 0)
+    error("Unable to allocate temporary i/o buffer");
+  /* Copy the particle data to the temporary buffer */
+  io_copy_temp_buffer(temp, e, props, N, internal_units, snapshot_units);
+
+  /* Create data space */
+  hid_t h_space;
+  if (N > 0)
+    h_space = H5Screate(H5S_SIMPLE);
+  else
+    h_space = H5Screate(H5S_NULL);
+
+  if (h_space < 0)
+    error("Error while creating data space for field '%s'.", props.name);
+
+  /* Decide what chunk size to use based on compression */
+  int log2_chunk_size = 20;
+
+  int rank;
+  hsize_t shape[2];
+  hsize_t chunk_shape[2];
+
+  if (props.dimension > 1) {
+    rank = 2;
+    shape[0] = N;
+    shape[1] = props.dimension;
+    chunk_shape[0] = 1 << log2_chunk_size;
+    chunk_shape[1] = props.dimension;
+  } else {
+    rank = 1;
+    shape[0] = N;
+    shape[1] = 0;
+    chunk_shape[0] = 1 << log2_chunk_size;
+    chunk_shape[1] = 0;
+  }
+
+  /* Make sure the chunks are not larger than the dataset */
+  if (chunk_shape[0] > N) chunk_shape[0] = N;
+
+  /* Change shape of data space */
+  hid_t h_err = H5Sset_extent_simple(h_space, rank, shape, shape);
+  if (h_err < 0)
+    error("Error while changing data space shape for field '%s'.", props.name);
+
+  /* Dataset type */
+  hid_t h_type = H5Tcopy(io_hdf5_type(props.type));
+
+  /* Dataset properties */
+  hid_t h_prop = H5Pcreate(H5P_DATASET_CREATE);
+
+  /* Create filters and set compression level if we have something to write */
+  if (N > 0) {
+
+    /* Set chunk size */
+    h_err = H5Pset_chunk(h_prop, rank, chunk_shape);
+    if (h_err < 0)
+      error("Error while setting chunk size (%llu, %llu) for field '%s'.",
+            chunk_shape[0], chunk_shape[1], props.name);
+
+    /* Are we imposing some form of lossy compression filter? */
+    if (lossy_compression != compression_write_lossless)
+      set_hdf5_lossy_compression(&h_prop, &h_type, lossy_compression,
+                                 props.name);
+
+    /* Impose GZIP data compression */
+    if (e->snapshot_compression > 0) {
+      h_err = H5Pset_shuffle(h_prop);
+      if (h_err < 0)
+        error("Error while setting shuffling options for field '%s'.",
+              props.name);
+
+      h_err = H5Pset_deflate(h_prop, e->snapshot_compression);
+      if (h_err < 0)
+        error("Error while setting compression options for field '%s'.",
+              props.name);
+    }
+
+    /* Impose check-sum to verify data corruption */
+    h_err = H5Pset_fletcher32(h_prop);
+    if (h_err < 0)
+      error("Error while setting checksum options for field '%s'.", props.name);
+  }
+
+  /* Create dataset */
+  const hid_t h_data = H5Dcreate(grp, props.name, h_type, h_space, H5P_DEFAULT,
+                                 h_prop, H5P_DEFAULT);
+  if (h_data < 0) error("Error while creating dataspace '%s'.", props.name);
+
+  /* Write temporary buffer to HDF5 dataspace */
+  h_err = H5Dwrite(h_data, io_hdf5_type(props.type), h_space, H5S_ALL,
+                   H5P_DEFAULT, temp);
+  if (h_err < 0) error("Error while writing data array '%s'.", props.name);
+
+  /* Write unit conversion factors for this data set */
+  char buffer[FIELD_BUFFER_SIZE] = {0};
+  units_cgs_conversion_string(buffer, snapshot_units, props.units,
+                              props.scale_factor_exponent);
+  float baseUnitsExp[5];
+  units_get_base_unit_exponents_array(baseUnitsExp, props.units);
+  io_write_attribute_f(h_data, "U_M exponent", baseUnitsExp[UNIT_MASS]);
+  io_write_attribute_f(h_data, "U_L exponent", baseUnitsExp[UNIT_LENGTH]);
+  io_write_attribute_f(h_data, "U_t exponent", baseUnitsExp[UNIT_TIME]);
+  io_write_attribute_f(h_data, "U_I exponent", baseUnitsExp[UNIT_CURRENT]);
+  io_write_attribute_f(h_data, "U_T exponent", baseUnitsExp[UNIT_TEMPERATURE]);
+  io_write_attribute_f(h_data, "h-scale exponent", 0.f);
+  io_write_attribute_f(h_data, "a-scale exponent", props.scale_factor_exponent);
+  io_write_attribute_s(h_data, "Expression for physical CGS units", buffer);
+
+  /* Write the actual number this conversion factor corresponds to */
+  const double factor =
+      units_cgs_conversion_factor(snapshot_units, props.units);
+  io_write_attribute_d(
+      h_data,
+      "Conversion factor to CGS (not including cosmological corrections)",
+      factor);
+  io_write_attribute_d(
+      h_data,
+      "Conversion factor to physical CGS (including cosmological corrections)",
+      factor * pow(e->cosmology->a, props.scale_factor_exponent));
+
+#ifdef SWIFT_DEBUG_CHECKS
+  if (strlen(props.description) == 0)
+    error("Invalid (empty) description of the field '%s'", props.name);
+#endif
+
+  /* Write the full description */
+  io_write_attribute_s(h_data, "Description", props.description);
+
+  /* Free and close everything */
+  swift_free("writebuff", temp);
+  H5Tclose(h_type);
+  H5Pclose(h_prop);
+  H5Dclose(h_data);
+  H5Sclose(h_space);
+}
+
+void write_fof_hdf5_catalogue(const struct fof_props* props,
+                              const size_t num_groups, const struct engine* e) {
+
+  char fileName[512];
+#ifdef WITH_MPI
+  sprintf(fileName, "%s_%04i.%d.hdf5", props->base_name,
+          e->snapshot_output_count, e->nodeID);
+#else
+  sprintf(fileName, "%s_%04i.hdf5", props->base_name, e->snapshot_output_count);
+#endif
+
+  hid_t h_file = H5Fcreate(fileName, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+  if (h_file < 0) error("Error while opening file '%s'.", fileName);
+
+  /* Compute the number of groups */
+  long long num_groups_local = num_groups;
+  long long num_groups_total = num_groups;
+#ifdef WITH_MPI
+  MPI_Allreduce(&num_groups, &num_groups_total, 1, MPI_LONG_LONG, MPI_SUM,
+                MPI_COMM_WORLD);
+#endif
+
+  /* Start by writing the header */
+  write_fof_hdf5_header(h_file, e, num_groups_total, num_groups_local, props);
+
+  hid_t h_grp =
+      H5Gcreate(h_file, "/Groups", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+  if (h_grp < 0) error("Error while creating groups group.\n");
+
+  struct io_props output_prop;
+  output_prop =
+      io_make_output_field_("Masses", DOUBLE, 1, UNIT_CONV_MASS, 0.f,
+                            (char*)props->group_mass, sizeof(double), "aaa");
+  write_fof_hdf5_array(e, h_grp, fileName, "Groups", output_prop,
+                       num_groups_local, compression_write_lossless,
+                       e->internal_units, e->snapshot_units);
+  output_prop = io_make_output_field_("Centres", DOUBLE, 3, UNIT_CONV_LENGTH,
+                                      1.f, (char*)props->group_centre_of_mass,
+                                      3 * sizeof(double), "aaa");
+  write_fof_hdf5_array(e, h_grp, fileName, "Groups", output_prop,
+                       num_groups_local, compression_write_lossless,
+                       e->internal_units, e->snapshot_units);
+  output_prop =
+      io_make_output_field_("GroupIDs", LONGLONG, 1, UNIT_CONV_NO_UNITS, 0.f,
+                            (char*)props->group_index, sizeof(size_t), "aaa");
+  write_fof_hdf5_array(e, h_grp, fileName, "Groups", output_prop,
+                       num_groups_local, compression_write_lossless,
+                       e->internal_units, e->snapshot_units);
+  output_prop =
+      io_make_output_field_("Sizes", LONGLONG, 1, UNIT_CONV_NO_UNITS, 0.f,
+                            (char*)props->group_size, sizeof(size_t), "aaa");
+  write_fof_hdf5_array(e, h_grp, fileName, "Groups", output_prop,
+                       num_groups_local, compression_write_lossless,
+                       e->internal_units, e->snapshot_units);
+
+  /* Close everything */
+  H5Gclose(h_grp);
+  H5Fclose(h_file);
+
+#ifdef WITH_MPI
+  MPI_Barrier(MPI_COMM_WORLD);
+#endif
+}
+
+#endif /* HAVE_HDF5 */
diff --git a/src/fof_catalogue_io.h b/src/fof_catalogue_io.h
new file mode 100644
index 0000000000000000000000000000000000000000..345c03ae4eca661c60550a6101780d423f2d3045
--- /dev/null
+++ b/src/fof_catalogue_io.h
@@ -0,0 +1,32 @@
+/*******************************************************************************
+ * This file is part of SWIFT.
+ * Copyright (c) 2020 Matthieu Schaller (schaller@strw.leidenuniv.nl)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as published
+ * by the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program.  If not, see <http://www.gnu.org/licenses/>.
+ *
+ ******************************************************************************/
+#ifndef SWIFT_FOF_CATALOGUE_IO_H
+#define SWIFT_FOF_CATALOGUE_IO_H
+
+/* Config parameters. */
+#include "../config.h"
+
+#ifdef WITH_FOF
+
+void write_fof_hdf5_catalogue(const struct fof_props *props,
+                              const size_t num_groups, const struct engine *e);
+
+#endif /* WITH_FOF */
+
+#endif /* SWIFT_FOF_CATALOGUE_IO_H */
diff --git a/src/hashmap.c b/src/hashmap.c
index a9632c77a65b7d351fef7b0ee5e72f3f76b597d7..6edd22b6ffbe58572bdcfc22b083b043413dd65f 100644
--- a/src/hashmap.c
+++ b/src/hashmap.c
@@ -31,6 +31,7 @@
 #include "error.h"
 #include "memuse.h"
 
+#include <float.h>
 #include <stdio.h>
 #include <stdlib.h>
 #include <string.h>
@@ -222,6 +223,9 @@ hashmap_element_t *hashmap_find(hashmap_t *m, hashmap_key_t key, int create_new,
 
       /* Set the key. */
       chunk->data[offset_in_chunk].key = key;
+      chunk->data[offset_in_chunk].value.value_array2_dbl[0] = -FLT_MAX;
+      chunk->data[offset_in_chunk].value.value_array2_dbl[1] = -FLT_MAX;
+      chunk->data[offset_in_chunk].value.value_array2_dbl[2] = -FLT_MAX;
 
       /* Return a pointer to the new element. */
       return &chunk->data[offset_in_chunk];
diff --git a/src/hashmap.h b/src/hashmap.h
index a4b89ebaf9434ce6f753ba124c0c59969b807b56..c6d798dc00c62ed7ad253709c5ecdae9a05c920f 100644
--- a/src/hashmap.h
+++ b/src/hashmap.h
@@ -52,6 +52,8 @@ typedef struct _hashmap_struct {
   long long value_st;
   float value_flt;
   double value_dbl;
+  double value_array_dbl[3];
+  double value_array2_dbl[3];
 } hashmap_struct_t;
 #define hashmap_value_t hashmap_struct_t
 #endif
diff --git a/src/line_of_sight.c b/src/line_of_sight.c
index 6af74ade6255a44a1f2cfc3e68aeabf733a8e289..f35db76910ba72ed8d0c055517847d8b01cbfd49 100644
--- a/src/line_of_sight.c
+++ b/src/line_of_sight.c
@@ -487,6 +487,16 @@ void write_hdf5_header(hid_t h_file, const struct engine *e,
   /* Write out the particle types */
   io_write_part_type_names(h_grp);
 
+  /* Write out the time-base */
+  if (e->policy | engine_policy_cosmology) {
+    io_write_attribute_d(h_grp, "TimeBase_dloga", e->time_base);
+    const double delta_t = cosmology_get_timebase(e->cosmology, e->ti_current);
+    io_write_attribute_d(h_grp, "TimeBase_dt", delta_t);
+  } else {
+    io_write_attribute_d(h_grp, "TimeBase_dloga", 0);
+    io_write_attribute_d(h_grp, "TimeBase_dt", e->time_base);
+  }
+
   /* Store the time at which the snapshot was written */
   time_t tm = time(NULL);
   struct tm *timeinfo = localtime(&tm);