diff --git a/examples/nIFTyCluster/Baryonic/README b/examples/nIFTyCluster/Baryonic/README
new file mode 100644
index 0000000000000000000000000000000000000000..8235ef1f05915450b5473ade5800ff3af85d0058
--- /dev/null
+++ b/examples/nIFTyCluster/Baryonic/README
@@ -0,0 +1,30 @@
+nIFTy Cluster
+=============
+
+
+Initial conditions
+------------------
+
+These initial conditions are reproduced, with permission, from
+the original authors. The initial conditions here contain
+baryons, and are the ones that were used in
+https://ui.adsabs.harvard.edu/abs/2016MNRAS.457.4063S/abstract.
+
+We have modified these initial conditions (initially Gadget binary)
+to work with SWIFT, which only accepts HDF5. There are scripts
+in this folder that download the required python modules and
+create the initial conditions from the Gadget style ones, should
+you have them available. Alternatively you can download the
+initial conditions from our webstorage with the getIC.sh script.
+
+
+About the simulation
+--------------------
+
+This is a very deep zoom, with 4 levels of hierarchy. You should
+run SWIFT in multi-softening mode to complete this in reasonable time.
+Usually we would expect this to run in about a day on a single Skylake
+HPC node.
+
+Some boilerplate for running on a SLURM batch system is included in
+the `run.sh` script.
diff --git a/examples/nIFTyCluster/Baryonic/clean.sh b/examples/nIFTyCluster/Baryonic/clean.sh
new file mode 100644
index 0000000000000000000000000000000000000000..cdeeac40ae3c412cc52ee8358d2c0dd93716b4e6
--- /dev/null
+++ b/examples/nIFTyCluster/Baryonic/clean.sh
@@ -0,0 +1,2 @@
+rm -rf pygadgetreader
+rm -rf pygadenv
diff --git a/examples/nIFTyCluster/Baryonic/convert_to_swift.py b/examples/nIFTyCluster/Baryonic/convert_to_swift.py
new file mode 100644
index 0000000000000000000000000000000000000000..1de42ac8021e5fa05c4bfe78df8a303669db4a6e
--- /dev/null
+++ b/examples/nIFTyCluster/Baryonic/convert_to_swift.py
@@ -0,0 +1,83 @@
+"""
+Script to convert the NIFTY ICs to those that are compatible with SWIFT.
+Note that this leaves h-factors as-is to be fixed in-place by SWIFT.
+
+You will need:
+
+    + pygadgetreader from https://bitbucket.org/rthompson/pygadgetreader/overview
+      (with 2to3 ran on all files to convert them to python3)
+    + swiftsimio
+"""
+
+from pygadgetreader import *
+from swiftsimio import Writer
+import numpy as np
+import unyt
+
+filename = "/cosma7/data/dp004/jlvc76/nIFTy/IC_CLUSTER_00019"
+
+length = unyt.kpc
+mass = 1e10 * unyt.msun
+time = unyt.s * unyt.kpc / unyt.km
+velocity = length / time
+energy_per_unit_mass = (length / time) ** 2
+
+
+nifty_units = unyt.UnitSystem("nifty", length, mass, time)
+
+writer = Writer(
+    unit_system=nifty_units,
+    box_size=readheader(filename, "boxsize") * length,
+    dimension=3,
+    compress=True,
+    extra_header={
+        "Redshift": readheader(filename, "redshift"),
+        "Omega0": readheader(filename, "O0"),
+        "OmegaLambda": readheader(filename, "Ol"),
+        "HubbleParam": readheader(filename, "h"),
+    },
+)
+
+writer.gas.coordinates = unyt.unyt_array(readsnap(filename, "pos", 0), length)
+
+writer.gas.velocities = unyt.unyt_array(readsnap(filename, "vel", 0), velocity)
+
+writer.gas.masses = unyt.unyt_array(readsnap(filename, "mass", 0), mass)
+
+writer.gas.internal_energy = unyt.unyt_array(
+    readsnap(filename, "u", 0), energy_per_unit_mass
+)
+
+# We must roll our own smoothing lengths.
+n_part = len(writer.gas.masses)
+x_range = writer.gas.coordinates.max() - writer.gas.coordinates.min()
+mean_interparticle_sep = x_range / n_part ** (1 / 3)
+
+writer.gas.smoothing_length = np.ones(n_part, dtype=float) * mean_interparticle_sep
+
+writer.gas.particle_ids = unyt.unyt_array(readsnap(filename, "pid", 0), None)
+
+
+def read_dm_quantity(name, unit, parttype):
+    """
+    The DM particles are in three sets because of their different masses.
+    In SWIFT we have to combine these.
+    """
+    out = np.concatenate(
+        [readsnap(filename, name, p) for p in parttype]
+    ) * unit
+    return out
+
+
+for name, parttype in {"dark_matter": [1], "boundary": [2, 3, 5]}.items():
+    writer_value = getattr(writer, name)
+
+    writer_value.coordinates = read_dm_quantity("pos", length, parttype)
+
+    writer_value.velocities = read_dm_quantity("vel", velocity, parttype)
+
+    writer_value.masses = read_dm_quantity("mass", mass, parttype)
+
+    writer_value.particle_ids = read_dm_quantity("pid", 1, parttype)
+
+writer.write("nifty.hdf5")
diff --git a/examples/nIFTyCluster/Baryonic/create_ics.sh b/examples/nIFTyCluster/Baryonic/create_ics.sh
new file mode 100644
index 0000000000000000000000000000000000000000..dd2ba3faeed8c2b7496dafc351cf9c7ebd314df6
--- /dev/null
+++ b/examples/nIFTyCluster/Baryonic/create_ics.sh
@@ -0,0 +1,10 @@
+bash setup.sh
+
+source pygadenv/bin/activate
+
+python3 convert_to_swift.py
+
+deactivate
+
+bash clean.sh
+
diff --git a/examples/nIFTyCluster/Baryonic/getIC.sh b/examples/nIFTyCluster/Baryonic/getIC.sh
new file mode 100644
index 0000000000000000000000000000000000000000..60d8ed20651f9ce04455850fd1390a4f012717b8
--- /dev/null
+++ b/examples/nIFTyCluster/Baryonic/getIC.sh
@@ -0,0 +1,2 @@
+#!/bin/bash
+wget http://virgodb.cosma.dur.ac.uk/swift-webstorage/ICs/nIFTyCluster/nifty.hdf5
diff --git a/examples/nIFTyCluster/Baryonic/nifty.yml b/examples/nIFTyCluster/Baryonic/nifty.yml
new file mode 100644
index 0000000000000000000000000000000000000000..4656a642ef77543c827f98fe0b55cb1187786fe4
--- /dev/null
+++ b/examples/nIFTyCluster/Baryonic/nifty.yml
@@ -0,0 +1,64 @@
+# Define the system of units to use internally. 
+InternalUnitSystem:
+  UnitMass_in_cgs:     1.98848e43    # 10^10 M_sun in grams
+  UnitLength_in_cgs:   3.08567758e21 # kpc in centimeters
+  UnitVelocity_in_cgs: 1e5           # km/s in centimeters per second
+  UnitCurrent_in_cgs:  1             # Amperes
+  UnitTemp_in_cgs:     1             # Kelvin
+
+# Cosmological parameters
+Cosmology:
+  a_begin:        0.015151515 # z_ini = 65
+  a_end:          1.0
+  Omega_m:        0.27
+  Omega_lambda:   0.73
+  Omega_b:        0.0469
+  h:              0.7
+  
+# Parameters governing the time integration
+TimeIntegration:
+  time_begin: 0.    # The starting time of the simulation (in internal units).
+  time_end:   1e-2  # The end time of the simulation (in internal units).
+  dt_min:     1e-10 # The minimal time-step size of the simulation (in internal units).
+  dt_max:     1e-2  # The maximal time-step size of the simulation (in internal units).
+
+# Parameters governing the snapshots
+Snapshots:
+  basename:            nifty # Common part of the name of output files
+  scale_factor_first:  0.05  # Scale-factor of the first snaphot (cosmological run)
+  time_first:          0.01  # Time of the first output (non-cosmological run) (in internal units)
+  delta_time:          1.01  # Time difference between consecutive outputs (in internal units)
+
+# Parameters governing the conserved quantities statistics
+Statistics:
+  scale_factor_first:  0.05 # Scale-factor of the first stat dump (cosmological run)
+  time_first:          0.01 # Time of the first stat dump (non-cosmological run) (in internal units)
+  delta_time:          1.01 # Time between statistics output
+
+# Parameters for the self-gravity scheme
+Gravity:
+  eta:                    0.025    # Constant dimensionless multiplier for time integration. 
+  theta:                  0.7     # Opening angle (Multipole acceptance criterion)
+  comoving_softening:     20.0 # Comoving softening length (in internal units).
+  max_physical_softening: 5.0    # Physical softening length (in internal units).
+  mesh_side_length:       512
+  softening_ratio:        0.04   # 1/25
+  softening_ratio_background:        0.04   # 1/25
+
+# Parameters for the hydrodynamics scheme
+SPH:
+  resolution_eta:        1.48691 # 100 ngb with wendland0-c2
+  h_min_ratio:           0.1      # Minimal smoothing in units of softening.
+  CFL_condition:         0.1      # Courant-Friedrich-Levy condition for time integration.
+  minimal_temperature:   100      # (internal units)
+
+Scheduler:
+  max_top_level_cells:  32        # (Optional) Maximal number of top-level cells in any dimension. The number of top-level cells will be the cube of this (this is the default value).
+
+# Parameters related to the initial conditions
+InitialConditions:
+  file_name:  ./nifty.hdf5    # The file to read
+  periodic:   1
+  cleanup_h_factors: 1               # Remove the h-factors inherited from Gadget
+  cleanup_velocity_factors: 1        # Remove the sqrt(a) factor in the velocities inherited from Gadget
+  cleanup_smoothing_lenghts: 1
diff --git a/examples/nIFTyCluster/Baryonic/run.sh b/examples/nIFTyCluster/Baryonic/run.sh
new file mode 100644
index 0000000000000000000000000000000000000000..1977ac4b76e41030562634acc6cc56ef37ede364
--- /dev/null
+++ b/examples/nIFTyCluster/Baryonic/run.sh
@@ -0,0 +1,14 @@
+#!/bin/bash -l
+
+#SBATCH -J nIFTyClusterSWIFT
+#SBATCH -N 1
+#SBATCH --tasks-per-node=2
+#SBATCH -o nifty_%j.out
+#SBATCH -e nifty_%j.err
+#SBATCH -p <CHANGEME>
+#SBATCH -A <CHANGEME>
+#SBATCH --exclusive
+
+#SBATCH -t 72:00:00
+
+mpirun -np 2 ../../swift_mpi --cosmology --hydro --self-gravity -v 1 --pin --threads=14  nifty.yml
diff --git a/examples/nIFTyCluster/Baryonic/setup.sh b/examples/nIFTyCluster/Baryonic/setup.sh
new file mode 100644
index 0000000000000000000000000000000000000000..4c204dedf59fa441fb89199d727946625b49c013
--- /dev/null
+++ b/examples/nIFTyCluster/Baryonic/setup.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+
+hg clone https://bitbucket.org/rthompson/pygadgetreader
+
+python3 -m venv pygadenv
+source pygadenv/bin/activate
+pip3 install swiftsimio
+
+cd pygadgetreader
+
+2to3 -nw *.py
+2to3 -nw */*.py
+
+python3 setup.py install
+
+cd ..
+
+deactivate