diff --git a/examples/MHDTests/CoolingHaloMHD_feedback/1Dprofiles.py b/examples/MHDTests/CoolingHaloMHD_feedback/1Dprofiles.py
new file mode 100644
index 0000000000000000000000000000000000000000..f47a488a905f39ce0e38149d1d5e30241c618f70
--- /dev/null
+++ b/examples/MHDTests/CoolingHaloMHD_feedback/1Dprofiles.py
@@ -0,0 +1,366 @@
+import numpy as np
+import h5py
+import argparse
+import unyt
+import matplotlib.pyplot as plt
+
+from swiftsimio import load
+from swiftsimio.visualisation.slice import slice_gas
+
+# Some constants cgs
+PARSEC_IN_CGS = 3.0856776e18
+KM_PER_SEC_IN_CGS = 1.0e5
+CONST_G_CGS = 6.672e-8
+MSOL_IN_CGS = 1.9891e33  # Solar mass
+kb_cgs = 1.38e-16  # boltzmann constant
+m_H_cgs = 1.68e-24  # atomic hydrogen mass
+GYR_IN_CGS = 3.1536e16  # gigayear
+# First set unit velocity and then the circular velocity parameter for the isothermal potential
+const_unit_velocity_in_cgs = 1.0e5  # kms^-1
+
+h_dim = 0.704 #0.681  # 0.67777  # hubble parameter
+H_0_cgs = 100.0 * h_dim * KM_PER_SEC_IN_CGS / (1.0e6 * PARSEC_IN_CGS)
+
+# From this we can find the virial radius, the radius within which the average density of the halo is
+# 200. * the mean matter density
+
+# Set M200 and get R200 and V200
+f_b = 0.17
+c_200 = 7.2
+spin_lambda = 0.05
+nH_max_cgs = 1e2
+M_200_cgs = 1e12 * MSOL_IN_CGS
+rhoc_cgs = 3 * H_0_cgs ** 2 / (8 * np.pi * CONST_G_CGS)
+r_200_cgs = (3 * M_200_cgs / (4 * np.pi * rhoc_cgs * 200)) ** (1 / 3)
+v_200_cgs = np.sqrt(CONST_G_CGS * M_200_cgs / r_200_cgs)
+v_200 = v_200_cgs / const_unit_velocity_in_cgs
+T_200_cgs = m_H_cgs * v_200_cgs ** 2 / (2 * kb_cgs)
+
+const_unit_mass_in_cgs = M_200_cgs
+const_unit_length_in_cgs = r_200_cgs
+# Derived quantities
+const_unit_time_in_cgs = const_unit_length_in_cgs / const_unit_velocity_in_cgs
+const_G = (
+    CONST_G_CGS
+    * const_unit_mass_in_cgs
+    * const_unit_time_in_cgs
+    * const_unit_time_in_cgs
+    / (const_unit_length_in_cgs * const_unit_length_in_cgs * const_unit_length_in_cgs)
+)
+
+
+# Parse command line arguments
+argparser = argparse.ArgumentParser()
+argparser.add_argument("input")
+argparser.add_argument("output")
+args = argparser.parse_args()
+
+# Where we want to slice the slice
+y0 = 0.5
+
+# Load snapshot
+filename = args.input
+data = load(filename)
+
+# Retrieve some information about the simulation run
+artDiffusion = data.metadata.hydro_scheme["Artificial Diffusion Constant"]
+dedHyp = data.metadata.hydro_scheme["Dedner Hyperbolic Constant"]
+dedHypDivv = data.metadata.hydro_scheme["Dedner Hyperbolic div(v) Constant"]
+dedPar = data.metadata.hydro_scheme["Dedner Parabolic Constant"]
+eta = data.metadata.hydro_scheme["Resistive Eta"]
+git = data.metadata.code["Git Revision"]
+gitBranch = data.metadata.code["Git Branch"]
+hydroScheme = data.metadata.hydro_scheme["Scheme"]
+kernel = data.metadata.hydro_scheme["Kernel function"]
+neighbours = data.metadata.hydro_scheme["Kernel target N_ngb"]
+n_gas = data.metadata.n_gas
+
+# Retrieve particle attributes of interest
+rho = data.gas.densities
+m = data.gas.masses
+coords = data.gas.coordinates
+coords_center = coords - data.metadata.boxsize / 2
+radius = np.linalg.norm(coords_center,axis=1)
+velocities = data.gas.velocities
+
+# calculate specific angular momentum j(r)
+rotation_axis = np.array([0,0,1])
+e_phi = np.cross(rotation_axis[None,:],coords_center)
+e_phi = e_phi/np.linalg.norm(e_phi,axis=1)[:,None]
+v_phi = velocities[:,0]*e_phi[:,0]+velocities[:,1]*e_phi[:,1]+velocities[:,2]*e_phi[:,2]
+e_r = np.cross(e_phi,rotation_axis[None,:])
+axis_dist = coords_center[:,0]*e_r[:,0]+coords_center[:,1]*e_r[:,1]+coords_center[:,2]*e_r[:,2]
+j = v_phi * radius**2 / axis_dist
+omega = v_phi / axis_dist
+Jtot = np.sum(omega * axis_dist**2 * m)
+
+P = data.gas.pressures
+T = data.gas.temperatures
+B = data.gas.magnetic_flux_densities
+Bx, By = B[:, 0], B[:, 1]
+normB = np.sqrt(B[:, 0] ** 2 + B[:, 1] ** 2 + B[:, 2] ** 2)
+divB = data.gas.magnetic_divergences
+h = data.gas.smoothing_lengths
+errB = np.log10(h * abs(divB) / normB)
+
+
+rho_units = unyt.g * unyt.cm ** (-3)
+r_units = 1e3 * PARSEC_IN_CGS * unyt.cm
+P_units = MSOL_IN_CGS / PARSEC_IN_CGS / GYR_IN_CGS ** 2 * (unyt.g / (unyt.cm * unyt.s**2))
+j_units =  (1e3 * PARSEC_IN_CGS) / GYR_IN_CGS * (unyt.cm**2/unyt.s) 
+nH_units = unyt.cm ** (-3)
+
+rho.convert_to_units(rho_units)
+P.convert_to_units(P_units)
+j.convert_to_units(j_units)
+n_H = rho/(m_H_cgs * unyt.g)
+
+radius = np.linalg.norm(coords_center,axis=1)
+
+# Plot maps
+plt.rcParams.update({"font.size": 16})
+
+nx = 1
+ny = 4
+fig, axs = plt.subplots(ny, nx, figsize=((10 * nx, 5 * ny)))
+fig.subplots_adjust(hspace=0.1)
+
+# plot density
+axs[0].scatter(radius.to(r_units), n_H.to(nH_units), s=0.1, color='black')
+axs[0].set_yscale("log")
+axs[0].set_yticks(np.logspace(-7, 2, 10))
+axs[0].set_ylim(1e-7, 1e2)
+
+# plot Pressure
+axs[1].scatter(radius.to(r_units), P.to(P_units), s=0.1, color='black')
+axs[1].set_yscale("log")
+
+# plot specific angular momentum
+axs[2].scatter(radius.to(r_units), j.to(j_units), s=0.1, color='black')
+axs[2].set_yscale("log")
+
+# NFW-like gas density profile
+def rho_r(r_value, f_b, M_200_cgs, r_200_cgs, c_200):
+    rho_0 = (
+        M_200_cgs
+        / (np.log(1 + c_200) - c_200 / (1 + c_200))
+        / (4 * np.pi * r_200_cgs ** 3 / c_200 ** 3)
+    )
+    result_cgs = rho_0 * f_b / (c_200 * r_value * (1 + c_200 * r_value) ** 2)
+    # Apply density cut
+    rho_max_cgs = nH_max_cgs * m_H_cgs
+    result_cgs = np.array(result_cgs)
+    result_cgs[result_cgs > rho_max_cgs] = rho_max_cgs
+    return result_cgs
+
+
+# NFW-like gas mass inside a sphere with radius R
+def Mgas_r(r_value, f_b, M_200_cgs, r_200_cgs, c_200):
+    M_0 = M_200_cgs / (np.log(1 + c_200) - c_200 / (1 + c_200))
+    return (
+        M_0
+        * f_b
+        * (np.log(1 + c_200 * r_value) - c_200 * r_value / (1 + c_200 * r_value))
+    )
+
+
+# NFW Gravitational acceleration
+def a_NFW(r_value, M_200_cgs, r_200_cgs, c_200):
+    a_pref = (
+        CONST_G_CGS
+        * M_200_cgs
+        / (np.log(1 + c_200) - c_200 / (1 + c_200))
+        / r_200_cgs ** 2
+    )
+    return (
+        a_pref
+        * ((r_value / (r_value + 1 / c_200)) - np.log(1 + c_200 * r_value))
+        / r_value ** 2
+    )
+
+
+# Integrate rho_gas*a_NFW
+def integrate(r_min, r_max, f_b, M_200_cgs, r_200_cgs, c_200, Nsteps=10000):
+    # Perform the integration
+    r_range = np.linspace(r_min, r_max, Nsteps)
+    dr = np.abs((r_max - r_min) / Nsteps)
+    integrands = rho_r(r_range, f_b, M_200_cgs, r_200_cgs, c_200) * a_NFW(
+        r_range, M_200_cgs, r_200_cgs, c_200
+    )
+    result_cgs = np.sum(integrands * dr) * r_200_cgs
+    return result_cgs
+
+
+# NFW-like gas hydrostatic equilibrium internal energy profile
+def u_vs_r(P0_cgs, r_value, r_max, f_b, M_200_cgs, r_200_cgs, c_200):
+    result_cgs = (
+        (P0_cgs - integrate(r_value, r_max, f_b, M_200_cgs, r_200_cgs, c_200))
+        / (gamma - 1)
+        / rho_r(r_value, f_b, M_200_cgs, r_200_cgs, c_200)
+    )
+    return result_cgs
+
+
+# NFW-like gas hydrostatic equilibrium pressure profile
+def P_vs_r(P0_cgs, r_value, r_max, f_b, M_200_cgs, r_200_cgs, c_200):
+    result_cgs = P0_cgs - integrate(r_value, r_max, f_b, M_200_cgs, r_200_cgs, c_200)
+    return result_cgs
+
+
+# NFW-like gas hydrostatic equilibrium temperature profile
+def T_vs_r(P0_cgs, r_value, r_max, f_b, M_200_cgs, r_200_cgs, c_200):
+    result_cgs = (
+        u_vs_r(P0_cgs, r_value, r_max, f_b, M_200_cgs, r_200_cgs, c_200)
+        * (gamma - 1)
+        * m_H_cgs
+        / kb_cgs
+    )
+    return result_cgs
+
+# define specific angular momentum distribution
+def j(r_value, j_max, s, f_b, M_200_cgs, r_200_cgs, c_200):
+    return (
+        j_max
+        * (Mgas_r(r_value, f_b, M_200_cgs, r_200_cgs, c_200) / (M_200_cgs * f_b)) ** s
+    )
+def jsimple(r_value, j_max, s, f_b, M_200_cgs, r_200_cgs, c_200):
+    return (
+        j_max
+        * r_value**s
+    )
+
+
+rmax = (r_200_cgs*unyt.cm).to(radius.units) #np.max(radius)
+rmin = np.min(radius) 
+r_analytic = np.logspace(np.log10(rmin.value),np.log10(rmax.value),1000)*radius.units
+#r_analytic = np.linspace(rmin.value,rmax.value,10000)*radius.units
+rho_analytic = rho_r((r_analytic/(r_200_cgs*unyt.cm)).value, f_b, M_200_cgs, r_200_cgs, c_200) 
+Mgas_analytic = Mgas_r((r_analytic/(r_200_cgs*unyt.cm)).value, f_b, M_200_cgs, r_200_cgs, c_200)
+nH_analytic = rho_analytic/m_H_cgs * nH_units
+
+
+T0_cgs = (
+    T_200_cgs
+)  # 1e5 # gas temperature on the edge of the box (if we want to set this manually)
+rho0_cgs = rho_r(
+    (rmax/(r_200_cgs*unyt.cm)).value, f_b, M_200_cgs, r_200_cgs, c_200
+) 
+
+P0_cgs = rho0_cgs * kb_cgs * T0_cgs / m_H_cgs  # gas pressure on the edge of the box
+P_analytic =np.array( [
+    P_vs_r(
+        P0_cgs,
+        (r_analytic[i]/(r_200_cgs*unyt.cm)).value,
+        (rmax/(r_200_cgs*unyt.cm)).value,
+        f_b,
+        M_200_cgs,
+        r_200_cgs,
+        c_200,
+    )
+    for i in range(len(r_analytic))
+]) * (unyt.g/(unyt.cm*unyt.s**2))  # gas particle internal energies
+
+# Normalize to Bullock
+mask_r200 = radius<=(r_200_cgs * unyt.cm)
+Jtot = np.sum(omega[mask_r200] * axis_dist[mask_r200]**2 * m[mask_r200]) 
+jsp = Jtot / np.sum(m[mask_r200])
+
+jsp_B =  (np.sqrt(2) * v_200_cgs * (unyt.cm/unyt.s))*r_200_cgs*(unyt.cm)*spin_lambda 
+print('Total spin ratio: ',jsp_B.to(j_units).value/jsp.to(j_units).value)
+
+############
+
+j_1_analytic = j((r_analytic/(r_200_cgs*unyt.cm)).value, 1, 1, f_b, M_200_cgs, r_200_cgs, c_200)
+j_1_analytic_simple = jsimple((r_analytic/(r_200_cgs*unyt.cm)).value, 1, 2, f_b, M_200_cgs, r_200_cgs, c_200)
+
+int_dOmega = 8*np.pi/3 # integral over angles from axial distance squared: int Sin^2 (theta)dphi d Cos (theta)  
+
+jsp_1_analytic = (np.sum(j_1_analytic[:-1] * rho_analytic[:-1] * (unyt.g/unyt.cm**3) * int_dOmega * r_analytic[:-1]**2*np.diff(r_analytic)))/(np.sum(rho_analytic[:-1] * (unyt.g/unyt.cm**3) * 4 * np.pi * r_analytic[:-1]**2*np.diff(r_analytic)))
+jsp_1_analytic_simple = (np.sum(j_1_analytic_simple[:-1] * rho_analytic[:-1] * (unyt.g/unyt.cm**3) * int_dOmega * r_analytic[:-1]**2*np.diff(r_analytic)))/(np.sum(rho_analytic[:-1] * (unyt.g/unyt.cm**3) * 4 * np.pi * r_analytic[:-1]**2*np.diff(r_analytic)))
+
+j_analytic = j_1_analytic / jsp_1_analytic * jsp_B
+
+j_analytic_simple = j_1_analytic_simple / jsp_1_analytic_simple * jsp_B
+
+
+
+#
+# plot density
+axs[0].plot(r_analytic.to(r_units), nH_analytic.to(nH_units), color='red',label = r'$\rho_{\rm NFW}^{gas}(r)$')
+axs[0].set_yscale("log")
+axs[0].set_xscale("log")
+axs[0].set_xlim([rmin,rmax])
+axs[0].legend()
+
+# plot Pressure
+axs[1].plot(r_analytic.to(r_units), P_analytic.to(P_units), color='red',label = r'$P_{hyd. eq.}(r)$')
+axs[1].set_yscale("log")
+axs[1].set_xscale("log")
+axs[1].set_xlim([rmin,rmax])
+axs[1].legend()
+
+# plot j(r)
+axs[2].plot(r_analytic.to(r_units), j_analytic.to(j_units), color='red',label = r'$\rm j(r)\sim M(r)$')
+axs[2].plot(r_analytic.to(r_units), j_analytic_simple.to(j_units), color='red', linestyle='dashed',label=r'$\rm j(r)\sim r^2$')
+axs[2].set_xscale("log")
+axs[2].set_xlim([rmin,rmax])
+axs[2].legend()
+
+# limits
+axs[0].set_ylim(1e-7, 1e2)
+axs[0].set_yticks(np.logspace(-7, 2, 10))
+axs[1].set_ylim(1e1, 1e9)
+axs[1].set_yticks(np.logspace(1, 9, 9))
+axs[2].set_ylim(1e18, 1e27)
+axs[2].set_yticks(np.logspace(18, 27, 10))
+
+axs[0].set_xlim(1e-2, 1e3)
+axs[1].set_xlim(1e-2, 1e3)
+axs[2].set_xlim(1e-2, 1e3)
+
+# add labels
+axs[0].set_ylabel(r"$n_H(r)$ $[cm^{-3}]$")
+axs[1].set_ylabel(r"$P(r)$ $[\rm M_{sol} \cdot pc^{-1} \cdot Gyr^{-2}]$")
+axs[2].set_ylabel(r"$j(r)$ $[\rm kpc^{2} \cdot Gyr^{-1} ]$")
+
+# vertical lines
+axs[0].axvline(x=(r_200_cgs*unyt.cm).to(r_units),color='gray',label=r'$R_{200}$')
+axs[1].axvline(x=(r_200_cgs*unyt.cm).to(r_units),color='gray',label=r'$R_{200}$')
+axs[2].axvline(x=(r_200_cgs*unyt.cm).to(r_units),color='gray',label=r'$R_{200}$')
+
+Ninfo = 3
+text_common_args = dict(
+    fontsize=10, ha="center", va="center", transform=axs[Ninfo].transAxes
+)
+
+axs[Ninfo].text(
+    0.5,
+    0.8,
+    r"Cooling Halo with Spin, IC quality check.",
+    **text_common_args,
+)
+axs[Ninfo].text(0.5, 0.7, r"SWIFT %s" % git.decode("utf-8"), **text_common_args)
+axs[Ninfo].text(0.5, 0.6, r"Branch %s" % gitBranch.decode("utf-8"), **text_common_args)
+axs[Ninfo].text(0.5, 0.5, hydroScheme.decode("utf-8"), **text_common_args)
+axs[Ninfo].text(
+    0.5,
+    0.4,
+    kernel.decode("utf-8") + r" with $%.2f$ neighbours" % (neighbours),
+    **text_common_args,
+)
+axs[Ninfo].text(
+    0.5, 0.3, r"Dimenstionless hubble constant: $%.3f$ " % h_dim, **text_common_args
+)
+axs[Ninfo].text(
+    0.5, 0.0, r"Number of particles: $N_p$: $%.0f$ " % (n_gas), **text_common_args
+)
+
+axs[Ninfo].axis("off")
+
+
+for ax in axs:
+    ax.minorticks_on()
+    ax.grid()
+
+fig.tight_layout()
+plt.savefig(args.output)
diff --git a/examples/MHDTests/CoolingHaloMHD_feedback/README b/examples/MHDTests/CoolingHaloMHD_feedback/README
new file mode 100644
index 0000000000000000000000000000000000000000..bfd8b2cf57c3c627f93fbb6fff28ec906c98f025
--- /dev/null
+++ b/examples/MHDTests/CoolingHaloMHD_feedback/README
@@ -0,0 +1,10 @@
+Compilation
+-----------
+for runs with EAGLE cooling+chemistry+entropy floor+feedback
+./configure --with-cooling=EAGLE --with-chemistry=EAGLE --with-entropy-floor=EAGLE --with-ext-potential=nfw --with-stars=EAGLE --with-star-formation=EAGLE --with-feedback=EAGLE --with-tracers=EAGLE --with-kernel=quintic-spline --with-spmhd=direct-induction --disable-hand-vec --disable-compiler-warnings --disable-doxygen-doc
+
+
+Running
+-------
+
+../../../swift --threads=16 --feedback --external-gravity --self-gravity --stars --star-formation --cooling --hydro --limiter --sync isolated_galaxy.yml 2>&1 | tee output.log
diff --git a/examples/MHDTests/CoolingHaloMHD_feedback/SFH.py b/examples/MHDTests/CoolingHaloMHD_feedback/SFH.py
new file mode 100644
index 0000000000000000000000000000000000000000..a223c6cf8aae26cd60e6ddc78d3ac51a82d8f885
--- /dev/null
+++ b/examples/MHDTests/CoolingHaloMHD_feedback/SFH.py
@@ -0,0 +1,100 @@
+"""
+Makes a movie using sphviewer and ffmpeg.
+
+Edit low_frac and up_frac to focus on a certain view of the box.
+The colour map can also be changed via colour_map.
+
+Usage: python3 makeMovie.py CoolingHalo_
+
+Written by James Willis (james.s.willis@durham.ac.uk)
+"""
+
+import glob
+import h5py as h5
+import numpy as np
+import matplotlib.pyplot as plt
+
+from tqdm import tqdm
+
+
+def getSFH(filename):
+
+    # Read the data
+    with h5.File(filename, "r") as f:
+        box_size = f["/Header"].attrs["BoxSize"][0]
+        coordinates = f["/PartType4/Coordinates"][:, :]
+        mass = f["/PartType4/Masses"][:]
+        # flag = f["/PartType4/NewStarFlag"][:]
+        birth_time = f["/PartType4/BirthTimes"][:]
+
+    absmaxz = 2  # kpc
+    absmaxxy = 10  # kpc
+
+    part_mask = (
+        ((coordinates[:, 0] - box_size / 2.0) > -absmaxxy)
+        & ((coordinates[:, 0] - box_size / 2.0) < absmaxxy)
+        & ((coordinates[:, 1] - box_size / 2.0) > -absmaxxy)
+        & ((coordinates[:, 1] - box_size / 2.0) < absmaxxy)
+        & ((coordinates[:, 2] - box_size / 2.0) > -absmaxz)
+        & ((coordinates[:, 2] - box_size / 2.0) < absmaxz)
+    )  # & (flag==1)
+
+    birth_time = birth_time[part_mask]
+    mass = mass[part_mask]
+
+    histogram = np.histogram(birth_time, bins=200, weights=mass * 2e4, range=[0, 0.1])
+    values = histogram[0]
+    xvalues = (histogram[1][:-1] + histogram[1][1:]) / 2.0
+    return xvalues, values
+
+
+def getsfrsnapwide():
+
+    time = np.arange(1, 101, 1)
+    SFR_sparticles = np.zeros(100)
+    SFR_gparticles = np.zeros(100)
+    new_sparticles = np.zeros(100)
+    previous_mass = 0
+    previous_numb = 0
+    for i in tqdm(range(1, 100)):
+        # Read the data
+        filename = "output_%04d.hdf5" % i
+        with h5.File(filename, "r") as f:
+            box_size = f["/Header"].attrs["BoxSize"][0]
+            coordinates = f["/PartType0/Coordinates"][:, :]
+            SFR = f["/PartType0/StarFormationRates"][:]
+            coordinates_star = f["/PartType4/Coordinates"][:, :]
+            masses_star = f["/PartType4/Masses"][:]
+
+        absmaxz = 2  # kpc
+        absmaxxy = 10  # kpc
+
+        part_mask = (
+            ((coordinates[:, 0] - box_size / 2.0) > -absmaxxy)
+            & ((coordinates[:, 0] - box_size / 2.0) < absmaxxy)
+            & ((coordinates[:, 1] - box_size / 2.0) > -absmaxxy)
+            & ((coordinates[:, 1] - box_size / 2.0) < absmaxxy)
+            & ((coordinates[:, 2] - box_size / 2.0) > -absmaxz)
+            & ((coordinates[:, 2] - box_size / 2.0) < absmaxz)
+            & (SFR > 0)
+        )
+
+        SFR = SFR[part_mask]
+
+        total_SFR = np.sum(SFR)
+        SFR_gparticles[i] = total_SFR * 10
+
+    return time[:-1], SFR_gparticles[1:]
+
+
+if __name__ == "__main__":
+
+    time, SFR1 = getsfrsnapwide()  # , SFR2, SFR_error = getsfrsnapwide()
+    time2, SFR3 = getSFH("output_%04d.hdf5" % 100)
+    plt.plot(time2[1:] * 1e3, SFR3[1:], label="Using birth_time of star particles")
+    plt.plot(time, SFR1, label="Using SFR of gas particles", color="g")
+    plt.xlabel("Time (Myr)")
+    plt.ylabel("SFH ($\\rm M_\odot \\rm yr^{-1}$)")
+    plt.ylim(0, 20)
+    plt.legend()
+    plt.savefig("SFH.png")
diff --git a/examples/MHDTests/CoolingHaloMHD_feedback/cooling_halo.yml b/examples/MHDTests/CoolingHaloMHD_feedback/cooling_halo.yml
new file mode 100644
index 0000000000000000000000000000000000000000..7b68031d04cb4a5a8c621c92616bd08065329b5c
--- /dev/null
+++ b/examples/MHDTests/CoolingHaloMHD_feedback/cooling_halo.yml
@@ -0,0 +1,193 @@
+# Define some meta-data about the simulation
+MetaData:
+  run_name:   CoolingHalo-MHD-EAGLE
+
+# Define the system of units to use internally.
+InternalUnitSystem:
+  UnitMass_in_cgs:     1.98848e43    # 10^10 M_sun in grams
+  UnitLength_in_cgs:   3.08567758e21 # 1 kpc in cm
+  UnitVelocity_in_cgs: 1e5           # 1 km/s in cm/s
+  UnitCurrent_in_cgs:  2.088431e13   # Amperes
+  UnitTemp_in_cgs:     1             # Kelvin
+
+# Parameters for the self-gravity scheme
+Gravity:
+  eta:          0.025                 # Constant dimensionless multiplier for time integration.
+  MAC:          geometric
+  theta_cr:     0.7                   # Opening angle (Multipole acceptance criterion).
+  use_tree_below_softening:  1
+  max_physical_DM_softening:     1.0 # Physical softening length (in internal units).
+  max_physical_baryon_softening: 1.0 # Physical softening length (in internal units).
+
+# External potential parameters
+NFWPotential:
+    useabspos:       0              # 0 -> positions based on centre, 1 -> absolute positions
+    position:        [0.,0.,0.]     # Location of centre of isothermal potential with respect to centre of the box (if 0) otherwise absolute (if 1) (internal units)
+    M_200:           100        # M200 of the galaxy disk
+    h:               0.704          # reduced Hubble constant (using FLAMINGO cosmology) (value does not specify the used units!)
+    concentration:   7.2            # concentration of the Halo
+    timestep_mult:   0.01           # Dimensionless pre-factor for the time-step condition, basically determines the fraction of the orbital time we use to do the time integration
+    epsilon:         1.0            # Softening size (internal units)
+
+# Parameters governing the time integration (Set dt_min and dt_max to the same value for a fixed time-step run.)
+TimeIntegration:
+  time_begin:        0.    # The starting time of the simulation (in internal units).
+  time_end:          4.0   # The end time of the simulation (in internal units).
+  dt_min:            1e-9  # The minimal time-step size of the simulation (in internal units).
+  dt_max:            1e-2  # The maximal time-step size of the simulation (in internal units).
+
+# Parameters governing the snapshots
+Snapshots:
+  basename:    output      # Common part of the name of output files
+  time_first:  0.          # Time of the first output if non-cosmological time-integration (in internal units)
+  delta_time:  0.01023    # Time difference between consecutive outputs (in internal units)  0.001023 TU = 1 Myr
+  compression: 4           # Compress the snapshots
+  recording_triggers_part: [-1, -1]   # Not recording as we have many snapshots
+  recording_triggers_bpart: [-1, -1]  # Not recording as we have many snapshots 
+  
+# Parameters governing the conserved quantities statistics
+Statistics:
+  delta_time:           1e-3     # Time between statistics output
+  time_first:              0     # (Optional) Time of the first stats output if non-cosmological time-integration (in internal units)
+
+# Parameters related to the initial conditions
+InitialConditions:
+  file_name:               CoolingHalo.hdf5 # The file to read
+  periodic:                0                # Are we running with periodic ICs?
+  #stars_smoothing_length:  0.5
+  
+# Parameters for the hydrodynamics scheme
+SPH:
+  resolution_eta:        1.2348 # 1.595 # Target smoothing length in units of the mean inter-particle separation (1.2348 == 48Ngbs with the cubic spline kernel).
+  CFL_condition:         0.1 # 0.05 # Courant-Friedrich-Levy condition for time integration.
+  h_min_ratio:           0.1        # Minimal smoothing in units of softening.
+  h_max:                 10.
+  minimal_temperature:   100.
+  h_tolerance: 1e-6                 # smoothing length accuracy
+
+# Parameters for MHD schemes
+MHD:
+  monopole_subtraction:   1.0
+  artificial_diffusion:   0.0
+  hyperbolic_dedner:      1.0 
+  hyperbolic_dedner_divv: 0.0
+  parabolic_dedner:       1.0
+  resistive_eta:          0.0
+
+# Parameters for the stars neighbour search
+Stars:
+  resolution_eta:        1.1642  # Target smoothing length in units of the mean inter-particle separation
+  h_tolerance:           7e-3
+  overwrite_birth_time:    1     # Make sure the stars in the ICs do not do any feedback
+  birth_time:             -1.    # by setting all of their birth times to -1  
+  timestep_age_threshold_Myr: 10.   # Age at which stars switch from young to old (in Mega-years).
+  max_timestep_young_Myr:     0.1   # Maximal time-step length of young stars (in Mega-years).
+  max_timestep_old_Myr:       1.0   # Maximal time-step length of old stars (in Mega-years).
+  luminosity_filename:   ./photometry
+
+
+# Standard EAGLE cooling options
+EAGLECooling:
+  dir_name:                ./coolingtables/  # Location of the Wiersma+09 cooling tables
+  H_reion_z:               7.5               # Redshift of Hydrogen re-ionization
+  H_reion_eV_p_H:          2.0               # Energy inject by Hydrogen re-ionization in electron-volt per Hydrogen atom
+  He_reion_z_centre:       3.5               # Redshift of the centre of the Helium re-ionization Gaussian
+  He_reion_z_sigma:        0.5               # Spread in redshift of the  Helium re-ionization Gaussian
+  He_reion_eV_p_H:         2.0               # Energy inject by Helium re-ionization in electron-volt per Hydrogen atom
+
+# PS2020 cooling parameters
+PS2020Cooling:
+  dir_name:                ./UV_dust1_CR1_G1_shield1.hdf5 # Location of the Ploeckinger+20 cooling tables
+  H_reion_z:               7.5               # Redshift of Hydrogen re-ionization (Planck 2018)
+  H_reion_eV_p_H:          2.0
+  He_reion_z_centre:       3.5               # Redshift of the centre of the Helium re-ionization Gaussian
+  He_reion_z_sigma:        0.5               # Spread in redshift of the  Helium re-ionization Gaussian
+  He_reion_eV_p_H:         2.0               # Energy inject by Helium re-ionization in electron-volt per Hydrogen atom
+  delta_logTEOS_subgrid_properties: 0.3      # delta log T above the EOS below which the subgrid properties use Teq assumption
+  rapid_cooling_threshold:          0.333333 # Switch to rapid cooling regime for dt / t_cool above this threshold.
+
+# Use solar abundances
+EAGLEChemistry:
+  init_abundance_metal:     0.0129   
+  init_abundance_Hydrogen:  0.7065   
+  init_abundance_Helium:    0.2806   
+  init_abundance_Carbon:    0.00207  
+  init_abundance_Nitrogen:  0.000836 
+  init_abundance_Oxygen:    0.00549  
+  init_abundance_Neon:      0.00141  
+  init_abundance_Magnesium: 0.000591 
+  init_abundance_Silicon:   0.000683 
+  init_abundance_Iron:      0.0011   
+
+# EAGLE star formation parameters
+EAGLEStarFormation:
+  SF_threshold:                      Zdep         # Zdep (Schaye 2004) or Subgrid
+  SF_model:                          PressureLaw  # PressureLaw (Schaye et al. 2008) or SchmidtLaw
+  KS_normalisation:                  1.515e-4     # The normalization of the Kennicutt-Schmidt law in Msun / kpc^2 / yr.
+  KS_exponent:                       1.4          # The exponent of the Kennicutt-Schmidt law.
+  min_over_density:                  100.0        # The over-density above which star-formation is allowed.
+  KS_high_density_threshold_H_p_cm3: 1e3          # Hydrogen number density above which the Kennicut-Schmidt law changes slope in Hydrogen atoms per cm^3.
+  KS_high_density_exponent:          2.0          # Slope of the Kennicut-Schmidt law above the high-density threshold.
+  EOS_entropy_margin_dex:            0.3          # When using Z-based SF threshold, logarithm base 10 of the maximal entropy above the EOS at which stars can form.
+  threshold_norm_H_p_cm3:            0.1          # When using Z-based SF threshold, normalisation of the metal-dependant density threshold for star formation in Hydrogen atoms per cm^3.
+  threshold_Z0:                      0.002        # When using Z-based SF threshold, reference metallicity (metal mass fraction) for the metal-dependant threshold for star formation.
+  threshold_slope:                   -0.64        # When using Z-based SF threshold, slope of the metal-dependant star formation threshold
+  threshold_max_density_H_p_cm3:     10.0         # When using Z-based SF threshold, maximal density of the metal-dependant density threshold for star formation in Hydrogen atoms per cm^3.
+  threshold_temperature1_K:          1000         # When using subgrid-based SF threshold, subgrid temperature below which gas is star-forming.
+  threshold_temperature2_K:          31622        # When using subgrid-based SF threshold, subgrid temperature below which gas is star-forming if also above the density limit.
+  threshold_number_density_H_p_cm3:  10           # When using subgrid-based SF threshold, subgrid number density above which gas is star-forming if also below the second temperature limit.
+  
+# Parameters for the EAGLE "equation of state"
+EAGLEEntropyFloor:
+  Jeans_density_threshold_H_p_cm3: 1e-4      # Physical density above which the EAGLE Jeans limiter entropy floor kicks in expressed in Hydrogen atoms per cm^3.
+  Jeans_over_density_threshold:    10.       # Overdensity above which the EAGLE Jeans limiter entropy floor can kick in.
+  Jeans_temperature_norm_K:        800       # Temperature of the EAGLE Jeans limiter entropy floor at the density threshold expressed in Kelvin.
+  Jeans_gamma_effective:           1.3333333 # Slope the of the EAGLE Jeans limiter entropy floor
+  Cool_density_threshold_H_p_cm3: 1e-5       # Physical density above which the EAGLE Cool limiter entropy floor kicks in expressed in Hydrogen atoms per cm^3.
+  Cool_over_density_threshold:    10.        # Overdensity above which the EAGLE Cool limiter entropy floor can kick in.
+  Cool_temperature_norm_K:        10.        # Temperature of the EAGLE Cool limiter entropy floor at the density threshold expressed in Kelvin. (NOTE: This is below the min T and hence this floor does nothing)
+  Cool_gamma_effective:           1.         # Slope the of the EAGLE Cool limiter entropy floor
+
+# EAGLE feedback model with constant feedback energy fraction
+EAGLEFeedback:
+  use_SNII_feedback:                    1               # Global switch for SNII thermal (stochastic) feedback.
+  use_SNIa_feedback:                    1               # Global switch for SNIa thermal (continuous) feedback.
+  use_AGB_enrichment:                   1               # Global switch for enrichement from AGB stars.
+  use_SNII_enrichment:                  1               # Global switch for enrichement from SNII stars.
+  use_SNIa_enrichment:                  1               # Global switch for enrichement from SNIa stars.
+  filename:                             ./yieldtables/  # Path to the directory containing the EAGLE yield tables.
+  IMF_min_mass_Msun:                    0.1             # Minimal stellar mass considered for the Chabrier IMF in solar masses.
+  IMF_max_mass_Msun:                  100.0             # Maximal stellar mass considered for the Chabrier IMF in solar masses.
+  SNII_min_mass_Msun:                   8.0             # Minimal mass considered for SNII stars in solar masses.
+  SNII_max_mass_Msun:                 100.0             # Maximal mass considered for SNII stars in solar masses.
+  SNII_feedback_model:                  MinimumDistance # Feedback modes: Random, Isotropic, MinimumDistance, MinimumDensity
+  SNII_sampled_delay:                   1               # Sample the SNII lifetimes to do feedback.
+  SNII_delta_T_K:                       3.16228e7       # Change in temperature to apply to the gas particle in a SNII thermal feedback event in Kelvin.
+  SNII_delta_v_km_p_s:                  200             # Velocity kick applied by the stars when doing SNII feedback (in km/s).
+  SNII_energy_erg:                      1.0e51          # Energy of one SNII explosion in ergs.
+  SNII_energy_fraction_function:        EAGLE           # Type of functional form to use for scaling the energy fraction with density and metallicity ('EAGLE', 'Separable', or 'Independent').
+  SNII_energy_fraction_min:             1.0             # Minimal fraction of energy applied in a SNII feedback event.
+  SNII_energy_fraction_max:             1.0             # Maximal fraction of energy applied in a SNII feedback event.
+  SNII_energy_fraction_Z_0:             0.0012663729    # Pivot point for the metallicity dependance of the SNII energy fraction (metal mass fraction).
+  SNII_energy_fraction_n_0_H_p_cm3:     1.4588          # Pivot point for the birth density dependance of the SNII energy fraction in cm^-3.
+  SNII_energy_fraction_n_Z:             0.8686          # Power-law for the metallicity dependance of the SNII energy fraction.
+  SNII_energy_fraction_n_n:             0.8686          # Power-law for the birth density dependance of the SNII energy fraction.
+  SNII_energy_fraction_use_birth_density: 0             # Are we using the density at birth to compute f_E or at feedback time?
+  SNII_energy_fraction_use_birth_metallicity: 0         # Are we using the metallicity at birth to compuote f_E or at feedback time?
+  SNIa_DTD:                             Exponential     # Functional form of the SNIa delay time distribution.
+  SNIa_DTD_delay_Gyr:                   0.04            # Stellar age after which SNIa start in Gyr (40 Myr corresponds to stars ~ 8 Msun).
+  SNIa_DTD_exp_timescale_Gyr:           2.0             # Time-scale of the exponential decay of the SNIa rates in Gyr.
+  SNIa_DTD_exp_norm_p_Msun:             0.002           # Normalisation of the SNIa rates in inverse solar masses.
+  SNIa_energy_erg:                     1.0e51           # Energy of one SNIa explosion in ergs.
+  AGB_ejecta_velocity_km_p_s:          10.0             # Velocity of the AGB ejectas in km/s.
+  stellar_evolution_age_cut_Gyr:        0.1             # Age in Gyr above which the enrichment is downsampled.
+  stellar_evolution_sampling_rate:       10             # Number of time-steps in-between two enrichment events for a star above the age threshold.
+  SNII_yield_factor_Hydrogen:           1.0             # (Optional) Correction factor to apply to the Hydrogen yield from the SNII channel.
+  SNII_yield_factor_Helium:             1.0             # (Optional) Correction factor to apply to the Helium yield from the SNII channel.
+  SNII_yield_factor_Carbon:             0.5             # (Optional) Correction factor to apply to the Carbon yield from the SNII channel.
+  SNII_yield_factor_Nitrogen:           1.0             # (Optional) Correction factor to apply to the Nitrogen yield from the SNII channel.
+  SNII_yield_factor_Oxygen:             1.0             # (Optional) Correction factor to apply to the Oxygen yield from the SNII channel.
+  SNII_yield_factor_Neon:               1.0             # (Optional) Correction factor to apply to the Neon yield from the SNII channel.
+  SNII_yield_factor_Magnesium:          2.0             # (Optional) Correction factor to apply to the Magnesium yield from the SNII channel.
+  SNII_yield_factor_Silicon:            1.0             # (Optional) Correction factor to apply to the Silicon yield from the SNII channel.
+  SNII_yield_factor_Iron:               0.5             # (Optional) Correction factor to apply to the Iron yield from the SNII channel.
diff --git a/examples/MHDTests/CoolingHaloMHD_feedback/getEagleCoolingTable.sh b/examples/MHDTests/CoolingHaloMHD_feedback/getEagleCoolingTable.sh
new file mode 100755
index 0000000000000000000000000000000000000000..5cfd93ef0f4603e40b7675f3f2c254b2250f699f
--- /dev/null
+++ b/examples/MHDTests/CoolingHaloMHD_feedback/getEagleCoolingTable.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+wget http://virgodb.cosma.dur.ac.uk/swift-webstorage/CoolingTables/EAGLE/coolingtables.tar.gz
+tar -xf coolingtables.tar.gz 
diff --git a/examples/MHDTests/CoolingHaloMHD_feedback/getEaglePhotometryTable.sh b/examples/MHDTests/CoolingHaloMHD_feedback/getEaglePhotometryTable.sh
new file mode 100755
index 0000000000000000000000000000000000000000..ee9c3b422f19518612416da0913b162fd4a120ff
--- /dev/null
+++ b/examples/MHDTests/CoolingHaloMHD_feedback/getEaglePhotometryTable.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+wget http://virgodb.cosma.dur.ac.uk/swift-webstorage/YieldTables/EAGLE/photometry.tar.gz
+tar -xf photometry.tar.gz
diff --git a/examples/MHDTests/CoolingHaloMHD_feedback/getGlass.sh b/examples/MHDTests/CoolingHaloMHD_feedback/getGlass.sh
new file mode 100755
index 0000000000000000000000000000000000000000..9e8e2a8f6a2fe49becbb9a339d85aaae333734a6
--- /dev/null
+++ b/examples/MHDTests/CoolingHaloMHD_feedback/getGlass.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+wget http://virgodb.cosma.dur.ac.uk/swift-webstorage/ICs/glassCube_128.hdf5
+wget http://virgodb.cosma.dur.ac.uk/swift-webstorage/ICs/glassCube_64.hdf5
+wget http://virgodb.cosma.dur.ac.uk/swift-webstorage/ICs/glassCube_32.hdf5
+wget http://virgodb.cosma.dur.ac.uk/swift-webstorage/ICs/glassCube_16.hdf5
+wget http://virgodb.cosma.dur.ac.uk/swift-webstorage/ICs/glassCube_8.hdf5
+
diff --git a/examples/MHDTests/CoolingHaloMHD_feedback/getPS2020CoolingTables.sh b/examples/MHDTests/CoolingHaloMHD_feedback/getPS2020CoolingTables.sh
new file mode 100755
index 0000000000000000000000000000000000000000..0945dc565a4c8b1723b21e685b03369e782cd3be
--- /dev/null
+++ b/examples/MHDTests/CoolingHaloMHD_feedback/getPS2020CoolingTables.sh
@@ -0,0 +1,2 @@
+#!/bin/bash
+wget http://virgodb.cosma.dur.ac.uk/swift-webstorage/CoolingTables/COLIBRE/UV_dust1_CR1_G1_shield1.hdf5
diff --git a/examples/MHDTests/CoolingHaloMHD_feedback/getYieldTable.sh b/examples/MHDTests/CoolingHaloMHD_feedback/getYieldTable.sh
new file mode 100755
index 0000000000000000000000000000000000000000..26eef020cab82acee2c80e88089df1790b281eab
--- /dev/null
+++ b/examples/MHDTests/CoolingHaloMHD_feedback/getYieldTable.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+wget http://virgodb.cosma.dur.ac.uk/swift-webstorage/YieldTables/EAGLE/yieldtables.tar.gz
+tar -xf yieldtables.tar.gz
diff --git a/examples/MHDTests/CoolingHaloMHD_feedback/makeIC.py b/examples/MHDTests/CoolingHaloMHD_feedback/makeIC.py
new file mode 100644
index 0000000000000000000000000000000000000000..23c1e330ffc54abf6cd4d10428291794155671e5
--- /dev/null
+++ b/examples/MHDTests/CoolingHaloMHD_feedback/makeIC.py
@@ -0,0 +1,531 @@
+###############################################################################
+# This file is part of SWIFT.
+# Copyright (c) 2016 Stefan Arridge (stefan.arridge@durham.ac.uk)
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+#
+##############################################################################
+
+# Halo parameters similar to R. Pakmor, V. Springel, 1212.1452
+
+import h5py
+import sys
+import numpy as np
+import math
+import random
+
+# Generates N particles in a spherically symmetric distribution with density profile ~r^(-2)
+# usage: python3 makeIC.py 1000: generate 1000 particles
+
+# Some constants cgs
+PARSEC_IN_CGS = 3.0856776e18
+KM_PER_SEC_IN_CGS = 1.0e5
+CONST_G_CGS = 6.672e-8
+CONST_MU0_CGS = 4 * np.pi * 1e-2
+MSOL_IN_CGS = 1.9891e33  # Solar mass
+kb_cgs = 1.38e-16  # boltzmann constant
+m_H_cgs = 1.68e-24  # atomic hydrogen mass
+# First set unit velocity and then the circular velocity parameter for the isothermal potential
+const_unit_velocity_in_cgs = 1.0e5  # kms^-1
+
+# Cosmological parameters
+OMEGA = 0.3  # Cosmological matter fraction at z = 0
+h = 0.704  # 0.67777  # hubble parameter
+# Find H_0, the inverse Hubble time, in cgs
+H_0_cgs = 100.0 * h * KM_PER_SEC_IN_CGS / (1.0e6 * PARSEC_IN_CGS)
+
+# DM halo parameters
+spin_lambda = 0.05  # spin parameter
+spin_lambda_choice = "Bullock"  # which definition of spin parameter to use
+f_b = 0.17  # baryon fraction
+c_200 = 7.2  # concentration parameter
+
+# Set the magnitude of the uniform seed magnetic field
+B0_Gaussian_Units = 1e-9  # 1e-6  # 1 micro Gauss
+B0_cgs = np.sqrt(CONST_MU0_CGS / (4.0 * np.pi)) * B0_Gaussian_Units
+
+# SPH
+eta = 1.3663  # kernel smoothing
+
+# Additional options
+spin_lambda_choice = (
+    "Bullock"
+)  # which definition of spin parameter to use (Peebles or Bullock)
+save_to_vtk = False
+plot_v_distribution = False
+
+# From this we can find the virial radius, the radius within which the average density of the halo is
+# 200. * the mean matter density
+
+# Set M200 and get R200 and V200
+M_200_cgs = 1e12 * MSOL_IN_CGS
+rhoc_cgs = 3 * H_0_cgs ** 2 / (8 * np.pi * CONST_G_CGS)
+r_200_cgs = (3 * M_200_cgs / (4 * np.pi * rhoc_cgs * 200)) ** (1 / 3)
+v_200_cgs = np.sqrt(CONST_G_CGS * M_200_cgs / r_200_cgs)
+v_200 = v_200_cgs / const_unit_velocity_in_cgs
+T_200_cgs = m_H_cgs * v_200_cgs ** 2 / (2 * kb_cgs)
+
+# Gas parameters
+gamma = 5.0 / 3.0
+T0_cgs = (
+    T_200_cgs
+)  # 1e5 # gas temperature on the edge of the box (if we want to set this manually)
+nH_max_cgs = 1e0  # maximum hydrogen number density
+print("T_200 = %E" % T_200_cgs)
+
+# Now set the unit length and mass
+const_unit_mass_in_cgs = M_200_cgs
+const_unit_length_in_cgs = r_200_cgs
+print("UnitMass_in_cgs:     ", const_unit_mass_in_cgs)
+print("UnitLength_in_cgs:   ", const_unit_length_in_cgs)
+print("UnitVelocity_in_cgs: ", const_unit_velocity_in_cgs)
+
+# Now set the magnetic field unit
+const_unit_magnetic_field_in_cgs = 1e-7  # 1muG
+
+# Derived quantities
+const_unit_time_in_cgs = const_unit_length_in_cgs / const_unit_velocity_in_cgs
+const_unit_current_in_cgs = const_unit_mass_in_cgs / (
+    const_unit_magnetic_field_in_cgs * const_unit_time_in_cgs ** 2
+)
+print("UnitTime_in_cgs:     ", const_unit_time_in_cgs)
+print("UnitCurrent_in_cgs:  ", const_unit_current_in_cgs)
+const_G = (
+    CONST_G_CGS
+    * const_unit_mass_in_cgs
+    * const_unit_time_in_cgs
+    * const_unit_time_in_cgs
+    / (const_unit_length_in_cgs * const_unit_length_in_cgs * const_unit_length_in_cgs)
+)
+print("G=", const_G)
+
+# Parameters
+periodic = 1  # 1 For periodic box
+boxSize = 2.0 # 4.0
+G = const_G
+N = int(sys.argv[1])  # Number of particles
+IAsource = "IAfile"
+
+# Create the file
+filename = "CoolingHalo.hdf5"
+file = h5py.File(filename, "w")
+
+# Units
+grp = file.create_group("/Units")
+grp.attrs["Unit length in cgs (U_L)"] = const_unit_length_in_cgs
+grp.attrs["Unit mass in cgs (U_M)"] = const_unit_mass_in_cgs
+grp.attrs["Unit time in cgs (U_t)"] = (
+    const_unit_length_in_cgs / const_unit_velocity_in_cgs
+)
+grp.attrs["Unit current in cgs (U_I)"] = const_unit_current_in_cgs
+grp.attrs["Unit temperature in cgs (U_T)"] = 1.0
+
+
+# Loading initial arrangement file
+IAfile = "glassCube_64.hdf5"
+# smoothing kernel optimized for numpy Price 1012.1885 (6)
+def open_IAfile(path_to_file):
+    IAfile = h5py.File(path_to_file, "r")
+    pos = IAfile["/PartType0/Coordinates"][:, :]
+    h = IAfile["/PartType0/SmoothingLength"][:]
+    return pos, h
+
+
+if IAsource == "IAfile":
+    # Loading initial arrangement file
+    IAfile = "glassCube_128.hdf5"
+    coords, _ = open_IAfile(IAfile)
+    lcut = (N / len(coords)) ** (1 / 3)
+    coords -= 0.5
+    mask = (
+        (np.abs(coords[:, 0]) <= lcut / 2)
+        & (np.abs(coords[:, 1]) <= lcut / 2)
+        & (np.abs(coords[:, 2]) <= lcut / 2)
+    )
+    coords = coords[mask]
+    coords /= lcut
+    coords += 0.5
+elif IAsource == "grid":
+    Nside = int(N ** (1 / 3))
+    grid_1d = np.linspace(0.0, 1.0, Nside)  # Cube side is 1, centered at (0.5,0.5,0.5)
+    # Create a 3D grid of coordinates
+    x, y, z = np.meshgrid(grid_1d, grid_1d, grid_1d, indexing="ij")
+    # Reshape into a list of points
+    coords = np.vstack([x.ravel(), y.ravel(), z.ravel()]).T
+    coords += 0.5 / Nside
+
+# functions for coordinate transformation
+import scipy.optimize as opt
+
+
+def nfw_cdf(r, r_s):
+    """Computes the cumulative mass fraction of the NFW profile."""
+    return np.log(1 + r / r_s) - (r / r_s) / (1 + r / r_s)
+
+
+def invert_nfw_cdf(F_uni, r_s, R_max, Rmin=1e-6):
+    """Find r_NFW by solving F_uni = F_NFW using a numerical solver."""
+    F_NFW_max = nfw_cdf(R_max, r_s)  # Normalization factor
+
+    def objective(r_nfw):
+        return nfw_cdf(r_nfw, r_s) / F_NFW_max - F_uni  # Find r where CDFs match
+
+    return opt.root_scalar(objective, bracket=[Rmin, R_max], method="bisect").root
+
+
+# make center at zero
+coords -= 0.5
+
+# cut a sphere
+r_uni = np.linalg.norm(coords, axis=1)
+coords = coords[r_uni <= 0.5]
+coords *= boxSize * np.sqrt(3)
+
+# calculate max distance from the center (units of r_200)
+R_max = np.sqrt(3) * (boxSize / 2)
+r_s = 1 / c_200
+# calculate distances to the center
+r_uni = np.linalg.norm(coords, axis=1)
+# Compute uniform CDF
+F_uni = r_uni ** 3 / R_max ** 3
+# Compute corresponding NFW radii
+r_nfw = np.array([invert_nfw_cdf(F, r_s, R_max) for F in F_uni])
+# Rescale positions
+scaling_factors = r_nfw / r_uni
+coords *= scaling_factors[:, np.newaxis]
+
+# NFW-like gas density profile
+def rho_r(r_value, f_b, M_200_cgs, r_200_cgs, c_200):
+    rho_0 = (
+        M_200_cgs
+        / (np.log(1 + c_200) - c_200 / (1 + c_200))
+        / (4 * np.pi * r_200_cgs ** 3 / c_200 ** 3)
+    )
+    result_cgs = rho_0 * f_b / (c_200 * r_value * (1 + c_200 * r_value) ** 2)
+    # Apply density cut
+    rho_max_cgs = nH_max_cgs * m_H_cgs
+    result_cgs = np.array(result_cgs)
+    result_cgs[result_cgs > rho_max_cgs] = rho_max_cgs
+    return result_cgs
+
+
+# NFW-like gas mass inside a sphere with radius R
+def Mgas_r(r_value, f_b, M_200_cgs, r_200_cgs, c_200):
+    M_0 = M_200_cgs / (np.log(1 + c_200) - c_200 / (1 + c_200))
+    return (
+        M_0
+        * f_b
+        * (np.log(1 + c_200 * r_value) - c_200 * r_value / (1 + c_200 * r_value))
+    )
+
+
+# NFW Gravitational acceleration
+def a_NFW(r_value, M_200_cgs, r_200_cgs, c_200):
+    a_pref = (
+        CONST_G_CGS
+        * M_200_cgs
+        / (np.log(1 + c_200) - c_200 / (1 + c_200))
+        / r_200_cgs ** 2
+    )
+    return (
+        a_pref
+        * ((r_value / (r_value + 1 / c_200)) - np.log(1 + c_200 * r_value))
+        / r_value ** 2
+    )
+
+
+# NFW Gravitational potential
+def phi_NFW(r_value, M_200_cgs, r_200_cgs, c_200):
+    phi_pref = (
+        CONST_G_CGS * M_200_cgs / (np.log(1 + c_200) - c_200 / (1 + c_200)) / r_200_cgs
+    )
+    return -phi_pref * np.log(1 + c_200 * r_value) / r_value
+
+
+# Integrate rho_gas*a_NFW
+def integrate(r_min, r_max, f_b, M_200_cgs, r_200_cgs, c_200, Nsteps=10000):
+    # Perform the integration
+    r_range = np.linspace(r_min, r_max, Nsteps)
+    dr = np.abs((r_max - r_min) / Nsteps)
+    integrands = rho_r(r_range, f_b, M_200_cgs, r_200_cgs, c_200) * a_NFW(
+        r_range, M_200_cgs, r_200_cgs, c_200
+    )
+    result_cgs = np.sum(integrands * dr) * r_200_cgs
+    return result_cgs
+
+
+# NFW-like gas hydrostatic equilibrium internal energy profile
+def u_vs_r(P0_cgs, r_value, r_max, f_b, M_200_cgs, r_200_cgs, c_200):
+    result_cgs = (
+        (P0_cgs - integrate(r_value, r_max, f_b, M_200_cgs, r_200_cgs, c_200))
+        / (gamma - 1)
+        / rho_r(r_value, f_b, M_200_cgs, r_200_cgs, c_200)
+    )
+    return result_cgs
+
+
+# NFW-like gas hydrostatic equilibrium temperature profile
+def T_vs_r(P0_cgs, r_value, r_max, f_b, M_200_cgs, r_200_cgs, c_200):
+    result_cgs = (
+        u_vs_r(P0_cgs, r_value, r_max, f_b, M_200_cgs, r_200_cgs, c_200)
+        * (gamma - 1)
+        * m_H_cgs
+        / kb_cgs
+    )
+    return result_cgs
+
+
+N = len(coords)
+
+gas_mass = (
+    Mgas_r(R_max, f_b, M_200_cgs, r_200_cgs, c_200) / M_200_cgs
+)  # get total gas mass within a sphere of radius sqrt(3)/2*Lbox
+gas_particle_mass = gas_mass / float(N)
+print("Gas particle mass is %E " % (gas_particle_mass * M_200_cgs / MSOL_IN_CGS))
+
+# Unnormalized mass shell distribution
+r = np.logspace(
+    np.log10(1e-6 * boxSize),
+    np.log10(boxSize * np.sqrt(3.0) / 2.0),
+    round(10 * N ** (1 / 3)),
+)
+
+# shift to centre of box
+coords += np.full(coords.shape, boxSize / 2.0)
+print("x range = (%f,%f)" % (np.min(coords[:, 0]), np.max(coords[:, 0])))
+print("y range = (%f,%f)" % (np.min(coords[:, 1]), np.max(coords[:, 1])))
+print("z range = (%f,%f)" % (np.min(coords[:, 2]), np.max(coords[:, 2])))
+
+print(np.mean(coords[:, 0]))
+print(np.mean(coords[:, 1]))
+print(np.mean(coords[:, 2]))
+
+# now find the particles which are within the box
+
+x_coords = coords[:, 0]
+y_coords = coords[:, 1]
+z_coords = coords[:, 2]
+
+ind = np.where(x_coords < boxSize)[0]
+x_coords = x_coords[ind]
+y_coords = y_coords[ind]
+z_coords = z_coords[ind]
+
+ind = np.where(x_coords > 0.0)[0]
+x_coords = x_coords[ind]
+y_coords = y_coords[ind]
+z_coords = z_coords[ind]
+
+ind = np.where(y_coords < boxSize)[0]
+x_coords = x_coords[ind]
+y_coords = y_coords[ind]
+z_coords = z_coords[ind]
+
+ind = np.where(y_coords > 0.0)[0]
+x_coords = x_coords[ind]
+y_coords = y_coords[ind]
+z_coords = z_coords[ind]
+
+ind = np.where(z_coords < boxSize)[0]
+x_coords = x_coords[ind]
+y_coords = y_coords[ind]
+z_coords = z_coords[ind]
+
+ind = np.where(z_coords > 0.0)[0]
+x_coords = x_coords[ind]
+y_coords = y_coords[ind]
+z_coords = z_coords[ind]
+
+# count number of particles
+
+N = x_coords.size
+
+print("Number of particles in the box = ", N)
+
+# make the coords and radius arrays again
+coords = np.zeros((N, 3))
+coords[:, 0] = x_coords
+coords[:, 1] = y_coords
+coords[:, 2] = z_coords
+
+
+# Save particle arrangement to .vtk file to open with paraview
+if save_to_vtk:
+    import vtk
+
+    vtk_points = vtk.vtkPoints()
+    for x, y, z in coords - boxSize / 2:
+        vtk_points.InsertNextPoint(x, y, z)
+    poly_data = vtk.vtkPolyData()
+    poly_data.SetPoints(vtk_points)
+    writer = vtk.vtkPolyDataWriter()
+    writer.SetFileName("output.vtk")
+    writer.SetInputData(poly_data)
+    writer.Write()  # one can use paraview to watch how particles are arranged
+
+radius = np.sqrt(
+    (coords[:, 0] - boxSize / 2.0) ** 2
+    + (coords[:, 1] - boxSize / 2.0) ** 2
+    + (coords[:, 2] - boxSize / 2.0) ** 2
+)
+axis_distance = np.sqrt(
+    (coords[:, 0] - boxSize / 2.0) ** 2 + (coords[:, 1] - boxSize / 2.0) ** 2
+)
+
+# now give particle's velocities and magnetic fields
+v = np.zeros((N, 3))
+B = np.zeros((N, 3))
+
+# first work out total angular momentum of the halo within the virial radius
+# we work in units where r_vir = 1 and M_vir = 1
+Total_E = v_200 ** 2 / 2.0
+# J = spin_lambda * const_G / np.sqrt(Total_E)
+j_sp = spin_lambda * np.sqrt(2) * v_200 * 1
+print("j_sp =", j_sp)
+# all particles within the virial radius have omega parallel to the z-axis, magnitude
+# is proportional to j/r^2
+
+# define specific angular momentum distribution
+def j(r_value, j_max, s, f_b, M_200_cgs, r_200_cgs, c_200, angular_momentum_profile="M(r)"):
+    if angular_momentum_profile == "M(r)":
+        return (
+            j_max
+            * (Mgas_r(r_value, f_b, M_200_cgs, r_200_cgs, c_200) / (M_200_cgs * f_b)) ** s
+        )
+    elif angular_momentum_profile=="r^s": # for rigid body rotation use r^2
+        return (
+            j_max
+            * r_value**s
+        )
+
+
+omega = np.zeros((N, 3))
+omega_ort = np.zeros((N, 3))
+radius_vect = np.zeros((N, 3))
+l = np.zeros((N, 3))
+for i in range(N):
+    radius_vect[i, :] = coords[i, :] - boxSize / 2.0
+    omega[i, 2] = j(radius[i], 1, 1, f_b, M_200_cgs, r_200_cgs, c_200) / radius[i] ** 2
+    v[i, :] = np.cross(omega[i, :], radius_vect[i, :])
+    B[i, 0] = B0_cgs / const_unit_magnetic_field_in_cgs
+    l[i, :] = gas_particle_mass * np.cross(radius_vect[i, :], v[i, :])
+
+# select particles inside R_200
+mask_r_200 = radius <= 1
+Lp_tot = np.sum(l[mask_r_200], axis=0)
+Lp_tot_abs = np.linalg.norm(Lp_tot)
+jsp = Lp_tot_abs/f_b
+normV = np.linalg.norm(v, axis=1)
+
+if spin_lambda_choice == "Peebles":
+    # Normalize to Peebles spin parameter
+    Ep_kin = gas_particle_mass * normV ** 2 / 2
+    Ep_pot = (
+        gas_particle_mass
+        * phi_NFW(radius, M_200_cgs, r_200_cgs, c_200)
+        / (const_unit_velocity_in_cgs ** 2)
+    )
+    Ep_tot = np.sum(Ep_kin[mask_r_200] + Ep_pot[mask_r_200])
+    calc_spin_par_P = (
+        Lp_tot_abs * np.sqrt(np.abs(Ep_tot)) / const_G / (f_b * 1) ** (5 / 2)
+    )
+    v *= spin_lambda / calc_spin_par_P
+    l *= spin_lambda / calc_spin_par_P
+elif spin_lambda_choice == "Bullock":
+    # Normalize to Bullock
+    jsp_B = (np.sqrt(2) * v_200 )*spin_lambda
+    v *= jsp_B/jsp
+
+   # jp_sp = Lp_tot_abs / (gas_particle_mass * np.sum(mask_r_200))
+   # calc_spin_par_B = jp_sp / (np.sqrt(2) * 1 * v_200)
+   # v *= spin_lambda / calc_spin_par_B
+   # l *= spin_lambda / calc_spin_par_B
+
+# Draw velocity distribution
+if plot_v_distribution:
+    import matplotlib.pyplot as plt
+
+    normV = np.linalg.norm(v, axis=1)
+    fig, ax = plt.subplots(figsize=(5, 5))
+    ax.scatter(radius * r_200_cgs / (1e3 * PARSEC_IN_CGS), normV)
+    ax.set_xlim(0, 40)
+    ax.set_ylim(0, 100)
+    plt.savefig("v_distribution.png")
+
+# Header
+grp = file.create_group("/Header")
+grp.attrs["BoxSize"] = boxSize
+grp.attrs["NumPart_Total"] = [N, 0, 0, 0, 0, 0]
+grp.attrs["NumPart_Total_HighWord"] = [0, 0, 0, 0, 0, 0]
+grp.attrs["NumPart_ThisFile"] = [N, 0, 0, 0, 0, 0]
+grp.attrs["Time"] = 0.0
+grp.attrs["NumFilesPerSnapshot"] = 1
+grp.attrs["MassTable"] = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
+grp.attrs["Flag_Entropy_ICs"] = [0, 0, 0, 0, 0, 0]
+grp.attrs["Dimension"] = 3
+
+# Particle group
+grp = file.create_group("/PartType0")
+
+ds = grp.create_dataset("Coordinates", (N, 3), "d")
+ds[()] = coords
+coords = np.zeros(1)
+
+ds = grp.create_dataset("Velocities", (N, 3), "f")
+ds[()] = v
+v = np.zeros(1)
+
+# Magnetic field
+ds = grp.create_dataset("MagneticFluxDensities", (N, 3), "f")
+ds[()] = B
+B = np.zeros(1)
+
+# All particles of equal mass
+m = np.full((N,), gas_particle_mass)
+ds = grp.create_dataset("Masses", (N,), "f")
+ds[()] = m
+m = np.zeros(1)
+
+# Smoothing lengths
+l = (4.0 * np.pi * radius ** 2 / N) ** (
+    1.0 / 3.0
+)  # local mean inter-particle separation
+h = np.full((N,), eta * l)
+ds = grp.create_dataset("SmoothingLength", (N,), "f")
+ds[()] = h
+h = np.zeros(1)
+
+# Internal energies in hydrostatic equilibrium
+rho0_cgs = rho_r(
+    boxSize * np.sqrt(3) / 2, f_b, M_200_cgs, r_200_cgs, c_200
+)  # gas density on the edge
+P0_cgs = rho0_cgs * kb_cgs * T0_cgs / m_H_cgs  # gas pressure on the edge of the box
+u = [
+    u_vs_r(
+        P0_cgs, radius[i], boxSize * np.sqrt(3) / 2, f_b, M_200_cgs, r_200_cgs, c_200
+    )
+    / const_unit_velocity_in_cgs ** 2
+    for i in range(N)
+]  # gas particle internal energies
+u = np.full((N,), u)
+ds = grp.create_dataset("InternalEnergy", (N,), "f")
+ds[()] = u
+u = np.zeros(1)
+
+# Particle IDs
+ids = 1 + np.linspace(0, N, N, endpoint=False)
+ds = grp.create_dataset("ParticleIDs", (N,), "L")
+ds[()] = ids
+
+file.close()
diff --git a/examples/MHDTests/CoolingHaloMHD_feedback/particle_tracker.py b/examples/MHDTests/CoolingHaloMHD_feedback/particle_tracker.py
new file mode 100644
index 0000000000000000000000000000000000000000..20ed3487ee4a41bfe6aafa75b869be18f5ea8583
--- /dev/null
+++ b/examples/MHDTests/CoolingHaloMHD_feedback/particle_tracker.py
@@ -0,0 +1,789 @@
+from glob import glob
+from swiftsimio import load
+import unyt as u
+import numpy as np
+import matplotlib.pyplot as plt
+
+
+from matplotlib import rc
+
+# Set the global font to be DejaVu Sans, size 10 (or any other sans-seri     f font of your choice!)
+rc("font", **{"family": "sans-serif", "sans-serif": ["DejaVu Sans"], "size": 10})
+# Set the font used for MathJax - more on this later
+rc("mathtext", **{"default": "regular"})
+
+
+# setting units here
+units_dict = {
+    "length": 3.08e21 * u.cm,
+    "density": 1.98e33 * u.g / (3.08e21 * u.cm) ** 3,
+    "velocity": u.km / u.s,
+    "momentum": 1.98e33 * u.g * u.km / u.s,
+    "pressure": 1e10 * (1.98e33 * u.g) / ((3.08e21 * u.cm) * (3.15e16 * u.s) ** 2),
+    "magneticfield": 1e-7 * u.g / (u.s ** 2 * u.statA),
+    "magneticfieldpertime": 1e-7 * u.g / (u.s ** 2 * u.statA * 3.156e7 * u.s),
+    "magneticfieldgradient": 1e-7 * u.g / (u.s ** 2 * u.statA * u.cm),
+    "time": 3.15e16 * u.s,
+    "mass": 1.98e33 * u.g,
+    "acceleration": u.cm / u.s ** 2,
+}
+# parsec - 3.086e18*u.cm
+
+
+# free fall time estimate
+# Constants
+G = 6.67430e-11 * u.m ** 3 / u.kg / u.s ** 2  # 6.67430e-8
+G = G.to(u.cm ** 3 / u.g / u.s ** 2)
+# Parameters (taken from Hopkins 2016)
+Rcloud = 4.628516371e16 * u.cm
+M = 1.99e33 * u.g  # total mass of the sphere
+rhocloud0 = M / (4 / 3 * np.pi * Rcloud ** 3)
+rhouniform = rhocloud0 / 360
+t_ff_lit = np.sqrt(3 / (2 * np.pi * G * rhocloud0))
+t_ff_wiki = np.sqrt(3 * np.pi / (32 * G * rhocloud0))
+t_ff_script = 3e4 * 3.156e7 * u.s
+
+
+def abs_vec(vec):
+    res = np.sqrt(vec[:, 0] ** 2 + vec[:, 1] ** 2 + vec[:, 2] ** 2)
+    return res
+
+
+def dot_vec(vec1, vec2):
+    res = vec1[:, 0] * vec2[:, 0] + vec1[:, 1] * vec2[:, 1] + vec1[:, 2] * vec2[:, 2]
+    return res
+
+
+def get_projection(vec, direction_vec):
+    res = dot_vec(vec, direction_vec) / abs_vec(direction_vec)
+    return res
+
+
+def cross_vec(vec1, vec2):
+    res_vec = np.zeros((len(vec1), 3))
+    res_vec[:, 0] = vec1[:, 1] * vec2[:, 2] - vec1[:, 2] * vec2[:, 1]
+    res_vec[:, 1] = vec1[:, 2] * vec2[:, 0] - vec1[:, 0] * vec2[:, 2]
+    res_vec[:, 2] = vec1[:, 0] * vec2[:, 1] - vec1[:, 1] * vec2[:, 0]
+    return res_vec
+
+
+def extract_variables_of_interest(data):
+
+    # set constant
+    mu0 = 1.25663706127e-1 * u.g * u.cm / (u.s ** 2 * u.statA ** 2)
+
+    # Get snapshot parameters
+    time = data.metadata.time
+    z = data.metadata.z
+    a = data.metadata.a
+
+    pids = data.gas.particle_ids
+
+    # Get physical quantities
+    coordinates = data.gas.coordinates.to_physical().astype("float64")
+    densities = data.gas.densities.to_physical().astype("float64")
+    smoothing_lengths = data.gas.smoothing_lengths.to_physical().astype("float64")
+    velocities = data.gas.velocities.to_physical().astype("float64")
+    pressures = data.gas.pressures.to_physical().astype("float64")
+    masses = data.gas.masses.to_physical().astype("float64")
+
+    momentums = velocities * masses[:, np.newaxis]
+
+    magnetic_flux_densities = data.gas.magnetic_flux_densities.to_physical().astype(
+        "float64"
+    )
+    magnetic_flux_densitiesdt = data.gas.magnetic_flux_densitiesdt.to_physical().astype(
+        "float64"
+    )
+    magnetic_divergences = data.gas.magnetic_divergences.to_physical().astype("float64")
+
+    magnetic_pressures = dot_vec(magnetic_flux_densities, magnetic_flux_densities) / (
+        2 * mu0
+    )
+    dynamic_pressures = densities * dot_vec(velocities, velocities)
+
+    # Center of mass position tracking
+    CM_frame_coordinates = coordinates
+    for k in range(3):
+        CM_k = (
+            np.sum(coordinates[:, k].value * masses.value)
+            / np.sum(masses.value)
+            * coordinates.units
+        )
+        CM_frame_coordinates -= CM_k
+
+    # working with units:
+    coordinates = coordinates.to(units_dict["length"])
+    CM_frame_coordinates = CM_frame_coordinates.to(units_dict["length"])
+    densities = densities.to(units_dict["density"])
+    smoothing_lengths = smoothing_lengths.to(units_dict["length"])
+    velocities = velocities.to(units_dict["velocity"])
+    momentums = momentums.to(units_dict["momentum"])
+    pressures = pressures.to(units_dict["pressure"])
+    magnetic_flux_densitiesdt = magnetic_flux_densitiesdt.to(
+        units_dict["magneticfieldpertime"]
+    )
+    magnetic_divergences = magnetic_divergences.to(units_dict["magneticfieldgradient"])
+    masses = masses.to(units_dict["mass"])
+    magnetic_pressures = magnetic_pressures.to(units_dict["pressure"])
+    dynamic_pressures = dynamic_pressures.to(units_dict["pressure"])
+
+    magnetic_flux_densities_norm = abs_vec(magnetic_flux_densities)
+
+    time = time.to(units_dict["time"])
+
+    r0_no_cut = (
+        magnetic_divergences
+        * smoothing_lengths
+        / (abs_vec(magnetic_flux_densities.value) * magnetic_flux_densities.units)
+    )
+
+    CM_frame_z = np.abs(CM_frame_coordinates[:, 2])
+    data_dict = {
+        "pids": pids,
+        "coordinates": coordinates,
+        "masses": masses,
+        "densities": densities,
+        "smoothing_lengths": smoothing_lengths,
+        "velocities": velocities,
+        "momentums": momentums,
+        "pressures": pressures,
+        "magnetic_flux_densities": magnetic_flux_densities,
+        "CM_frame_coordinates": CM_frame_coordinates,
+        "CM_frame_z": CM_frame_z,
+        "magnetic_flux_densities_norm": magnetic_flux_densities_norm,
+        "magnetic_pressures": magnetic_pressures,
+        "dynamic_pressures": dynamic_pressures,
+    }
+
+    values = {}
+    units = {}
+    for key in data_dict.keys():
+        values = values | {key: data_dict[key].value}
+
+    snapshot_parameters = {"time": time.value, "z": z, "a": a}
+
+    # Retrieve some information about the simulation run
+    artDiffusion = data.metadata.hydro_scheme["Artificial Diffusion Constant"]
+    dedHyp = data.metadata.hydro_scheme["Dedner Hyperbolic Constant"]
+    dedHypDivv = data.metadata.hydro_scheme["Dedner Hyperbolic div(v) Constant"]
+    dedPar = data.metadata.hydro_scheme["Dedner Parabolic Constant"]
+    eta = data.metadata.hydro_scheme["Resistive Eta"]
+    git = data.metadata.code["Git Revision"]
+    gitBranch = data.metadata.code["Git Branch"]
+    hydroScheme = data.metadata.hydro_scheme["Scheme"]
+    kernel = data.metadata.hydro_scheme["Kernel function"]
+    neighbours = data.metadata.hydro_scheme["Kernel target N_ngb"]
+
+    metadata = {
+        "artDiffusion": artDiffusion,
+        "dedHyp": dedHyp,
+        "dedHypDivv": dedHypDivv,
+        "dedPar": dedPar,
+        "eta": eta,
+        "git": git,
+        "gitBranch": gitBranch,
+        "hydroScheme": hydroScheme,
+        "kernel": kernel,
+        "neighbours": neighbours,
+    }
+
+    return {"values": values, "parameters": snapshot_parameters, "metadata": metadata}
+
+
+def updoad_shapshots_data_from_folder(folderaddr):
+    addr_book = glob(folderaddr + "/**/*.hdf5", recursive=True)
+    addr_book = sorted(addr_book)
+    names = [addr.split("/")[-1].split(".hdf5")[0] for addr in addr_book]
+    # print(names)
+    snapshots_data = []
+    for addr in addr_book:
+        snapshot = load(addr)
+        snapshots_data += [extract_variables_of_interest(snapshot)]
+    return snapshots_data, names
+
+
+def upload_particle_history(all_data, pid):
+    particle_data = []
+    for snapshot in all_data:
+        particle_snapshot_dict = snapshot["parameters"]
+        indx = np.argwhere(snapshot["values"]["pids"] == pid)[0][0]
+        for key in snapshot["values"].keys():
+            particle_snapshot_dict = particle_snapshot_dict | {
+                key: snapshot["values"][key][indx]
+            }
+        particle_data += [particle_snapshot_dict]
+
+    # transform list of dicts to single
+    # Dynamically extract keys from the first dictionary
+    keys = particle_data[0].keys()
+
+    # Transform the list of dictionaries to a single dictionary
+    result = {key: np.array([entry[key] for entry in particle_data]) for key in keys}
+    result = result | {"pid": pid}
+    return result
+
+
+def identify_largest_quantity_particles(
+    all_snapshots, quantity, isvec=False, region_cut=True
+):
+    largest_quantity_particles = []
+    for snap in all_snapshots:
+        quantity_values = snap["values"][quantity]
+        zcoords = np.abs(snap["values"]["CM_frame_coordinates"][:, 2])
+        rcoords = np.sqrt(
+            snap["values"]["CM_frame_coordinates"][:, 0] ** 2
+            + snap["values"]["CM_frame_coordinates"][:, 1] ** 2
+        )
+        velocities = snap["values"]["velocities"]
+        vabs = np.sqrt(
+            velocities[:, 0] ** 2 + velocities[:, 1] ** 2 + velocities[:, 2] ** 2
+        )
+        mask = np.array([True for i in range(len(quantity_values))])
+        if region_cut:
+            zcut = (zcoords > 0.015 * 0.015) & (
+                zcoords < 0.15 * 0.015
+            )  # z>0.015 Rcloud
+            rcut = rcoords < 0.15 * 0.015
+
+            mask = (zcut) & (rcut)
+
+        # quantity_values = quantity_values[zcut]
+        if isvec:
+            quantity_values = np.sqrt(
+                quantity_values[:, 0] ** 2
+                + quantity_values[:, 1] ** 2
+                + quantity_values[:, 2] ** 2
+            )
+        pids = snap["values"]["pids"]
+        quantity_values_cut = quantity_values[mask]
+        # print('snapshot at t=',snap['parameters']['time'])
+        indx = np.argmax(quantity_values_cut)
+        # print('particle #',pids[mask][indx])
+        # print('quantity value = ',quantity_values[pids[mask][indx]])
+        largest_quantity_particles.append(pids[mask][indx])
+    return largest_quantity_particles
+
+
+def plot_quatities_for_particle_vs_time(
+    particle_history,
+    quantity_list,
+    outputfileaddr="./quantities.png",
+    time_var="time",
+    timeinterval=[0, 3],
+    title="quantities vs time",
+    customtimeinterval=False,
+    customquantityinterval=False,
+    quantityinterval=[1e-1, 1e1],
+    logscale=True,
+):
+
+    fig, ax = plt.subplots(1, len(quantity_list), figsize=(6.5 * len(quantity_list), 6))
+    for i in range(len(quantity_list)):
+        quantity = particle_history[quantity_list[i]]
+        times = particle_history[time_var]
+        if len(quantity.shape) > 1:
+            quantity = np.sqrt(
+                quantity[:, 0] ** 2 + quantity[:, 1] ** 2 + quantity[:, 2] ** 2
+            )
+
+        mask_nonzero = quantity != 0.0
+        # quantity=quantity[mask_nonzero]
+        # times = times[mask_nonzero]
+        mask_in_range = (times > timeinterval[0]) & (times < timeinterval[-1])
+        # print(len(times[mask_in_range]))
+        # if len(quantity[mask_in_range])!=0:
+        # ax[i].axvline(x=1.378579,ymin = 0.1*np.min(quantity[mask_in_range]),ymax = 10*np.max(quantity[mask_in_range]),color='red',linestyle='dashed')
+        # ax[i].axvline(x=1.378579,ymin = 1e-30,ymax = 1e30,color='red',linestyle='dashed')
+
+        # handle only positive and positive-negative values:
+        if logscale:
+            mask = quantity < 0
+            if np.sum(mask) > 0:
+                # Separate positive and negative values
+                positive = np.where(
+                    quantity > 0, quantity, np.nan
+                )  # Use NaN for negative values
+                negative = np.where(
+                    quantity < 0, quantity, np.nan
+                )  # Use NaN for positive values
+                ax[i].plot(times, positive, color="red", marker=".")
+                ax[i].plot(times, -negative, color="blue", marker=".")
+            else:
+                ax[i].plot(times, quantity, color="black", marker=".")
+        else:
+            ax[i].plot(times, quantity, color="black", marker=".")
+        ax[i].set_ylabel(quantity_list[i], fontsize=20)
+        ax[i].set_xlabel("$t/t_{ff}$", fontsize=20)
+        if logscale:
+            ax[i].set_yscale("log")
+        ax[i].tick_params(axis="both", labelsize=20)
+        if customtimeinterval:
+            ax[i].set_xlim(timeinterval)
+        if customquantityinterval:
+            ax[i].set_ylim(quantityinterval)
+        ax[i].grid()
+    fig.suptitle("Particle #" + str(particle_history["pid"]), fontsize=20)
+    fig.tight_layout()
+    plt.savefig(outputfileaddr)
+    plt.close(fig)
+
+    return 0
+
+
+Lcut = 15  # Lcut in kPc.
+
+
+def plot_quantity_vs_time(
+    all_snapshots, quantity, outputfileaddr, isvec=False, region_cut=True, label=""
+):
+    quantity_array = []
+    rmsq_array = []
+    mzq_array = []
+    times_array = []
+    npart_array = []
+    for snap in all_snapshots:
+        quantity_values = snap["values"][quantity]
+        Np = len(quantity_values)
+        # total_quantity = np.sum(quantity_values,axis=0)
+        # rms_quantity = np.sqrt(np.mean(quantity_values[:,0]**2+quantity_values[:,1]**2+quantity_values[:,2]**2))
+        print(len(quantity_values))
+        if region_cut:
+            z = snap["values"]["CM_frame_coordinates"][:, 2]
+            y = snap["values"]["CM_frame_coordinates"][:, 1]
+            x = snap["values"]["CM_frame_coordinates"][:, 0]
+            mask = (np.abs(z) <= Lcut) & (np.abs(y) <= Lcut) & (np.abs(x) <= Lcut)
+            quantity_values = quantity_values[mask]
+
+        total_quantity = np.sum(quantity_values, axis=0)
+        rms_quantity = np.sqrt(
+            np.mean(
+                quantity_values[:, 0] ** 2
+                + quantity_values[:, 1] ** 2
+                + quantity_values[:, 2] ** 2
+            )
+        )
+        mz_quantity = np.mean(np.abs(quantity_values[:, 2]))
+        time = snap["parameters"]["time"]
+        quantity_array.append(total_quantity)
+        rmsq_array.append(rms_quantity)
+        mzq_array.append(mz_quantity)
+        times_array.append(time)
+        npart_array.append(len(quantity_values))
+
+        # Retrieve some information about the simulation run
+        artDiffusion = snap["metadata"]["artDiffusion"]
+        dedHyp = snap["metadata"]["dedHyp"]
+        dedHypDivv = snap["metadata"]["dedHypDivv"]
+        dedPar = snap["metadata"]["dedPar"]
+        eta = snap["metadata"]["eta"]
+        git = snap["metadata"]["git"]
+        gitBranch = snap["metadata"]["gitBranch"]
+        hydroScheme = snap["metadata"]["hydroScheme"]
+        kernel = snap["metadata"]["kernel"]
+        neighbours = snap["metadata"]["neighbours"]
+
+    quantity_array = np.array(quantity_array)
+    rmsq_array = np.array(rmsq_array)
+    mzq_array = np.array(mzq_array)
+    times_array = np.array(times_array)
+    npart_array = np.array(npart_array)
+
+    fig, ax = plt.subplots(1, 3, figsize=(5.5 * 3, 5))
+    if isvec:
+        quantity_abs = np.sqrt(
+            quantity_array[:, 0] ** 2
+            + quantity_array[:, 1] ** 2
+            + quantity_array[:, 2] ** 2
+        )
+    else:
+        quantity_abs = np.abs(quantity_array)
+
+    # ax[0].plot(times_array,quantity_array[:,2],color='black',marker='.', label='$p_z$')
+    # ax[0].plot(times_array,quantity_array[:,0],color='blue' ,marker='.', label='$p_x$')
+    # ax[0].plot(times_array,quantity_array[:,1],color='red' ,marker='.', label='$p_y$')
+    # ax[0].plot(times_array,np.abs(quantity_array[:,2]),color='black',marker='.', label='$p_z$')
+    # pq = quantity_array[:,2]#/npart_array
+    # mask = pq<0
+    # lpq = '$mean p_z$'
+    # if np.sum(mask)>0:
+    #    # Separate positive and negative values
+    #    positive = np.where(pq > 0, pq, np.nan)  # Use NaN for negative values
+    #    negative = np.where(pq < 0, pq, np.nan)  # Use NaN for positive values
+    #    ax[0].plot(times_array, positive,color='red', marker='.',label = 'mean $p_z>0$')
+    #    ax[0].plot(times_array,-negative,color='blue',marker='.',label = 'mean $p_z<0$')
+    # else:
+    # ax[0].plot(times_array,quantity_abs,color='black',marker='.',label = 'B')
+
+    ax[0].plot(times_array, rmsq_array, color="green", marker=".", label="$B_{rms}$")
+    # ax[0].plot(times_array, mzq_array, color='black', marker='.',label='mean $|p_z|$')
+
+    # ax[0].plot(times_array,np.abs(quantity_array[:,0]),color='blue' ,marker='.', label='$p_x$')
+    # ax[0].plot(times_array,np.abs(quantity_array[:,1]),color='red'  ,marker='.', label='$p_y$')
+
+    ax[0].legend()
+    ax[0].grid()
+    # ax[0].set_yscale('log')
+    ax[0].set_ylabel("$|B|$, [$\mu G$]", fontsize=20)
+    ax[0].set_xlabel("$time [Gyr]$", fontsize=20)
+    ax[0].set_ylim([0, 10.0])
+    ax[0].set_xlim([0, 3.0])
+    # ax[0].set_ylim([1e-8,1e-4])
+
+    ax[1].plot(times_array, npart_array, color="black", marker=".")
+    ax[1].grid()
+    ax[1].set_yscale("log")
+    ax[1].set_ylabel(
+        f"$N_p$" + label, fontsize=20
+    )  # , Lcut = {Lcut} A.U.',fontsize=20)
+    ax[1].set_xlabel("$time [Gyr]$", fontsize=20)
+    ax[1].set_ylim([1e1, 1e5])
+    ax[1].set_xlim([0, 3.0])
+
+    # add panel with infromation about the run
+    text_common_args = dict(
+        fontsize=10, ha="center", va="center", transform=ax[2].transAxes
+    )
+
+    ax[2].text(0.5, 0.8, "Cooling halo with spin, $Np=%.0f$ " % Np, **text_common_args)
+    ax[2].text(0.5, 0.7, "swift %s" % git.decode("utf-8"), **text_common_args)
+    ax[2].text(0.5, 0.6, "Branch %s" % gitBranch.decode("utf-8"), **text_common_args)
+    ax[2].text(0.5, 0.5, hydroScheme.decode("utf-8"), **text_common_args)
+    ax[2].text(
+        0.5,
+        0.4,
+        kernel.decode("utf-8") + " with $%.2f$ neighbours" % (neighbours),
+        **text_common_args,
+    )
+    ax[2].text(
+        0.5, 0.3, "Artificial diffusion: $%.2f$ " % (artDiffusion), **text_common_args
+    )
+    ax[2].text(
+        0.5,
+        0.2,
+        "Dedner Hyp, Hyp_div(v), Par: $%.2f,%.2f,%.2f$ " % (dedHyp, dedHypDivv, dedPar),
+        **text_common_args,
+    )
+    ax[2].text(
+        0.5, 0.1, "Physical resistivity $\eta$: $%.2f$ " % (eta), **text_common_args
+    )
+    ax[2].axis("off")
+
+    # fig.suptitle(quantity,fontsize=20)
+    fig.tight_layout()
+    plt.savefig(outputfileaddr)
+    plt.close(fig)
+    return
+
+
+def plot_pressure_vs_time(
+    all_snapshots,
+    outputfileaddr,
+    isvec=False,
+    region_cut=True,
+    label="",
+    rcut=15,
+    zcut=0.5,
+):
+    pressures_array = []
+    magnetic_pressures_array = []
+    dynamic_pressures_array = []
+    times_array = []
+    npart_array = []
+    rcut *= units_dict["length"]
+    zcut *= units_dict["length"]
+    for snap in all_snapshots:
+        pressures_values = snap["values"]["pressures"] * units_dict["pressure"]
+        magnetic_pressures_values = (
+            snap["values"]["magnetic_pressures"] * units_dict["pressure"]
+        )
+        dynamic_pressures_values = (
+            snap["values"]["dynamic_pressures"] * units_dict["pressure"]
+        )
+        densities_values = snap["values"]["densities"] * units_dict["density"]
+        masses_values = snap["values"]["masses"] * units_dict["mass"]
+        Np = len(pressures_values)
+
+        if region_cut:
+            z = snap["values"]["CM_frame_coordinates"][:, 2] * units_dict["length"]
+            y = snap["values"]["CM_frame_coordinates"][:, 1] * units_dict["length"]
+            x = snap["values"]["CM_frame_coordinates"][:, 0] * units_dict["length"]
+            r = np.sqrt(x ** 2 + y ** 2)
+            mask = (r <= rcut) & (np.abs(z) <= zcut)
+            pressures_values = pressures_values[mask]
+            magnetic_pressures_values = magnetic_pressures_values[mask]
+            dynamic_pressures_values = dynamic_pressures_values[mask]
+            densities_values = densities_values[mask]
+            masses_values = masses_values[mask]
+
+        # calculate volume weighted averages:
+        volume = np.sum(masses_values / densities_values)
+        pressures_vv = (
+            np.sum(pressures_values * masses_values / densities_values) / volume
+        )
+        magnetic_pressures_vv = (
+            np.sum(magnetic_pressures_values * masses_values / densities_values)
+            / volume
+        )
+        dynamic_pressures_vv = (
+            np.sum(dynamic_pressures_values * masses_values / densities_values) / volume
+        )
+
+        pressures_vv = pressures_vv.to(units_dict["pressure"]).value
+        magnetic_pressures_vv = magnetic_pressures_vv.to(units_dict["pressure"]).value
+        dynamic_pressures_vv = dynamic_pressures_vv.to(units_dict["pressure"]).value
+
+        pressures_array.append(pressures_vv)
+        magnetic_pressures_array.append(magnetic_pressures_vv)
+        dynamic_pressures_array.append(dynamic_pressures_vv)
+
+        time = snap["parameters"]["time"]
+        times_array.append(time)
+        npart_array.append(len(pressures_values))
+
+        # Retrieve some information about the simulation run
+        artDiffusion = snap["metadata"]["artDiffusion"]
+        dedHyp = snap["metadata"]["dedHyp"]
+        dedHypDivv = snap["metadata"]["dedHypDivv"]
+        dedPar = snap["metadata"]["dedPar"]
+        eta = snap["metadata"]["eta"]
+        git = snap["metadata"]["git"]
+        gitBranch = snap["metadata"]["gitBranch"]
+        hydroScheme = snap["metadata"]["hydroScheme"]
+        kernel = snap["metadata"]["kernel"]
+        neighbours = snap["metadata"]["neighbours"]
+
+    pressures_array = np.array(pressures_array)
+    magnetic_pressures_array = np.array(magnetic_pressures_array)
+    dynamic_pressures_array = np.array(dynamic_pressures_array)
+    times_array = np.array(times_array)
+    npart_array = np.array(npart_array)
+
+    fig, ax = plt.subplots(1, 3, figsize=(5.5 * 3, 5))
+    ax[0].plot(
+        times_array, pressures_array, color="red", marker=".", label=r"$P_{thermal}$"
+    )
+    ax[0].plot(
+        times_array,
+        magnetic_pressures_array,
+        color="blue",
+        marker=".",
+        label=r"$P_{mag}$",
+    )
+    ax[0].plot(
+        times_array,
+        dynamic_pressures_array,
+        color="green",
+        marker=".",
+        label=r"$\rho v^2$",
+    )
+
+    ax[0].legend()
+    ax[0].grid()
+    ax[0].set_yscale("log")
+    ax[0].set_ylabel("$P$, [$ 10^{10}$ $M_{sol}$ $kpc^{-1}$ $Gyr^{-2}$]", fontsize=20)
+    ax[0].set_xlabel("time $[Gyr]$", fontsize=20)
+    ax[0].set_ylim([1e-4, 1e2])
+    ax[0].set_xlim([0, 4.0])
+    # ax[0].set_ylim([1e-8,1e-4])
+
+    ax[1].plot(times_array, npart_array, color="black", marker=".")
+    ax[1].grid()
+    ax[1].set_yscale("log")
+    ax[1].set_ylabel(r"$N_p^{cut}$", fontsize=20)  # , Lcut = {Lcut} A.U.',fontsize=20)
+    ax[1].set_xlabel("time $[Gyr]$", fontsize=20)
+    ax[1].set_ylim([1e1, 1e5])
+    ax[1].set_xlim([0, 4.0])
+
+    # add panel with infromation about the run
+    text_common_args = dict(
+        fontsize=10, ha="center", va="center", transform=ax[2].transAxes
+    )
+
+    ax[2].text(0.5, 0.8, "Cooling halo with spin, $Np=%.0f$ " % Np, **text_common_args)
+    ax[2].text(0.5, 0.7, "swift %s" % git.decode("utf-8"), **text_common_args)
+    ax[2].text(0.5, 0.6, "Branch %s" % gitBranch.decode("utf-8"), **text_common_args)
+    ax[2].text(0.5, 0.5, hydroScheme.decode("utf-8"), **text_common_args)
+    ax[2].text(
+        0.5,
+        0.4,
+        kernel.decode("utf-8") + " with $%.2f$ neighbours" % (neighbours),
+        **text_common_args,
+    )
+    ax[2].text(
+        0.5, 0.3, "Artificial diffusion: $%.2f$ " % (artDiffusion), **text_common_args
+    )
+    ax[2].text(
+        0.5,
+        0.2,
+        "Dedner Hyp, Hyp_div(v), Par: $%.2f,%.2f,%.2f$ " % (dedHyp, dedHypDivv, dedPar),
+        **text_common_args,
+    )
+    ax[2].text(
+        0.5, 0.1, "Physical resistivity $\eta$: $%.2f$ " % (eta), **text_common_args
+    )
+    ax[2].axis("off")
+
+    # fig.suptitle(quantity,fontsize=20)
+    fig.tight_layout()
+    plt.savefig(outputfileaddr)
+    plt.close(fig)
+    return
+
+
+def plot_B_vs_time(
+    all_snapshots, outputfileaddr, isvec=False, region_cut=True, label="", rcut=15
+):
+    magnetic_flux_densities_array = []
+    times_array = []
+    npart_array = []
+    total_mass_array = []
+    rcut *= units_dict["length"]
+    for snap in all_snapshots:
+        magnetic_flux_densities_values = (
+            snap["values"]["magnetic_flux_densities"] * units_dict["magneticfield"]
+        )
+        magnetic_flux_densities_values_sq = dot_vec(
+            magnetic_flux_densities_values, magnetic_flux_densities_values
+        )
+        densities_values = snap["values"]["densities"] * units_dict["density"]
+        masses_values = snap["values"]["masses"] * units_dict["mass"]
+        Np = len(masses_values)
+
+        if region_cut:
+            z = snap["values"]["CM_frame_coordinates"][:, 2] * units_dict["length"]
+            y = snap["values"]["CM_frame_coordinates"][:, 1] * units_dict["length"]
+            x = snap["values"]["CM_frame_coordinates"][:, 0] * units_dict["length"]
+            r = np.sqrt(x ** 2 + y ** 2 + z ** 2)
+            mask = r <= rcut
+            magnetic_flux_densities_values_sq = magnetic_flux_densities_values_sq[mask]
+            densities_values = densities_values[mask]
+            masses_values = masses_values[mask]
+
+        # calculate volume weighted averages:
+        volume = np.sum(masses_values / densities_values)
+        magnetic_flux_densities_sq_vv = (
+            np.sum(magnetic_flux_densities_values_sq * masses_values / densities_values)
+            / volume
+        )
+        magnetic_flux_densities_vv = np.sqrt(magnetic_flux_densities_sq_vv)
+
+        magnetic_flux_densities_vv = magnetic_flux_densities_vv.to(
+            units_dict["magneticfield"]
+        ).value
+
+        magnetic_flux_densities_array.append(magnetic_flux_densities_vv)
+
+        time = snap["parameters"]["time"]
+        times_array.append(time)
+        npart_array.append(len(masses_values))
+        total_mass_array.append(np.sum(masses_values.to(units_dict["mass"]).value))
+
+        # Retrieve some information about the simulation run
+        artDiffusion = snap["metadata"]["artDiffusion"]
+        dedHyp = snap["metadata"]["dedHyp"]
+        dedHypDivv = snap["metadata"]["dedHypDivv"]
+        dedPar = snap["metadata"]["dedPar"]
+        eta = snap["metadata"]["eta"]
+        git = snap["metadata"]["git"]
+        gitBranch = snap["metadata"]["gitBranch"]
+        hydroScheme = snap["metadata"]["hydroScheme"]
+        kernel = snap["metadata"]["kernel"]
+        neighbours = snap["metadata"]["neighbours"]
+
+    magnetic_flux_densities_array = np.array(magnetic_flux_densities_array)
+    times_array = np.array(times_array)
+    npart_array = np.array(npart_array)
+    total_mass_array = np.array(total_mass_array)
+
+    fig, ax = plt.subplots(1, 4, figsize=(5.5 * 4, 5))
+    ax[0].plot(times_array, magnetic_flux_densities_array, color="black", marker=".")
+
+    # ax[0].legend()
+    ax[0].grid()
+    # ax[0].set_yscale('log')
+    ax[0].set_ylabel("$B_{rms}$, [$[\mu G]$]", fontsize=20)
+    ax[0].set_xlabel("time $[Gyr]$", fontsize=20)
+    ax[0].set_ylim([0, 3.5])
+    ax[0].set_xlim([0, 3.0])
+    # ax[0].set_ylim([1e-8,1e-4])
+
+    ax[1].plot(times_array, npart_array, color="black", marker=".")
+    ax[1].grid()
+    ax[1].set_yscale("log")
+    ax[1].set_ylabel(r"$N_p^{cut}$", fontsize=20)  # , Lcut = {Lcut} A.U.',fontsize=20)
+    ax[1].set_xlabel("time $[Gyr]$", fontsize=20)
+    ax[1].set_ylim([1e1, 1e5])
+    ax[1].set_xlim([0, 3.0])
+
+    ax[2].plot(times_array, total_mass_array, color="black", marker=".")
+    ax[2].grid()
+    ax[2].set_yscale("log")
+    ax[2].set_ylabel(
+        r"$M_{tot} [M_{sol}]$", fontsize=20
+    )  # , Lcut = {Lcut} A.U.',fontsize=20)
+    ax[2].set_xlabel("time $[Gyr]$", fontsize=20)
+    ax[2].set_ylim([1e8, 1e11])
+    ax[2].set_xlim([0, 3.0])
+
+    # add panel with infromation about the run
+    text_common_args = dict(
+        fontsize=10, ha="center", va="center", transform=ax[3].transAxes
+    )
+
+    ax[3].text(0.5, 0.8, "Cooling halo with spin, $Np=%.0f$ " % Np, **text_common_args)
+    ax[3].text(0.5, 0.7, "swift %s" % git.decode("utf-8"), **text_common_args)
+    ax[3].text(0.5, 0.6, "Branch %s" % gitBranch.decode("utf-8"), **text_common_args)
+    ax[3].text(0.5, 0.5, hydroScheme.decode("utf-8"), **text_common_args)
+    ax[3].text(
+        0.5,
+        0.4,
+        kernel.decode("utf-8") + " with $%.2f$ neighbours" % (neighbours),
+        **text_common_args,
+    )
+    ax[3].text(
+        0.5, 0.3, "Artificial diffusion: $%.2f$ " % (artDiffusion), **text_common_args
+    )
+    ax[3].text(
+        0.5,
+        0.2,
+        "Dedner Hyp, Hyp_div(v), Par: $%.2f,%.2f,%.2f$ " % (dedHyp, dedHypDivv, dedPar),
+        **text_common_args,
+    )
+    ax[3].text(
+        0.5, 0.1, "Physical resistivity $\eta$: $%.2f$ " % (eta), **text_common_args
+    )
+    ax[3].axis("off")
+
+    # fig.suptitle(quantity,fontsize=20)
+    fig.tight_layout()
+    plt.savefig(outputfileaddr)
+    plt.close(fig)
+    return
+
+
+# 34013
+
+folder = "./gb099172f_1e6p_hyp=0.1_corr_mass_prof"
+
+snapshots_history, snapshot_names = updoad_shapshots_data_from_folder(folder)
+
+# plot_quantity_vs_time(snapshots_history,'magnetic_flux_densities_norm',outputfileaddr=folder+'/momentums_noRegCut.png',isvec=True,region_cut=False, label = ' All')
+
+plot_pressure_vs_time(
+    snapshots_history,
+    outputfileaddr=folder + "/P_cut.png",
+    isvec=True,
+    region_cut=True,
+    rcut=15,
+    zcut=15.0,
+    label=f" $Lbox=${2*Lcut} kPc",
+)
+plot_B_vs_time(
+    snapshots_history,
+    outputfileaddr=folder + "/B_cut.png",
+    isvec=True,
+    region_cut=True,
+    rcut=15,
+    label=f" $Lbox=${2*Lcut} kPc",
+)
diff --git a/examples/MHDTests/CoolingHaloMHD_feedback/plotCorrelation.py b/examples/MHDTests/CoolingHaloMHD_feedback/plotCorrelation.py
new file mode 100644
index 0000000000000000000000000000000000000000..d991ef8a1805fb3be113006ca6c9fc66edeb3e67
--- /dev/null
+++ b/examples/MHDTests/CoolingHaloMHD_feedback/plotCorrelation.py
@@ -0,0 +1,259 @@
+from swiftsimio import load
+from swiftsimio.visualisation.slice import slice_gas
+from swiftsimio.visualisation.rotation import rotation_matrix_from_vector
+import numpy as np
+import sys
+import h5py
+from matplotlib.pyplot import imsave
+from matplotlib.colors import LogNorm
+import matplotlib.pyplot as plt
+from matplotlib import ticker
+from matplotlib.colors import LogNorm
+from matplotlib.ticker import FormatStrFormatter
+from matplotlib import colors
+import unyt
+
+filename = sys.argv[1]
+
+# slice_height = sys.argv[3]
+
+# projection = sys.argv[4]
+
+prefered_color = "magma"
+
+cpts = 100
+
+
+to_plot = "correlation_hist"  # 'B' or 'A' or 'errors' or |B|_rho_|v|_R0
+
+data = load(filename)
+
+time = data.metadata.time
+time.convert_to_units(unyt.Gyr)
+print("Plotting at %2f Gyr" % time)
+
+# getting values from snapshots
+
+img_res = 512
+divreg = 1e-30
+reg_err = 0.01
+
+# see available fields in snapshot
+# print(data.metadata.gas_properties.field_names)
+# Get physical quantities
+x = data.gas.coordinates[:, 0].to_physical()
+y = data.gas.coordinates[:, 1].to_physical()
+z = data.gas.coordinates[:, 2].to_physical()
+
+rho = data.gas.densities
+nH = rho.to(unyt.g / unyt.cm ** 3) / (1.67e-24 * unyt.g)
+h = data.gas.smoothing_lengths
+v = data.gas.velocities
+P = data.gas.pressures
+B = data.gas.magnetic_flux_densities
+T = data.gas.temperatures
+
+R0 = data.gas.r0
+R1 = data.gas.r1
+R2 = data.gas.r2
+R3 = data.gas.r3
+
+normB = np.sqrt(B[:, 0] ** 2 + B[:, 1] ** 2 + B[:, 2] ** 2)
+
+# Retrieve some information about the simulation run
+artDiffusion = data.metadata.hydro_scheme["Artificial Diffusion Constant"]
+dedHyp = data.metadata.hydro_scheme["Dedner Hyperbolic Constant"]
+dedHypDivv = data.metadata.hydro_scheme["Dedner Hyperbolic div(v) Constant"]
+dedPar = data.metadata.hydro_scheme["Dedner Parabolic Constant"]
+eta = data.metadata.hydro_scheme["Resistive Eta"]
+git = data.metadata.code["Git Revision"]
+gitBranch = data.metadata.code["Git Branch"]
+hydroScheme = data.metadata.hydro_scheme["Scheme"]
+kernel = data.metadata.hydro_scheme["Kernel function"]
+neighbours = data.metadata.hydro_scheme["Kernel target N_ngb"]
+
+
+########################################## Make histogram
+if to_plot == "hist":
+    # min_metric = 0.1
+    quantity = (
+        nH  # T.to(unyt.K)#rho.to(unyt.g/unyt.cm**3)
+    ).value  # (rho.to_physical()/rho_mean).value#(abs_vec(v).to_physical()/vrms).value
+    qname = r"$n_H$ $[cm^{-3}]$"  # r"$\rho$ $[g/cm^3]$"  #'rho/<rho>' #'$|v|/v_{rms}$'
+    Nbins = 100
+    # bins = np.linspace(np.min(quantity),np.max(quantity),Nbins)
+    bins = np.logspace(
+        int(np.log10(np.min(quantity))) - 1, int(np.log10(np.max(quantity))) + 1, Nbins
+    )
+    fig, ax = plt.subplots()
+
+    nx = 2
+    ny = 1
+    fig, ax = plt.subplots(ny, nx, sharey=True, figsize=(5 * nx, 5 * ny))
+
+    ax[0].hist(quantity, bins=bins, color="blue", label=qname, alpha=0.5, density=False)
+    ax[0].set_xscale("log")
+    ax[0].set_yscale("log")
+    ax[0].legend()
+    ax[0].set_ylabel("$N_{particles}$/bin")
+    ax[0].set_xlabel(qname)
+    ax[0].set_ylim([1, 1e5])
+    # ax.set_title(f"z={z_to_display:}")
+    ax[0].grid()
+
+    # add panel with infromation about the run
+    Np = len(quantity)
+    text_common_args = dict(
+        fontsize=10, ha="center", va="center", transform=ax[1].transAxes
+    )
+
+    ax[1].text(
+        0.5,
+        0.8,
+        "Cooling halo with spin at time $t=%.2f$ Gyr" % data.metadata.time,
+        **text_common_args,
+    )
+    ax[1].text(0.5, 0.7, "swift %s" % git.decode("utf-8"), **text_common_args)
+    ax[1].text(0.5, 0.6, "Branch %s" % gitBranch.decode("utf-8"), **text_common_args)
+    ax[1].text(0.5, 0.5, hydroScheme.decode("utf-8"), **text_common_args)
+    ax[1].text(
+        0.5,
+        0.4,
+        kernel.decode("utf-8") + " with $%.2f$ neighbours" % (neighbours),
+        **text_common_args,
+    )
+    ax[1].text(
+        0.5, 0.3, "Artificial diffusion: $%.2f$ " % (artDiffusion), **text_common_args
+    )
+    ax[1].text(
+        0.5,
+        0.2,
+        "Dedner Hyp, Hyp_div(v), Par: $%.2f,%.2f,%.2f$ " % (dedHyp, dedHypDivv, dedPar),
+        **text_common_args,
+    )
+    ax[1].text(
+        0.5, 0.1, "Physical resistivity $\eta$: $%.2f$ " % (eta), **text_common_args
+    )
+    ax[1].text(
+        0.5, 0.0, "Number of particles $N_p$: $%.0f$ " % (Np), **text_common_args
+    )
+    ax[1].axis("off")
+
+    fig.tight_layout()
+
+    plt.savefig(sys.argv[2], dpi=100)
+    exit()
+########################################## Make correlation histograms
+
+
+def prepare_histogram_density_plot(data_x, data_y, Nbins):
+    bins_x = np.logspace(
+        int(np.log10(np.min(data_x))) - 1, int(np.log10(np.max(data_x))) + 1, Nbins
+    )
+    bins_y = np.logspace(
+        int(np.log10(np.min(data_y))) - 1, int(np.log10(np.max(data_y))) + 1, Nbins
+    )
+    hist, xedges, yedges = np.histogram2d(data_x, data_y, bins=[bins_x, bins_y])
+    # building uncorrelated histogram
+    # hist_x, _ = np.histogram(data_x, bins=bins_x)
+    # hist_y, _ = np.histogram(data_y, bins=bins_y)
+    # hist_uncorr = np.outer(hist_x, hist_y).astype("float64")
+
+    # norm
+    min_level = 1 / np.sum(hist)
+    hist /= np.sum(hist)
+    # hist_uncorr /= np.sum(hist_uncorr)
+
+    # print(np.sum(hist),np.sum(hist_uncorr))
+    # regularize
+    # hist[hist < min_level] = min_level
+    # hist_uncorr[hist_uncorr < min_level] = min_level
+
+    # build correlation_funciton
+    # correlation_funcion = hist - hist_uncorr
+
+    X, Y = np.meshgrid(xedges, yedges)
+    return X, Y, hist  # hist_uncorr, correlation_funcion
+
+
+def plot_histogram_density_plot(quantity_x, quantity_y, qname_x, qname_y, Nbins=100):
+    quantity_x = quantity_x.value
+    quantity_y = quantity_y.value
+
+    X, Y, hist = prepare_histogram_density_plot(quantity_x, quantity_y, Nbins)
+    nx = 2
+    ny = 1
+    fig, ax = plt.subplots(ny, nx, sharey=True, figsize=(5 * nx, 5 * ny))
+
+    fig.suptitle(f"({qname_x},{qname_y}) space")
+
+    ax[0].set_title(r"$f_{12}(x,y)$")
+    ax[0].set_ylabel(qname_y)
+    ax[0].set_xlabel(qname_x)
+    ax[0].set_box_aspect(1)
+    # plt.imshow(hist.T, origin='lower', aspect='auto', cmap='plasma')
+
+    pax = ax[0].pcolormesh(X, Y, hist.T, cmap=prefered_color, norm=LogNorm())
+    ax[0].grid(True, linewidth=0.5, alpha=0.3)
+    ax[0].set_xscale("log")
+    ax[0].set_yscale("log")
+
+    x = np.logspace(np.log10(min(quantity_x)), np.log10(max(quantity_x)), 100)
+    y = min(quantity_y) / 10 * (x / min(quantity_x)) ** (2 / 3)
+
+    # ax[0].plot(x,y,color='red',alpha=0.5)
+
+    # add panel with infromation about the run
+    Np = len(quantity_x)
+    text_common_args = dict(
+        fontsize=10, ha="center", va="center", transform=ax[1].transAxes
+    )
+
+    ax[1].text(
+        0.5,
+        0.8,
+        "Cooling halo with spin at time $t=%.2f$ Gyr" % data.metadata.time,
+        **text_common_args,
+    )
+    ax[1].text(0.5, 0.7, "swift %s" % git.decode("utf-8"), **text_common_args)
+    ax[1].text(0.5, 0.6, "Branch %s" % gitBranch.decode("utf-8"), **text_common_args)
+    ax[1].text(0.5, 0.5, hydroScheme.decode("utf-8"), **text_common_args)
+    ax[1].text(
+        0.5,
+        0.4,
+        kernel.decode("utf-8") + " with $%.2f$ neighbours" % (neighbours),
+        **text_common_args,
+    )
+    ax[1].text(
+        0.5, 0.3, "Artificial diffusion: $%.2f$ " % (artDiffusion), **text_common_args
+    )
+    ax[1].text(
+        0.5,
+        0.2,
+        "Dedner Hyp, Hyp_div(v), Par: $%.2f,%.2f,%.2f$ " % (dedHyp, dedHypDivv, dedPar),
+        **text_common_args,
+    )
+    ax[1].text(
+        0.5, 0.1, "Physical resistivity $\eta$: $%.2f$ " % (eta), **text_common_args
+    )
+    ax[1].text(
+        0.5, 0.0, "Number of particles $N_p$: $%.0f$ " % (Np), **text_common_args
+    )
+    ax[1].axis("off")
+
+    # fig.colorbar(pax)
+    fig.tight_layout()
+    plt.savefig(sys.argv[2], dpi=200)
+
+
+if to_plot == "correlation_hist":
+    q_x = nH.to(1 / unyt.cm ** 3)
+    q_y = T.to(
+        unyt.K
+    )  # normB.to(1e-7*unyt.g / (unyt.statA * unyt.s * unyt.s)) #T.to(unyt.K)
+    q_x = q_x.to_physical()
+    q_y = q_y.to_physical()
+    plot_histogram_density_plot(
+        q_x, q_y, r"$\rho / m_H$ $[1/cm^3]$", "T $[K]$", Nbins=200
+    )
+    exit()
diff --git a/examples/MHDTests/CoolingHaloMHD_feedback/plotErrors.py b/examples/MHDTests/CoolingHaloMHD_feedback/plotErrors.py
new file mode 100644
index 0000000000000000000000000000000000000000..bbb6d9d639245c854eb0ab3aa0f4b7d22a32e729
--- /dev/null
+++ b/examples/MHDTests/CoolingHaloMHD_feedback/plotErrors.py
@@ -0,0 +1,302 @@
+import numpy as np
+import h5py
+import sys
+import unyt
+import matplotlib.pyplot as plt
+
+from swiftsimio import load
+from swiftsimio.visualisation.rotation import rotation_matrix_from_vector
+from swiftsimio.visualisation.slice import slice_gas
+
+filename = sys.argv[1]
+data = load(filename)
+center = 0.5 * data.metadata.boxsize
+
+time = data.metadata.time
+time.convert_to_units(unyt.Gyr)
+print("Plotting at %2f Gyr" % time)
+
+# Retrieve some information about the simulation run
+artDiffusion = data.metadata.hydro_scheme["Artificial Diffusion Constant"]
+dedHyp = data.metadata.hydro_scheme["Dedner Hyperbolic Constant"]
+dedHypDivv = data.metadata.hydro_scheme["Dedner Hyperbolic div(v) Constant"]
+dedPar = data.metadata.hydro_scheme["Dedner Parabolic Constant"]
+eta = data.metadata.hydro_scheme["Resistive Eta"]
+git = data.metadata.code["Git Revision"]
+gitBranch = data.metadata.code["Git Branch"]
+hydroScheme = data.metadata.hydro_scheme["Scheme"]
+kernel = data.metadata.hydro_scheme["Kernel function"]
+neighbours = data.metadata.hydro_scheme["Kernel target N_ngb"]
+
+
+# Constants
+G = 6.67430e-11 * unyt.m ** 3 / unyt.kg / unyt.s ** 2  # 6.67430e-8
+G = G.to(unyt.cm ** 3 / unyt.g / unyt.s ** 2)
+mu0 = 1.25663706127e-1 * unyt.g * unyt.cm / (unyt.s ** 2 * unyt.A ** 2)
+
+# First create a mass-weighted temperature dataset
+r = data.gas.coordinates - center
+v = data.gas.velocities
+norm_r = np.sqrt(r[:, 0] ** 2 + r[:, 1] ** 2 + r[:, 2] ** 2)
+vr = np.sum(v * r, axis=1) / norm_r
+normv = np.sqrt(v[:, 0] ** 2 + v[:, 1] ** 2 + v[:, 2] ** 2)
+B = data.gas.magnetic_flux_densities
+# B = v.value * 0.0 * 1e-7*unyt.g / (unyt.statA * unyt.s * unyt.s)
+# J = data.gas.magnetic_flux_curl
+normB = np.sqrt(B[:, 0] ** 2 + B[:, 1] ** 2 + B[:, 2] ** 2)
+# normJ = np.sqrt(J[:, 0] ** 2 + J[:, 1] ** 2 + J[:, 2] ** 2)
+divB = data.gas.magnetic_divergences
+# divB = norm_r.value * 0.0 * 1e-7*unyt.g / (unyt.statA * unyt.s * unyt.s* unyt.cm)
+h = data.gas.smoothing_lengths
+rho = data.gas.densities
+
+
+R0 = data.gas.r0
+R1 = data.gas.r1
+R2 = data.gas.r2
+OW = data.gas.owtriggers
+mean_OW = np.mean(OW)
+min_OW = np.min(OW)
+print("Mean OW is ", np.round(mean_OW, 1))
+print("Min OW is ", np.round(min_OW, 1))
+OW = OW / mean_OW
+
+# Normalize errors
+R0[R0 < 1e-2] = 1e-2
+R1[R1 < 1e-2] = 1e-2
+R2[R2 < 1e-2] = 1e-2
+OW[OW < 1e-2] = 1e-2
+
+
+Np = len(h)
+print("Number of particles %E" % len(data.gas.masses))
+print("Total mass %E Msol" % np.sum(data.gas.masses.to(unyt.Msun).value))
+print("Gas particle mass %E Msol" % np.mean(data.gas.masses.to(unyt.Msun).value))
+
+data.gas.mass_weighted_R0 = data.gas.masses * R0
+data.gas.mass_weighted_R1 = data.gas.masses * R1
+data.gas.mass_weighted_R2 = data.gas.masses * R2
+data.gas.mass_weighted_OW = data.gas.masses * OW
+
+
+"""
+rotation_matrix = [[1,0,0],
+                   [0,1,0],
+                   [0,0,1]]
+"""
+rotation_matrix = [[0, 0, 1], [0, 1, 0], [-1, 0, 0]]
+
+# set plot area
+Lslice_kPc = 20.0  # 2*21.5
+Lslice = Lslice_kPc * 3.08e18 * 1e3 * unyt.cm
+
+visualise_region_xy = [
+    center[0] - Lslice,
+    center[0] + Lslice,
+    center[1] - Lslice,
+    center[1] + Lslice,
+]
+
+visualise_region = [
+    center[0] - Lslice,
+    center[0] + Lslice,
+    center[2] - Lslice,
+    center[2] + Lslice,
+]
+
+
+common_arguments_xy = dict(
+    data=data,
+    resolution=512,
+    parallel=True,
+    region=visualise_region_xy,
+    z_slice=center[2],  # 0.0 * unyt.cm,
+)
+
+common_arguments = dict(
+    data=data,
+    resolution=512,
+    parallel=True,
+    rotation_center=unyt.unyt_array(center),
+    rotation_matrix=rotation_matrix,
+    region=visualise_region,
+    z_slice=0.0 * unyt.cm,
+)
+
+
+# Map in msun / mpc^3
+
+mass_map_xy = slice_gas(**common_arguments_xy, project="masses")
+mass_map = slice_gas(**common_arguments, project="masses")
+
+mass_weighted_R0_map_xy = slice_gas(**common_arguments_xy, project="mass_weighted_R0")
+mass_weighted_R1_map_xy = slice_gas(**common_arguments_xy, project="mass_weighted_R1")
+mass_weighted_R2_map_xy = slice_gas(**common_arguments_xy, project="mass_weighted_R2")
+mass_weighted_OW_map_xy = slice_gas(**common_arguments_xy, project="mass_weighted_OW")
+mass_weighted_R0_map = slice_gas(**common_arguments, project="mass_weighted_R0")
+mass_weighted_R1_map = slice_gas(**common_arguments, project="mass_weighted_R1")
+mass_weighted_R2_map = slice_gas(**common_arguments, project="mass_weighted_R2")
+mass_weighted_OW_map = slice_gas(**common_arguments, project="mass_weighted_OW")
+
+# mass_weighted_J_map = slice_gas(**common_arguments, project="mass_weighted_J")
+R0_map_xy = mass_weighted_R0_map_xy / mass_map_xy
+R1_map_xy = mass_weighted_R1_map_xy / mass_map_xy
+R2_map_xy = mass_weighted_R2_map_xy / mass_map_xy
+OW_map_xy = mass_weighted_OW_map_xy / mass_map_xy
+
+R0_map = mass_weighted_R0_map / mass_map
+R1_map = mass_weighted_R1_map / mass_map
+R2_map = mass_weighted_R2_map / mass_map
+OW_map = mass_weighted_OW_map / mass_map
+
+nx = 2
+ny = 5
+fig, ax = plt.subplots(ny, nx, sharey=True, figsize=(5 * nx, 5 * ny))
+
+# fig.suptitle("Plotting at %.3f free fall times" % toplot)
+
+a00 = ax[0, 0].contourf(
+    np.log10(R0_map.value),
+    cmap="plasma",  # "plasma",#"gist_stern",#"RdBu",
+    extend="both",
+    levels=np.linspace(-2, 0, 100),
+)
+
+a01 = ax[0, 1].contourf(
+    np.log10(R0_map_xy.value).T,
+    cmap="plasma",  # "plasma",#"gist_stern", #"RdBu",
+    extend="both",
+    levels=np.linspace(-2, 0, 100),
+)
+a10 = ax[1, 0].contourf(
+    np.log10(R1_map.value),
+    cmap="plasma",  # "viridis", #"nipy_spectral_r",
+    extend="both",
+    levels=np.linspace(-2, 0, 100),
+)
+a11 = ax[1, 1].contourf(
+    np.log10(R1_map_xy.value).T,
+    cmap="plasma",  # "viridis", #"nipy_spectral_r",
+    extend="both",
+    levels=np.linspace(-2, 0, 100),
+)
+
+a20 = ax[2, 0].contourf(
+    np.log10(R2_map.value),
+    cmap="plasma",  # "viridis", #"nipy_spectral_r",
+    extend="both",
+    levels=np.linspace(-2, 0, 100),
+)
+
+a21 = ax[2, 1].contourf(
+    np.log10(R2_map_xy.value).T,
+    cmap="plasma",  # "viridis", #"nipy_spectral_r",
+    extend="both",
+    levels=np.linspace(-2, 0, 100),
+)
+
+a30 = ax[3, 0].contourf(
+    np.log10(OW_map.value),
+    cmap="bwr",  # "viridis", #"nipy_spectral_r",
+    extend="both",
+    levels=np.linspace(-1, 1, 100),
+)
+
+a31 = ax[3, 1].contourf(
+    np.log10(OW_map_xy.value).T,
+    cmap="bwr",  # "viridis", #"nipy_spectral_r",
+    extend="both",
+    levels=np.linspace(-1, 1, 100),
+)
+
+locs = [512 / 4, 512 / 2, 3 * 512 / 4]
+locs = [512 / 4, 512 / 2, 3 * 512 / 4]
+labels = [-Lslice_kPc / 2, 0, Lslice_kPc / 2]
+
+for ii in range(ny):
+    ax[ii, 0].set_ylabel(r"$z$ [kPc]")
+    ax[ii, 0].set_yticks(locs, labels)
+
+for ii in range(ny):
+    for jj in range(nx):
+        ax[ii, jj].set_xlabel(r"$x$ [kPc]")
+        ax[ii, jj].set_xticks(locs, labels)
+        # ax[ii, jj].set_aspect("equal")
+        ax[ii, jj].set_aspect("equal", adjustable="box")
+        ax[ii, jj].set_xlim(0, 511)
+        ax[ii, jj].set_ylim(0, 511)
+
+
+ticks_error = [-2, -1, 0]
+cbar1 = plt.colorbar(a00, ax=ax[0, 0], fraction=0.046, pad=0.04, ticks=ticks_error)
+cbar1.set_label(r"$\mathrm{log}_{10} R_0 \; $")
+cbar2 = plt.colorbar(a01, ax=ax[0, 1], fraction=0.046, pad=0.04, ticks=ticks_error)
+ax[0, 1].set_ylabel(r"$y$ [kPc]")
+cbar2.set_label(r"$\mathrm{log}_{10} R_0 \; $")
+ax[1, 1].set_ylabel(r"$y$ [kPc]")
+cbar3 = plt.colorbar(a11, ax=ax[1, 0], fraction=0.046, pad=0.04, ticks=ticks_error)
+cbar3.set_label(r"$\mathrm{log}_{10} R_1 \;$")
+cbar4 = plt.colorbar(a11, ax=ax[1, 1], fraction=0.046, pad=0.04, ticks=ticks_error)
+cbar4.set_label(r"$\mathrm{log}_{10} R_1 \;$")
+
+ax[2, 1].set_ylabel(r"$y$ [kPc]")
+
+cbar5 = plt.colorbar(a20, ax=ax[2, 0], fraction=0.046, pad=0.04, ticks=ticks_error)
+cbar5.set_label(r"$\mathrm{log}_{10} R_2 \;$")
+
+cbar6 = plt.colorbar(a21, ax=ax[2, 1], fraction=0.046, pad=0.04, ticks=ticks_error)
+cbar6.set_label(r"$\mathrm{log}_{10} R_2 \;$")
+
+ticksOW = [-1, 0, 1]
+cbar7 = plt.colorbar(a30, ax=ax[3, 0], fraction=0.046, pad=0.04, ticks=ticksOW)
+cbar7.set_label(r"$\mathrm{log}_{10} OW/\langle OW \rangle \;$")
+
+cbar8 = plt.colorbar(a31, ax=ax[3, 1], fraction=0.046, pad=0.04, ticks=ticksOW)
+cbar8.set_label(r"$\mathrm{log}_{10} OW/\langle OW \rangle \;$")
+
+Ninfo = 4
+
+# add panel with infromation about the run
+text_common_args = dict(
+    fontsize=10, ha="center", va="center", transform=ax[Ninfo, 1].transAxes
+)
+
+ax[Ninfo, 1].text(
+    0.5,
+    0.8,
+    "Cooling halo with spin at time $t=%.2f$ Gyr" % data.metadata.time,
+    **text_common_args,
+)
+ax[Ninfo, 1].text(0.5, 0.7, "swift %s" % git.decode("utf-8"), **text_common_args)
+ax[Ninfo, 1].text(0.5, 0.6, "Branch %s" % gitBranch.decode("utf-8"), **text_common_args)
+ax[Ninfo, 1].text(0.5, 0.5, hydroScheme.decode("utf-8"), **text_common_args)
+ax[Ninfo, 1].text(
+    0.5,
+    0.4,
+    kernel.decode("utf-8") + " with $%.2f$ neighbours" % (neighbours),
+    **text_common_args,
+)
+ax[Ninfo, 1].text(
+    0.5, 0.3, "Artificial diffusion: $%.2f$ " % (artDiffusion), **text_common_args
+)
+ax[Ninfo, 1].text(
+    0.5,
+    0.2,
+    "Dedner Hyp, Hyp_div(v), Par: $%.2f,%.2f,%.2f$ " % (dedHyp, dedHypDivv, dedPar),
+    **text_common_args,
+)
+ax[Ninfo, 1].text(
+    0.5, 0.1, "Physical resistivity $\eta$: $%.2f$ " % (eta), **text_common_args
+)
+ax[Ninfo, 1].text(
+    0.5,
+    0.0,
+    r"Number of particles $N_p$: $%.0f$, $\langle OW \rangle$ = $%.0f$, $ OW_{min}$ = $%.0f$ "
+    % (Np, mean_OW, min_OW),
+    **text_common_args,
+)
+ax[Ninfo, 1].axis("off")
+
+fig.tight_layout()
+
+plt.savefig(sys.argv[2], dpi=220)
diff --git a/examples/MHDTests/CoolingHaloMHD_feedback/plotMovie.py b/examples/MHDTests/CoolingHaloMHD_feedback/plotMovie.py
new file mode 100644
index 0000000000000000000000000000000000000000..a364c6f574230fda3a8e9f10fdd925c0b4e9dc60
--- /dev/null
+++ b/examples/MHDTests/CoolingHaloMHD_feedback/plotMovie.py
@@ -0,0 +1,36 @@
+import subprocess
+from glob import glob
+import os
+
+
+def import_all_snapshot_addr(folderaddr):
+    addr_book = glob(folderaddr + "**/CoolingHalo_*.hdf5", recursive=True)
+    addr_book = sorted(addr_book)
+    image_names = [
+        "image_" + addr.split(".hdf5")[0].split("_")[-1] + ".png" for addr in addr_book
+    ]
+    return addr_book, image_names
+
+
+def plot_all_snapshots(folderaddr):
+    snap_addr, output_addr = import_all_snapshot_addr(folderaddr)
+    for saddr, iaddr in zip(snap_addr, output_addr):
+        command = "python3 plotCorrelation.py " + saddr + " " + iaddr
+        try:
+            subprocess.call(command, shell=True)
+        except subprocess.CalledProcessError as e:
+            print(
+                f"Encountered error number {e.returncode}, command output: {e.output}"
+            )
+
+
+def make_a_movie(framerate=1):
+    command = f"ffmpeg -framerate {framerate} -i 'image_%04d.png' -avoid_negative_ts make_zero -r {framerate} -pix_fmt yuv420p video.mp4"
+    try:
+        subprocess.call(command, shell=True)
+    except subprocess.CalledProcessError as e:
+        print(f"Encountered error number {e.returncode}, command output: {e.output}")
+
+
+plot_all_snapshots("./CH_gb099172f_hyp=0.1_par=1.0_divv=0.0_smallHalo_1e4_correcteps")
+# make_a_movie()
diff --git a/examples/MHDTests/CoolingHaloMHD_feedback/plotSolution.py b/examples/MHDTests/CoolingHaloMHD_feedback/plotSolution.py
new file mode 100644
index 0000000000000000000000000000000000000000..88682220714f1e3b32b54a383bfd99901cdbb2ad
--- /dev/null
+++ b/examples/MHDTests/CoolingHaloMHD_feedback/plotSolution.py
@@ -0,0 +1,228 @@
+import numpy as np
+import h5py
+import argparse
+import unyt
+import matplotlib.pyplot as plt
+
+from swiftsimio import load
+from swiftsimio.visualisation.projection import project_gas
+
+# Parse command line arguments
+argparser = argparse.ArgumentParser()
+argparser.add_argument("input")
+argparser.add_argument("output")
+args = argparser.parse_args()
+
+# Load snapshot
+filename = args.input
+data = load(filename)
+
+time = data.metadata.time
+time.convert_to_units(unyt.Gyr)
+print("Plotting at %2f Gyr" % time)
+
+# Retrieve some information about the simulation run
+artDiffusion = data.metadata.hydro_scheme["Artificial Diffusion Constant"]
+dedHyp = data.metadata.hydro_scheme["Dedner Hyperbolic Constant"]
+dedHypDivv = data.metadata.hydro_scheme["Dedner Hyperbolic div(v) Constant"]
+dedPar = data.metadata.hydro_scheme["Dedner Parabolic Constant"]
+eta = data.metadata.hydro_scheme["Resistive Eta"]
+git = data.metadata.code["Git Revision"]
+gitBranch = data.metadata.code["Git Branch"]
+hydroScheme = data.metadata.hydro_scheme["Scheme"]
+kernel = data.metadata.hydro_scheme["Kernel function"]
+neighbours = data.metadata.hydro_scheme["Kernel target N_ngb"]
+
+# Retrieve particle attributes of interest
+v = data.gas.velocities
+normv = np.sqrt(v[:, 0] ** 2 + v[:, 1] ** 2 + v[:, 2] ** 2)
+B = data.gas.magnetic_flux_densities
+normB = np.sqrt(B[:, 0] ** 2 + B[:, 1] ** 2 + B[:, 2] ** 2)
+divB = data.gas.magnetic_divergences
+h = data.gas.smoothing_lengths
+
+# Generate mass weighted maps of quantities of interest
+data.gas.mass_weighted_densities = data.gas.masses * data.gas.densities
+
+data.gas.mass_weighted_pressures = data.gas.masses * data.gas.pressures
+
+data.gas.mass_weighted_normv = data.gas.masses * normv
+
+data.gas.mass_weighted_normB = data.gas.masses * normB
+
+data.gas.mass_weighted_error = data.gas.masses * np.log10(
+    np.maximum(h * abs(divB) / (normB + 0.01 * np.max(normB)), 1e-6)
+)
+
+boxSize = data.metadata.boxsize
+
+visfrac = 0.0025
+
+visualise_region = [
+    0.5 * boxSize[0] - visfrac * boxSize[0],
+    0.5 * boxSize[0] + visfrac * boxSize[0],
+    0.5 * boxSize[1] - visfrac * boxSize[1],
+    0.5 * boxSize[1] + visfrac * boxSize[1],
+]
+
+common_arguments = dict(
+    data=data, resolution=512, parallel=True, region=visualise_region
+)
+
+mass_map = project_gas(**common_arguments, project="masses")
+
+mass_weighted_density_map = project_gas(
+    **common_arguments, project="mass_weighted_densities"
+)
+
+mass_weighted_pressure_map = project_gas(
+    **common_arguments, project="mass_weighted_pressures"
+)
+
+mass_weighted_normv_map = project_gas(**common_arguments, project="mass_weighted_normv")
+
+mass_weighted_normB_map = project_gas(**common_arguments, project="mass_weighted_normB")
+
+mass_weighted_error_map = project_gas(**common_arguments, project="mass_weighted_error")
+
+# Take out mass dependence
+# density_map = mass_weighted_density_map / mass_map
+
+mass_map.convert_to_units(unyt.Msun * unyt.pc ** (-2))
+
+pressure_map = mass_weighted_pressure_map / mass_map
+# pressure_map.convert_to_units(unyt.kg)
+
+normv_map = mass_weighted_normv_map / mass_map
+normv_map.convert_to_units(unyt.km * unyt.s ** (-1))
+
+normB_map = mass_weighted_normB_map / mass_map
+normB_map.convert_to_units(1e-7 * unyt.g * unyt.statA ** (-1) * unyt.s ** (-2))
+
+error_map = mass_weighted_error_map / mass_map
+
+# Plot maps
+plt.rcParams.update({"font.size": 16})
+fig, ax = plt.subplots(3, 2, figsize=(12.25, 17))
+
+lvs = 100
+
+a00 = ax[0, 0].contourf(
+    np.log10(mass_map.value.T),
+    cmap="gist_heat",
+    levels=np.linspace(0.5, 2.5, 100),
+    extend="both",
+)
+a01 = ax[0, 1].contourf(
+    np.log10(pressure_map.value.T),
+    cmap="gist_heat",
+    extend="both",
+    levels=lvs,  # np.linspace(0.0, 1.0, 100)
+)
+a10 = ax[1, 0].contourf(
+    np.log10(normv_map.value.T),
+    cmap="gist_heat",
+    extend="both",
+    levels=lvs,  # np.linspace(0.0, 1.0, 100)
+)
+a11 = ax[1, 1].contourf(
+    np.log10(normB_map.value.T),
+    cmap="gist_heat",
+    extend="both",
+    levels=lvs,  # np.linspace(0.5, 1.75, 100)
+)
+a20 = ax[2, 0].contourf(
+    error_map.value.T, cmap="jet", extend="both", levels=np.linspace(-5.0, 0.0, 6)
+)
+
+# Add panel with infromation about the run
+text_common_args = dict(
+    fontsize=10, ha="center", va="center", transform=ax[2, 1].transAxes
+)
+
+ax[2, 1].text(
+    0.5,
+    0.8,
+    "Cooling Halo with Spin at time $t=%.2f$" % data.metadata.time,
+    **text_common_args,
+)
+ax[2, 1].text(0.5, 0.7, "SWIFT %s" % git.decode("utf-8"), **text_common_args)
+ax[2, 1].text(0.5, 0.6, "Branch %s" % gitBranch.decode("utf-8"), **text_common_args)
+ax[2, 1].text(0.5, 0.5, hydroScheme.decode("utf-8"), **text_common_args)
+ax[2, 1].text(
+    0.5,
+    0.4,
+    kernel.decode("utf-8") + " with $%.2f$ neighbours" % (neighbours),
+    **text_common_args,
+)
+ax[2, 1].text(
+    0.5, 0.3, "Artificial diffusion: $%.2f$ " % (artDiffusion), **text_common_args
+)
+ax[2, 1].text(
+    0.5,
+    0.2,
+    "Dedner Hyp, Hyp_div(v), Par: $%.2f,%.2f,%.2f$ " % (dedHyp, dedHypDivv, dedPar),
+    **text_common_args,
+)
+ax[2, 1].text(
+    0.5, 0.1, "Physical resistivity $\eta$: $%.2f$ " % (eta), **text_common_args
+)
+ax[2, 1].axis("off")
+
+for axi in ax:
+    for axii in axi:
+        axii.set_xticks([])
+        axii.set_yticks([])
+        axii.set_aspect("equal")
+
+# Set appropriate colourbars
+fig.colorbar(
+    a00,
+    ax=ax[0, 0],
+    label=r"$\mathrm{log}_{10} \; \Sigma_{gas} \; [M_{\odot} \; {pc}^{-2}]$",
+    fraction=0.042,
+    pad=0.04,
+    location="left",
+    # ticks=np.linspace(0.0, 2.5, 6),
+)
+
+fig.colorbar(
+    a01,
+    ax=ax[0, 1],
+    label=r"$\mathrm{log}_{10} \; P \; [M_{\odot} \; / kpc \; / {Gyr}^{-2}]$",
+    fraction=0.042,
+    pad=0.04,
+    # ticks=np.linspace(0.0, 1.0, 6),
+)
+
+fig.colorbar(
+    a10,
+    ax=ax[1, 0],
+    label=r"$\mathrm{log}_{10} \; |\mathbf{v}| \; [kms \; s^{-1}]$",
+    fraction=0.042,
+    pad=0.04,
+    location="left",
+    # ticks=np.linspace(0.0, 1.0, 6),
+)
+
+fig.colorbar(
+    a11,
+    ax=ax[1, 1],
+    label=r"$\mathrm{log}_{10} \; |\mathbf{B}| \; [\mu G]$",
+    fraction=0.042,
+    pad=0.04,
+    # ticks=np.linspace(0.5, 1.75, 6),
+)
+
+fig.colorbar(
+    a20,
+    ax=ax[2, 0],
+    label=r"$\mathrm{log}_{10} \frac{h \: \nabla \cdot B}{|B|}$",
+    fraction=0.042,
+    pad=0.04,
+    location="left",
+)
+
+plt.subplots_adjust(wspace=0, hspace=0)
+
+plt.savefig(args.output)
diff --git a/examples/MHDTests/CoolingHaloMHD_feedback/plotSolutionReference.py b/examples/MHDTests/CoolingHaloMHD_feedback/plotSolutionReference.py
new file mode 100644
index 0000000000000000000000000000000000000000..a563d432517d48f16a5a76f48af9729ac6138c01
--- /dev/null
+++ b/examples/MHDTests/CoolingHaloMHD_feedback/plotSolutionReference.py
@@ -0,0 +1,523 @@
+import numpy as np
+import h5py
+import sys
+import unyt
+import matplotlib.pyplot as plt
+
+from swiftsimio import load
+from swiftsimio.visualisation.rotation import rotation_matrix_from_vector
+from swiftsimio.visualisation.slice import slice_gas
+
+filename = sys.argv[1]
+data = load(filename)
+center = 0.5 * data.metadata.boxsize
+
+time = data.metadata.time
+time.convert_to_units(unyt.Gyr)
+print("Plotting at %2f Gyr" % time)
+
+# Retrieve some information about the simulation run
+artDiffusion = data.metadata.hydro_scheme["Artificial Diffusion Constant"]
+dedHyp = data.metadata.hydro_scheme["Dedner Hyperbolic Constant"]
+dedHypDivv = data.metadata.hydro_scheme["Dedner Hyperbolic div(v) Constant"]
+dedPar = data.metadata.hydro_scheme["Dedner Parabolic Constant"]
+eta = data.metadata.hydro_scheme["Resistive Eta"]
+git = data.metadata.code["Git Revision"]
+gitBranch = data.metadata.code["Git Branch"]
+hydroScheme = data.metadata.hydro_scheme["Scheme"]
+kernel = data.metadata.hydro_scheme["Kernel function"]
+neighbours = data.metadata.hydro_scheme["Kernel target N_ngb"]
+
+
+# Constants
+G = 6.67430e-11 * unyt.m ** 3 / unyt.kg / unyt.s ** 2  # 6.67430e-8
+G = G.to(unyt.cm ** 3 / unyt.g / unyt.s ** 2)
+mu0 = 1.25663706127e-1 * unyt.g * unyt.cm / (unyt.s ** 2 * unyt.statA ** 2)
+
+# First create a mass-weighted temperature dataset
+r = data.gas.coordinates - center
+v = data.gas.velocities
+norm_r = np.sqrt(r[:, 0] ** 2 + r[:, 1] ** 2 + r[:, 2] ** 2)
+vr = np.sum(v * r, axis=1) / norm_r
+normv = np.sqrt(v[:, 0] ** 2 + v[:, 1] ** 2 + v[:, 2] ** 2)
+B = data.gas.magnetic_flux_densities
+# B = v.value * 0.0 * 1e-7*unyt.g / (unyt.statA * unyt.s * unyt.s)
+# J = data.gas.magnetic_flux_curl
+normB = np.sqrt(B[:, 0] ** 2 + B[:, 1] ** 2 + B[:, 2] ** 2)
+# normJ = np.sqrt(J[:, 0] ** 2 + J[:, 1] ** 2 + J[:, 2] ** 2)
+divB = data.gas.magnetic_divergences
+# divB = norm_r.value * 0.0 * 1e-7*unyt.g / (unyt.statA * unyt.s * unyt.s* unyt.cm)
+h = data.gas.smoothing_lengths
+rho = data.gas.densities
+Np = len(h)
+print("Number of particles %E" % len(data.gas.masses))
+print("Total mass %E Msol" % np.sum(data.gas.masses.to(unyt.Msun).value))
+print("Gas particle mass %E Msol" % np.mean(data.gas.masses.to(unyt.Msun).value))
+
+plasmaBeta = data.gas.pressures / (normB ** 2 / (2 * mu0))
+
+data.gas.mass_weighted_densities = data.gas.masses * data.gas.densities
+data.gas.mass_weighted_error = data.gas.masses * np.log10(
+    np.maximum(h * abs(divB) / normB, 1e-6)
+)
+data.gas.mass_weighted_vr = data.gas.masses * vr
+data.gas.mass_weighted_Bz = data.gas.masses * abs(B[:, 2])
+data.gas.mass_weighted_Bx = data.gas.masses * abs(B[:, 0])
+data.gas.mass_weighted_By = data.gas.masses * abs(B[:, 1])
+# data.gas.mass_weighted_J = (
+#    data.gas.masses * np.sqrt(J[:, 0] ** 2 + J[:, 1] ** 2 + J[:, 2] ** 2) / mu0
+# )
+
+data.gas.mass_weighted_plasmaBeta = data.gas.masses * plasmaBeta
+data.gas.mass_weighted_normv = data.gas.masses * normv
+data.gas.mass_weighted_normB = data.gas.masses * normB
+
+"""
+rotation_matrix = [[1,0,0],
+                   [0,1,0],
+                   [0,0,1]]
+"""
+rotation_matrix = [[0, 0, 1], [0, 1, 0], [-1, 0, 0]]
+
+# set plot area
+Lslice_kPc = 20.0  # 2*21.5
+Lslice = Lslice_kPc * 3.08e18 * 1e3 * unyt.cm
+
+visualise_region_xy = [
+    center[0] - Lslice,
+    center[0] + Lslice,
+    center[1] - Lslice,
+    center[1] + Lslice,
+]
+
+visualise_region = [
+    center[0] - Lslice,
+    center[0] + Lslice,
+    center[2] - Lslice,
+    center[2] + Lslice,
+]
+
+
+common_arguments_xy = dict(
+    data=data,
+    resolution=512,
+    parallel=True,
+    region=visualise_region_xy,
+    z_slice=center[2],  # 0.0 * unyt.cm,
+)
+
+common_arguments = dict(
+    data=data,
+    resolution=512,
+    parallel=True,
+    rotation_center=unyt.unyt_array(center),
+    rotation_matrix=rotation_matrix,
+    region=visualise_region,
+    z_slice=0.0 * unyt.cm,
+)
+
+
+# Map in msun / mpc^3
+
+mass_map_xy = slice_gas(**common_arguments_xy, project="masses")
+mass_weighted_density_map_xy = slice_gas(
+    **common_arguments_xy, project="mass_weighted_densities"
+)
+
+mass_map = slice_gas(**common_arguments, project="masses")
+mass_weighted_density_map = slice_gas(
+    **common_arguments, project="mass_weighted_densities"
+)
+mass_weighted_error_map = slice_gas(**common_arguments, project="mass_weighted_error")
+mass_weighted_error_map_xy = slice_gas(
+    **common_arguments_xy, project="mass_weighted_error"
+)
+mass_weighted_vr_map = slice_gas(**common_arguments, project="mass_weighted_vr")
+mass_weighted_Bz_map = slice_gas(**common_arguments, project="mass_weighted_Bz")
+mass_weighted_Bx_map = slice_gas(**common_arguments, project="mass_weighted_Bx")
+mass_weighted_By_map = slice_gas(**common_arguments, project="mass_weighted_By")
+# mass_weighted_J_map = slice_gas(**common_arguments, project="mass_weighted_J")
+
+mass_weighted_plasmaBeta_map = slice_gas(
+    **common_arguments, project="mass_weighted_plasmaBeta"
+)
+mass_weighted_normv_map = slice_gas(**common_arguments, project="mass_weighted_normv")
+mass_weighted_normv_map_xy = slice_gas(
+    **common_arguments_xy, project="mass_weighted_normv"
+)
+mass_weighted_normB_map = slice_gas(**common_arguments, project="mass_weighted_normB")
+mass_weighted_normB_map_xy = slice_gas(
+    **common_arguments_xy, project="mass_weighted_normB"
+)
+mass_weighted_Bz_map_xy = slice_gas(**common_arguments_xy, project="mass_weighted_Bz")
+mass_weighted_Bx_map_xy = slice_gas(**common_arguments_xy, project="mass_weighted_Bx")
+mass_weighted_By_map_xy = slice_gas(**common_arguments_xy, project="mass_weighted_By")
+
+density_map_xy = mass_weighted_density_map_xy / mass_map_xy
+density_map = mass_weighted_density_map / mass_map
+error_map = mass_weighted_error_map / mass_map
+error_map_xy = mass_weighted_error_map_xy / mass_map_xy
+vr_map = mass_weighted_vr_map / mass_map
+Bz_map = mass_weighted_Bz_map / mass_map
+Bx_map = mass_weighted_Bx_map / mass_map
+By_map = mass_weighted_By_map / mass_map
+Bz_map_xy = mass_weighted_Bz_map_xy / mass_map_xy
+Bx_map_xy = mass_weighted_Bx_map_xy / mass_map_xy
+By_map_xy = mass_weighted_By_map_xy / mass_map_xy
+# J_map = mass_weighted_J_map / mass_map
+plasmaBeta_map = mass_weighted_plasmaBeta_map / mass_map
+normv_map = mass_weighted_normv_map / mass_map
+normv_map_xy = mass_weighted_normv_map_xy / mass_map_xy
+normB_map = mass_weighted_normB_map / mass_map
+normB_map_xy = mass_weighted_normB_map_xy / mass_map_xy
+
+# density_map_xy.convert_to_units(unyt.g * unyt.cm ** (-3))
+# density_map.convert_to_units(unyt.g * unyt.cm ** (-3))
+
+density_map_xy.convert_to_units(1e10 * (1e33 * unyt.g) * (3.08e21 * unyt.cm) ** (-3))
+density_map.convert_to_units(1e10 * (1e33 * unyt.g) * (3.08e21 * unyt.cm) ** (-3))
+
+nH_map_xy = density_map_xy / (1.67e-24 * unyt.g)
+nH_map = density_map / (1.67e-24 * unyt.g)
+
+vr_map.convert_to_units(unyt.km / unyt.s)
+Bz_map.convert_to_units(1e-7 * unyt.g / (unyt.statA * unyt.s * unyt.s))
+Bx_map.convert_to_units(1e-7 * unyt.g / (unyt.statA * unyt.s * unyt.s))
+By_map.convert_to_units(1e-7 * unyt.g / (unyt.statA * unyt.s * unyt.s))
+Bz_map_xy.convert_to_units(1e-7 * unyt.g / (unyt.statA * unyt.s * unyt.s))
+Bx_map_xy.convert_to_units(1e-7 * unyt.g / (unyt.statA * unyt.s * unyt.s))
+By_map_xy.convert_to_units(1e-7 * unyt.g / (unyt.statA * unyt.s * unyt.s))
+# J_map.convert_to_units(unyt.statA / (unyt.m ** 2))
+normv_map.convert_to_units(unyt.km / unyt.s)
+normv_map_xy.convert_to_units(unyt.km / unyt.s)
+normB_map.convert_to_units(1e-7 * unyt.g / (unyt.statA * unyt.s * unyt.s))
+normB_map_xy.convert_to_units(1e-7 * unyt.g / (unyt.statA * unyt.s * unyt.s))
+# normB_map = np.sqrt(Bx_map**2+By_map**2+Bz_map**2)
+# normB_map_xy = np.sqrt(Bx_map_xy**2+By_map_xy**2+Bz_map_xy**2)
+
+nx = 2
+ny = 4
+fig, ax = plt.subplots(ny, nx, sharey=True, figsize=(5 * nx, 5 * ny))
+
+# fig.suptitle("Plotting at %.3f free fall times" % toplot)
+
+a00 = ax[0, 0].contourf(
+    np.log10(nH_map.value),
+    # np.log10(density_map.value),
+    cmap="jet",  # "plasma",#"gist_stern",#"RdBu",
+    extend="both",
+    levels=np.linspace(-4, 2, 100),
+    # levels=np.linspace(-7, -1, 100),
+)
+
+a01 = ax[0, 1].contourf(
+    np.log10(nH_map_xy.value).T,
+    # np.log10(density_map_xy.value).T,
+    cmap="jet",  # "plasma",#"gist_stern", #"RdBu",
+    extend="both",
+    levels=np.linspace(-4, 2, 100),
+    # levels=np.linspace(-7, -1, 100),
+)
+
+# a10 = ax[1, 0].contourf(
+#    np.log10(np.maximum(normv_map.value,-1)), cmap="cividis", extend="both", levels=np.linspace(1.0, 2.0, 100)
+# )
+# a11 = ax[1, 1].contourf(
+#    np.log10(np.maximum(normv_map_xy.value,-1)), cmap="cividis", extend="both", levels=np.linspace(1.0, 2.0, 100)
+# )
+# a11 = ax[1, 1].contourf(
+#    error_map.value, cmap="jet", extend="both", levels=np.arange(-6.0, 3.0, 1.0)
+# )
+# a20 = ax[2, 0].contourf(
+#    np.log10(np.maximum(plasmaBeta_map.value, -4)),
+#    cmap="gist_stern",
+#    extend="both",
+#    levels=np.linspace(-3, 4, 100),
+# )
+a10 = ax[1, 0].contourf(
+    np.maximum(np.log10(normB_map.value), -6),
+    cmap="jet",  # "viridis", #"nipy_spectral_r",
+    extend="both",
+    levels=np.linspace(-2, 2, 100),
+)
+a11 = ax[1, 1].contourf(
+    np.maximum(np.log10(normB_map_xy.value), -6).T,
+    cmap="jet",  # "viridis", #"nipy_spectral_r",
+    extend="both",
+    levels=np.linspace(-2, 2, 100),
+)
+
+a20 = ax[2, 0].contourf(
+    error_map.value, cmap="jet", extend="both", levels=np.arange(-6.0, 3.0, 1.0)
+)
+a21 = ax[2, 1].contourf(
+    (error_map_xy.value).T, cmap="jet", extend="both", levels=np.arange(-6.0, 3.0, 1.0)
+)
+
+a30 = ax[3, 0].contourf(
+    np.maximum(np.log10(plasmaBeta_map.value), -10),
+    cmap="bwr",
+    extend="both",
+    levels=np.linspace(0.0, 1.0, 100),
+)
+locs = [512 / 4, 512 / 2, 3 * 512 / 4]
+locs = [512 / 4, 512 / 2, 3 * 512 / 4]
+labels = [-Lslice_kPc / 2, 0, Lslice_kPc / 2]
+
+for ii in range(ny):
+    ax[ii, 0].set_ylabel(r"$z$ [kPc]")
+    ax[ii, 0].set_yticks(locs, labels)
+
+for ii in range(ny):
+    for jj in range(nx):
+        ax[ii, jj].set_xlabel(r"$x$ [kPc]")
+        ax[ii, jj].set_xticks(locs, labels)
+        # ax[ii, jj].set_aspect("equal")
+        ax[ii, jj].set_aspect("equal", adjustable="box")
+        ax[ii, jj].set_xlim(0, 511)
+        ax[ii, jj].set_ylim(0, 511)
+
+
+ticks_rho = [-4, -3, -2, -1, 0, 1, 2]
+# ticks_rho = [-7,-6,-5,-4,-3,-2,-1]
+cbar1 = plt.colorbar(a00, ax=ax[0, 0], fraction=0.046, pad=0.04, ticks=ticks_rho)
+cbar1.set_label(r"$\mathrm{log}_{10} \rho / m_H \; [ {cm}^{-3}]$")
+# cbar1.set_label(r"$\mathrm{log}_{10} \rho \; [10^{10} {M}_{sol} {kpc}^{-3}]$")
+cbar2 = plt.colorbar(a01, ax=ax[0, 1], fraction=0.046, pad=0.04, ticks=ticks_rho)
+ax[0, 1].set_ylabel(r"$y$ [kPc]")
+cbar2.set_label(r"$\mathrm{log}_{10} \rho / m_H \; [ {cm}^{-3}]$")
+# cbar2.set_label(r"$\mathrm{log}_{10} \rho \; [10^{10} {M}_{sol} {kpc}^{-3}]$")
+
+# cbar3 = plt.colorbar(
+#    a10, ax=ax[1, 0], fraction=0.046, pad=0.04, ticks=np.linspace(1, 2, 2)
+# )
+# cbar3.set_label(r"$\mathrm{log}_{10} |v| \; [km s^{-1}]$")
+#
+# cbar4 = plt.colorbar(
+#    a11, ax=ax[1, 1], fraction=0.046, pad=0.04, ticks=np.linspace(1, 2, 2)
+# )
+# cbar4.set_label(r"$\mathrm{log}_{10} |v| \; [km s^{-1}]$")
+ax[1, 1].set_ylabel(r"$y$ [kPc]")
+# cbar4 = plt.colorbar(a11, ax=ax[1, 1], fraction=0.046, pad=0.04)
+# cbar4.set_label(r"$\mathrm{log}_{10} \frac{h \: \nabla \cdot B}{|B|}$")
+# ticks_Beta = [-3,-1.83,-0.667,0.500,1.67,2.83,4.0]
+# cbar5 = plt.colorbar(
+#    a20, ax=ax[2, 0], fraction=0.046, pad=0.04, ticks=ticks_Beta
+# )
+# cbar5.set_label(r"$\mathrm{log}_{10} \beta \;$")
+ticks_B = [-2, -1, 0, 1, 2]
+cbar3 = plt.colorbar(a11, ax=ax[1, 0], fraction=0.046, pad=0.04, ticks=ticks_B)
+cbar3.set_label(r"$\mathrm{log}_{10} B \; [\mu G]$")
+cbar4 = plt.colorbar(a11, ax=ax[1, 1], fraction=0.046, pad=0.04, ticks=ticks_B)
+cbar4.set_label(r"$\mathrm{log}_{10} B \; [\mu G]$")
+
+ax[2, 1].set_ylabel(r"$y$ [kPc]")
+
+cbar5 = plt.colorbar(a20, ax=ax[2, 0], fraction=0.046, pad=0.04)
+cbar5.set_label(r"$\mathrm{log}_{10} \frac{h \: \nabla \cdot B}{|B|}$")
+
+cbar6 = plt.colorbar(a21, ax=ax[2, 1], fraction=0.046, pad=0.04)
+cbar6.set_label(r"$\mathrm{log}_{10} \frac{h \: \nabla \cdot B}{|B|}$")
+
+ticksBeta = [0, 1]
+cbar7 = plt.colorbar(a30, ax=ax[3, 0], fraction=0.046, pad=0.04, ticks=ticksBeta)
+cbar7.set_label(r"$\mathrm{log}_{10} \beta $")
+
+# add streamlines
+
+data.gas.mass_weighted_vx = data.gas.masses * v[:, 0]
+data.gas.mass_weighted_vy = data.gas.masses * v[:, 1]
+mass_weighted_vx_map_xy = slice_gas(**common_arguments_xy, project="mass_weighted_vx")
+mass_weighted_vy_map_xy = slice_gas(**common_arguments_xy, project="mass_weighted_vy")
+vx_map_xy = mass_weighted_vx_map_xy / mass_map_xy
+vy_map_xy = mass_weighted_vy_map_xy / mass_map_xy
+
+vx_map_xy = vx_map_xy.value
+vy_map_xy = vy_map_xy.value
+vplanenorm_map_xy = np.sqrt(vx_map_xy ** 2 + vy_map_xy ** 2)
+vx_map_xy /= vplanenorm_map_xy
+vy_map_xy /= vplanenorm_map_xy
+
+dimx = 512
+dimy = 512
+new_x = np.linspace(0, dimx, dimx)
+new_y = np.linspace(0, dimy, dimy)
+
+step = 20
+ax[0, 1].quiver(
+    new_x[::step],
+    new_y[::step],
+    np.transpose(vx_map_xy.reshape((dimx, dimy)))[::step, ::step],
+    np.transpose(vy_map_xy.reshape((dimx, dimy)))[::step, ::step],
+    color="black",
+    scale=1.5 / step,
+    scale_units="xy",  # Fixes the arrow length in data coordinates
+    pivot="middle"
+    # density=1.0,
+    # linewidth=0.2,
+    # arrowsize=0.4,
+)
+
+
+data.gas.mass_weighted_vx = data.gas.masses * v[:, 0]
+data.gas.mass_weighted_vz = data.gas.masses * v[:, 2]
+mass_weighted_vx_map = slice_gas(**common_arguments, project="mass_weighted_vx")
+mass_weighted_vz_map = slice_gas(**common_arguments, project="mass_weighted_vz")
+vx_map = mass_weighted_vx_map / mass_map
+vz_map = mass_weighted_vz_map / mass_map
+
+vx_map = vx_map.value
+vz_map = vz_map.value
+vplanenorm_map = np.sqrt(vx_map ** 2 + vz_map ** 2)
+vx_map /= vplanenorm_map
+vz_map /= vplanenorm_map
+
+
+dimx = 512
+dimy = 512
+new_x = np.linspace(0, dimx, dimx)
+new_y = np.linspace(0, dimy, dimy)
+
+step = 20
+ax[0, 0].quiver(
+    new_x[::step],
+    new_y[::step],
+    np.transpose(vx_map.reshape((dimx, dimy)))[::step, ::step],
+    np.transpose(vz_map.reshape((dimx, dimy)))[::step, ::step],
+    color="black",
+    scale=1.5 / step,
+    scale_units="xy",  # Fixes the arrow length in data coordinates
+    pivot="middle"
+    # density=1.0,
+    # linewidth=0.2,
+    # arrowsize=0.4,
+)
+
+data.gas.mass_weighted_Bx = data.gas.masses * B[:, 0]
+data.gas.mass_weighted_By = data.gas.masses * B[:, 1]
+mass_weighted_Bx_map_xy = slice_gas(**common_arguments_xy, project="mass_weighted_Bx")
+mass_weighted_By_map_xy = slice_gas(**common_arguments_xy, project="mass_weighted_By")
+Bx_map_xy = mass_weighted_Bx_map_xy / mass_map_xy
+By_map_xy = mass_weighted_By_map_xy / mass_map_xy
+# Bx_map_xy.convert_to_units(1e-7*unyt.g / (unyt.statA * unyt.s * unyt.s)).values
+# By_map_xy.convert_to_units(1e-7*unyt.g / (unyt.statA * unyt.s * unyt.s)).values
+
+Bx_map_xy = Bx_map_xy.value
+By_map_xy = By_map_xy.value
+Bplanenorm_map_xy = np.sqrt(Bx_map_xy ** 2 + By_map_xy ** 2)
+Bx_map_xy /= Bplanenorm_map_xy
+By_map_xy /= Bplanenorm_map_xy
+
+dimx = 512
+dimy = 512
+new_x = np.linspace(0, dimx, dimx)
+new_y = np.linspace(0, dimy, dimy)
+
+step = 20
+ax[1, 1].quiver(
+    new_x[::step],
+    new_y[::step],
+    np.transpose(Bx_map_xy.reshape((dimx, dimy)))[::step, ::step],
+    np.transpose(By_map_xy.reshape((dimx, dimy)))[::step, ::step],
+    color="black",
+    scale=1.5 / step,
+    scale_units="xy",  # Fixes the arrow length in data coordinates
+    pivot="middle"
+    # density=1.0,
+    # linewidth=0.2,
+    # arrowsize=0.4,
+)
+
+data.gas.mass_weighted_Bx = data.gas.masses * B[:, 0]
+data.gas.mass_weighted_Bz = data.gas.masses * B[:, 2]
+mass_weighted_Bx_map = slice_gas(**common_arguments, project="mass_weighted_Bx")
+mass_weighted_Bz_map = slice_gas(**common_arguments, project="mass_weighted_Bz")
+Bx_map = mass_weighted_Bx_map / mass_map
+Bz_map = mass_weighted_Bz_map / mass_map
+
+Bx_map = Bx_map.value
+Bz_map = Bz_map.value
+Bplanenorm_map = np.sqrt(Bx_map ** 2 + Bz_map ** 2)
+Bx_map /= Bplanenorm_map
+Bz_map /= Bplanenorm_map
+
+
+dimx = 512
+dimy = 512
+new_x = np.linspace(0, dimx, dimx)
+new_y = np.linspace(0, dimy, dimy)
+
+step = 20
+ax[1, 0].quiver(
+    new_x[::step],
+    new_y[::step],
+    np.transpose(Bx_map.reshape((dimx, dimy)))[::step, ::step],
+    np.transpose(Bz_map.reshape((dimx, dimy)))[::step, ::step],
+    color="black",
+    scale=1.5 / step,
+    scale_units="xy",  # Fixes the arrow length in data coordinates
+    pivot="middle"
+    # density=1.0,
+    # linewidth=0.2,
+    # arrowsize=0.4,
+)
+
+
+index = np.argmax(normv)
+vpart = normv[index]
+rpart = r[index]
+Bpart = normB[index]
+Berr = np.maximum(h * abs(divB) / normB, 1e-6)
+Berrpart = Berr[index]
+hpart = h[index]
+vpart.convert_to_units(unyt.km / unyt.s)
+rpart.convert_to_units(3.08e18 * 1e3 * unyt.cm)
+hpart.convert_to_units(3.08e18 * 1e3 * unyt.cm)
+Bpart.convert_to_units(1e-7 * unyt.g / (unyt.statA * unyt.s * unyt.s))
+print(
+    f"Particle {index}, \nwith maximal velocity {vpart.value} km/s, \nmagnetic field {Bpart.value} muG, \nwith error level {Berrpart.value}, \nwith smoothing length {hpart.value} kPc, \nlocated at {rpart.value} kPc"
+)
+
+part_pixel_coords = 512 / 2 * (1 + rpart.value / Lslice_kPc)
+
+# ax[1,0].scatter(part_pixel_coords[0],part_pixel_coords[2],color='red',marker='x')
+
+
+# add panel with infromation about the run
+text_common_args = dict(
+    fontsize=10, ha="center", va="center", transform=ax[3, 1].transAxes
+)
+
+ax[3, 1].text(
+    0.5,
+    0.8,
+    "Cooling halo with spin at time $t=%.2f$ Gyr" % data.metadata.time,
+    **text_common_args,
+)
+ax[3, 1].text(0.5, 0.7, "swift %s" % git.decode("utf-8"), **text_common_args)
+ax[3, 1].text(0.5, 0.6, "Branch %s" % gitBranch.decode("utf-8"), **text_common_args)
+ax[3, 1].text(0.5, 0.5, hydroScheme.decode("utf-8"), **text_common_args)
+ax[3, 1].text(
+    0.5,
+    0.4,
+    kernel.decode("utf-8") + " with $%.2f$ neighbours" % (neighbours),
+    **text_common_args,
+)
+ax[3, 1].text(
+    0.5, 0.3, "Artificial diffusion: $%.2f$ " % (artDiffusion), **text_common_args
+)
+ax[3, 1].text(
+    0.5,
+    0.2,
+    "Dedner Hyp, Hyp_div(v), Par: $%.2f,%.2f,%.2f$ " % (dedHyp, dedHypDivv, dedPar),
+    **text_common_args,
+)
+ax[3, 1].text(
+    0.5, 0.1, "Physical resistivity $\eta$: $%.2f$ " % (eta), **text_common_args
+)
+ax[3, 1].text(0.5, 0.0, "Number of particles $N_p$: $%.0f$ " % (Np), **text_common_args)
+ax[3, 1].axis("off")
+
+fig.tight_layout()
+
+plt.savefig(sys.argv[2], dpi=220)
diff --git a/examples/MHDTests/CoolingHaloMHD_feedback/plotSpectrum.py b/examples/MHDTests/CoolingHaloMHD_feedback/plotSpectrum.py
new file mode 100644
index 0000000000000000000000000000000000000000..576975125b4a3d7433f922896743553beb8e6197
--- /dev/null
+++ b/examples/MHDTests/CoolingHaloMHD_feedback/plotSpectrum.py
@@ -0,0 +1,339 @@
+import numpy as np
+import h5py
+import unyt
+import matplotlib.pyplot as plt
+import argparse
+
+from swiftsimio import load
+from swiftsimio.visualisation.volume_render import render_gas
+
+
+# Parse command line arguments
+argparser = argparse.ArgumentParser()
+argparser.add_argument("input")
+argparser.add_argument("output")
+#argparser.add_argument("resolution")
+args = argparser.parse_args()
+
+# Load snapshot
+filename = args.input
+data = load(filename)
+
+time = np.round(data.metadata.time,2)
+print("Plotting at ", time)
+
+Lbox = data.metadata.boxsize
+
+# Retrieve some information about the simulation run
+artDiffusion = data.metadata.hydro_scheme["Artificial Diffusion Constant"]
+dedHyp = data.metadata.hydro_scheme["Dedner Hyperbolic Constant"]
+dedHypDivv = data.metadata.hydro_scheme["Dedner Hyperbolic div(v) Constant"]
+dedPar = data.metadata.hydro_scheme["Dedner Parabolic Constant"]
+eta = data.metadata.hydro_scheme["Resistive Eta"]
+git = data.metadata.code["Git Revision"]
+gitBranch = data.metadata.code["Git Branch"]
+hydroScheme = data.metadata.hydro_scheme["Scheme"]
+kernel = data.metadata.hydro_scheme["Kernel function"]
+neighbours = data.metadata.hydro_scheme["Kernel target N_ngb"]
+
+# Retrieve particle attributes of interest
+v = data.gas.velocities
+rho = data.gas.densities
+B = data.gas.magnetic_flux_densities
+h = data.gas.smoothing_lengths
+normB = np.sqrt(B[:, 0] ** 2 + B[:, 1] ** 2 + B[:, 2] ** 2)
+
+divB = data.gas.magnetic_divergences
+minh = np.min(h.value)
+
+# Renormalize quantities
+mu0 = 1.25663706127e-6 * unyt.kg * unyt.m / (unyt.s ** 2 * unyt.statA ** 2)
+# note that older swiftsimio version is using statA even if swift has MF as A
+#mu0 = 1.25663706127e-1 * unyt.g * unyt.cm / (unyt.s ** 2 * unyt.statA ** 2)
+v = v * (rho[:, None]/2)**0.5
+B = B / np.sqrt(2*mu0)
+
+#Npside = int(len(h)**(1/3))+1
+
+# Generate mass weighted maps of quantities of interest
+data.gas.mass_weighted_vx = data.gas.masses * v[:,0]
+data.gas.mass_weighted_vy = data.gas.masses * v[:,1]
+data.gas.mass_weighted_vz = data.gas.masses * v[:,2]
+
+data.gas.mass_weighted_Bx = data.gas.masses * B[:,0]
+data.gas.mass_weighted_By = data.gas.masses * B[:,1]
+data.gas.mass_weighted_Bz = data.gas.masses * B[:,2]
+
+data.gas.mass_weighted_error = data.gas.masses * np.maximum(h * abs(divB) / (normB + 0.01 * np.max(normB)), 1e-6)
+
+#res = 128 #Npside #int(args.resolution)
+
+#Lslice = 0.5*Lbox
+Lslice_kPc = 20  # 2*21.5
+Lslice = Lslice_kPc * 3.08e18 * 1e3 * unyt.cm
+
+k_slice = 2*np.pi/Lslice
+k_res = np.max(2*np.pi/h)
+
+res = int((2*Lslice/np.min(h)).value) #Npside #int(args.resolution)
+
+# resolution limiter
+resmax= 512
+print('Suggested maximal resoluiton: ',res)
+res = min([res,resmax])
+
+print('Spectral resolution: ',res)
+
+center = Lbox/2
+visualise_region = [
+    center[0] - Lslice,
+    center[0] + Lslice,
+    center[1] - Lslice,
+    center[1] + Lslice,
+    center[2] - Lslice,
+    center[2] + Lslice,
+]
+
+common_arguments = dict(
+    data=data, resolution=res, parallel=True,periodic=True,region=visualise_region,
+)
+
+
+mass_cube = render_gas(**common_arguments, project="masses")
+min_nonzero_m =np.min( mass_cube.flatten()[mass_cube.flatten().value!=0.0])
+mass_cube[mass_cube.value==0]=1e-2*min_nonzero_m
+
+mass_weighted_vx_cube = render_gas(**common_arguments, project="mass_weighted_vx")
+mass_weighted_vy_cube = render_gas(**common_arguments, project="mass_weighted_vy")
+mass_weighted_vz_cube = render_gas(**common_arguments, project="mass_weighted_vz")
+
+mass_weighted_Bx_cube = render_gas(**common_arguments, project="mass_weighted_Bx")
+mass_weighted_By_cube = render_gas(**common_arguments, project="mass_weighted_By")
+mass_weighted_Bz_cube = render_gas(**common_arguments, project="mass_weighted_Bz")
+
+mass_weighted_error_cube = render_gas(**common_arguments, project="mass_weighted_error")
+
+
+vx_cube = mass_weighted_vx_cube/mass_cube
+vy_cube = mass_weighted_vy_cube/mass_cube
+vz_cube = mass_weighted_vz_cube/mass_cube
+
+Bx_cube = mass_weighted_Bx_cube/mass_cube
+By_cube = mass_weighted_By_cube/mass_cube
+Bz_cube = mass_weighted_Bz_cube/mass_cube
+
+error_cube = mass_weighted_error_cube/mass_cube 
+
+unit_energy = unyt.g * unyt.cm**2 / unyt.s**2
+unit_energy_density = unit_energy / unyt.cm**3
+unit_sqrt_energy_density = (unit_energy_density)**0.5
+unit_length = 1e3*unyt.pc
+
+vx_cube.convert_to_units(unit_sqrt_energy_density)
+vy_cube.convert_to_units(unit_sqrt_energy_density)
+vz_cube.convert_to_units(unit_sqrt_energy_density)
+
+Bx_cube.convert_to_units(unit_sqrt_energy_density)
+By_cube.convert_to_units(unit_sqrt_energy_density)
+Bz_cube.convert_to_units(unit_sqrt_energy_density)
+
+k_slice = 2*np.pi/Lslice
+k_res = np.max(2*np.pi/h)
+
+k_slice.convert_to_units(1/unit_length)
+k_res.convert_to_units(1/unit_length)
+
+def compute_power_spectrum_scal(Q, dx, nbins):
+    # Grid size
+    Nx, Ny, Nz = Q.shape
+
+    # Compute Fourier transforms
+    Q_k = np.fft.fftn(Q)
+
+    # Compute power spectrum (squared magnitude of Fourier components)
+    Q_power_k = np.abs(Q_k)**2
+
+    # Compute the corresponding wavenumbers
+    kx = np.fft.fftfreq(Nx, d=dx) * 2 * np.pi
+    ky = np.fft.fftfreq(Ny, d=dx) * 2 * np.pi
+    kz = np.fft.fftfreq(Nz, d=dx) * 2 * np.pi
+
+    # Create 3D arrays of wavevectors
+    KX, KY, KZ = np.meshgrid(kx, ky, kz, indexing='ij')
+    k_mag = np.sqrt(KX**2 + KY**2 + KZ**2)
+
+    # Flatten arrays for binning
+    k_mag_flat = k_mag.flatten()
+    Q_power_flat = Q_power_k.flatten()
+
+    # Define k bins (you can tweak bin size)
+    k_bins = np.linspace(0, np.max(2*np.pi/minh), num=nbins)
+
+    # exclude 0th mode:
+    k_bins = k_bins[1:]
+    k_mag_flat = k_mag_flat[1:]
+    Q_power_flat = Q_power_flat[1:]
+
+    k_bin_centers = 0.5 * (k_bins[1:] + k_bins[:-1])
+
+    # Bin the power spectrum
+    power_spectrum, _ = np.histogram(k_mag_flat, bins=k_bins, weights=Q_power_flat)
+    counts, _ = np.histogram(k_mag_flat, bins=k_bins)
+    
+    # Avoid division by zero
+    power_spectrum = np.where(counts > 0, power_spectrum / counts, 0)
+
+    return k_bin_centers, power_spectrum
+
+def compute_power_spectrum_vec(Qx, Qy, Qz, dx,nbins):
+    # Ensure all arrays are unyt arrays with same units
+    dx = dx #.to(unyt.pc)
+    volume_element = dx**3  # single cell volume in cm^3
+
+    # Get shape
+    Nx, Ny, Nz = Qx.shape
+
+    # Fourier transform (keep units)
+    Qx_k = np.fft.fftn(Qx.value) * volume_element.value
+    Qy_k = np.fft.fftn(Qy.value) * volume_element.value
+    Qz_k = np.fft.fftn(Qz.value) * volume_element.value
+
+    # Reapply units
+    Qx_k = Qx_k * Qx.units * volume_element.units
+    Qy_k = Qy_k * Qy.units * volume_element.units
+    Qz_k = Qz_k * Qz.units * volume_element.units
+
+    # Power in k-space
+    Pk = (np.abs(Qx_k)**2 + np.abs(Qy_k)**2 + np.abs(Qz_k)**2)#.to(unyt.g/(unyt.s**2)*unyt.cm**5)  # or appropriate units
+
+    # Compute wavenumbers
+    kx = np.fft.fftfreq(Nx, d=dx.value) * 2 * np.pi / dx.units #unyt.cm**-1
+    ky = np.fft.fftfreq(Ny, d=dx.value) * 2 * np.pi / dx.units #unyt.cm**-1
+    kz = np.fft.fftfreq(Nz, d=dx.value) * 2 * np.pi / dx.units #unyt.cm**-1
+    KX, KY, KZ = np.meshgrid(kx, ky, kz, indexing='ij')
+    k_mag = np.sqrt(KX**2 + KY**2 + KZ**2)
+
+    # Calculate monopole component
+    Q_dot_k = Qx_k * KX + Qy_k * KY + Qz_k * KZ
+    Qx_k_div = KX * Q_dot_k / k_mag**2
+    Qy_k_div = KY * Q_dot_k / k_mag**2
+    Qz_k_div = KZ * Q_dot_k / k_mag**2
+    Pk_div = (np.abs(Qx_k_div)**2 + np.abs(Qy_k_div)**2 + np.abs(Qz_k_div)**2)#.to(unyt.g/(unyt.s**2)*unyt.cm**5)  # or appropriate units
+
+    # Flatten and bin
+    k_flat = k_mag.flatten() #.to('1/cm')
+    Pk_flat = Pk.flatten() #.to(unyt.g/(unyt.s**2)*unyt.cm**5)  # this should be correct after unit reduction
+    Pk_div_flat = Pk_div.flatten()
+
+    # Bin in k-space
+    k_min_log = np.log10(np.min(k_flat.value[k_flat.value!=0]))
+    k_max_log = np.log10(np.max(k_flat.value))
+    k_bins = np.logspace(k_min_log,k_max_log, num=nbins) * k_flat.units  #*unyt.cm**-1
+
+    # exclude 0th mode:
+    k_bins = k_bins[1:]
+    k_flat = k_flat[1:]
+    Pk_flat = Pk_flat[1:]
+    Pk_div_flat = Pk_div_flat[1:]
+
+    # converting to Energy per unit wavevector
+    Ek_flat = 4*np.pi * k_flat**2 * Pk_flat
+    Ek_div_flat = 4*np.pi * k_flat**2 * Pk_div_flat
+
+    k_bin_centers = 0.5 * (k_bins[1:] + k_bins[:-1])
+
+    power_spectrum = np.zeros(len(k_bin_centers)) * Ek_flat.units
+    power_spectrum_div = np.zeros(len(k_bin_centers)) * Ek_div_flat.units
+    counts = np.zeros(len(k_bin_centers))
+   
+    for i in range(len(k_bin_centers)):
+        in_bin = (k_flat >= k_bins[i]) & (k_flat < k_bins[i+1])
+        power_spectrum[i] = Ek_flat[in_bin].sum()
+        power_spectrum_div[i] = Ek_div_flat[in_bin].sum()
+        counts[i] = in_bin.sum()
+
+    # Normalize per mode (optional)
+    power_spectrum = np.where(counts > 0, power_spectrum / counts, 0 * power_spectrum.units)
+    power_spectrum_div = np.where(counts > 0, power_spectrum_div / counts, 0 * power_spectrum_div.units)
+
+    # mask non-zero
+    mask_zeros = power_spectrum==0
+    k_bin_centers = k_bin_centers[~mask_zeros]
+    power_spectrum = power_spectrum[~mask_zeros]
+    power_spectrum_div = power_spectrum_div[~mask_zeros]
+
+    return k_bin_centers, power_spectrum, power_spectrum_div
+
+dx = 2*Lslice.to(unit_length)/(res)
+
+# plot magnetic field spectrum
+ks, Eb, Eb_div = compute_power_spectrum_vec(Bx_cube,By_cube,Bz_cube, dx = dx, nbins=res-1 )
+# plot velocity spectrum
+ks, Ev, _ = compute_power_spectrum_vec(vx_cube,vy_cube,vz_cube, dx = dx, nbins=res-1 )
+# plot divergence error spectrum
+#ks, Perr = compute_power_spectrum_scal(error_cube, dx = dx, nbins=res-1 )
+
+Eb.convert_to_units(unyt.erg*(1e3*unyt.pc))
+Eb_div.convert_to_units(unyt.erg*(1e3*unyt.pc))
+
+Ev.convert_to_units(unyt.erg*(1e3*unyt.pc))
+ks.convert_to_units(1e-3/unyt.pc)
+
+
+fig, ax = plt.subplots(figsize=(10, 6.2))
+
+ax.plot(ks,Ev.value,color='blue',label='$E_v(k)$')
+ax.plot(ks,Eb.value,color='red',linestyle='solid',label='$E_B(k)$')
+ax.plot(ks,Eb_div.value,color='purple',linestyle='solid',label='$E_{B_{mon}}(k)$')
+#ax.plot(ks,Perr,color='purple',label='$P_{R_{0}}(k)$')
+
+# resolution line
+ax.axvline(x=k_res, color='brown',linestyle='solid',label = r'$k_{\mathrm{res}}$')
+
+# self gravity softening line
+ksoft = 2*np.pi / (0.2 * 1e3 * unyt.pc)
+ksoft.convert_to_units(1e-3/unyt.pc)
+ax.axvline(x=ksoft, color='brown',linestyle='dashdot',label = r'$k_{\mathrm{soft}}$')
+
+# plot spectral lines
+ksmock = np.logspace(np.log10(ksoft.value)-1,np.log10(ksoft.value)-0.5,10)*ksoft.units
+p = -5/3
+Eright = 1e57 * unyt.erg*(1e3*unyt.pc)
+kright = ksoft
+kright.convert_to_units(1e-3/unyt.pc)
+Emock = Eright*(ksmock/kright)**(p)
+Emock.convert_to_units(unyt.erg*(1e3*unyt.pc))
+ax.plot(ksmock,Emock,color='black',linestyle='dashed')#,label = '$k^{-5/3}$')
+
+klabel = ksoft/10**0.45
+klabel.convert_to_units(1e-3/unyt.pc)
+Elabel = Eright*(klabel/kright)**(p)
+Elabel.convert_to_units(unyt.erg*(1e3*unyt.pc))
+ax.text(klabel,Elabel,r'$k^{-5/3}$')
+
+# plot spectral lines
+ksmock = np.logspace(np.log10(k_slice.value),np.log10(k_slice.value)+0.5,10)*k_slice.units
+p = 3/2
+Eleft = 1e57 * unyt.erg*(1e3*unyt.pc)
+kleft = k_slice
+kleft.convert_to_units(1e-3/unyt.pc)
+Emock = Eleft*(ksmock/kleft)**(p)
+Emock.convert_to_units(unyt.erg*(1e3*unyt.pc))
+ax.plot(ksmock,Emock,color='black',linestyle='dashed')#,label = '$k^{-5/3}$')
+
+klabel = k_slice/10**0.15
+klabel.convert_to_units(1e-3/unyt.pc)
+Elabel = Eleft*(klabel/kleft)**(p)
+Elabel.convert_to_units(unyt.erg*(1e3*unyt.pc))
+ax.text(klabel,Elabel,r'$k^{3/2}$')
+
+ax.set_yscale('log')
+ax.set_xscale('log')
+ax.set_xlabel(r'$\mathrm{k}$ $[\mathrm{kpc}^{-1}]$', fontsize=30)
+ax.set_ylabel(r'$\mathrm{P}(\mathrm{k})$ $[\mathrm{erg}\cdot\mathrm{kpc}]$', fontsize=30)
+ax.tick_params(axis='both', labelsize=20)
+#ax.grid()
+ax.legend()
+fig.tight_layout()
+plt.savefig(args.output)
+
diff --git a/examples/MHDTests/CoolingHaloMHD_feedback/plot_box_evolution.py b/examples/MHDTests/CoolingHaloMHD_feedback/plot_box_evolution.py
new file mode 100644
index 0000000000000000000000000000000000000000..05827463e9b6ab43d14e1f820316c41adbeb9d49
--- /dev/null
+++ b/examples/MHDTests/CoolingHaloMHD_feedback/plot_box_evolution.py
@@ -0,0 +1,206 @@
+###############################################################################
+# This file is part of SWIFT.
+# Copyright (c) 2015 Bert Vandenbroucke (bert.vandenbroucke@ugent.be)
+#                    Matthieu Schaller (schaller@strw.leidenuniv.nl)
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+#
+##############################################################################
+
+import matplotlib
+
+matplotlib.use("Agg")
+from pylab import *
+from scipy import stats
+import h5py
+import numpy as np
+import glob
+import os.path
+
+
+def find_indices(a, b):
+    result = np.zeros(len(b))
+    for i in range(len(b)):
+        result[i] = ((np.where(a == b[i]))[0])[0]
+
+    return result
+
+
+# Plot parameters
+params = {
+    "axes.labelsize": 10,
+    "axes.titlesize": 10,
+    "font.size": 12,
+    "legend.fontsize": 12,
+    "xtick.labelsize": 10,
+    "ytick.labelsize": 10,
+    "text.usetex": True,
+    "figure.figsize": (9.90, 6.45),
+    "figure.subplot.left": 0.1,
+    "figure.subplot.right": 0.99,
+    "figure.subplot.bottom": 0.1,
+    "figure.subplot.top": 0.95,
+    "figure.subplot.wspace": 0.2,
+    "figure.subplot.hspace": 0.2,
+    "lines.markersize": 6,
+    "lines.linewidth": 3.0,
+}
+rcParams.update(params)
+
+
+# Number of snapshots and elements
+newest_snap_name = max(glob.glob("output_*.hdf5"), key=os.path.getctime)
+n_snapshots = int(newest_snap_name.replace("output_", "").replace(".hdf5", "")) + 1
+n_elements = 9
+
+# Read the simulation data
+sim = h5py.File("output_0000.hdf5", "r")
+boxSize = sim["/Header"].attrs["BoxSize"][0]
+time = sim["/Header"].attrs["Time"][0]
+scheme = sim["/HydroScheme"].attrs["Scheme"]
+kernel = sim["/HydroScheme"].attrs["Kernel function"]
+neighbours = sim["/HydroScheme"].attrs["Kernel target N_ngb"]
+eta = sim["/HydroScheme"].attrs["Kernel eta"]
+git = sim["Code"].attrs["Git Revision"]
+stellar_mass = sim["/PartType4/Masses"][0]
+
+# Units
+unit_length_in_cgs = sim["/Units"].attrs["Unit length in cgs (U_L)"]
+unit_mass_in_cgs = sim["/Units"].attrs["Unit mass in cgs (U_M)"]
+unit_time_in_cgs = sim["/Units"].attrs["Unit time in cgs (U_t)"]
+unit_temp_in_cgs = sim["/Units"].attrs["Unit temperature in cgs (U_T)"]
+unit_vel_in_cgs = unit_length_in_cgs / unit_time_in_cgs
+unit_energy_in_cgs = unit_mass_in_cgs * unit_vel_in_cgs * unit_vel_in_cgs
+unit_length_in_si = 0.01 * unit_length_in_cgs
+unit_mass_in_si = 0.001 * unit_mass_in_cgs
+unit_time_in_si = unit_time_in_cgs
+unit_density_in_cgs = unit_mass_in_cgs * unit_length_in_cgs ** -3
+unit_pressure_in_cgs = unit_mass_in_cgs / unit_length_in_cgs * unit_time_in_cgs ** -2
+unit_int_energy_in_cgs = unit_energy_in_cgs / unit_mass_in_cgs
+unit_entropy_in_cgs = unit_energy_in_cgs / unit_temp_in_cgs
+Gyr_in_cgs = 3.155e16
+Msun_in_cgs = 1.989e33
+
+box_energy = zeros(n_snapshots)
+box_mass = zeros(n_snapshots)
+box_star_mass = zeros(n_snapshots)
+box_metal_mass = zeros(n_snapshots)
+element_mass = zeros((n_snapshots, n_elements))
+t = zeros(n_snapshots)
+
+# Read data from snapshots
+for i in range(n_snapshots):
+    print("reading snapshot " + str(i))
+    # Read the simulation data
+    sim = h5py.File("output_%04d.hdf5" % i, "r")
+    t[i] = sim["/Header"].attrs["Time"][0]
+    # ids = sim["/PartType0/ParticleIDs"][:]
+
+    masses = sim["/PartType0/Masses"][:]
+    box_mass[i] = np.sum(masses)
+
+    star_masses = sim["/PartType4/Masses"][:]
+    box_star_mass[i] = np.sum(star_masses)
+
+    metallicities = sim["/PartType0/Metallicities"][:]
+    box_metal_mass[i] = np.sum(metallicities * masses)
+
+    internal_energies = sim["/PartType0/InternalEnergies"][:]
+    box_energy[i] = np.sum(masses * internal_energies)
+
+# Plot the interesting quantities
+figure()
+
+# Box mass --------------------------------
+subplot(221)
+plot(
+    t[1:] * unit_time_in_cgs / Gyr_in_cgs,
+    (box_mass[1:] - box_mass[0]) * unit_mass_in_cgs / Msun_in_cgs,
+    linewidth=0.5,
+    color="k",
+    marker="*",
+    ms=0.5,
+    label="swift",
+)
+xlabel("${\\rm{Time}} (Gyr)$", labelpad=0)
+ylabel("Change in total gas particle mass (Msun)", labelpad=2)
+ticklabel_format(style="sci", axis="y", scilimits=(0, 0))
+
+# Box metal mass --------------------------------
+subplot(222)
+plot(
+    t[1:] * unit_time_in_cgs / Gyr_in_cgs,
+    (box_metal_mass[1:] - box_metal_mass[0]) * unit_mass_in_cgs / Msun_in_cgs,
+    linewidth=0.5,
+    color="k",
+    ms=0.5,
+    label="swift",
+)
+xlabel("${\\rm{Time}} (Gyr)$", labelpad=0)
+ylabel("Change in total metal mass of gas particles (Msun)", labelpad=2)
+ticklabel_format(style="sci", axis="y", scilimits=(0, 0))
+
+# Box energy --------------------------------
+subplot(223)
+plot(
+    t[1:] * unit_time_in_cgs / Gyr_in_cgs,
+    (box_energy[1:] - box_energy[0]) * unit_energy_in_cgs,
+    linewidth=0.5,
+    color="k",
+    ms=0.5,
+    label="swift",
+)
+xlabel("${\\rm{Time}} (Gyr)$", labelpad=0)
+ylabel("Change in total energy of gas particles (erg)", labelpad=2)
+ticklabel_format(style="sci", axis="y", scilimits=(0, 0))
+
+# Box mass --------------------------------
+subplot(224)
+plot(
+    t[1:] * unit_time_in_cgs / Gyr_in_cgs,
+    (box_mass[1:] - box_mass[0]) * unit_mass_in_cgs / Msun_in_cgs,
+    linewidth=0.5,
+    color="k",
+    marker="*",
+    ms=0.5,
+    label="gas",
+)
+plot(
+    t[1:] * unit_time_in_cgs / Gyr_in_cgs,
+    (box_star_mass[1:] - box_star_mass[n_snapshots - 1])
+    * unit_mass_in_cgs
+    / Msun_in_cgs,
+    linewidth=0.5,
+    color="r",
+    marker="*",
+    ms=0.5,
+    label="stars",
+)
+plot(
+    t[1:] * unit_time_in_cgs / Gyr_in_cgs,
+    (box_star_mass[1:] - box_star_mass[n_snapshots - 1] + box_mass[1:] - box_mass[0])
+    * unit_mass_in_cgs
+    / Msun_in_cgs,
+    linewidth=0.5,
+    color="g",
+    marker="*",
+    ms=0.5,
+    label="total",
+)
+xlabel("${\\rm{Time}} (Gyr)$", labelpad=0)
+ylabel("Change in total gas particle mass (Msun)", labelpad=2)
+ticklabel_format(style="sci", axis="y", scilimits=(0, 0))
+legend()
+
+savefig("box_evolution.png", dpi=200)
diff --git a/examples/MHDTests/CoolingHaloMHD_feedback/run.sh b/examples/MHDTests/CoolingHaloMHD_feedback/run.sh
new file mode 100755
index 0000000000000000000000000000000000000000..e854c6029153bd75017e950232b94d4680cb321f
--- /dev/null
+++ b/examples/MHDTests/CoolingHaloMHD_feedback/run.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+# Generate the initial conditions if they are not present.
+echo "Generating initial conditions for the MHD cloud"
+python3 makeIC_MWsize.py 450000 
+
+# Run SWIFT
+../../../swift --threads=16 --feedback --external-gravity --self-gravity --stars --star-formation --cooling --hydro --limiter --sync cooling_halo.yml 2>&1 | tee output.log