diff --git a/examples/SantaBarbara/README b/examples/SantaBarbara/README index 973d09a438e8ff39d7c4a91c5a08d4dfc4a51a60..e5bba3752215c438c01ff35931d22901c3a9d0d3 100644 --- a/examples/SantaBarbara/README +++ b/examples/SantaBarbara/README @@ -14,3 +14,8 @@ particles will be generated at startup. MD5 check-sum of the ICS: ba9ab4f00a70d39fa601a4a59984b343 SantaBarbara.hdf5 + +You can use the script run_velociraptor.sh to also run a basic 3D FoF +with VELOCIraptor on your output data. You will need to set the +VELOCIRAPTOR_PATH environment variable to tell us where the stf-gas +binary lives. diff --git a/examples/SantaBarbara/makeImage.py b/examples/SantaBarbara/makeImage.py new file mode 100644 index 0000000000000000000000000000000000000000..db6416010447952b3edd6b235237d045b16bdefd --- /dev/null +++ b/examples/SantaBarbara/makeImage.py @@ -0,0 +1,268 @@ +""" +Makes an image of the Santa Barbara cluster. + +Requires py-sphviewer. + +Invoke as follows: + +python3 makeImage.py <name of hdf5 file> \ + <number of particle type (i.e. 0 or 1)> \ + <colour map to use (default viridis)> \ + <text color (default white)> \ + <image resolution (default 2048x2048)> +""" + +import numpy as np +import matplotlib.pyplot as plt +import h5py +import matplotlib + +from sphviewer.tools import QuickView +from matplotlib.patches import Rectangle + +from typing import Tuple +from collections import namedtuple + + +# Set up our simulation data collection to keep stuff together +SimulationData = namedtuple( + "SimulationData", + ["coordinates", "masses", "sph_name", "dark_matter_mass", "swift_name", "boxsize"], +) + + +def latex_float(f): + """ + Taken from: + https://stackoverflow.com/questions/13490292/format-number-using-latex-notation-in-python. + + Formats a float to LaTeX style. + """ + + float_str = "{0:.2g}".format(f) + if "e" in float_str: + base, exponent = float_str.split("e") + return r"{0} \times 10^{{{1}}}".format(base, int(exponent)) + else: + return float_str + + +def read_data_from_file(filename: str, part_type=0) -> SimulationData: + """ + Reads the relevant data from the HDF5 file. + """ + part_type_name = f"PartType{part_type}" + + with h5py.File(filename, "r") as file: + coordinates, boxsize = move_box(file[f"{part_type_name}/Coordinates"][...].T) + masses = file[f"{part_type_name}/Masses"][...] + + sph_name = file["HydroScheme"].attrs["Scheme"].decode("utf-8") + unit_mass = ( + float(file["Units"].attrs["Unit mass in cgs (U_M)"]) / 2e33 + ) # in M_sun + + dark_matter_mass = float(file["PartType1/Masses"][0]) * unit_mass + + code_revision = file["Code"].attrs["Git Revision"].decode("utf-8") + swift_name = f"SWIFT {code_revision}" + + data = SimulationData( + coordinates=coordinates, + masses=masses, + sph_name=sph_name, + dark_matter_mass=dark_matter_mass, + swift_name=swift_name, + boxsize=boxsize, + ) + + return data + + +def move_box(coordinates: np.ndarray) -> np.ndarray: + """ + Takes the coordinates and moves them in the x-y plane. This moves them 20 + code units to the left/right to ensure that the zoomed-out version of the + cluster image is nicely shown + """ + + boxsize = np.max(coordinates[0]) + coordinates[0] -= 20 + coordinates[1] -= 20 + coordinates[0] %= boxsize + coordinates[1] %= boxsize + + return coordinates, boxsize + + +def generate_views(data: SimulationData, res=2048) -> Tuple[np.ndarray]: + """ + Generates the views on the data from py-sphviewer. + + Returns the overall image for the whole box and then a zoomed region. + """ + + qv_all = QuickView( + data.coordinates, + data.masses, + r="infinity", + plot=False, + xsize=res, + ysize=res, + logscale=False, + p=0, + np=48, + ) + zoomed_res = (res * 6) // 10 + mask = np.logical_and( + np.logical_and( + data.coordinates[0] > (data.boxsize/2-4-20), + data.coordinates[0] < (data.boxsize/2+6-20) + ), + np.logical_and( + data.coordinates[1] > (data.boxsize/2-3.5-20), + data.coordinates[1] < (data.boxsize/2+6.5-20) + ) + ) + qv_zoomed = QuickView( + data.coordinates.T[mask].T, + data.masses[mask], + r="infinity", + plot=False, + xsize=zoomed_res, + ysize=zoomed_res, + logscale=False, + np=48, + ) + + return qv_all.get_image(), qv_zoomed.get_image() + + +def create_plot(data: SimulationData, res=2048, cmap="viridis", text_color="white"): + """ + Creates a figure and axes object and returns them for you to do with what you wish. + """ + + img_all, img_zoomed = generate_views(data, res) + + fig, ax = plt.subplots(figsize=(8, 8)) + + # Set up in "image" mode + ax.axis("off") + fig.subplots_adjust(0, 0, 1, 1) + + ax.imshow( + np.log10(img_all + np.min(img_all[img_all != 0])), + origin="lower", + extent=[-1, 1, -1, 1], + cmap=cmap, + ) + + lower_left = [(-24 / (0.5 * data.boxsize)), (-23.5 / (0.5 * data.boxsize))] + zoom_rect = Rectangle( + lower_left, + 10 / (0.5 * data.boxsize), + 10 / (0.5 * data.boxsize), + linewidth=2, + edgecolor=text_color, + facecolor="none", + ) + ax.add_patch(zoom_rect) + + # Remove ticks as we want "image mode" + ax2 = fig.add_axes([0.35, 0.35, 0.6, 0.6], frame_on=True, xticks=[], yticks=[]) + + ax2.imshow( + np.log10(img_zoomed + np.min(img_zoomed[img_zoomed != 0])), + origin="lower", + extent=[-1, 1, -1, 1], + cmap=cmap, + ) + + # This ugly hack sets the box around the subfigure to be white + for child in ax2.get_children(): + if isinstance(child, matplotlib.spines.Spine): + child.set_color(text_color) + child.set_linewidth(2) + + # Draw lines between boxes + + # Bottom Right + ax.plot( + [(-14 / (0.5 * data.boxsize)), 0.9], + [(-23.5 / (0.5 * data.boxsize)), -0.3], + lw=2, + color=text_color, + ) + # Top Left + ax.plot( + [(-24 / (0.5 * data.boxsize)), -0.3], + [(-13.5 / (0.5 * data.boxsize)), 0.9], + lw=2, + color=text_color, + ) + + ax.text(0.95, -0.95, data.swift_name, color=text_color, ha="right") + formatted_dark_matter_mass = latex_float(data.dark_matter_mass) + ax.text( + -0.95, + 0.95, + rf"M$_{{\rm DM}} = {formatted_dark_matter_mass}$ M$_\odot$", + color=text_color, + va="top", + ) + ax.text( + -0.95, + -0.95, + data.sph_name + "\n" + r"Santa Barbara Cluster (re-ran from Frenk+ 1999)", + color=text_color, + ) + + return fig, ax + + +if __name__ == "__main__": + import sys + + try: + filename = sys.argv[1] + except IndexError: + filename = "santabarbara_0153.hdf5" + + try: + part_type = int(sys.argv[2]) + except IndexError: + part_type = 0 + + try: + cmap = sys.argv[3] + except IndexError: + cmap = "viridis" + + try: + text_color = sys.argv[4] + except IndexError: + text_color = "white" + + try: + res = int(sys.argv[5]) + except IndexError: + res = 2048 + + # Read in the data from file + + try: + data = read_data_from_file(filename, part_type) + except IndexError: + # Must be a dark matter only run + part_type = 1 + data = read_data_from_file(filename, part_type) + + # Make the plot + + fig, ax = create_plot(data, res, cmap, text_color) + + fig.savefig( + f"SantaBarbara_{data.sph_name[:8]}_{cmap}_PartType{part_type}_res{res}.png", + dpi=res // 8, + ) diff --git a/examples/SantaBarbara/plotSolution.py b/examples/SantaBarbara/plotSolution.py new file mode 100644 index 0000000000000000000000000000000000000000..a23aa2089a0f82a9dad989134d1ebf11a97af2fe --- /dev/null +++ b/examples/SantaBarbara/plotSolution.py @@ -0,0 +1,391 @@ +""" +Plots the "solution" (i.e. some profiles) for the Santa Barbara cluster. + +Invoke as follows: + +python3 plotSolution.py <snapshot number> <catalogue directory> <number of bins (optional)> +""" + +import matplotlib.pyplot as plt +import numpy as np + +import h5py + +from collections import namedtuple +from typing import Tuple + +try: + import makeImage + + create_images = True +except: + create_images = False + +# Simulation data +SimulationParticleData = namedtuple( + "SimulationData", ["gas", "dark_matter", "metadata"] +) +ParticleData = namedtuple( + "ParticleData", ["coordinates", "radii", "masses", "densities", "energies"] +) +MetaData = namedtuple("MetaData", ["header", "code", "hydroscheme"]) +HaloData = namedtuple("HaloData", ["c", "Rvir", "Mvir", "center"]) + + +def get_energies(handle: h5py.File): + """ + Gets the energies with the correct units. + """ + u = handle["PartType0/InternalEnergy"][:] + unit_length_in_cgs = handle["/Units"].attrs["Unit length in cgs (U_L)"] + unit_mass_in_cgs = handle["/Units"].attrs["Unit mass in cgs (U_M)"] + unit_time_in_cgs = handle["/Units"].attrs["Unit time in cgs (U_t)"] + gas_gamma = handle["/HydroScheme"].attrs["Adiabatic index"][0] + a = handle["/Cosmology"].attrs["Scale-factor"][0] + + unit_length_in_si = 0.01 * unit_length_in_cgs + unit_mass_in_si = 0.001 * unit_mass_in_cgs + unit_time_in_si = unit_time_in_cgs + + u *= unit_length_in_si ** 2 / unit_time_in_si ** 2 + u /= a ** (3 * (gas_gamma - 1.)) + + return u + + +def load_data(filename: str, center: np.array) -> SimulationParticleData: + """ + Loads the relevant data for making the profiles, as well as some metadata + for the plot. + + Center is the center of the SB cluster and is used to calculate the radial + distances to the particles. + """ + + with h5py.File(filename, "r") as file: + gas_handle = file["PartType0"] + dm_handle = file["PartType1"] + + gas_data = ParticleData( + coordinates=gas_handle["Coordinates"][...], + radii=get_radial_distances(gas_handle["Coordinates"][...], center), + masses=gas_handle["Masses"][...], + energies=get_energies(file), + densities=gas_handle["Density"][...], + ) + + dm_data = ParticleData( + coordinates=dm_handle["Coordinates"][...], + radii=get_radial_distances(dm_handle["Coordinates"][...], center), + masses=dm_handle["Masses"][...], + energies=None, + densities=None, + ) + + metadata = MetaData( + header=dict(file["Header"].attrs), + code=dict(file["Code"].attrs), + hydroscheme=dict(file["HydroScheme"].attrs), + ) + + simulation_data = SimulationParticleData( + gas=gas_data, dark_matter=dm_data, metadata=metadata + ) + + return simulation_data + + +def get_halo_data(catalogue_filename: str) -> HaloData: + """ + Gets the halo center of the largest halo (i.e. the SB cluster). + + You will want the .properties file, probably + + halo/santabarbara.properties + + that is given by VELOCIraptor. + """ + + with h5py.File(catalogue_filename, "r") as file: + x = file["Xc"][0] + y = file["Yc"][0] + z = file["Zc"][0] + Mvir = file["Mass_200crit"][0] + Rvir = file["R_200crit"][0] + c = file["cNFW"][0] + + return HaloData(c=c, Rvir=Rvir, Mvir=Mvir, center=np.array([x, y, z])) + + +def get_radial_distances(coordinates: np.ndarray, center: np.array) -> np.array: + """ + Gets the radial distances for all particles. + """ + dx = coordinates - center + + return np.sqrt(np.sum(dx * dx, axis=1)) + + +def get_radial_density_profile(radii, masses, bins: int) -> Tuple[np.ndarray]: + """ + Gets the radial gas density profile, after generating similar bins to those + used in similar works. + """ + + bins = np.logspace(-2, 1, bins) + + histogram, bin_edges = np.histogram(a=radii, weights=masses, bins=bins) + + volumes = np.array( + [ + (4. * np.pi / 3.) * (r_outer ** 3 - r_inner ** 3) + for r_outer, r_inner in zip(bin_edges[1:], bin_edges[:-1]) + ] + ) + + return histogram / volumes, bin_edges # densities + + +def mu(T, H_frac, T_trans): + """ + Get the molecular weight as a function of temperature. + """ + if T > T_trans: + return 4. / (8. - 5. * (1. - H_frac)) + else: + return 4. / (1. + 3. * H_frac) + + +def T(u, metadata: MetaData): + """ + Temperature of primordial gas. + """ + + gas_gamma = metadata.hydroscheme["Adiabatic index"][0] + H_frac = metadata.hydroscheme["Hydrogen mass fraction"][0] + T_trans = metadata.hydroscheme["Hydrogen ionization transition temperature"][0] + + k_in_J_K = 1.38064852e-23 + mH_in_kg = 1.6737236e-27 + + T_over_mu = (gas_gamma - 1.) * u * mH_in_kg / k_in_J_K + ret = np.ones(np.size(u)) * T_trans + + # Enough energy to be ionized? + mask_ionized = T_over_mu > (T_trans + 1) / mu(T_trans + 1, H_frac, T_trans) + if np.sum(mask_ionized) > 0: + ret[mask_ionized] = T_over_mu[mask_ionized] * mu(T_trans * 10, H_frac, T_trans) + + # Neutral gas? + mask_neutral = T_over_mu < (T_trans - 1) / mu((T_trans - 1), H_frac, T_trans) + if np.sum(mask_neutral) > 0: + ret[mask_neutral] = T_over_mu[mask_neutral] * mu(0, H_frac, T_trans) + + return ret + + +def get_radial_temperature_profile( + data: SimulationParticleData, bins: int +) -> np.ndarray: + """ + Gets the radial gas density profile, after generating similar bins to those + used in similar works. + """ + + temperatures = T(data.gas.energies, data.metadata) + radii = data.gas.radii + + bins = np.logspace(-2, 1, bins) + + histogram, _ = np.histogram(a=radii, weights=temperatures, bins=bins) + + counts, _ = np.histogram(a=radii, weights=np.ones_like(radii), bins=bins) + + return histogram / counts # need to get mean value in bin + + +def get_radial_entropy_profile(data: SimulationParticleData, bins: int) -> np.ndarray: + """ + Gets the radial gas density profile, after generating similar bins to those + used in similar works. + """ + + gas_gamma = data.metadata.hydroscheme["Adiabatic index"][0] + gamma_minus_one = gas_gamma - 1.0 + + entropies = ( + data.gas.energies * (gamma_minus_one) / data.gas.densities ** gamma_minus_one + ) + print("Warning: Current entropy profile assumes all gas is ionised") + radii = data.gas.radii + + bins = np.logspace(-2, 1, bins) + + histogram, _ = np.histogram(a=radii, weights=entropies, bins=bins) + + counts, _ = np.histogram(a=radii, weights=np.ones_like(radii), bins=bins) + + return histogram / counts # need to get mean value in bin + + +def nfw(R, halo_data: HaloData): + """ + NFW profile at radius R. + """ + + R_s = halo_data.Rvir / halo_data.c + rho_0 = (4 * np.pi * R_s ** 3) / (halo_data.Mvir) + rho_0 *= np.log(1 + halo_data.c) - halo_data.c / (halo_data.c + 1) + rho_0 = 1.0 / rho_0 + + ratio = R / R_s + + return rho_0 / (ratio * (1 + ratio) ** 2) + + +def create_plot( + data: SimulationParticleData, + halo_data: HaloData, + bins: int, + create_images: bool, + image_data: np.ndarray, +): + """ + Creates the figure and axes objects and plots the data on them. + """ + + fig, axes = plt.subplots(2, 3, figsize=(12, 8)) + + gas_density, bin_edges = get_radial_density_profile( + data.gas.radii, data.gas.masses, bins=bins + ) + dm_density, _ = get_radial_density_profile( + data.dark_matter.radii, data.dark_matter.masses, bins=bins + ) + temperature = get_radial_temperature_profile(data, bins=bins) + entropy = get_radial_entropy_profile(data, bins=bins) + + bin_centers = [0.5 * (x + y) for x, y in zip(bin_edges[:-1], bin_edges[1:])] + nfw_R = np.logspace(-2, 1, bins * 100) + nfw_rho = nfw(nfw_R, halo_data) + + axes[0][0].loglog() + axes[0][0].plot(nfw_R, 0.1 * nfw_rho, ls="dashed", color="grey") + axes[0][0].scatter(bin_centers, gas_density) + axes[0][0].set_ylabel(r"$\rho_{\rm gas} (R)$ [$10^{10}$ M$_\odot$ Mpc$^{-3}$]") + axes[0][0].set_xlabel(r"R [Mpc]") + axes[0][0].set_xlim(0.01, 10) + + axes[0][1].semilogx() + axes[0][1].scatter(bin_centers, np.log(entropy)) + axes[0][1].set_ylabel( + r"Entropy $\log(A$ [K ($10^{10}$ M$_\odot$)$^{2/3}$ Mpc$^{-2}$])" + ) + axes[0][1].set_xlabel(r"R [Mpc]") + axes[0][1].set_xlim(0.01, 10) + + if create_images: + axes[0][2].imshow(np.log10(image_data)) + + axes[0][2].set_xticks([]) + axes[0][2].set_yticks([]) + + axes[1][0].loglog() + axes[1][0].scatter(bin_centers, temperature) + axes[1][0].set_ylabel(r"$T_{\rm gas} (R)$ [K]") + axes[1][0].set_xlabel(r"R [Mpc]") + axes[1][0].set_xlim(0.01, 10) + + axes[1][1].loglog() + axes[1][1].scatter(bin_centers, dm_density) + axes[1][1].plot(nfw_R, 0.9 * nfw_rho, ls="dashed", color="grey") + axes[1][1].set_ylabel(r"$\rho_{\rm DM} (R)$ [$10^{10}$ M$_\odot$ Mpc$^{-3}$]") + axes[1][1].set_xlabel(r"R [Mpc]") + axes[1][1].set_xlim(0.01, 10) + axes[1][1].text( + 0.02, + 5, + "$c_{{vir}} = {:2.2f}$\n$R_{{vir}} = {:2.2f}$ Mpc\n$M_{{vir}} = {:2.2f}$ $10^{{10}}$ M$_\odot$".format( + halo_data.c, halo_data.Rvir, halo_data.Mvir + ), + va="bottom", + ha="left", + ) + + axes[1][2].text( + -0.49, + 0.7, + "Santa Barbara with $\\gamma={:2.2f}$ in 3D".format( + data.metadata.hydroscheme["Adiabatic index"][0] + ), + ) + + scheme_list = data.metadata.hydroscheme["Scheme"].decode("utf-8").split(" ") + i = 4 + while i < len(scheme_list): + scheme_list.insert(i, "\n") + i += 4 + 1 + wrapped_scheme = " ".join(scheme_list) + wrapped_scheme.replace("\n ", "\n") + + axes[1][2].text(-0.49, 0.8, wrapped_scheme) + + axes[1][2].plot([-0.49, 0.1], [0.62, 0.62], "k-", lw=1) + + axes[1][2].text( + -0.49, 0.5, f"SWIFT {data.metadata.code['Git Revision'].decode('utf-8')}" + ) + + axes[1][2].text( + -0.49, + 0.3, + data.metadata.hydroscheme["Kernel function"].decode("utf-8"), + fontsize=10, + ) + axes[1][2].text( + -0.49, + 0.2, + "{:2.3f} neighbours ($\\eta={:3.3f}$)".format( + data.metadata.hydroscheme["Kernel target N_ngb"][0], + data.metadata.hydroscheme["Kernel eta"][0], + ), + ) + axes[1][2].set_xlim(-0.5, 0.5) + axes[1][2].set_ylim(0, 1) + axes[1][2].axis("off") + + fig.tight_layout() + + return fig, axes + + +if __name__ == "__main__": + import sys + + filename = "santabarbara_{:04d}.hdf5".format(int(sys.argv[1])) + catalogue_filename = f"{sys.argv[2]}/santabarbara.properties" + + try: + bins = int(sys.argv[3]) + except: + bins = 25 + + halo_data = get_halo_data(catalogue_filename) + simulation_data = load_data(filename, halo_data.center) + + if create_images: + data = makeImage.read_data_from_file(filename, part_type=0) + _, image_data = makeImage.generate_views(data) + del data + else: + image_data = None + + fig, _ = create_plot( + data=simulation_data, + halo_data=halo_data, + bins=bins, + create_images=create_images, + image_data=image_data, + ) + + fig.savefig("santabarbara.png", dpi=300) diff --git a/examples/SantaBarbara/run.sh b/examples/SantaBarbara/run.sh new file mode 100755 index 0000000000000000000000000000000000000000..f98b13aac0ea15658319a57b07417d305d3ff509 --- /dev/null +++ b/examples/SantaBarbara/run.sh @@ -0,0 +1,4 @@ +#!/bin/bash +# Run SWIFT +../swift -c -s -r -G -t 28 santa_barbara.yml + diff --git a/examples/SantaBarbara/run_velociraptor.sh b/examples/SantaBarbara/run_velociraptor.sh new file mode 100644 index 0000000000000000000000000000000000000000..3b7ca06ec8125d05066762f20c7324f9faa42348 --- /dev/null +++ b/examples/SantaBarbara/run_velociraptor.sh @@ -0,0 +1,2 @@ +mkdir halo +${VELOCIRAPTOR_PATH} -I 2 -i santabarbara_0153 -C velociraptor_cfg.cfg -o ./halo/santabarbara diff --git a/examples/SantaBarbara/velociraptor_cfg.cfg b/examples/SantaBarbara/velociraptor_cfg.cfg new file mode 100644 index 0000000000000000000000000000000000000000..4b9b441b71cc65a85a9731018d38ffc2f003c0ff --- /dev/null +++ b/examples/SantaBarbara/velociraptor_cfg.cfg @@ -0,0 +1,135 @@ +#configuration file. +#It is suggested that you alter this file as necessary as not all options will be desired and some conflict. +#This file is simply meant to show options available. + +################################ +#input related +################################ +#input is from a cosmological so can use parameters like box size, h, Omega_m to calculate length and density scales +Cosmological_input=1 + +#Type of snapshot to read. Ignored when using within SWIFT. +HDF_name_convention=6 # ILLUSTRIS 0, GADGETX 1, EAGLE 2, GIZMO 3, SIMBA 4, MUFASA 5, SWIFTEAGLE 6 + +Particle_search_type=2 #search all particles, see allvars for other types +Baryon_searchflag=0 #if 1 search for baryons separately using phase-space search when identifying substructures, 2 allows special treatment in field FOF linking and phase-space substructure search, 0 treat the same as dark matter particles +Search_for_substructure=0 #if 0, end search once field objects are found +FoF_Field_search_type=5 #5 3DFOF search for field halos, 4 for 6DFOF clean up of field halos, 3 for 6DFOF with velocity scale distinct for each halo +Unbind_flag=0 #run unbinding +Halo_core_search=0 +Significance_level=1.0 #how significant a substructure is relative to Poisson noise. Values >= 1 are fine. + +################################ +# unit options, should always be provided +################################ + +# This is only for i/o. Specifies what units the code was running in. +# These should be set to whatever internal units we use. +# They have no impact on the way the code runs. +Length_unit_to_kpc=1000. #conversion of output length units to kpc +Velocity_to_kms=1.0 #conversion of output velocity units to km/s +Mass_to_solarmass=1e+10 #conversion of output mass units to solar masses + +# units conversion from input to desired internal unit. +# These should be set to 1 unless a conversion is expected. +Length_unit=1.0 #default length unit +Velocity_unit=1.0 #default velocity unit +Mass_unit=1.0 #default mass unit + +# These are ignored when running within SWIFT. +# When using standalone code, G and H must match the value used in the run. +Gravity=4.300927e+01 # In internal units (here 10^10 Msun, km/s, Mpc) +Hubble_unit=100.0 # This is H0 / h in internal units. + +################################ +#search related options +################################ + +#how to search a simulation +# searches for separate 6dfof cores in field haloes, and then more than just flags halo as merging, assigns particles to each merging "halo". 2 is full separation, 1 is flagging, 0 is off +#also useful for zoom simulations or simulations of individual objects, setting this flag means no field structure search is run +Singlehalo_search=0 #if file is single halo in which one wishes to search for substructure +#additional option for field haloes +Keep_FOF=0 #if field 6DFOF search is done, allows to keep structures found in 3DFOF (can be interpreted as the inter halo stellar mass when only stellar search is used).\n + +#minimum size for structures +Minimum_size=256 #min 20 particles +Minimum_halo_size=-1 #if field halos have different minimum sizes, otherwise set to -1. + +#for field fof halo search +Halo_linking_length_factor=2.0 #factor by which Physical_linking_length is changed when searching for field halos. Typical values are ~2 when using iterative substructure search. +Halo_velocity_linking_length_factor=5.0 #for 6d fof halo search increase ellv from substructure search + +#for mean field estimates and local velocity density distribution funciton estimator related quantiites, rarely need to change this +Cell_fraction = 0.01 #fraction of field fof halo used to determine mean velocity distribution function. Typical values are ~0.005-0.02 +Grid_type=1 #normal entropy based grid, shouldn't have to change +Nsearch_velocity=32 #number of velocity neighbours used to calculate local velocity distribution function. Typial values are ~32 +Nsearch_physical=256 #numerof physical neighbours from which the nearest velocity neighbour set is based. Typical values are 128-512 + +#for substructure search, rarely ever need to change this +FoF_search_type=1 #default phase-space FOF search. Don't really need to change +Iterative_searchflag=1 #iterative substructure search, for substructure find initial candidate substructures with smaller linking lengths then expand search region +Outlier_threshold=2.5 #outlier threshold for a particle to be considered residing in substructure, that is how dynamically distinct a particle is. Typical values are >2 +Velocity_ratio=2.0 #ratio of speeds used in phase-space FOF +Velocity_opening_angle=0.10 #angle between velocities. 18 degrees here, typical values are ~10-30 +Physical_linking_length=0.10 #physical linking length. IF reading periodic volumes in gadget/hdf/ramses, in units of the effective inter-particle spacing. Otherwise in user defined code units. Here set to 0.10 as iterative flag one, values of 0.1-0.3 are typical. +Velocity_linking_length=0.20 #where scaled by structure dispersion + +#for iterative substructure search, rarely ever need to change this +Iterative_threshold_factor=1.0 #change in threshold value when using iterative search. Here no increase in threshold if iterative or not +Iterative_linking_length_factor=2.0 #increase in final linking final iterative substructure search will be sqrt(2.25)*this factor +Iterative_Vratio_factor=1.0 #change in Vratio when using iterative search. no change in vratio +Iterative_ThetaOp_factor=1.0 #change in velocity opening angle. no change in velocity opening angle + +#for checking for halo merger remnants, which are defined as large, well separated phase-space density maxima + +#if searching for cores, linking lengths. likely does not need to change much +Use_adaptive_core_search=2 #calculate dispersions in configuration & vel space to determine linking lengths +Halo_core_ellx_fac=1.0 #how linking lengths are changed when searching for local 6DFOF cores, +Halo_core_ellv_fac=1.0 #how velocity lengths based on dispersions are changed when searching for local 6DFOF cores +Halo_core_ncellfac=0.05 #fraction of total halo particle number setting min size of a local 6DFOF core +Halo_core_adaptive_sigma_fac=2.0 #used when running fully adaptive core search with phase-space tensors, specifies the width of the physical linking length in configuration space dispersion (think of this as how many sigma to include). Typically values are 2 +Halo_core_num_loops=3 #allows the core search to iterate, shrinking the velocity linking length to used till the number of cores identified decreases or this limit is reached. Allows apative search with larger linking length to be robust. Typically values are 3-5 +Halo_core_loop_ellv_fac=0.75 #Factor by which velocity linking length is decreased when running loops for core search. Typically values are 0.75 + +################################ +#Unbinding options (VELOCIraptor is able to accurately identify tidal debris so particles need not be bound to a structure) +################################ + +#unbinding related items + +Min_bound_mass_frac=0.2 #minimum bound mass fraction, not yet implemented +#alpha factor used to determine whether particle is "bound" alaph*T+W<0. For standard subhalo catalogues use >0.9 but if interested in tidal debris 0.2-0.5 +Allowed_kinetic_potential_ratio=0.2 +#run unbinding of field structures, aka halos +Bound_halos=0 +#simple Plummer softening length when calculating gravitational energy. If cosmological simulation with period, is fraction of interparticle spacing +Softening_length=0.00263 +#don't keep background potential when unbinding +Keep_background_potential=0 + +################################ +#Calculation of properties related options +################################ +#when calculating properties, for field objects calculate inclusive masses +Inclusive_halo_masses=1 #calculate inclusive masses +#ensures that output is comoving distances per little h +Comoving_units=0 + +################################ +#output related +################################ + +Write_group_array_file=0 #write a group array file +Separate_output_files=0 #separate output into field and substructure files similar to subfind +Binary_output=2 #binary output 1, ascii 0, and HDF 2 + +#halo ids are adjusted by this value * 1000000000000 (or 1000000 if code compiled with the LONGINTS option turned off) +#to ensure that halo ids are temporally unique. So if you had 100 snapshots, for snap 100 set this to 100 and 100*1000000000000 will +#be added to the halo id as set for this snapshot, so halo 1 becomes halo 100*1000000000000+1 and halo 1 of snap 0 would just have ID=1 +Snapshot_value=1 + +################################ +#other options +################################ +Verbose=0 #how talkative do you want the code to be, 0 not much, 1 a lot, 2 chatterbox