... | ... | @@ -144,4 +144,34 @@ mpirun -np 16 ../../swift --pin --cosmology --hydro --self-gravity -v 1 --thread |
|
|
# Full physics:
|
|
|
mpirun -np 16 ../../swift --pin --cosmology --eagle -v 1 --threads=14 eagle_12.yml
|
|
|
|
|
|
``` |
|
|
\ No newline at end of file |
|
|
```
|
|
|
|
|
|
### DINE
|
|
|
|
|
|
This is a number of AMD epyc nodes. Here is a SLURM submission script for a recent
|
|
|
Intel toolchain, the build details are the same as COSMA8.
|
|
|
```
|
|
|
#!/bin/bash -l
|
|
|
|
|
|
#SBATCH -J SWIFT-EAGLE-25
|
|
|
#SBATCH --ntasks=8
|
|
|
#SBATCH --cpus-per-task=32
|
|
|
#SBATCH -o job.%J.dump
|
|
|
#SBATCH -e job.%J.err
|
|
|
#SBATCH -p bluefield1
|
|
|
#SBATCH -A durham
|
|
|
#SBATCH --exclusive
|
|
|
#SBATCH -t 4:00:00
|
|
|
|
|
|
module purge
|
|
|
module load intel_comp/2022.1.2 compiler mpi
|
|
|
module load gsl/2.4 fftw/3.3.9epyc parmetis/4.0.3-64bit parallel_hdf5/1.12.0 ucx/1.13.0rc2
|
|
|
|
|
|
export I_MPI_PIN_DOMAIN=auto
|
|
|
|
|
|
mpirun -np $SLURM_NTASKS ../../../swift_mpi --hydro --self-gravity --stars -v 1 --threads=16 \
|
|
|
eagle_25.yml
|
|
|
|
|
|
exit
|
|
|
```
|
|
|
Note that `I_MPI_PIN_DOMAIN` is needed for pinning. |
|
|
\ No newline at end of file |