Skip to content
Snippets Groups Projects
Commit af271cdd authored by Matthieu Schaller's avatar Matthieu Schaller
Browse files

Updated default run scripts with the new policy and parameters

parent 8190b0e1
No related branches found
No related tags found
2 merge requests!136Master,!79First version of the multiple time-stepping
...@@ -34,19 +34,19 @@ do ...@@ -34,19 +34,19 @@ do
# Sod-Shock runs # Sod-Shock runs
if [ ! -e SodShock_${cpu}.dump ] if [ ! -e SodShock_${cpu}.dump ]
then then
./swift -r 1000 -t $cpu -f SodShock/sodShock.hdf5 -m 0.01 -w 5000 -d 1e-4 > SodShock_fixed_${cpu}.dump ./swift -t $cpu -f SodShock/sodShock.hdf5 -m 0.01 -w 5000 -c 1. -d 1e-7 -e 0.01 > SodShock_fixed_${cpu}.dump
fi fi
# Sedov blast # Sedov blast
if [ ! -e SedovBlast_${cpu}.dump ] if [ ! -e SedovBlast_${cpu}.dump ]
then then
./swift -r 4096 -t $cpu -f SedovBlast/sedov.hdf5 -m 0.02 -w 5000 -d 5e-5 > SedovBlast_fixed_${cpu}.dump ./swift -t $cpu -f SedovBlast/sedov.hdf5 -m 0.02 -w 5000 -c 1. -d 1e-7 -e 0.01 > SedovBlast_fixed_${cpu}.dump
fi fi
# Cosmological volume # Cosmological volume
if [ ! -e CosmoVolume_${cpu}.dump ] if [ ! -e CosmoVolume_${cpu}.dump ]
then then
./swift -r 256 -t $cpu -f CosmoVolume/cosmoVolume.hdf5 -m 0.6 -w 5000 -d 1e-8 > CosmoVolume_fixed_${cpu}.dump ./swift -t $cpu -f CosmoVolume/cosmoVolume.hdf5 -m 0.6 -w 5000 -c 1. -d 1e-7 -e 0.01 > CosmoVolume_fixed_${cpu}.dump
fi fi
done done
......
#!/bin/bash #!/bin/bash
# Set the number of runs # Set the number of runs
RUNS=100 FINALTIME=1.
# Cores per node # Cores per node
CPN=12 CPN=12
...@@ -23,43 +23,43 @@ for cpu in $(seq 1 $CPN) ...@@ -23,43 +23,43 @@ for cpu in $(seq 1 $CPN)
do do
if [ ! -e ${PREFIX}_${QUEUE}_1x${cpu}.dump ] if [ ! -e ${PREFIX}_${QUEUE}_1x${cpu}.dump ]
then then
bsub -oo ${PREFIX}_${QUEUE}_1x${cpu}.dump -q ${QUEUE} -P ${PROJECT} -x -n 1 -R "span[ptile=1]" ./swift_fixdt -r $RUNS -t $cpu -f ${INPUT} -m 0.705 -w 6000 -z 300 -d 1e-8 bsub -oo ${PREFIX}_${QUEUE}_1x${cpu}.dump -q ${QUEUE} -P ${PROJECT} -x -n 1 -R "span[ptile=1]" ./swift -c $FINALTIME -t $cpu -f ${INPUT} -m 0.705 -w 6000 -z 300 -d 1e-7 -e 0.01
fi fi
done done
# Multi-node runs # Multi-node runs
if [ ! -e ${PREFIX}_${QUEUE}_2x${cpu}.dump ] if [ ! -e ${PREFIX}_${QUEUE}_2x${cpu}.dump ]
then then
bsub -oo ${PREFIX}_${QUEUE}_2x${CPN}.dump -q ${QUEUE} -P ${PROJECT} -x -W 02:00 -n 2 -R "span[ptile=1]" mpirun -np 2 ./swift_fixdt_mpi -r $RUNS -t $CPN -g "2 1 1" -f ${INPUT} -m 0.705 -w 6000 -z 300 -d 1e-8 bsub -oo ${PREFIX}_${QUEUE}_2x${CPN}.dump -q ${QUEUE} -P ${PROJECT} -x -W 02:00 -n 2 -R "span[ptile=1]" mpirun -np 2 ./swift_mpi -c $FINALTIME -t $CPN -g "2 1 1" -f ${INPUT} -m 0.705 -w 6000 -z 300 -d 1e-7 -e 0.01
fi fi
if [ ! -e ${PREFIX}_${QUEUE}_4x${cpu}.dump ] if [ ! -e ${PREFIX}_${QUEUE}_4x${cpu}.dump ]
then then
bsub -oo ${PREFIX}_${QUEUE}_4x${CPN}.dump -q ${QUEUE} -P ${PROJECT} -x -W 02:00 -n 4 -R "span[ptile=1]" mpirun -np 4 ./swift_fixdt_mpi -r $RUNS -t $CPN -g "2 2 1" -f ${INPUT} -m 0.705 -w 6000 -z 300 -d 1e-8 bsub -oo ${PREFIX}_${QUEUE}_4x${CPN}.dump -q ${QUEUE} -P ${PROJECT} -x -W 02:00 -n 4 -R "span[ptile=1]" mpirun -np 4 ./swift_mpi -c $FINALTIME -t $CPN -g "2 2 1" -f ${INPUT} -m 0.705 -w 6000 -z 300 -d 1e-7 -e 0.01
fi fi
if [ ! -e ${PREFIX}_${QUEUE}_8x${cpu}.dump ] if [ ! -e ${PREFIX}_${QUEUE}_8x${cpu}.dump ]
then then
bsub -oo ${PREFIX}_${QUEUE}_8x${CPN}.dump -q ${QUEUE} -P ${PROJECT} -x -W 02:00 -n 8 -R "span[ptile=1]" mpirun -np 8 ./swift_fixdt_mpi -r $RUNS -t $CPN -g "2 2 2" -f ${INPUT} -m 0.705 -w 6000 -z 300 -d 1e-8 bsub -oo ${PREFIX}_${QUEUE}_8x${CPN}.dump -q ${QUEUE} -P ${PROJECT} -x -W 02:00 -n 8 -R "span[ptile=1]" mpirun -np 8 ./swift_mpi -c $FINALTIME -t $CPN -g "2 2 2" -f ${INPUT} -m 0.705 -w 6000 -z 300 -d 1e-7 -e 0.01
fi fi
if [ ! -e ${PREFIX}_${QUEUE}_16x${cpu}.dump ] if [ ! -e ${PREFIX}_${QUEUE}_16x${cpu}.dump ]
then then
bsub -oo ${PREFIX}_${QUEUE}_16x${CPN}.dump -q ${QUEUE} -P ${PROJECT} -x -W 02:00 -n 16 -R "span[ptile=1]" mpirun -np 16 ./swift_fixdt_mpi -r $RUNS -t $CPN -g "4 2 2" -f ${INPUT} -m 0.705 -w 6000 -z 300 -d 1e-8 bsub -oo ${PREFIX}_${QUEUE}_16x${CPN}.dump -q ${QUEUE} -P ${PROJECT} -x -W 02:00 -n 16 -R "span[ptile=1]" mpirun -np 16 ./swift_mpi -c $FINALTIME -t $CPN -g "4 2 2" -f ${INPUT} -m 0.705 -w 6000 -z 300 -d 1e-7 -e 0.01
fi fi
if [ ! -e ${PREFIX}_${QUEUE}_32x${cpu}.dump ] if [ ! -e ${PREFIX}_${QUEUE}_32x${cpu}.dump ]
then then
bsub -oo ${PREFIX}_${QUEUE}_32x${CPN}.dump -q ${QUEUE} -P ${PROJECT} -x -W 02:00 -n 32 -R "span[ptile=1]" mpirun -np 32 ./swift_fixdt_mpi -r $RUNS -t $CPN -g "4 4 2" -f ${INPUT} -m 0.705 -w 6000 -z 300 -d 1e-8 bsub -oo ${PREFIX}_${QUEUE}_32x${CPN}.dump -q ${QUEUE} -P ${PROJECT} -x -W 02:00 -n 32 -R "span[ptile=1]" mpirun -np 32 ./swift_mpi -c $FINALTIME -t $CPN -g "4 4 2" -f ${INPUT} -m 0.705 -w 6000 -z 300 -d 1e-7 -e 0.01
fi fi
if [ ! -e ${PREFIX}_${QUEUE}_64x${cpu}.dump ] if [ ! -e ${PREFIX}_${QUEUE}_64x${cpu}.dump ]
then then
bsub -oo ${PREFIX}_${QUEUE}_64x${CPN}.dump -q ${QUEUE} -P ${PROJECT} -x -W 02:00 -n 64 -R "span[ptile=1]" mpirun -np 64 ./swift_fixdt_mpi -r $RUNS -t $CPN -g "4 4 4" -f ${INPUT} -m 0.705 -w 6000 -z 300 -d 1e-8 bsub -oo ${PREFIX}_${QUEUE}_64x${CPN}.dump -q ${QUEUE} -P ${PROJECT} -x -W 02:00 -n 64 -R "span[ptile=1]" mpirun -np 64 ./swift_mpi -c $FINALTIME -t $CPN -g "4 4 4" -f ${INPUT} -m 0.705 -w 6000 -z 300 -d 1e-7 -e 0.01
fi fi
if [ ! -e ${PREFIX}_${QUEUE}_128x${cpu}.dump ] if [ ! -e ${PREFIX}_${QUEUE}_128x${cpu}.dump ]
then then
bsub -oo ${PREFIX}_${QUEUE}_128x${CPN}.dump -q ${QUEUE} -P ${PROJECT} -x -W 02:00 -n 128 -R "span[ptile=1]" mpirun -np 128 ./swift_fixdt_mpi -r $RUNS -t $CPN -g "8 4 4" -f ${INPUT} -m 0.705 -w 6000 -z 300 -d 1e-8 bsub -oo ${PREFIX}_${QUEUE}_128x${CPN}.dump -q ${QUEUE} -P ${PROJECT} -x -W 02:00 -n 128 -R "span[ptile=1]" mpirun -np 128 ./swift_mpi -c $FINALTIME -t $CPN -g "8 4 4" -f ${INPUT} -m 0.705 -w 6000 -z 300 -d 1e-7 -e 0.01
fi fi
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment