From 3fd41c714db0e17cd3beb76b94941cef94fe6234 Mon Sep 17 00:00:00 2001 From: Guillermo Lara Date: Tue, 9 Jul 2024 18:01:34 +0000 Subject: [PATCH] Add Urania machine --- support/Machines/Urania.yaml | 11 ++++++ support/SubmitScripts/Urania.sh | 65 +++++++++++++++++++++++++++++++++ 2 files changed, 76 insertions(+) create mode 100644 support/Machines/Urania.yaml create mode 100644 support/SubmitScripts/Urania.sh diff --git a/support/Machines/Urania.yaml b/support/Machines/Urania.yaml new file mode 100644 index 0000000000000..786d27fcf9305 --- /dev/null +++ b/support/Machines/Urania.yaml @@ -0,0 +1,11 @@ +# Distributed under the MIT License. +# See LICENSE.txt for details. + +Machine: + Name: Urania + Description: | + Supercomputer at the Max Planck Computing Data Facilty. + DefaultProcsPerNode: 72 + DefaultQueue: "p.urania" + DefaultTimeLimit: "1-00:00:00" + LaunchCommandSingleNode: ["srun", "-n", "1"] diff --git a/support/SubmitScripts/Urania.sh b/support/SubmitScripts/Urania.sh new file mode 100644 index 0000000000000..906920be95b46 --- /dev/null +++ b/support/SubmitScripts/Urania.sh @@ -0,0 +1,65 @@ +{% extends "SubmitTemplateBase.sh" %} + +# Distributed under the MIT License. +# See LICENSE.txt for details. + +# Supercomputer at the Max Planck Computing Data Facility. +# More information: +# https://docs.mpcdf.mpg.de/doc/computing/clusters/systems/Gravitational_Physics_ACR.html + +{% block head %} +# Standard output and error: +#SBATCH -o ./tjob.out.%j +#SBATCH -e ./tjob.err.%j +# Initial working directory: +#SBATCH -D ./ +# Number of MPI Tasks, e.g. 4: +#SBATCH --nodes {{ num_nodes | default(1) }} +#SBATCH --ntasks-per-node=1 +#SBATCH --ntasks-per-core=1 +#SBATCH --cpus-per-task=72 +# Memory usage [MB] of the job is required, 2200 MB per task: +#SBATCH --mem=240000 +# Wall clock limit: +#SBATCH -t {{ time_limit | default("1-00:00:00") }} +#SBATCH -p {{ queue | default("p.debug") }} +{% endblock %} + +{% block charm_ppn %} +# One thread for communication +CHARM_PPN=$(expr ${SLURM_CPUS_PER_TASK} - 2) +{% endblock %} + +{% block list_modules %} +# Load compiler and MPI modules with explicit version specifications, +# consistently with the versions used to build the executable. +module purge +module load gcc/11 +module load impi/2021.7 +module load boost/1.79 +module load gsl/1.16 +module load cmake/3.26 +module load hdf5-serial/1.12.2 +module load anaconda/3/2021.11 + +# Load Spack environment +source /u/guilara/repos/spack/share/spack/setup-env.sh +spack env activate env3_spectre_impi + +# Define Charm paths +export CHARM_ROOT=/u/guilara/charm_impi_2/mpi-linux-x86_64-smp +export PATH=$PATH:/u/guilara/charm_impi_2/mpi-linux-x86_64-smp/bin + +# Spectre directories +export SPECTRE_HOME=/u/guilara/repos/spectre +export SPECTRE_BUILD_DIR=${SPECTRE_HOME}/build_develop +# Load python environment +source $SPECTRE_HOME/env/bin/activate +{% endblock %} + +{% block run_command %} +srun -n ${SLURM_NTASKS} ${SPECTRE_EXECUTABLE} \ + --input-file ${SPECTRE_INPUT_FILE} \ + ++ppn ${CHARM_PPN} +pemap 0-34,36-70 +commap 35,71 \ + ${SPECTRE_CHECKPOINT:+ +restart "${SPECTRE_CHECKPOINT}"} > tjob.out +{% endblock %}