Skip to content

Commit

Permalink
Add Urania machine
Browse files Browse the repository at this point in the history
  • Loading branch information
guilara committed Jul 30, 2024
1 parent 801b12a commit 3fd41c7
Show file tree
Hide file tree
Showing 2 changed files with 76 additions and 0 deletions.
11 changes: 11 additions & 0 deletions support/Machines/Urania.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# Distributed under the MIT License.
# See LICENSE.txt for details.

Machine:
Name: Urania
Description: |
Supercomputer at the Max Planck Computing Data Facilty.
DefaultProcsPerNode: 72
DefaultQueue: "p.urania"
DefaultTimeLimit: "1-00:00:00"
LaunchCommandSingleNode: ["srun", "-n", "1"]
65 changes: 65 additions & 0 deletions support/SubmitScripts/Urania.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
{% extends "SubmitTemplateBase.sh" %}

# Distributed under the MIT License.
# See LICENSE.txt for details.

# Supercomputer at the Max Planck Computing Data Facility.
# More information:
# https://docs.mpcdf.mpg.de/doc/computing/clusters/systems/Gravitational_Physics_ACR.html

{% block head %}
# Standard output and error:
#SBATCH -o ./tjob.out.%j
#SBATCH -e ./tjob.err.%j
# Initial working directory:
#SBATCH -D ./
# Number of MPI Tasks, e.g. 4:
#SBATCH --nodes {{ num_nodes | default(1) }}
#SBATCH --ntasks-per-node=1
#SBATCH --ntasks-per-core=1
#SBATCH --cpus-per-task=72
# Memory usage [MB] of the job is required, 2200 MB per task:
#SBATCH --mem=240000
# Wall clock limit:
#SBATCH -t {{ time_limit | default("1-00:00:00") }}
#SBATCH -p {{ queue | default("p.debug") }}
{% endblock %}

{% block charm_ppn %}
# One thread for communication
CHARM_PPN=$(expr ${SLURM_CPUS_PER_TASK} - 2)
{% endblock %}

{% block list_modules %}
# Load compiler and MPI modules with explicit version specifications,
# consistently with the versions used to build the executable.
module purge
module load gcc/11
module load impi/2021.7
module load boost/1.79
module load gsl/1.16
module load cmake/3.26
module load hdf5-serial/1.12.2
module load anaconda/3/2021.11

# Load Spack environment
source /u/guilara/repos/spack/share/spack/setup-env.sh
spack env activate env3_spectre_impi

# Define Charm paths
export CHARM_ROOT=/u/guilara/charm_impi_2/mpi-linux-x86_64-smp
export PATH=$PATH:/u/guilara/charm_impi_2/mpi-linux-x86_64-smp/bin

# Spectre directories
export SPECTRE_HOME=/u/guilara/repos/spectre
export SPECTRE_BUILD_DIR=${SPECTRE_HOME}/build_develop
# Load python environment
source $SPECTRE_HOME/env/bin/activate
{% endblock %}

{% block run_command %}
srun -n ${SLURM_NTASKS} ${SPECTRE_EXECUTABLE} \
--input-file ${SPECTRE_INPUT_FILE} \
++ppn ${CHARM_PPN} +pemap 0-34,36-70 +commap 35,71 \
${SPECTRE_CHECKPOINT:+ +restart "${SPECTRE_CHECKPOINT}"} > tjob.out
{% endblock %}

0 comments on commit 3fd41c7

Please sign in to comment.