Skip to content

Commit

Permalink
Publishes matlab-deps support for MATLAB R2024b.
Browse files Browse the repository at this point in the history
  • Loading branch information
epaganon authored and Prabhakar Kumar committed Sep 2, 2024
1 parent c9ea7d8 commit 9dd6ed2
Show file tree
Hide file tree
Showing 16 changed files with 708 additions and 1 deletion.
2 changes: 1 addition & 1 deletion .github/workflows/matlab-deps-r2024a-ubuntu22.04.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,4 +27,4 @@ jobs:
matlab_release_tag: 'r2024a'
os_info_tag: 'ubuntu22.04'
is_default_os: true
should_add_latest_tag: true
should_add_latest_tag: false
30 changes: 30 additions & 0 deletions .github/workflows/matlab-deps-r2024b-ubi8.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
# Copyright 2024 The MathWorks, Inc.
# Build & Publish matlab-deps/r2024b/ubi8
name: matlab-deps-r2024b-ubi8

# Define when builds will occur:
on:
# Run workflow when there is a push to the 'main' branch & push includes changes to any files in described path
push:
branches:
- 'main'
paths:
- 'matlab-deps/r2024b/ubi8/**'

# Run at 00:00 on every Monday (1st Day of the Week) (See: crontab.guru)
schedule:
- cron: '0 0 * * 1'

workflow_dispatch:

jobs:
build-and-publish-docker-image:
uses: ./.github/workflows/build-and-publish-docker-image.yml
secrets: inherit
with:
docker_build_context: './matlab-deps/r2024b/ubi8'
base_image_name: mathworks/matlab-deps
matlab_release_tag: 'r2024b'
os_info_tag: 'ubi8'
is_default_os: false
should_add_latest_tag: false
30 changes: 30 additions & 0 deletions .github/workflows/matlab-deps-r2024b-ubi9.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
# Copyright 2024 The MathWorks, Inc.
# Build & Publish matlab-deps/r2024b/ubi9
name: matlab-deps-r2024b-ubi9

# Define when builds will occur:
on:
# Run workflow when there is a push to the 'main' branch & push includes changes to any files in described path
push:
branches:
- 'main'
paths:
- 'matlab-deps/r2024b/ubi9/**'

# Run at 00:00 on every Monday (1st Day of the Week) (See: crontab.guru)
schedule:
- cron: '0 0 * * 1'

workflow_dispatch:

jobs:
build-and-publish-docker-image:
uses: ./.github/workflows/build-and-publish-docker-image.yml
secrets: inherit
with:
docker_build_context: './matlab-deps/r2024b/ubi9'
base_image_name: mathworks/matlab-deps
matlab_release_tag: 'r2024b'
os_info_tag: 'ubi9'
is_default_os: false
should_add_latest_tag: false
30 changes: 30 additions & 0 deletions .github/workflows/matlab-deps-r2024b-ubuntu20.04.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
# Copyright 2024 The MathWorks, Inc.
# Build & Publish matlab-deps/r2024b/ubuntu20.04
name: matlab-deps-r2024b-ubuntu20.04

# Define when builds will occur:
on:
# Run workflow when there is a push to the 'main' branch & push includes changes to any files in described path
push:
branches:
- 'main'
paths:
- 'matlab-deps/r2024b/ubuntu20.04/**'

# Run at 00:00 on every Monday (1st Day of the Week) (See: crontab.guru)
schedule:
- cron: '0 0 * * 1'

workflow_dispatch:

jobs:
build-and-publish-docker-image:
uses: ./.github/workflows/build-and-publish-docker-image.yml
secrets: inherit
with:
docker_build_context: './matlab-deps/r2024b/ubuntu20.04'
base_image_name: mathworks/matlab-deps
matlab_release_tag: 'r2024b'
os_info_tag: 'ubuntu20.04'
is_default_os: false
should_add_latest_tag: false
30 changes: 30 additions & 0 deletions .github/workflows/matlab-deps-r2024b-ubuntu22.04.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
# Copyright 2024 The MathWorks, Inc.
# Build & Publish matlab-deps/r2024b/ubuntu22.04
name: matlab-deps-r2024b-ubuntu22.04

# Define when builds will occur:
on:
# Run workflow when there is a push to the 'main' branch & push includes changes to any files in described path
push:
branches:
- 'main'
paths:
- 'matlab-deps/r2024b/ubuntu22.04/**'

# Run at 00:00 on every Monday (1st Day of the Week) (See: crontab.guru)
schedule:
- cron: '0 0 * * 1'

workflow_dispatch:

jobs:
build-and-publish-docker-image:
uses: ./.github/workflows/build-and-publish-docker-image.yml
secrets: inherit
with:
docker_build_context: './matlab-deps/r2024b/ubuntu22.04'
base_image_name: mathworks/matlab-deps
matlab_release_tag: 'r2024b'
os_info_tag: 'ubuntu22.04'
is_default_os: true
should_add_latest_tag: true
37 changes: 37 additions & 0 deletions matlab-deps/r2024b/aws-batch/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
# Copyright 2024 The MathWorks, Inc.

FROM nvidia/cuda:12.2.2-base-ubuntu22.04

LABEL maintainer="The MathWorks"

ENV DEBIAN_FRONTEND="noninteractive" TZ="Etc/UTC"

# base-dependencies.txt lists libraries required by MATLAB, except for:
# csh - Support MATLAB's Engine API for C and Fortran. See https://www.mathworks.com/help/matlab/matlab_external/introducing-matlab-engine.html
# gcc, g++, gfortran - Support Simulink code generation and mex with gcc, g++, or gfortran.
# locales, locales-all - Provide extended locales support
# python3, python3-pip - Python and pip are to run/install the AWS CLI.
# unzip, zip - Used to stage input/output data.
COPY base-dependencies.txt /tmp/base-dependencies.txt

RUN apt-get update && apt-get install --no-install-recommends -y `cat /tmp/base-dependencies.txt` \
&& apt-get clean && apt-get -y autoremove && rm -rf /var/lib/apt/lists/*

WORKDIR /

# Install the AWS CLI to use for staging input and output data
RUN pip3 install awscli

# Create directories for the MATLAB install and for the JobStorageLocation
ENV JOB_STORAGE_LOCATION /usr/local/JobStorageLocation
ENV MATLAB_INSTALL_LOCATION /usr/local/matlab
RUN mkdir -p ${MATLAB_INSTALL_LOCATION} ${JOB_STORAGE_LOCATION}
RUN chmod -R +wx ${JOB_STORAGE_LOCATION}

# To avoid inadvertantly polluting the / directory, use the JobStorageLocation
# while running MATLAB.
WORKDIR ${JOB_STORAGE_LOCATION}

COPY stageDataAndRunJob.sh /usr/local/stageDataAndRunJob.sh
RUN chmod +x /usr/local/stageDataAndRunJob.sh
ENTRYPOINT ["/usr/local/stageDataAndRunJob.sh"]
60 changes: 60 additions & 0 deletions matlab-deps/r2024b/aws-batch/base-dependencies.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
ca-certificates
libasound2
libatomic1
libc6
libcairo-gobject2
libcairo2
libcap2
libcrypt1
libcups2
libdrm2
libfontconfig1
libfribidi0
libgbm1
libgdk-pixbuf-2.0-0
libgl1
libglib2.0-0
libgstreamer-plugins-base1.0-0
libgstreamer1.0-0
libgtk-3-0
libice6
libltdl7
libnettle8
libnspr4
libnss3
libpam0g
libpango-1.0-0
libpangocairo-1.0-0
libpangoft2-1.0-0
libpixman-1-0
libsndfile1
libtirpc3
libudev1
libuuid1
libwayland-client0
libxcomposite1
libxcursor1
libxdamage1
libxfixes3
libxfont2
libxft2
libxinerama1
libxrandr2
libxt6
libxtst6
libxxf86vm1
locales
locales-all
make
net-tools
procps
sudo
unzip
zlib1g
csh
g++
gcc
gfortran
python3
python3-pip
zip
122 changes: 122 additions & 0 deletions matlab-deps/r2024b/aws-batch/stageDataAndRunJob.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
#!/bin/sh
#
# Makes the file (passed as 1st arg) executable and runs it
#
# This script uses the following environment variables set by the submit MATLAB code:
# PARALLEL_SERVER_S3_BUCKET - The S3 Bucket.
# PARALLEL_SERVER_S3_PREFIX - The prefix under which the job's files are stored in the S3 bucket.
# PARALLEL_SERVER_JOB_LOCATION - The job's storage location.
# PARALLEL_SERVER_TASK_ID_OFFSET - The offset of the Task's ID relative to the value of AWS_BATCH_JOB_ARRAY_INDEX.

# Copyright 2024 The MathWorks, Inc.

# The first argument is the path to the job wrapper.
JOB_WRAPPER=${1}

if [ -n "${AWS_BATCH_JOB_ARRAY_INDEX}" ]; then
# This is an array job.
# The Task ID is equal to AWS_BATCH_JOB_ARRAY_INDEX + PARALLEL_SERVER_TASK_ID_OFFSET.
TASK_ID=`expr ${AWS_BATCH_JOB_ARRAY_INDEX} + ${PARALLEL_SERVER_TASK_ID_OFFSET}`
else
# This is not an array job.
# The Task ID is equal to PARALLEL_SERVER_TASK_ID_OFFSET.
TASK_ID=${PARALLEL_SERVER_TASK_ID_OFFSET}
fi
echo "Task ID is ${TASK_ID}"

# Export PARALLEL_SERVER_TASK_LOCATION because parallel.cluster.generic.independentDecodeFcn requires it.
export PARALLEL_SERVER_TASK_LOCATION="${PARALLEL_SERVER_JOB_LOCATION}/Task${TASK_ID}";

# Determine which input file to copy from S3 for this task.
LIST_INPUT_FILES_CMD="aws s3api list-objects-v2 --bucket ${PARALLEL_SERVER_S3_BUCKET} --prefix ${PARALLEL_SERVER_S3_PREFIX}/stageIn/${PARALLEL_SERVER_JOB_LOCATION}/Task --query 'Contents[].{Key: Key}' --output text"
echo "Listing all the task input files for this MATLAB job using the command ${LIST_INPUT_FILES_CMD}"
LIST_INPUT_FILES_OUTPUT=`eval ${LIST_INPUT_FILES_CMD}`

LIST_INPUT_FILES_EXIT_CODE=${?}
if [ ${LIST_INPUT_FILES_EXIT_CODE} -ne 0 ] ; then
echo "Listing the task input files failed. Exiting with code ${LIST_INPUT_FILES_EXIT_CODE}"
exit ${LIST_INPUT_FILES_EXIT_CODE}
fi

GROUPED_TASK_INPUT_FILES=`echo ${LIST_INPUT_FILES_OUTPUT} | grep -e "Task[0-9]\+-[0-9]\+.in.mat" --only-matching`
SINGLE_TASK_INPUT_FILES=`echo ${LIST_INPUT_FILES_OUTPUT} | grep -e "Task[0-9]\+.in.mat" --only-matching`

for FILE in ${SINGLE_TASK_INPUT_FILES}
do
if [ ${FILE} = "Task${TASK_ID}.in.mat" ] ; then
INPUT_FILE_TO_DOWNLOAD=${FILE}
break
fi
done

if [ -z ${INPUT_FILE_TO_DOWNLOAD} ] ; then
for FILE in ${GROUPED_TASK_INPUT_FILES}
do
# Extract the start and end ranges
START_RANGE=`echo ${FILE} | grep -e "[0-9]\+" --only-matching | head -n 1`
END_RANGE=`echo ${FILE} | grep -e "[0-9]\+" --only-matching | tail -n 1`

if [ ${TASK_ID} -ge ${START_RANGE} ] && [ ${TASK_ID} -le ${END_RANGE} ]; then
INPUT_FILE_TO_DOWNLOAD=${FILE}
break
fi
done
fi

if [ -z ${INPUT_FILE_TO_DOWNLOAD} ] ; then
echo "We could not find an input file for this Task in S3. Exiting with code 1"
exit 1;
fi

echo "Determined the task input file for this task to be ${INPUT_FILE_TO_DOWNLOAD}."

# Copy input files from S3 to JobStorageLocation.
JOB_STORAGE_LOCATION="/usr/local/JobStorageLocation"
S3_COPY_CMD="aws s3 cp s3://${PARALLEL_SERVER_S3_BUCKET}/${PARALLEL_SERVER_S3_PREFIX}/stageIn ${JOB_STORAGE_LOCATION}/ \
--recursive \
--exclude \"${PARALLEL_SERVER_JOB_LOCATION}/Task*.*\" \
--include \"${PARALLEL_SERVER_JOB_LOCATION}/${INPUT_FILE_TO_DOWNLOAD}\""

echo "Copying files from S3 to JobStorageLocation using command: ${S3_COPY_CMD}"
eval ${S3_COPY_CMD}

S3_COPY_EXIT_CODE=${?}
if [ ${S3_COPY_EXIT_CODE} -ne 0 ] ; then
echo "Copy of input files from S3 to JobStorageLocation failed. Exiting with code ${S3_COPY_EXIT_CODE}"
exit ${S3_COPY_EXIT_CODE}
fi

chmod +x ${JOB_WRAPPER}

echo "Executing the job wrapper script ${JOB_WRAPPER}"
$JOB_WRAPPER

# Store the exit code from the JobWrapper so we can exit with it later.
JOB_WRAPPER_EXIT_CODE=${?}

# Zip files up to transfer to S3.
ZIP_CMD="zip -jr ${JOB_STORAGE_LOCATION}/${PARALLEL_SERVER_TASK_LOCATION}.zip ${JOB_STORAGE_LOCATION}/${PARALLEL_SERVER_JOB_LOCATION}/* \
-x \\*.in.mat \
-x \\*independentJobWrapper.sh"
echo "Zipping task output files using command: ${ZIP_CMD}"
eval ${ZIP_CMD}

ZIP_CMD_EXIT_CODE=${?}
if [ ${ZIP_CMD_EXIT_CODE} -ne 0 ] ; then
echo "Zipping task output files failed. Exiting with exit code ${ZIP_CMD_EXIT_CODE}"
exit ${ZIP_CMD_EXIT_CODE}
fi

# Copy zipped job output files to S3
S3_COPY_CMD="aws s3 cp ${JOB_STORAGE_LOCATION}/${PARALLEL_SERVER_TASK_LOCATION}.zip s3://${PARALLEL_SERVER_S3_BUCKET}/${PARALLEL_SERVER_S3_PREFIX}/stageOut/${PARALLEL_SERVER_TASK_LOCATION}.zip"
echo "Copying zipped output files from JobStorageLocation to S3 using command: ${S3_COPY_CMD}"
eval ${S3_COPY_CMD}

S3_COPY_EXIT_CODE=${?}
if [ ${S3_COPY_EXIT_CODE} -ne 0 ] ; then
echo "Copy of output files from JobStorageLocation to S3 failed. Exiting with exit code ${S3_COPY_EXIT_CODE}"
exit ${S3_COPY_EXIT_CODE}
fi

echo "Exiting with the exit code received from the JobWrapper: ${JOB_WRAPPER_EXIT_CODE}"
exit ${JOB_WRAPPER_EXIT_CODE}
28 changes: 28 additions & 0 deletions matlab-deps/r2024b/ubi8/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
# Copyright 2024 The MathWorks, Inc.
FROM registry.access.redhat.com/ubi8/ubi:latest

LABEL maintainer="The MathWorks"

ENV TZ="Etc/UTC"

COPY base-dependencies.txt /tmp/base-dependencies.txt

RUN yum update --disableplugin=subscription-manager -y \
&& yum install --disableplugin=subscription-manager -y `cat /tmp/base-dependencies.txt` \
&& yum --disableplugin=subscription-manager clean all -y

ENV LANG C
RUN [ -s /etc/machine-id ] || dbus-uuidgen > /etc/machine-id
RUN [ -d /usr/share/X11/xkb ] || mkdir -p /usr/share/X11/xkb

# Uncomment the following RUN yum statement to enable code generation capabilities,
# or if you will be compiling your own mex files with gcc, g++, or gfortran.
#
#RUN yum install -y gcc.x86_64 gcc-c++.x86_64 gcc-gfortran.x86_64 && yum --disableplugin=subscription-manager clean all -y

# Uncomment the following line if you require the fuse filesystem
#RUN yum install -y fuse-libs.x86_64 && yum --disableplugin=subscription-manager clean all -y

# Uncomment to resolve certain license manager issues
#RUN ln -s /lib64/ld-linux-x86-64.so.2 /lib64/ld-lsb-x86-64.so.3

Loading

0 comments on commit 9dd6ed2

Please sign in to comment.