Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rolling mlserver back from 1.3.5 to 1.3.2 #93

Open
wants to merge 17 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 12 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
123 changes: 61 additions & 62 deletions mlserver/rockcraft.yaml
Original file line number Diff line number Diff line change
@@ -1,25 +1,26 @@
# Based on https://github.com/SeldonIO/MLServer/blob/1.3.5/Dockerfile
# Based on https://github.com/SeldonIO/MLServer/blob/1.3.2/Dockerfile
# Operator version is 1.17.1 version for this rock can be found in
# https://github.com/SeldonIO/seldon-core/blob/v1.17.1/operator/config/manager/configmap.yaml
name: mlserver
summary: An image for Seldon MLServer Huggingface
summary: An image for Seldon MLServer
description: |
This image is used as part of the Charmed Kubeflow product.
version: 1.3.5_20.04_1 # <upstream-version>_<base-version>_<Charmed-KF-version>
version: 1.3.2
license: Apache-2.0
base: ubuntu:20.04
base: ubuntu@20.04
run-user: _daemon_
services:
mlserver:
override: replace
summary: "mlserver service"
startup: enabled
command: bash -c 'export PATH=/opt/conda/bin/:/opt/mlserver/.local/bin:${PATH}:/usr/bin && export PYTHONPATH=/opt/mlserver/.local/lib/python3.8/site-packages/:${PYTHONPATH} && eval $(/opt/conda/bin/conda shell.bash hook 2> /dev/null) && mlserver start ${MLSERVER_MODELS_DIR}'
command: bash -c 'source venv/bin/activate && mlserver start ${MLSERVER_MODELS_DIR}'
working-dir: "/opt/mlserver"
user: "mlserver"
environment:
MLSERVER_ENV_TARBALL: "/mnt/models/environment.tar.gz"
PATH: "$PATH:/opt/mlserver/venv/bin"
MLSERVER_MODELS_DIR: "/mnt/models/"
LD_LIBRARY_PATH: "/usr/local/nvidia/lib64:/opt/conda/lib/python3.8/site-packages/nvidia/cuda_runtime/lib:$LD_LIBRARY_PATH"
LD_LIBRARY_PATH: "/opt/mlserver/venv/lib/python3.8/site-packages/nvidia/cuda_runtime/lib:$LD_LIBRARY_PATH"
TRANSFORMERS_CACHE: "/opt/mlserver/.cache"
NUMBA_CACHE_DIR: "/opt/mlserver/.cache"
platforms:
Expand All @@ -30,80 +31,66 @@ parts:
plugin: nil
source: https://github.com/SeldonIO/MLServer
source-type: git
source-tag: 1.3.5
build-packages:
source-tag: 1.3.2
overlay-packages:
- python3-dev
- python3-distutils
build-packages:
- python3-setuptools
- python3-venv
- python3-pip
# TO-DO: Verify need for the packages below
# - libgomp
# - mesa-libGL
# - glib2-devel
# - shadow-utils
- ffmpeg
override-build: |

mkdir -p ${CRAFT_PART_INSTALL}/opt/mlserver/dist
mkdir -p ${CRAFT_PART_INSTALL}/opt/mlserver/requirements
cp ${CRAFT_PART_SRC}/setup.py .
cp ${CRAFT_PART_SRC}/MANIFEST.in .
cp ${CRAFT_PART_SRC}/README.md .

sed -i \
-e 's/\bpython\b/python3.8/g' \
hack/build-wheels.sh

./hack/build-wheels.sh ${CRAFT_PART_INSTALL}/opt/mlserver/dist
cp -R ./requirements ${CRAFT_PART_INSTALL}/opt/mlserver

override-stage: |
export PYTHON_VERSION="3.8.16"
export CONDA_VERSION="22.11.1"
export MINIFORGE_VERSION="${CONDA_VERSION}-4"
export MLSERVER_PATH=opt/mlserver
export CONDA_PATH=opt/conda
export PATH=/opt/mlserver/.local/bin:/opt/conda/bin:$PATH

# Install Conda, Python 3.8 and FFmpeg
curl -L -o ~/miniforge3.sh https://github.com/conda-forge/miniforge/releases/download/${MINIFORGE_VERSION}/Miniforge3-${MINIFORGE_VERSION}-Linux-x86_64.sh
chmod +x ~/miniforge3.sh
bash ~/miniforge3.sh -b -u -p ${CONDA_PATH}
rm ~/miniforge3.sh
${CONDA_PATH}/bin/conda install --yes conda=${CONDA_VERSION} python=${PYTHON_VERSION} ffmpeg
${CONDA_PATH}/bin/conda clean -tipy
mkdir -p etc/profile.d
ln -sf ${CONDA_PATH}/etc/profile.d/conda.sh etc/profile.d/conda.sh
echo ". ${CONDA_PATH}/etc/profile.d/conda.sh" >> ~/.bashrc
echo "PATH=${PATH}" >> ~/.bashrc
${CONDA_PATH}/bin/conda init bash
echo "conda activate base" >> ~/.bashrc
chgrp -R root opt/conda && chmod -R g+rw opt/conda

# install required wheels
export PATH=/opt/mlserver/venv/bin:$PATH

# Create and activate a virtual environment
mkdir -p ${MLSERVER_PATH}
mkdir -p ./wheels
cp -p ${CRAFT_PART_INSTALL}/opt/mlserver/dist/mlserver-*.whl ./wheels
cp -p ${CRAFT_PART_INSTALL}/opt/mlserver/dist/mlserver_huggingface-*.whl ./wheels
cp -p ${CRAFT_PART_INSTALL}/opt/mlserver/dist/mlserver_mlflow-*.whl ./wheels
cp -p ${CRAFT_PART_INSTALL}/opt/mlserver/dist/mlserver_sklearn-*.whl ./wheels
cp -p ${CRAFT_PART_INSTALL}/opt/mlserver/dist/mlserver_xgboost-*.whl ./wheels

# setup pip to be from conda
. ${CONDA_PATH}/etc/profile.d/conda.sh
pip install --prefix ${MLSERVER_PATH}/.local --upgrade pip wheel 'setuptools<65.6.0'
pip install --prefix ${MLSERVER_PATH}/.local $(ls "./wheels/mlserver-"*.whl)
pip install --prefix ${MLSERVER_PATH}/.local $(ls "./wheels/mlserver_huggingface-"*.whl)
pip install --prefix ${MLSERVER_PATH}/.local $(ls "./wheels/mlserver_mlflow-"*.whl)
pip install --prefix ${MLSERVER_PATH}/.local $(ls "./wheels/mlserver_sklearn-"*.whl)
pip install --prefix ${MLSERVER_PATH}/.local $(ls "./wheels/mlserver_xgboost-"*.whl)
chown -R root:root ${MLSERVER_PATH} && chmod -R 777 ${MLSERVER_PATH}

# conda writes shebangs with its path everywhere, and in crafting, that will be, for example:
# #!/root/stage/opt/conda/...
#
# Snip off the /root/stage part
bash -c "grep -R -E '/root/stage' opt/ 2>/dev/null | grep -v Bin | awk '{split(\$0,out,\":\"); print out[1]}' | uniq | xargs -I{} sed -i -e 's/\/root\/stage//' {}"
python3.8 -m venv ${MLSERVER_PATH}/venv
source ${MLSERVER_PATH}/venv/bin/activate

# Upgrade pip and install setuptools and wheel
pip install --upgrade pip wheel 'setuptools<65.6.0'

# Install required wheels
#mkdir -p ./wheels
#cp -p ${CRAFT_PART_INSTALL}/opt/mlserver/dist/mlserver{,-huggingface,-mlflow,-sklearn,-xgboost}-*.whl ./wheels

# replace first line of mlserver script with reference to installed Conda python
export CONDA_PYTHON="#\!\/opt\/conda\/bin\/python"
sed -i "1s/.*/${CONDA_PYTHON}/" ${MLSERVER_PATH}/.local/bin/mlserver

pip install --no-cache-dir $(ls "${CRAFT_PART_INSTALL}/opt/mlserver/dist/mlserver-"*.whl)
pip install --no-cache-dir $(ls "${CRAFT_PART_INSTALL}/opt/mlserver/dist/mlserver_huggingface-"*.whl)
pip install --no-cache-dir $(ls "${CRAFT_PART_INSTALL}/opt/mlserver/dist/mlserver_mlflow-"*.whl)
pip install --no-cache-dir $(ls "${CRAFT_PART_INSTALL}/opt/mlserver/dist/mlserver_sklearn-"*.whl)
pip install --no-cache-dir $(ls "${CRAFT_PART_INSTALL}/opt/mlserver/dist/mlserver_xgboost-"*.whl)
pip install --no-cache-dir -r ${CRAFT_PART_INSTALL}/opt/mlserver/requirements/docker.txt

# clean wheels
rm -rf ./wheels
# Clear pip cache
pip cache purge
rm -rf /root/.cache/pip

# Adjust permissions
chown -R root:root ${MLSERVER_PATH} && chmod -R 777 ${MLSERVER_PATH}

BON4 marked this conversation as resolved.
Show resolved Hide resolved
# Update mlserver script to reference the correct Python executable
export PYTHON_PATH="/opt/mlserver/venv/bin/python3.8"
sed -i "1s|.*|#!${PYTHON_PATH}|" ${MLSERVER_PATH}/venv/bin/mlserver

override-prime: |
# copy all artifacts
cp -rp ${CRAFT_STAGE}/opt .
Expand All @@ -116,6 +103,18 @@ parts:
cp ${CRAFT_PART_SRC}/hack/generate_dotenv.py hack/
cp ${CRAFT_PART_SRC}/hack/activate-env.sh hack/

# not-root user for this rock should be 'mlserver'
non-root-user:
plugin: nil
after: [ mlserver ]
overlay-script: |
groupadd -R $CRAFT_OVERLAY -g 1000 mlserver
useradd -R $CRAFT_OVERLAY -M -r -u 1000 -g mlserver mlserver
override-prime: |
craftctl default
chown -R 1000:0 opt/mlserver
chmod -R 776 opt/mlserver

security-team-requirement:
plugin: nil
override-build: |
Expand Down
52 changes: 23 additions & 29 deletions mlserver/tests/test_rock.py
BON4 marked this conversation as resolved.
Show resolved Hide resolved
Original file line number Diff line number Diff line change
@@ -1,42 +1,36 @@
# Copyright 2022 Canonical Ltd.
# Copyright 2024 Canonical Ltd.
# See LICENSE file for licensing details.
#
#

from pathlib import Path

import os
import logging
import random
import pytest
import string
import subprocess
import yaml

from pytest_operator.plugin import OpsTest
from charmed_kubeflow_chisme.rock import CheckRock

@pytest.fixture()
def rock_test_env(tmpdir):
"""Yields a temporary directory and random docker container name, then cleans them up after."""
container_name = "".join([str(i) for i in random.choices(string.ascii_lowercase, k=8)])
yield tmpdir, container_name

try:
subprocess.run(["docker", "rm", container_name])
except Exception:
pass
# tmpdir fixture we use here should clean up the other files for us

@pytest.mark.abort_on_fail
def test_rock(ops_test: OpsTest, rock_test_env):
def test_rock():
"""Test rock."""
temp_dir, container_name = rock_test_env
check_rock = CheckRock("rockcraft.yaml")
rock_image = check_rock.get_image_name()
rock_image = check_rock.get_name()
rock_version = check_rock.get_version()
LOCAL_ROCK_IMAGE = f"{check_rock.get_image_name()}:{check_rock.get_version()}"
LOCAL_ROCK_IMAGE = f"{rock_image}:{rock_version}"

# assert the rock contains the expected files
subprocess.run(
[
"docker",
"run",
"--rm",
LOCAL_ROCK_IMAGE,
"exec",
"ls",
"-la",
"/licenses/license.txt",
],
check=True,
)

# create rock filesystem
subprocess.run(["docker", "run", LOCAL_ROCK_IMAGE, "exec", "ls", "-la", "/opt/mlserver/.local/lib/python3.8/site-packages/mlserver"], check=True)
subprocess.run(["docker", "run", LOCAL_ROCK_IMAGE, "exec", "ls", "-la", "/opt/mlserver/.local/bin/mlserver"], check=True)
subprocess.run(
["docker", "run", "--rm", LOCAL_ROCK_IMAGE, "exec", "ls", "-la", "/opt/mlserver"],
check=True,
)
8 changes: 8 additions & 0 deletions mlserver/tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -48,3 +48,11 @@ deps =
commands =
# run rock tests
pytest -v --tb native --show-capture=all --log-cli-level=INFO {posargs} {toxinidir}/tests

[testenv:integration]
passenv = *
allowlist_externals =
echo
commands =
# TODO: Implement integration tests here
echo "WARNING: This is a placeholder test - no test is implemented here."
Loading