Skip to content

Commit

Permalink
Merge pull request #1065 from opendatacube/cherrypicks-from18
Browse files Browse the repository at this point in the history
Cherrypicks from18
  • Loading branch information
SpacemanPaul authored Aug 23, 2024
2 parents 78648aa + 2e8a663 commit a9eacd5
Show file tree
Hide file tree
Showing 54 changed files with 3,366 additions and 440 deletions.
11 changes: 9 additions & 2 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,10 @@
datacube_wms/wms_cfg_local.py
.pytest_cache
*/__pycache__
**/.pytest_cache
**/__pycache__
.hypothesis

venv
.venv

**/.pixi
.git
1 change: 0 additions & 1 deletion .env_simple
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ POSTGRES_USER=opendatacubeusername
SERVER_DB_USERNAME=opendatacubeusername
POSTGRES_PASSWORD=opendatacubepassword
POSTGRES_DB="odc_postgres,odc_postgis"
READY_PROBE_DB=odc_postgis

#################
# OWS CFG Config
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/complementary-config-test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -63,5 +63,5 @@ jobs:
docker compose -f docker-compose.yaml -f docker-compose.cleandb.yaml up -d --wait
docker compose -f docker-compose.yaml -f docker-compose.cleandb.yaml exec -T ows /bin/sh -c "datacube system init; datacube system check"
docker compose -f docker-compose.yaml -f docker-compose.cleandb.yaml exec -T ows /bin/sh -c "curl https://raw.githubusercontent.com/GeoscienceAustralia/dea-config/master/dev/services/wms/inventory.json -o /tmp/inventory.json"
docker compose -f docker-compose.yaml -f docker-compose.db.yaml exec -T ows /bin/sh -c "cd /code; ./compare-cfg.sh"
docker compose -f docker-compose.yaml -f docker-compose.db.yaml exec -T ows /bin/sh -c "cd /code && ./compare-cfg.sh"
docker compose -f docker-compose.yaml -f docker-compose.cleandb.yaml down
4 changes: 2 additions & 2 deletions .github/workflows/lint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,25 +8,25 @@ on:
- 'develop-1.9'
paths:
- '**'
- '.github/workflows/lint.yml'
- '!docs/**'
- '!*.rst'
- '!*.md'
- '!datacube_ows/__init__.py'
- '!.github/**'
- '.github/workflows/lint.yml'

push:
branches:
- 'master'
- 'develop-1.9'
paths:
- '**'
- '.github/workflows/lint.yml'
- '!docs/**'
- '!*.rst'
- '!*.md'
- '!datacube_ows/__init__.py'
- '!.github/**'
- '.github/workflows/lint.yml'

jobs:
pylint:
Expand Down
12 changes: 6 additions & 6 deletions .github/workflows/pyspy-profiling.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,24 +8,24 @@ on:
- 'develop-1.9'
paths:
- '**'
- '.github/workflows/pyspy-profiling.yaml'
- '!docs/**'
- '!*.rst'
- '!*.md'
- '!datacube_ows/__init__.py'
- '!.github/**'
- '.github/workflows/pyspy-profiling.yaml'

push:
branches:
- 'master'
paths:
- '**'
- '.github/workflows/pyspy-profiling.yaml'
- '!docs/**'
- '!*.rst'
- '!*.md'
- '!datacube_ows/__init__.py'
- '!.github/**'
- '.github/workflows/pyspy-profiling.yaml'

jobs:
build:
Expand Down Expand Up @@ -56,9 +56,9 @@ jobs:
export LOCAL_UID=$(id -u $USER)
export LOCAL_GID=$(id -g $USER)
export $(grep -v '^#' .env_simple | xargs)
echo "::set-output name=PID::$(docker inspect --format '{{.State.Pid}}' $(docker inspect -f '{{.Name}}' \
$(docker-compose -f docker-compose.yaml -f docker-compose.db.yaml -f docker-compose.pyspy.yaml ps -q ows) \
| cut -c2-))"
echo "PID=$(docker inspect --format '{{.State.Pid}}' $(docker inspect -f '{{.Name}}' \
$(docker compose -f docker-compose.yaml -f docker-compose.db.yaml -f docker-compose.pyspy.yaml ps -q ows) \
| cut -c2-))" > $GITHUB_OUTPUT
- name: Run py-spy profiling (stage 1 - run profiling service)
timeout-minutes: 1
Expand All @@ -68,7 +68,7 @@ jobs:
export LOCAL_GID=$(id -g $USER)
export $(grep -v '^#' .env_simple | xargs)
docker compose -f docker-compose.yaml -f docker-compose.db.yaml -f docker-compose.pyspy.yaml \
exec -T ows /bin/sh -c "cd /code;./test_urls.sh &"
exec -T ows /bin/sh -c "cd /code && ./test_urls.sh &"
docker compose -f docker-compose.yaml -f docker-compose.db.yaml -f docker-compose.pyspy.yaml \
run pyspy record -f speedscope -o ./artifacts/profile.json --duration 30 \
--pid ${{steps.set-output-container-id.outputs.PID}} --subprocesses
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/test-prod.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,24 +7,24 @@ on:
- 'master'
paths:
- '**'
- '.github/workflows/test-prod.yaml'
- '!docs/**'
- '!*.rst'
- '!*.md'
- '!datacube_ows/__init__.py'
- '!.github/**'
- '.github/workflows/test-prod.yaml'

push:
branches:
- 'master'
paths:
- '**'
- '.github/workflows/test-prod.yaml'
- '!docs/**'
- '!*.rst'
- '!*.md'
- '!datacube_ows/__init__.py'
- '!.github/**'
- '.github/workflows/test-prod.yaml'

env:
ORG: opendatacube
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,14 +48,14 @@ jobs:
# the production image
- name: Build dev OWS image
run: |
docker build \
docker build --build-arg ENVIRONMENT=test \
--tag ${ORG}/${IMAGE}:_builder \
.
- name: Test and lint dev OWS image
run: |
mkdir artifacts
docker run -e LOCAL_UID=$(id -u $USER) -e LOCAL_GID=$(id -g $USER) -v ${PWD}/artifacts:/mnt/artifacts ${ORG}/${IMAGE}:_builder /bin/sh -c "cd /code;./check-code.sh"
docker run -e LOCAL_UID=$(id -u $USER) -e LOCAL_GID=$(id -g $USER) -v ${PWD}/artifacts:/mnt/artifacts ${ORG}/${IMAGE}:_builder /bin/sh -c "cd /code && ./check-code.sh"
mv ./artifacts/coverage.xml ./artifacts/coverage-unit.xml
- name: Dockerized Integration Pytest
Expand All @@ -64,7 +64,7 @@ jobs:
export LOCAL_GID=$(id -g $USER)
export $(grep -v '^#' .env_simple | xargs)
docker compose -f docker-compose.yaml -f docker-compose.db.yaml up -d --wait --build
docker compose -f docker-compose.yaml -f docker-compose.db.yaml exec -T ows /bin/sh -c "cd /code;./check-code-all.sh"
docker compose -f docker-compose.yaml -f docker-compose.db.yaml exec -T ows /bin/sh -c "cd /code && ./check-code-all.sh"
docker compose -f docker-compose.yaml -f docker-compose.db.yaml down
- name: Upload All coverage to Codecov
Expand Down
96 changes: 42 additions & 54 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,92 +1,80 @@
# Note that this is now pinned to a fixed version. Remember to check for new versions periodically.
FROM ghcr.io/osgeo/gdal:ubuntu-small-3.8.5 AS builder
FROM ghcr.io/osgeo/gdal:ubuntu-small-3.9.1 AS builder

# Setup build env for postgresql-client-14
# Environment is test or deployment.
ARG ENVIRONMENT=deployment

# Setup build env for postgresql-client-16
USER root
RUN apt-get update -y \
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --fix-missing --no-install-recommends \
git \
# For pybabel
python3-distutils \
# For Psycopg2
libpq-dev python3-dev \
gcc \
python3-pip \
postgresql-client-14 \
postgresql-client-16 \
# For Pyproj build \
proj-bin proj-data libproj-dev \
proj-bin libproj-dev \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* /var/dpkg/* /var/tmp/* /var/log/dpkg.log

ENV GDAL_DISABLE_READDIR_ON_OPEN="EMPTY_DIR"

# Copy source code and install it
WORKDIR /code
COPY . /code

RUN echo "version=\"$(python3 setup.py --version)\"" > datacube_ows/_version.py
RUN pip install --no-cache-dir .[ops,test]

## Only install pydev requirements if arg PYDEV_DEBUG is set to 'yes'
ARG PYDEV_DEBUG="no"
RUN if [ "$PYDEV_DEBUG" = "yes" ]; then \
pip install --no-cache-dir .[dev] \
;fi
WORKDIR /build

RUN pip freeze
RUN python3 -m pip --disable-pip-version-check -q wheel --no-binary psycopg2 psycopg2 \
&& ([ "$ENVIRONMENT" = "deployment" ] || \
python3 -m pip --disable-pip-version-check -q wheel --no-binary pyproj pyproj)

# Should match builder base.
FROM ghcr.io/osgeo/gdal:ubuntu-small-3.8.5
FROM ghcr.io/osgeo/gdal:ubuntu-small-3.9.1

RUN apt-get update -y \
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
# Environment is test or deployment.
ARG ENVIRONMENT=deployment
RUN export DEBIAN_FRONTEND=noninteractive \
&& apt-get update -y \
&& apt-get install -y --no-install-recommends \
git \
gosu \
python3-pip \
tini \
&& ([ "$ENVIRONMENT" = "deployment" ] || \
apt-get install -y --no-install-recommends \
proj-bin) \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* /var/dpkg/* /var/tmp/* /var/log/dpkg.log

# Add login-script for UID/GID-remapping.
COPY --chown=root:root --link docker/files/remap-user.sh /usr/local/bin/remap-user.sh

# all the python pip installed libraries
COPY --from=builder /usr/local/lib/python3.10/dist-packages /usr/local/lib/python3.10/dist-packages
COPY --from=builder /usr/lib/python3/dist-packages /usr/lib/python3/dist-packages
COPY --from=builder /usr/lib/python3.10/distutils/* /usr/lib/python3.10/distutils/
# postgres client
COPY --from=builder /usr/lib/postgresql /usr/lib/postgresql
COPY --from=builder /usr/share/postgresql /usr/share/postgresql
# perl5 is used for pg_isready
COPY --from=builder /usr/share/perl5 /usr/share/perl5
COPY --from=builder /usr/bin/pg_isready /usr/bin/pg_isready
# datacube cli
COPY --from=builder /usr/local/bin/datacube /usr/local/bin/datacube
# datacube-ows cli
COPY --from=builder /usr/local/bin/datacube-ows /usr/local/bin/datacube-ows
# datacube-ows-update cli
COPY --from=builder /usr/local/bin/datacube-ows-update /usr/local/bin/datacube-ows-update
# datacube-ows-cfg check
COPY --from=builder /usr/local/bin/datacube-ows-cfg /usr/local/bin/datacube-ows-cfg
# flask cli
COPY --from=builder /usr/local/bin/flask /usr/local/bin/flask
# gunicorn cli
COPY --from=builder /usr/local/bin/gunicorn /usr/local/bin/gunicorn
# pybabel cli
COPY --from=builder /usr/local/bin/pybabel /usr/local/bin/pybabel

# Copy source code and install it
WORKDIR /code
COPY . /code

## Only install pydev requirements if arg PYDEV_DEBUG is set to 'yes'
ARG PYDEV_DEBUG="no"
COPY --from=builder --link /build/*.whl ./
RUN EXTRAS=$([ "$ENVIRONMENT" = "deployment" ] || echo ",test") && \
python3 -m pip --disable-pip-version-check install ./*.whl --break-system-packages && \
rm ./*.whl && \
echo "version=\"$(python3 setup.py --version)\"" > datacube_ows/_version.py && \
python3 -m pip --disable-pip-version-check install --no-cache-dir ".[ops$EXTRAS]" --break-system-packages && \
([ "$PYDEV_DEBUG" != "yes" ] || \
python3 -m pip --disable-pip-version-check install --no-cache-dir .[dev] --break-system-packages) && \
python3 -m pip freeze && \
([ "$ENVIRONMENT" != "deployment" ] || \
(rm -rf /code/* /code/.git* && \
apt-get purge -y \
git \
git-man \
python3-pip))

# Configure user
RUN useradd -m -s /bin/bash ows
WORKDIR "/home/ows"
WORKDIR "/home/ubuntu"

ENV GDAL_DISABLE_READDIR_ON_OPEN="EMPTY_DIR" \
CPL_VSIL_CURL_ALLOWED_EXTENSIONS=".tif, .tiff" \
GDAL_HTTP_MAX_RETRY="10" \
GDAL_HTTP_RETRY_DELAY="1"

RUN chown 1000:100 /dev/shm

ENTRYPOINT ["/usr/local/bin/remap-user.sh"]
CMD ["gunicorn", "-b", "0.0.0.0:8000", "--workers=3", "--threads=2", "-k", "gevent", "--timeout", "121", "--pid", "/home/ows/gunicorn.pid", "--log-level", "info", "--worker-tmp-dir", "/dev/shm", "--config", "python:datacube_ows.gunicorn_config", "datacube_ows.wsgi"]
CMD ["gunicorn", "-b", "0.0.0.0:8000", "--workers=3", "-k", "gevent", "--timeout", "121", "--pid", "/home/ubuntu/gunicorn.pid", "--log-level", "info", "--worker-tmp-dir", "/dev/shm", "--config", "python:datacube_ows.gunicorn_config", "datacube_ows.wsgi"]
26 changes: 26 additions & 0 deletions Dockerfile.micromamba
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
FROM mambaorg/micromamba:1.5.8
COPY --chown=$MAMBA_USER:$MAMBA_USER env.micromamba.yaml /tmp/env.yaml
RUN --mount=type=cache,target=/opt/conda/pkgs micromamba install -y -n base -f /tmp/env.yaml && \
micromamba clean --all --yes --force-pkgs-dirs && \
# find /home/mambauser/.mamba/pkgs -type d \( -name test -o -name tests \) -print0 | xargs -0 rm -rf && \
find /opt/conda/lib -type d \( -name test -o -name tests \) -print0 | xargs -0 rm -rf && \
rm -rf /opt/conda/lib/libpython3* /opt/conda/include /opt/conda/share/{gir-1.0,poppler,man}
# TODO: pieces of botocore (98Mb) and scipy (72Mb) can likely be removed

ARG MAMBA_DOCKERFILE_ACTIVATE=1 # (otherwise python will not be found)


COPY --chown=$MAMBA_USER:$MAMBA_USER . /tmp/code

ARG PSEUDO_VERSION # strongly recommended to update based on git describe

RUN SETUPTOOLS_SCM_PRETEND_VERSION_FOR_DATACUBE_OWS=${PSEUDO_VERSION} pip install /tmp/code #-e .[test]
#RUN pip install /code
#python -c 'import uuid; print(uuid.uuid4())' > /tmp/my_uuid

ENV GDAL_DISABLE_READDIR_ON_OPEN="EMPTY_DIR" \
CPL_VSIL_CURL_ALLOWED_EXTENSIONS=".tif, .tiff" \
GDAL_HTTP_MAX_RETRY="10" \
GDAL_HTTP_RETRY_DELAY="1"

CMD ["gunicorn", "-b", "0.0.0.0:8000", "--workers=3", "-k", "gthread", "--timeout", "121", "--pid", "/tmp/gunicorn.pid", "--log-level", "info", "--worker-tmp-dir", "/dev/shm", "--config", "python:datacube_ows.gunicorn_config", "datacube_ows.wsgi"]
Loading

0 comments on commit a9eacd5

Please sign in to comment.