Skip to content

fix(ecmwf-realtime): Update India fileprefix #1261

fix(ecmwf-realtime): Update India fileprefix

fix(ecmwf-realtime): Update India fileprefix #1261

Workflow file for this run

name: Python CI
on:
push:
branches: []
paths-ignore:
- 'README.md'
tags:
- 'v*'
pull_request:
branches: []
paths-ignore:
- 'README.md'
workflow_dispatch:
# Specify concurrency such that only one workflow can run at a time
# * Different workflow files are not affected
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
# Registry for storing Container images
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
# Ensure the GitHub token can remove packages
permissions:
packages: write
jobs:
# Define a dependencies job that runs on all branches and PRs
# * Installs dependencies and caches them
build-venv:
runs-on: ubuntu-latest
container: quay.io/condaforge/miniforge3:latest
steps:
- name: Checkout repository
uses: actions/checkout@v3
# Restore cached virtualenv, if available
# * The pyproject.toml hash is part of the cache key, invalidating
# the cache if the file changes
- name: Restore cached virtualenv
id: restore-cache
uses: actions/cache/restore@v3
with:
path: ./venv
key: ${{ runner.os }}-venv-${{ hashFiles('**/pyproject.toml') }}
# Should mirror the build-venv stage in the Containerfile
- name: Build venv
run: |
apt -qq update && apt -qq install -y build-essential
conda create -p ./venv python=3.12
./venv/bin/python -m pip install --upgrade -q pip wheel setuptools
if: steps.restore-cache.outputs.cache-hit != 'true'
# Should mirror the build-reqs stage in the Containerfile
# * Except this installs the dev dependencies as well
- name: Install all dependencies
run: |
conda install -p ./venv -q -y eccodes zarr
./venv/bin/python -m pip install -q .[dev] --no-binary=nwp-consumer
if: steps.restore-cache.outputs.cache-hit != 'true'
# Cache the virtualenv for future runs
- name: Cache virtualenv
uses: actions/cache/save@v3
with:
path: ./venv
key: ${{ steps.restore-cache.outputs.cache-primary-key }}
if: steps.restore-cache.outputs.cache-hit != 'true'
# Define a unittest job that runs on all branches and PRs
test-unit:
runs-on: ubuntu-latest
container: quay.io/condaforge/miniforge3:latest
needs: build-venv
steps:
- name: Checkout repository
uses: actions/checkout@v3
# Restore cached virtualenv
- name: Restore cached virtualenv
uses: actions/cache/restore@v3
with:
path: ./venv
key: ${{ runner.os }}-venv-${{ hashFiles('**/pyproject.toml') }}
- name: Install package
run: ./venv/bin/python -m pip install -q .
# Run unittests
# * Produce JUnit XML report
- name: Run unit tests
env:
ECCODES_DEFINITION_PATH: ${{ github.workspace }}/venv/share/eccodes/definitions
run: ./venv/bin/python -m xmlrunner discover -s src/nwp_consumer -p "test_*.py" --output-file ut-report.xml
# Create test summary to be visualised on the job summary screen on GitHub
# * Runs even if previous steps fail
- name: Create test summary
uses: test-summary/action@v2
with:
paths: "*t-report.xml"
show: "fail, skip"
if: always()
# Define an autotagger job that runs on merge requests
tag:
runs-on: ubuntu-latest
needs: test-unit
if: |
github.event_name == 'pull_request' &&
github.event.action == 'closed' &&
github.event.pull_request.merged == true
steps:
- name: Checkout repository
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Bump version and push tag
uses: RueLaLa/auto-tagger@master
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_PR_NUMBER: ${{ github.event.number }}
# Define an integration test job that runs only on version on main tags
test-integration:
runs-on: ubuntu-latest
container: quay.io/condaforge/miniforge3:latest
needs: build-venv
if: |
github.event_name == 'workflow_dispatch' ||
(contains(github.ref, 'refs/tags/v') && github.event_name == 'push')
steps:
- name: Checkout repository
uses: actions/checkout@v3
# Restore cached virtualenv
- name: Restore cached virtualenv
uses: actions/cache/restore@v3
with:
path: ./venv
key: ${{ runner.os }}-venv-${{ hashFiles('**/pyproject.toml') }}
- name: Install package
run: ./venv/bin/python -m pip install -q .
# Run integration tests
# * Requires secrets to be set in the repository settings
# * Produce JUnit XML report
- name: Run integration tests
env:
LOGLEVEL: "debug"
RAW_DIR: "/tmp/raw"
ZARR_DIR: "/tmp/zarr"
CEDA_FTP_PASS: ${{ secrets.CEDA_FTP_PASS }}
CEDA_FTP_USER: ${{ secrets.CEDA_FTP_USER }}
METOFFICE_API_KEY: ${{ secrets.METOFFICE_API_KEY }}
METOFFICE_ORDER_ID: ${{ secrets.METOFFICE_ORDER_ID }}
ECMWF_API_KEY: ${{ secrets.ECMWF_API_KEY }}
ECMWF_API_EMAIL: ${{ secrets.ECMWF_API_EMAIL }}
ECMWF_API_URL: ${{ secrets.ECMWF_API_URL }}
run: ./venv/bin/python -m xmlrunner discover -s src/test_integration -p "test_*.py" --output-file it-report.xml
# Create test summary to be visualised on the job summary screen on GitHub
# * Runs even if previous steps fail
- name: Create test summary
uses: test-summary/action@v2
with:
paths: "*t-report.xml"
show: "fail, skip"
if: always()
# Define a "build-container" job that runs on branch commits only
# * Builds and pushes an OCI Container image to the registry defined in the environment variables
# * Only runs if test job passes
build-container:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
needs: test-unit
if: github.event_name != 'pull_request'
steps:
# Do a non-shallow clone of the repo to ensure tags are present
# * This allows setuptools-git-versioning to automatically set the version
- name: Checkout repository
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Buildx
uses: docker/setup-buildx-action@v2
- name: Log in to the Container registry
uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
# Tag the built image according to the event type
# * If the event is a valid version tag, use the tag name
# * If the event is a branch commit, use the commit sha
- name: Extract metadata (tags, labels) for Container
id: meta
uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=ref,event=branch
type=semver,pattern={{version}}
# Build and push the Container image to the registry
# * Creates a multiplatform-aware image
# * Semantic versioning is handled via the metadata action
# * The image layers are cached between action runs with the following strategy
# * - On push to main, also push build cache
# * - On push to other branches, only pull build cache
- name: Build and push Container image and cache
uses: docker/build-push-action@v4
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
with:
context: .
file: Containerfile
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
platforms: linux/amd64,linux/arm64
cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:buildcache
cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:buildcache,mode=max
- name: Build and push container image
uses: docker/build-push-action@v4
if: github.event_name != 'push' || github.ref != 'refs/heads/main'
with:
context: .
file: Containerfile
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
platforms: linux/amd64,linux/arm64
cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:buildcache
# Define a "build-wheel" job that runs on version tags
# * Only runs if integration test job passes
build-wheel:
runs-on: ubuntu-latest
needs: test-integration
if: contains(github.ref, 'refs/tags/v') && github.event_name == 'push'
steps:
# Do a non-shallow clone of the repo to ensure tags are present
# * This allows setuptools-git-versioning to automatically set the version
- name: Checkout repository
uses: actions/checkout@v3
with:
fetch-depth: 0
# Restore cached virtualenv
- name: Restore cached virtualenv
uses: actions/cache/restore@v3
with:
path: ./venv
key: ${{ runner.os }}-venv-${{ hashFiles('**/pyproject.toml') }}
# Building the wheel dynamically assigns the version according to git
# * The setuptools_git_versioning package reads the git tags and assigns the version
# * The version is then used in the wheel filename and made available in the package
# * setuptools_git_versioning is configured in pyproject.toml
- name: Build wheel
run: ./venv/bin/python -m pip wheel . --no-deps --wheel-dir dist
- name: Upload wheel
uses: actions/upload-artifact@v3
with:
name: wheel
path: dist/*.whl
- name: Publish wheel
uses: pypa/[email protected]
with:
user: __token__
password: ${{ secrets.PYPI_API_TOKEN }}