convert add_limit to pipe step based limiting #7453
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: dest | redshift, postgres and fs | |
on: | |
pull_request: | |
branches: | |
- master | |
- devel | |
workflow_dispatch: | |
schedule: | |
- cron: '0 2 * * *' | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} | |
cancel-in-progress: true | |
env: | |
DLT_SECRETS_TOML: ${{ secrets.DLT_SECRETS_TOML }} | |
# For s3 compatible tests | |
TESTS__R2_AWS_ACCESS_KEY_ID: a4950a5003b26f5a71ac97ef3848ff4c | |
TESTS__R2_AWS_SECRET_ACCESS_KEY: ${{ secrets.CLOUDFLARE_R2_SECRET_ACCESS_KEY }} | |
TESTS__R2_ENDPOINT_URL: https://9830548e4e4b582989be0811f2a0a97f.r2.cloudflarestorage.com | |
TESTS__R2_REGION_NAME: us-east-1 | |
# RUNTIME__SENTRY_DSN: https://[email protected]/4504819859914752 | |
RUNTIME__LOG_LEVEL: ERROR | |
RUNTIME__DLTHUB_TELEMETRY_ENDPOINT: ${{ secrets.RUNTIME__DLTHUB_TELEMETRY_ENDPOINT }} | |
# Test redshift and filesystem with all buckets | |
# postgres runs again here so we can test on mac/windows | |
ACTIVE_DESTINATIONS: "[\"redshift\", \"postgres\", \"duckdb\", \"filesystem\", \"dummy\"]" | |
# note that all buckets are enabled for testing | |
ALL_FILESYSTEM_DRIVERS: "[\"memory\", \"file\", \"r2\", \"s3\", \"gs\", \"az\", \"abfss\", \"gdrive\"]" #excludes sftp | |
jobs: | |
get_docs_changes: | |
name: docs changes | |
uses: ./.github/workflows/get_docs_changes.yml | |
# Tests that require credentials do not run in forks | |
if: ${{ !github.event.pull_request.head.repo.fork || contains(github.event.pull_request.labels.*.name, 'ci from fork')}} | |
run_loader: | |
name: dest | redshift, postgres and fs tests | |
needs: get_docs_changes | |
if: needs.get_docs_changes.outputs.changes_outside_docs == 'true' | |
strategy: | |
fail-fast: false | |
defaults: | |
run: | |
shell: bash | |
runs-on: "ubuntu-latest" | |
steps: | |
- name: Check out | |
uses: actions/checkout@master | |
- name: Setup Python | |
uses: actions/setup-python@v4 | |
with: | |
python-version: "3.10.x" | |
- name: Install Poetry | |
uses: snok/[email protected] | |
with: | |
virtualenvs-create: true | |
virtualenvs-in-project: true | |
installer-parallel: true | |
# - name: Load cached venv | |
# id: cached-poetry-dependencies | |
# uses: actions/cache@v3 | |
# with: | |
# # path: ${{ steps.pip-cache.outputs.dir }} | |
# path: .venv | |
# key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }}-redshift | |
- name: Install dependencies | |
# if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' | |
run: poetry install --no-interaction -E redshift -E postgis -E postgres -E gs -E s3 -E az -E parquet -E duckdb -E cli -E filesystem --with sentry-sdk --with pipeline -E deltalake | |
- name: create secrets.toml | |
run: pwd && echo "$DLT_SECRETS_TOML" > tests/.dlt/secrets.toml | |
- run: | | |
poetry run pytest tests/load --ignore tests/load/sources -m "essential" | |
name: Run essential tests Linux | |
if: ${{ ! (contains(github.event.pull_request.labels.*.name, 'ci full') || github.event_name == 'schedule')}} | |
- run: | | |
poetry run pytest tests/load --ignore tests/load/sources | |
name: Run all tests Linux | |
if: ${{ contains(github.event.pull_request.labels.*.name, 'ci full') || github.event_name == 'schedule'}} |