Example:chess production #2133
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: test redshift, postgres and filesystem buckets | |
on: | |
pull_request: | |
branches: | |
- master | |
- devel | |
workflow_dispatch: | |
env: | |
DESTINATION__POSTGRES__CREDENTIALS: postgresql://[email protected]:5432/dlt_data | |
DESTINATION__DUCKDB__CREDENTIALS: duckdb:///_storage/test_quack.duckdb | |
DESTINATION__REDSHIFT__CREDENTIALS: postgresql://[email protected]:5439/dlt_ci | |
DESTINATION__FILESYSTEM__CREDENTIALS__AWS_ACCESS_KEY_ID: AKIAT4QMVMC4J46G55G4 | |
DESTINATION__FILESYSTEM__CREDENTIALS__AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
DESTINATION__FILESYSTEM__CREDENTIALS__AZURE_STORAGE_ACCOUNT_NAME: dltdata | |
DESTINATION__FILESYSTEM__CREDENTIALS__AZURE_STORAGE_ACCOUNT_KEY: ${{ secrets.AZURE_STORAGE_ACCOUNT_KEY }} | |
# For s3 compatible tests | |
TESTS__R2_AWS_ACCESS_KEY_ID: a4950a5003b26f5a71ac97ef3848ff4c | |
TESTS__R2_AWS_SECRET_ACCESS_KEY: ${{ secrets.CLOUDFLARE_R2_SECRET_ACCESS_KEY }} | |
TESTS__R2_ENDPOINT_URL: https://9830548e4e4b582989be0811f2a0a97f.r2.cloudflarestorage.com | |
# DESTINATION__ATHENA__CREDENTIALS__AWS_ACCESS_KEY_ID: AKIAT4QMVMC4J46G55G4 | |
# DESTINATION__ATHENA__CREDENTIALS__AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
# DESTINATION__ATHENA__CREDENTIALS__REGION_NAME: eu-central-1 | |
# DESTINATION__ATHENA__QUERY_RESULT_BUCKET: s3://dlt-athena-output | |
# password is the same so it will be shared | |
CREDENTIALS__PROJECT_ID: chat-analytics-rasa-ci | |
CREDENTIALS__CLIENT_EMAIL: chat-analytics-loader@chat-analytics-rasa-ci.iam.gserviceaccount.com | |
CREDENTIALS__PRIVATE_KEY: ${{ secrets.BQ_CRED_PRIVATE_KEY }} | |
CREDENTIALS__PASSWORD: ${{ secrets.PG_PASSWORD }} | |
RUNTIME__SENTRY_DSN: https://[email protected]/4504819859914752 | |
RUNTIME__LOG_LEVEL: ERROR | |
RUNTIME__DLTHUB_TELEMETRY_SEGMENT_WRITE_KEY: TLJiyRkGVZGCi2TtjClamXpFcxAA1rSB | |
# Test redshift and filesystem with all buckets | |
# postgres runs again here so we can test on mac/windows | |
ACTIVE_DESTINATIONS: "[\"redshift\", \"postgres\", \"duckdb\", \"filesystem\", \"dummy\"]" | |
jobs: | |
get_docs_changes: | |
uses: ./.github/workflows/get_docs_changes.yml | |
# Tests that require credentials do not run in forks | |
if: ${{ !github.event.pull_request.head.repo.fork }} | |
run_loader: | |
name: test destinations redshift, postgres and filesystem | |
needs: get_docs_changes | |
if: needs.get_docs_changes.outputs.changes_outside_docs == 'true' | |
strategy: | |
fail-fast: false | |
matrix: | |
os: ["ubuntu-latest"] | |
# os: ["ubuntu-latest", "macos-latest", "windows-latest"] | |
defaults: | |
run: | |
shell: bash | |
runs-on: ${{ matrix.os }} | |
steps: | |
- name: Check out | |
uses: actions/checkout@master | |
- name: Setup Python | |
uses: actions/setup-python@v4 | |
with: | |
python-version: "3.10.x" | |
- name: Install Poetry | |
uses: snok/[email protected] | |
with: | |
virtualenvs-create: true | |
virtualenvs-in-project: true | |
installer-parallel: true | |
- name: Load cached venv | |
id: cached-poetry-dependencies | |
uses: actions/cache@v3 | |
with: | |
# path: ${{ steps.pip-cache.outputs.dir }} | |
path: .venv | |
key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }}-redshift | |
- name: Install dependencies | |
# if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' | |
run: poetry install --no-interaction -E redshift -E gs -E s3 -E az -E parquet -E duckdb -E cli | |
# - name: Install self | |
# run: poetry install --no-interaction | |
- run: | | |
poetry run pytest tests/load | |
if: runner.os != 'Windows' | |
name: Run tests Linux/MAC | |
- run: | | |
poetry run pytest tests/load | |
if: runner.os == 'Windows' | |
name: Run tests Windows | |
shell: cmd | |
matrix_job_required_check: | |
name: Redshift, PostgreSQL and DuckDB tests | |
needs: run_loader | |
runs-on: ubuntu-latest | |
if: always() | |
steps: | |
- name: Check matrix job results | |
if: contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') | |
run: | | |
echo "One or more matrix job tests failed or were cancelled. You may need to re-run them." && exit 1 |