From 188c2b34734af131baaa55dbbc4feb27702ba572 Mon Sep 17 00:00:00 2001 From: Pankaj Date: Thu, 15 Aug 2024 01:17:24 +0530 Subject: [PATCH] cleanup --- .github/workflows/test.yml | 1049 +++++++++++--------- .github/workflows/test_kubernetes.yml | 23 - dev/Dockerfile.postgres_profile_docker_k8s | 2 +- dev/dags/dbt/jaffle_shop/profiles.yml | 12 +- scripts/test/kubernetes-setup.sh | 93 +- scripts/test/values.yaml | 18 - 6 files changed, 580 insertions(+), 617 deletions(-) delete mode 100644 .github/workflows/test_kubernetes.yml delete mode 100644 scripts/test/values.yaml diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 000b5acca..e9b737e7b 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,493 +1,556 @@ -#name: test -# -#on: -# push: # Run on pushes to the default branch -# branches: [main] -# pull_request_target: # Also run on pull requests originated from forks -# branches: [main] -# -#concurrency: -# group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} -# cancel-in-progress: true -# -#jobs: -# Authorize: -# environment: ${{ github.event_name == 'pull_request_target' && -# github.event.pull_request.head.repo.full_name != github.repository && -# 'external' || 'internal' }} -# runs-on: ubuntu-latest -# steps: -# - run: true -# -# Type-Check: -# runs-on: ubuntu-latest -# steps: -# - uses: actions/checkout@v3 -# with: -# ref: ${{ github.event.pull_request.head.sha || github.ref }} -# -# - uses: actions/setup-python@v3 -# with: -# python-version: "3.9" -# architecture: "x64" -# -# - run: pip3 install hatch -# - run: hatch run tests.py3.9-2.7:type-check -# -# Run-Unit-Tests: -# runs-on: ubuntu-latest -# strategy: -# matrix: -# python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] -# airflow-version: ["2.4", "2.5", "2.6", "2.7", "2.8", "2.9"] -# exclude: -# - python-version: "3.11" -# airflow-version: "2.4" -# - python-version: "3.11" -# airflow-version: "2.5" -# # Apache Airflow versions prior to 2.9.0 have not been tested with Python 3.12. -# # Official support for Python 3.12 and the corresponding constraints.txt are available only for Apache Airflow >= 2.9.0. -# # See: https://github.com/apache/airflow/tree/2.9.0?tab=readme-ov-file#requirements -# # See: https://github.com/apache/airflow/tree/2.8.4?tab=readme-ov-file#requirements -# - python-version: "3.12" -# airflow-version: "2.4" -# - python-version: "3.12" -# airflow-version: "2.5" -# - python-version: "3.12" -# airflow-version: "2.6" -# - python-version: "3.12" -# airflow-version: "2.7" -# - python-version: "3.12" -# airflow-version: "2.8" -# steps: -# - uses: actions/checkout@v3 -# with: -# ref: ${{ github.event.pull_request.head.sha || github.ref }} -# -# - uses: actions/cache@v3 -# with: -# path: | -# ~/.cache/pip -# .local/share/hatch/ -# key: unit-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} -# -# - name: Set up Python ${{ matrix.python-version }} -# uses: actions/setup-python@v4 -# with: -# python-version: ${{ matrix.python-version }} -# -# - name: Install packages and dependencies -# run: | -# python -m pip install uv -# uv pip install --system hatch -# hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze -# -# - name: Test Cosmos against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} -# run: | -# hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-cov -# -# - name: Upload coverage to Github -# uses: actions/upload-artifact@v2 -# with: -# name: coverage-unit-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} -# path: .coverage -# -# Run-Integration-Tests: -# needs: Authorize -# runs-on: ubuntu-latest -# strategy: -# matrix: -# python-version: ["3.8", "3.9", "3.10", "3.11"] -# airflow-version: ["2.4", "2.5", "2.6", "2.7", "2.8", "2.9"] -# exclude: -# - python-version: "3.11" -# airflow-version: "2.4" -# - python-version: "3.11" -# airflow-version: "2.5" -# services: -# postgres: -# image: postgres -# env: -# POSTGRES_PASSWORD: postgres -# options: >- -# --health-cmd pg_isready -# --health-interval 10s -# --health-timeout 5s -# --health-retries 5 -# ports: -# - 5432:5432 -# steps: -# - uses: actions/checkout@v3 -# with: -# ref: ${{ github.event.pull_request.head.sha || github.ref }} -# -# - uses: actions/cache@v3 -# with: -# path: | -# ~/.cache/pip -# .local/share/hatch/ -# key: integration-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} -# -# - name: Set up Python ${{ matrix.python-version }} -# uses: actions/setup-python@v4 -# with: -# python-version: ${{ matrix.python-version }} -# -# - name: Install packages and dependencies -# run: | -# python -m pip install uv -# uv pip install --system hatch -# hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze -# -# - name: Test Cosmos against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} -# run: | -# hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-setup -# hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration -# env: -# AIRFLOW__COSMOS__ENABLE_CACHE_DBT_LS: 0 -# AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ -# AIRFLOW_CONN_EXAMPLE_CONN: postgres://postgres:postgres@0.0.0.0:5432/postgres -# AIRFLOW_CONN_AWS_S3_CONN: ${{ secrets.AIRFLOW_CONN_AWS_S3_CONN }} -# AIRFLOW_CONN_GCP_GS_CONN: ${{ secrets.AIRFLOW_CONN_GCP_GS_CONN }} -# AIRFLOW_CONN_AZURE_ABFS_CONN: ${{ secrets.AIRFLOW_CONN_AZURE_ABFS_CONN }} -# DATABRICKS_HOST: mock -# DATABRICKS_WAREHOUSE_ID: mock -# DATABRICKS_TOKEN: mock -# DATABRICKS_CLUSTER_ID: mock -# AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 -# PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH -# COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} -# POSTGRES_HOST: localhost -# POSTGRES_USER: postgres -# POSTGRES_PASSWORD: postgres -# POSTGRES_DB: postgres -# POSTGRES_SCHEMA: public -# POSTGRES_PORT: 5432 -# -# - name: Upload coverage to Github -# uses: actions/upload-artifact@v2 -# with: -# name: coverage-integration-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} -# path: .coverage -# -# Run-Integration-Tests-Expensive: -# needs: Authorize -# runs-on: ubuntu-latest -# strategy: -# matrix: -# python-version: ["3.11"] -# airflow-version: ["2.6"] -# -# services: -# postgres: -# image: postgres -# env: -# POSTGRES_PASSWORD: postgres -# options: >- -# --health-cmd pg_isready -# --health-interval 10s -# --health-timeout 5s -# --health-retries 5 -# ports: -# - 5432:5432 -# -# steps: -# - uses: actions/checkout@v3 -# with: -# ref: ${{ github.event.pull_request.head.sha || github.ref }} -# - uses: actions/cache@v3 -# with: -# path: | -# ~/.cache/pip -# .local/share/hatch/ -# key: integration-expensive-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} -# -# - name: Set up Python ${{ matrix.python-version }} -# uses: actions/setup-python@v4 -# with: -# python-version: ${{ matrix.python-version }} -# -# - name: Install packages and dependencies -# run: | -# python -m pip install uv -# uv pip install --system hatch -# hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze -# -# - name: Test Cosmos against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} -# run: | -# hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-setup -# DATABRICKS_UNIQUE_ID="${{github.run_id}}" hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-expensive -# env: -# AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ -# AIRFLOW_CONN_EXAMPLE_CONN: postgres://postgres:postgres@0.0.0.0:5432/postgres -# AIRFLOW_CONN_AWS_S3_CONN: ${{ secrets.AIRFLOW_CONN_AWS_S3_CONN }} -# AIRFLOW_CONN_GCP_GS_CONN: ${{ secrets.AIRFLOW_CONN_GCP_GS_CONN }} -# AIRFLOW_CONN_AZURE_ABFS_CONN: ${{ secrets.AIRFLOW_CONN_AZURE_ABFS_CONN }} -# PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH -# AIRFLOW_CONN_DATABRICKS_DEFAULT: ${{ secrets.AIRFLOW_CONN_DATABRICKS_DEFAULT }} -# DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }} -# AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 -# COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} -# POSTGRES_HOST: localhost -# POSTGRES_USER: postgres -# POSTGRES_PASSWORD: postgres -# POSTGRES_DB: postgres -# POSTGRES_SCHEMA: public -# POSTGRES_PORT: 5432 -# -# - name: Upload coverage to Github -# uses: actions/upload-artifact@v2 -# with: -# name: coverage-integration-expensive-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} -# path: .coverage -# -# env: -# AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ -# AIRFLOW_CONN_EXAMPLE_CONN: postgres://postgres:postgres@0.0.0.0:5432/postgres -# PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH -# AIRFLOW_CONN_DATABRICKS_DEFAULT: ${{ secrets.AIRFLOW_CONN_DATABRICKS_DEFAULT }} -# DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }} -# -# Run-Integration-Tests-Sqlite: -# needs: Authorize -# runs-on: ubuntu-latest -# strategy: -# matrix: -# python-version: ["3.11"] -# airflow-version: ["2.7"] -# -# steps: -# - uses: actions/checkout@v3 -# with: -# ref: ${{ github.event.pull_request.head.sha || github.ref }} -# - uses: actions/cache@v3 -# with: -# path: | -# ~/.cache/pip -# .local/share/hatch/ -# key: integration-sqlite-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} -# -# - name: Set up Python ${{ matrix.python-version }} -# uses: actions/setup-python@v4 -# with: -# python-version: ${{ matrix.python-version }} -# -# - name: Install packages and dependencies -# run: | -# python -m pip install uv -# uv pip install --system hatch -# hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze -# -# - name: Test Cosmos against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} -# run: | -# hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-sqlite-setup -# hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-sqlite -# env: -# AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ -# AIRFLOW_CONN_EXAMPLE_CONN: postgres://postgres:postgres@0.0.0.0:5432/postgres -# AIRFLOW_CONN_AWS_S3_CONN: ${{ secrets.AIRFLOW_CONN_AWS_S3_CONN }} -# AIRFLOW_CONN_GCP_GS_CONN: ${{ secrets.AIRFLOW_CONN_GCP_GS_CONN }} -# AIRFLOW_CONN_AZURE_ABFS_CONN: ${{ secrets.AIRFLOW_CONN_AZURE_ABFS_CONN }} -# AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 -# PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH -# COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} -# DATABRICKS_CLUSTER_ID: mock -# DATABRICKS_HOST: mock -# DATABRICKS_WAREHOUSE_ID: mock -# DATABRICKS_TOKEN: mock -# POSTGRES_HOST: localhost -# POSTGRES_USER: postgres -# POSTGRES_PASSWORD: postgres -# POSTGRES_DB: postgres -# POSTGRES_SCHEMA: public -# POSTGRES_PORT: 5432 -# -# - name: Upload coverage to Github -# uses: actions/upload-artifact@v2 -# with: -# name: coverage-integration-sqlite-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} -# path: .coverage -# -# env: -# AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ -# AIRFLOW_CONN_EXAMPLE_CONN: postgres://postgres:postgres@0.0.0.0:5432/postgres -# PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH -# -# Run-Integration-Tests-DBT-1-5-4: -# needs: Authorize -# runs-on: ubuntu-latest -# strategy: -# matrix: -# python-version: [ "3.11" ] -# airflow-version: [ "2.7" ] -# services: -# postgres: -# image: postgres -# env: -# POSTGRES_PASSWORD: postgres -# options: >- -# --health-cmd pg_isready -# --health-interval 10s -# --health-timeout 5s -# --health-retries 5 -# ports: -# - 5432:5432 -# -# steps: -# - uses: actions/checkout@v3 -# with: -# ref: ${{ github.event.pull_request.head.sha || github.ref }} -# - uses: actions/cache@v3 -# with: -# path: | -# ~/.cache/pip -# .local/share/hatch/ -# key: integration-dbt-1-5-4-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} -# -# - name: Set up Python ${{ matrix.python-version }} -# uses: actions/setup-python@v4 -# with: -# python-version: ${{ matrix.python-version }} -# -# - name: Install packages and dependencies -# run: | -# python -m pip install uv -# uv pip install --system hatch -# hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze -# -# - name: Test Cosmos against Airflow ${{ matrix.airflow-version }}, Python ${{ matrix.python-version }} and dbt 1.5.4 -# run: | -# hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-dbt-1-5-4 -# env: -# AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ -# AIRFLOW_CONN_EXAMPLE_CONN: postgres://postgres:postgres@0.0.0.0:5432/postgres -# AIRFLOW_CONN_AWS_S3_CONN: ${{ secrets.AIRFLOW_CONN_AWS_S3_CONN }} -# AIRFLOW_CONN_GCP_GS_CONN: ${{ secrets.AIRFLOW_CONN_GCP_GS_CONN }} -# AIRFLOW_CONN_AZURE_ABFS_CONN: ${{ secrets.AIRFLOW_CONN_AZURE_ABFS_CONN }} -# AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 -# PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH -# COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} -# DATABRICKS_CLUSTER_ID: mock -# DATABRICKS_HOST: mock -# DATABRICKS_WAREHOUSE_ID: mock -# DATABRICKS_TOKEN: mock -# POSTGRES_HOST: localhost -# POSTGRES_USER: postgres -# POSTGRES_PASSWORD: postgres -# POSTGRES_DB: postgres -# POSTGRES_SCHEMA: public -# POSTGRES_PORT: 5432 -# -# - name: Upload coverage to Github -# uses: actions/upload-artifact@v2 -# with: -# name: coverage-integration-dbt-1-5-4-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} -# path: .coverage -# -# env: -# AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ -# AIRFLOW_CONN_EXAMPLE_CONN: postgres://postgres:postgres@0.0.0.0:5432/postgres -# PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH -# -# Run-Performance-Tests: -# needs: Authorize -# runs-on: ubuntu-latest -# strategy: -# matrix: -# python-version: ["3.11"] -# airflow-version: ["2.7"] -# num-models: [1, 10, 50, 100] -# services: -# postgres: -# image: postgres -# env: -# POSTGRES_PASSWORD: postgres -# options: >- -# --health-cmd pg_isready -# --health-interval 10s -# --health-timeout 5s -# --health-retries 5 -# ports: -# - 5432:5432 -# steps: -# - uses: actions/checkout@v3 -# with: -# ref: ${{ github.event.pull_request.head.sha || github.ref }} -# - uses: actions/cache@v3 -# with: -# path: | -# ~/.cache/pip -# .local/share/hatch/ -# key: perf-test-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} -# -# - name: Set up Python ${{ matrix.python-version }} -# uses: actions/setup-python@v4 -# with: -# python-version: ${{ matrix.python-version }} -# -# - name: Install packages and dependencies -# run: | -# python -m pip install uv -# uv pip install --system hatch -# hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze -# -# - name: Run performance tests against against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} -# id: run-performance-tests -# run: | -# hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-performance-setup -# hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-performance -# -# # read the performance results and set them as an env var for the next step -# # format: NUM_MODELS={num_models}\nTIME={end - start}\n -# cat /tmp/performance_results.txt > $GITHUB_STEP_SUMMARY -# env: -# AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ -# AIRFLOW_CONN_EXAMPLE_CONN: postgres://postgres:postgres@0.0.0.0:5432/postgres -# AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 -# PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH -# COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} -# POSTGRES_HOST: localhost -# POSTGRES_USER: postgres -# POSTGRES_PASSWORD: postgres -# POSTGRES_DB: postgres -# POSTGRES_SCHEMA: public -# POSTGRES_PORT: 5432 -# MODEL_COUNT: ${{ matrix.num-models }} -# env: -# AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ -# AIRFLOW_CONN_EXAMPLE_CONN: postgres://postgres:postgres@0.0.0.0:5432/postgres -# PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH -# -# Code-Coverage: -# if: github.event.action != 'labeled' -# needs: -# - Run-Unit-Tests -# - Run-Integration-Tests -# - Run-Integration-Tests-Expensive -# runs-on: ubuntu-latest -# steps: -# - uses: actions/checkout@v3 -# with: -# ref: ${{ github.event.pull_request.head.sha || github.ref }} -# - name: Set up Python 3.11 -# uses: actions/setup-python@v3 -# with: -# python-version: "3.11" -# - name: Install coverage -# run: | -# pip3 install coverage -# - name: Download all coverage artifacts -# uses: actions/download-artifact@v2 -# with: -# path: ./coverage -# - name: Combine coverage -# run: | -# coverage combine ./coverage/coverage*/.coverage -# coverage report -# coverage xml -# - name: Upload coverage to Codecov -# uses: codecov/codecov-action@v3 -# with: -# fail_ci_if_error: true -# token: ${{ secrets.CODECOV_TOKEN }} -# files: coverage.xml +name: test + +on: + push: # Run on pushes to the default branch + branches: [main] + pull_request_target: # Also run on pull requests originated from forks + branches: [main] + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + Authorize: + environment: ${{ github.event_name == 'pull_request_target' && + github.event.pull_request.head.repo.full_name != github.repository && + 'external' || 'internal' }} + runs-on: ubuntu-latest + steps: + - run: true + + Type-Check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha || github.ref }} + + - uses: actions/setup-python@v3 + with: + python-version: "3.9" + architecture: "x64" + + - run: pip3 install hatch + - run: hatch run tests.py3.9-2.7:type-check + + Run-Unit-Tests: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + airflow-version: ["2.4", "2.5", "2.6", "2.7", "2.8", "2.9"] + exclude: + - python-version: "3.11" + airflow-version: "2.4" + - python-version: "3.11" + airflow-version: "2.5" + # Apache Airflow versions prior to 2.9.0 have not been tested with Python 3.12. + # Official support for Python 3.12 and the corresponding constraints.txt are available only for Apache Airflow >= 2.9.0. + # See: https://github.com/apache/airflow/tree/2.9.0?tab=readme-ov-file#requirements + # See: https://github.com/apache/airflow/tree/2.8.4?tab=readme-ov-file#requirements + - python-version: "3.12" + airflow-version: "2.4" + - python-version: "3.12" + airflow-version: "2.5" + - python-version: "3.12" + airflow-version: "2.6" + - python-version: "3.12" + airflow-version: "2.7" + - python-version: "3.12" + airflow-version: "2.8" + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha || github.ref }} + + - uses: actions/cache@v3 + with: + path: | + ~/.cache/pip + .local/share/hatch/ + key: unit-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install packages and dependencies + run: | + python -m pip install uv + uv pip install --system hatch + hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze + + - name: Test Cosmos against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} + run: | + hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-cov + + - name: Upload coverage to Github + uses: actions/upload-artifact@v2 + with: + name: coverage-unit-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} + path: .coverage + + Run-Integration-Tests: + needs: Authorize + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.8", "3.9", "3.10", "3.11"] + airflow-version: ["2.4", "2.5", "2.6", "2.7", "2.8", "2.9"] + exclude: + - python-version: "3.11" + airflow-version: "2.4" + - python-version: "3.11" + airflow-version: "2.5" + services: + postgres: + image: postgres + env: + POSTGRES_PASSWORD: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha || github.ref }} + + - uses: actions/cache@v3 + with: + path: | + ~/.cache/pip + .local/share/hatch/ + key: integration-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install packages and dependencies + run: | + python -m pip install uv + uv pip install --system hatch + hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze + + - name: Test Cosmos against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} + run: | + hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-setup + hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration + env: + AIRFLOW__COSMOS__ENABLE_CACHE_DBT_LS: 0 + AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ + AIRFLOW_CONN_EXAMPLE_CONN: postgres://postgres:postgres@0.0.0.0:5432/postgres + AIRFLOW_CONN_AWS_S3_CONN: ${{ secrets.AIRFLOW_CONN_AWS_S3_CONN }} + AIRFLOW_CONN_GCP_GS_CONN: ${{ secrets.AIRFLOW_CONN_GCP_GS_CONN }} + AIRFLOW_CONN_AZURE_ABFS_CONN: ${{ secrets.AIRFLOW_CONN_AZURE_ABFS_CONN }} + DATABRICKS_HOST: mock + DATABRICKS_WAREHOUSE_ID: mock + DATABRICKS_TOKEN: mock + DATABRICKS_CLUSTER_ID: mock + AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 + PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH + COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} + POSTGRES_HOST: localhost + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: postgres + POSTGRES_SCHEMA: public + POSTGRES_PORT: 5432 + + - name: Upload coverage to Github + uses: actions/upload-artifact@v2 + with: + name: coverage-integration-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} + path: .coverage + + Run-Integration-Tests-Expensive: + needs: Authorize + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.11"] + airflow-version: ["2.6"] + + services: + postgres: + image: postgres + env: + POSTGRES_PASSWORD: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha || github.ref }} + - uses: actions/cache@v3 + with: + path: | + ~/.cache/pip + .local/share/hatch/ + key: integration-expensive-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install packages and dependencies + run: | + python -m pip install uv + uv pip install --system hatch + hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze + + - name: Test Cosmos against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} + run: | + hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-setup + DATABRICKS_UNIQUE_ID="${{github.run_id}}" hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-expensive + env: + AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ + AIRFLOW_CONN_EXAMPLE_CONN: postgres://postgres:postgres@0.0.0.0:5432/postgres + AIRFLOW_CONN_AWS_S3_CONN: ${{ secrets.AIRFLOW_CONN_AWS_S3_CONN }} + AIRFLOW_CONN_GCP_GS_CONN: ${{ secrets.AIRFLOW_CONN_GCP_GS_CONN }} + AIRFLOW_CONN_AZURE_ABFS_CONN: ${{ secrets.AIRFLOW_CONN_AZURE_ABFS_CONN }} + PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH + AIRFLOW_CONN_DATABRICKS_DEFAULT: ${{ secrets.AIRFLOW_CONN_DATABRICKS_DEFAULT }} + DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }} + AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 + COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} + POSTGRES_HOST: localhost + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: postgres + POSTGRES_SCHEMA: public + POSTGRES_PORT: 5432 + + - name: Upload coverage to Github + uses: actions/upload-artifact@v2 + with: + name: coverage-integration-expensive-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} + path: .coverage + + env: + AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ + AIRFLOW_CONN_EXAMPLE_CONN: postgres://postgres:postgres@0.0.0.0:5432/postgres + PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH + AIRFLOW_CONN_DATABRICKS_DEFAULT: ${{ secrets.AIRFLOW_CONN_DATABRICKS_DEFAULT }} + DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }} + + Run-Integration-Tests-Sqlite: + needs: Authorize + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.11"] + airflow-version: ["2.7"] + + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha || github.ref }} + - uses: actions/cache@v3 + with: + path: | + ~/.cache/pip + .local/share/hatch/ + key: integration-sqlite-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install packages and dependencies + run: | + python -m pip install uv + uv pip install --system hatch + hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze + + - name: Test Cosmos against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} + run: | + hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-sqlite-setup + hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-sqlite + env: + AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ + AIRFLOW_CONN_EXAMPLE_CONN: postgres://postgres:postgres@0.0.0.0:5432/postgres + AIRFLOW_CONN_AWS_S3_CONN: ${{ secrets.AIRFLOW_CONN_AWS_S3_CONN }} + AIRFLOW_CONN_GCP_GS_CONN: ${{ secrets.AIRFLOW_CONN_GCP_GS_CONN }} + AIRFLOW_CONN_AZURE_ABFS_CONN: ${{ secrets.AIRFLOW_CONN_AZURE_ABFS_CONN }} + AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 + PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH + COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} + DATABRICKS_CLUSTER_ID: mock + DATABRICKS_HOST: mock + DATABRICKS_WAREHOUSE_ID: mock + DATABRICKS_TOKEN: mock + POSTGRES_HOST: localhost + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: postgres + POSTGRES_SCHEMA: public + POSTGRES_PORT: 5432 + + - name: Upload coverage to Github + uses: actions/upload-artifact@v2 + with: + name: coverage-integration-sqlite-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} + path: .coverage + + env: + AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ + AIRFLOW_CONN_EXAMPLE_CONN: postgres://postgres:postgres@0.0.0.0:5432/postgres + PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH + + Run-Integration-Tests-DBT-1-5-4: + needs: Authorize + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [ "3.11" ] + airflow-version: [ "2.7" ] + services: + postgres: + image: postgres + env: + POSTGRES_PASSWORD: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha || github.ref }} + - uses: actions/cache@v3 + with: + path: | + ~/.cache/pip + .local/share/hatch/ + key: integration-dbt-1-5-4-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install packages and dependencies + run: | + python -m pip install uv + uv pip install --system hatch + hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze + + - name: Test Cosmos against Airflow ${{ matrix.airflow-version }}, Python ${{ matrix.python-version }} and dbt 1.5.4 + run: | + hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-dbt-1-5-4 + env: + AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ + AIRFLOW_CONN_EXAMPLE_CONN: postgres://postgres:postgres@0.0.0.0:5432/postgres + AIRFLOW_CONN_AWS_S3_CONN: ${{ secrets.AIRFLOW_CONN_AWS_S3_CONN }} + AIRFLOW_CONN_GCP_GS_CONN: ${{ secrets.AIRFLOW_CONN_GCP_GS_CONN }} + AIRFLOW_CONN_AZURE_ABFS_CONN: ${{ secrets.AIRFLOW_CONN_AZURE_ABFS_CONN }} + AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 + PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH + COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} + DATABRICKS_CLUSTER_ID: mock + DATABRICKS_HOST: mock + DATABRICKS_WAREHOUSE_ID: mock + DATABRICKS_TOKEN: mock + POSTGRES_HOST: localhost + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: postgres + POSTGRES_SCHEMA: public + POSTGRES_PORT: 5432 + + - name: Upload coverage to Github + uses: actions/upload-artifact@v2 + with: + name: coverage-integration-dbt-1-5-4-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} + path: .coverage + + env: + AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ + AIRFLOW_CONN_EXAMPLE_CONN: postgres://postgres:postgres@0.0.0.0:5432/postgres + PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH + + Run-Performance-Tests: + needs: Authorize + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.11"] + airflow-version: ["2.7"] + num-models: [1, 10, 50, 100] + services: + postgres: + image: postgres + env: + POSTGRES_PASSWORD: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha || github.ref }} + - uses: actions/cache@v3 + with: + path: | + ~/.cache/pip + .local/share/hatch/ + key: perf-test-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install packages and dependencies + run: | + python -m pip install uv + uv pip install --system hatch + hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze + + - name: Run performance tests against against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} + id: run-performance-tests + run: | + hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-performance-setup + hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-performance + + # read the performance results and set them as an env var for the next step + # format: NUM_MODELS={num_models}\nTIME={end - start}\n + cat /tmp/performance_results.txt > $GITHUB_STEP_SUMMARY + env: + AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ + AIRFLOW_CONN_EXAMPLE_CONN: postgres://postgres:postgres@0.0.0.0:5432/postgres + AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 + PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH + COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} + POSTGRES_HOST: localhost + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: postgres + POSTGRES_SCHEMA: public + POSTGRES_PORT: 5432 + MODEL_COUNT: ${{ matrix.num-models }} + env: + AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ + AIRFLOW_CONN_EXAMPLE_CONN: postgres://postgres:postgres@0.0.0.0:5432/postgres + PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH + + Run-Kubernetes-Tests: + needs: Authorize + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [ "3.11" ] + airflow-version: [ "2.8" ] + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha || github.ref }} + - uses: actions/cache@v3 + with: + path: | + ~/.cache/pip + .local/share/hatch/ + key: coverage-integration-kubernetes-test-${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.airflow-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('cosmos/__init__.py') }} + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Create KinD cluster + uses: container-tools/kind-action@v1 + + - name: Install packages and dependencies + run: | + python -m pip install uv + uv pip install --system hatch + hatch -e tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }} run pip freeze + + - name: Run kubernetes tests agains + run: | + sh ./scripts/test/kubernetes-setup.sh + hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-kubernetes + env: + AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ + AIRFLOW_CONN_EXAMPLE_CONN: postgres://postgres:postgres@0.0.0.0:5432/postgres + AIRFLOW_CONN_AWS_S3_CONN: ${{ secrets.AIRFLOW_CONN_AWS_S3_CONN }} + AIRFLOW_CONN_GCP_GS_CONN: ${{ secrets.AIRFLOW_CONN_GCP_GS_CONN }} + AIRFLOW_CONN_AZURE_ABFS_CONN: ${{ secrets.AIRFLOW_CONN_AZURE_ABFS_CONN }} + AIRFLOW__CORE__DAGBAG_IMPORT_TIMEOUT: 90.0 + PYTHONPATH: /home/runner/work/astronomer-cosmos/astronomer-cosmos/:$PYTHONPATH + COSMOS_CONN_POSTGRES_PASSWORD: ${{ secrets.COSMOS_CONN_POSTGRES_PASSWORD }} + DATABRICKS_CLUSTER_ID: mock + DATABRICKS_HOST: mock + DATABRICKS_WAREHOUSE_ID: mock + DATABRICKS_TOKEN: mock + POSTGRES_HOST: localhost + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: postgres + POSTGRES_SCHEMA: public + POSTGRES_PORT: 5432 + + - name: Upload coverage to Github + uses: actions/upload-artifact@v2 + with: + name: coverage-integration-kubernetes-test-${{ matrix.python-version }}-${{ matrix.airflow-version }} + path: .coverage + + Code-Coverage: + if: github.event.action != 'labeled' + needs: + - Run-Unit-Tests + - Run-Integration-Tests + - Run-Integration-Tests-Expensive + - Run-Kubernetes-Tests + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha || github.ref }} + - name: Set up Python 3.11 + uses: actions/setup-python@v3 + with: + python-version: "3.11" + - name: Install coverage + run: | + pip3 install coverage + - name: Download all coverage artifacts + uses: actions/download-artifact@v2 + with: + path: ./coverage + - name: Combine coverage + run: | + coverage combine ./coverage/coverage*/.coverage + coverage report + coverage xml + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v3 + with: + fail_ci_if_error: true + token: ${{ secrets.CODECOV_TOKEN }} + files: coverage.xml diff --git a/.github/workflows/test_kubernetes.yml b/.github/workflows/test_kubernetes.yml deleted file mode 100644 index 3987752b5..000000000 --- a/.github/workflows/test_kubernetes.yml +++ /dev/null @@ -1,23 +0,0 @@ -name: Kubernetes Integration Tests - - -on: - push: # Run on pushes to the default branch - branches: [kube_mode_ci] -# pull_request_target: # Also run on pull requests originated from forks -# branches: [kube_mode_ci] - - -jobs: - run-kubernets-tests: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Kubernetes KinD Cluster - uses: container-tools/kind-action@v1 - - name: Run tests - run: | - sh ./scripts/test/kubernetes-setup.sh - pip install hatch - hatch -e tests.py3.9-2.9 run pip freeze - hatch run tests.py3.9-2.9:test-kubernetes diff --git a/dev/Dockerfile.postgres_profile_docker_k8s b/dev/Dockerfile.postgres_profile_docker_k8s index 1634616c5..7375851c6 100644 --- a/dev/Dockerfile.postgres_profile_docker_k8s +++ b/dev/Dockerfile.postgres_profile_docker_k8s @@ -4,7 +4,7 @@ RUN pip install dbt-postgres==1.8.2 psycopg2==2.9.3 pytz ENV POSTGRES_DATABASE=postgres ENV POSTGRES_HOST=postgres.default.svc.cluster.local -ENV POSTGRES_PASSWORD= +ENV POSTGRES_PASSWORD=postgres ENV POSTGRES_PORT=5432 ENV POSTGRES_SCHEMA=public ENV POSTGRES_USER=postgres diff --git a/dev/dags/dbt/jaffle_shop/profiles.yml b/dev/dags/dbt/jaffle_shop/profiles.yml index db1f34acd..43f0aae7d 100644 --- a/dev/dags/dbt/jaffle_shop/profiles.yml +++ b/dev/dags/dbt/jaffle_shop/profiles.yml @@ -16,9 +16,9 @@ postgres_profile: outputs: dev: type: postgres - dbname: postgres #'{{ env_var(''POSTGRES_DATABASE'') }}' - host: postgres.default.svc.cluster.local #'{{ env_var(''POSTGRES_HOST'') }}' - pass: postgres #'{{ env_var(''POSTGRES_PASSWORD'') }}' - port: 5432 #'{{ env_var(''POSTGRES_PORT'') | as_number }}' - schema: postgres # '{{ env_var(''POSTGRES_SCHEMA'') }}' - user: postgres #'{{ env_var(''POSTGRES_USER'') }}' + dbname: '{{ env_var(''POSTGRES_DATABASE'') }}' + host: '{{ env_var(''POSTGRES_HOST'') }}' + pass: '{{ env_var(''POSTGRES_PASSWORD'') }}' + port: '{{ env_var(''POSTGRES_PORT'') | as_number }}' + schema: '{{ env_var(''POSTGRES_SCHEMA'') }}' + user: '{{ env_var(''POSTGRES_USER'') }}' diff --git a/scripts/test/kubernetes-setup.sh b/scripts/test/kubernetes-setup.sh index ce5899d72..b0e37d0bb 100644 --- a/scripts/test/kubernetes-setup.sh +++ b/scripts/test/kubernetes-setup.sh @@ -1,93 +1,34 @@ #!/bin/bash +# Print each command before executing it +# Exit the script immediately if any command exits with a non-zero status (for debugging purposes) set -x set -e -#check_nodes_ready() { -# # Get the list of node statuses -# node_statuses=$(kubectl get nodes --no-headers | awk '{print $2}') -# # Check if all nodes are in the "Ready" state -# for status in $node_statuses; do -# if [ "$status" != "Ready" ]; then -# return 1 -# fi -# done -# return 0 -#} -# -#wait_for_nodes_ready() { -# local max_attempts=60 -# local interval=5 -# local attempt=0 -# -# echo "Waiting for nodes in the kind cluster to be in 'Ready' state..." -# -# while [ $attempt -lt $max_attempts ]; do -# if check_nodes_ready; then -# echo "All nodes in the kind cluster are in 'Ready' state." -# return 0 -# else -# echo "Nodes are not yet ready. Checking again in $interval seconds..." -# sleep $interval -# attempt=$((attempt + 1)) -# fi -# done -# -# echo "Timeout waiting for nodes in the kind cluster to be in 'Ready' state." -# return 1 -#} -# -#kubectl config set-context default -# -## Create a docker image containing the dbt project files and dbt profile -#cd dev && docker build -t dbt-jaffle-shop:1.0.0 -f Dockerfile.postgres_profile_docker_k8s . -## Make the build image available in the Kind K8s cluster -#kind load docker-image dbt-jaffle-shop:1.0.0 -# -## Deploy a Postgres pod to Kind -##helm repo add bitnami https://charts.bitnami.com/bitnami -##helm repo update -##helm install postgres bitnami/postgresql --set postgresqlExtendedConf.huge_pages="off" # -f scripts/test/values.yaml -# -## Retrieve the Postgres password and set it as an environment variable -##POSTGRES_PASSWORD=$(kubectl get secret --namespace default postgres-postgresql -o jsonpath="{.data.postgres-password}" | base64 -d) -##export POSTGRES_PASSWORD -# -#kubectl create secret generic postgres-secrets --from-literal=host=postgres-postgresql.default.svc.cluster.local --from-literal=password=$POSTGRES_PASSWORD -# -#sleep 120 -## Expose the Postgres to the host running Docker/Kind -##kubectl port-forward --namespace default postgres-postgresql-0 5432:5432 & -##kubectl port-forward --namespace default svc/postgres-postgresql 5432:5432 & -##wait_for_nodes_ready -## -### Wait for the kind cluster to be in 'Ready' state -##wait_for_nodes_ready -# -## For Debugging -#echo "nodes" -#kubectl get nodes -#echo "helm" -#helm list -#echo "pod service" -#kubectl get pods --namespace default -#kubectl get svc --namespace default -#echo "pg log" -#kubectl logs postgres-postgresql-0 -c postgresql -#kubectl describe pod postgres-postgresql-0 - - -kubectl create secret generic postgres-secrets --from-literal=host=postgres-postgresql.default.svc.cluster.local --from-literal=password=postgres +# Create a Kubernetes secret named 'postgres-secrets' with the specified literals for host and password +kubectl create secret generic postgres-secrets \ + --from-literal=host=postgres-postgresql.default.svc.cluster.local \ + --from-literal=password=postgres +# Apply the PostgreSQL deployment configuration from the specified YAML file kubectl apply -f scripts/test/postgres-deployment.yaml +# Build the Docker image with tag 'dbt-jaffle-shop:1.0.0' using the specified Dockerfile cd dev && docker build -t dbt-jaffle-shop:1.0.0 -f Dockerfile.postgres_profile_docker_k8s . + +# Load the Docker image into the local KIND cluster kind load docker-image dbt-jaffle-shop:1.0.0 +# Retrieve the name of the PostgreSQL pod using the label selector 'app=postgres' +# The output is filtered to get the first pod's name POD_NAME=$(kubectl get pods -n default -l app=postgres -o jsonpath='{.items[0].metadata.name}') +# Print the name of the PostgreSQL pod echo "$POD_NAME" -kubectl port-forward --namespace default "$POD_NAME" 5432:5432 & +# Forward port 5432 from the PostgreSQL pod to the local machine's port 5432 +# This allows local access to the PostgreSQL instance running in the pod +kubectl port-forward --namespace default "$POD_NAME" 5432:5432 & +# List all pods in the default namespace to verify the status of pods kubectl get pod diff --git a/scripts/test/values.yaml b/scripts/test/values.yaml deleted file mode 100644 index 44fcfa83d..000000000 --- a/scripts/test/values.yaml +++ /dev/null @@ -1,18 +0,0 @@ -primary: - livenessProbe: - initialDelaySeconds: 120 - timeoutSeconds: 5 - periodSeconds: 10 - successThreshold: 1 - readinessProbe: - enabled: true - initialDelaySeconds: 120 - periodSeconds: 10 - timeoutSeconds: 5 - successThreshold: 1 - startupProbe: - enabled: true - initialDelaySeconds: 120 - periodSeconds: 10 - timeoutSeconds: 5 - successThreshold: 1