Added sql-server tests for dbt 1.3.0 to 1.8.0 #581
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: CI test package | |
on: | |
workflow_dispatch: | |
# all PRs, important to note that `pull_request_target` workflows | |
# will run in the context of the target branch of a PR | |
pull_request_target: | |
env: | |
# These are configured in GitHub secrets | |
DBT_PROFILES_DIR: ${{ github.workspace }}/integration_test_project | |
GITHUB_SHA_OVERRIDE: ${{ github.event.pull_request.head.sha }} # We need the commit hash of the pull request branch's head, the GITHUB_SHA env var is always the base branch in a pull_request_target trigger | |
DBT_ENV_SECRET_SNOWFLAKE_TEST_ACCOUNT: ${{ secrets.SNOWFLAKE_TEST_ACCOUNT }} | |
DBT_ENV_SECRET_SNOWFLAKE_TEST_USER: ${{ secrets.SNOWFLAKE_TEST_USER }} | |
DBT_ENV_SECRET_SNOWFLAKE_TEST_PASSWORD: ${{ secrets.SNOWFLAKE_TEST_PASSWORD }} | |
DBT_ENV_SECRET_SNOWFLAKE_TEST_ROLE: ${{ secrets.SNOWFLAKE_TEST_ROLE }} | |
DBT_ENV_SECRET_SNOWFLAKE_TEST_DATABASE: ${{ secrets.SNOWFLAKE_TEST_DATABASE }} | |
DBT_ENV_SECRET_SNOWFLAKE_TEST_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_WAREHOUSE }} | |
DBT_ENV_SECRET_DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }} | |
DBT_ENV_SECRET_DATABRICKS_HTTP_PATH: ${{ secrets.DATABRICKS_HTTP_PATH }} | |
DBT_ENV_SECRET_DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} | |
DBT_ENV_SECRET_GCP_PROJECT: ${{ secrets.GCP_PROJECT }} | |
# Env var to test version | |
LAST_RELEASE_SUPPORTED_DBT_VERSION: 1_8_0 # A dbt version supported by both the last release and this one | |
# Env vars to test invocations model | |
DBT_CLOUD_PROJECT_ID: 123 | |
DBT_CLOUD_JOB_ID: ABC | |
DBT_CLOUD_RUN_REASON: "String with 'quotes' !" | |
TEST_ENV_VAR_1: TEST_VALUE | |
TEST_ENV_VAR_NUMBER: 3 | |
TEST_ENV_VAR_EMPTY: "" | |
TEST_ENV_VAR_WITH_QUOTE: "Triggered via Apache Airflow by task 'trigger_dbt_cloud_job_run' in the airtable_ingest DAG." | |
DBT_ENV_CUSTOM_ENV_FAVOURITE_DBT_PACKAGE: dbt_artifacts | |
jobs: | |
integration: | |
strategy: | |
fail-fast: false # Don't fail one DWH if the others fail | |
matrix: | |
warehouse: ["snowflake", "bigquery", "postgres"] | |
runs-on: ubuntu-latest | |
environment: | |
name: Approve Integration Tests | |
permissions: | |
contents: "read" | |
id-token: "write" | |
services: | |
postgres: | |
image: postgres | |
env: | |
POSTGRES_PASSWORD: postgres | |
options: >- | |
--health-cmd pg_isready | |
--health-interval 10s | |
--health-timeout 5s | |
--health-retries 5 | |
ports: | |
- 5432:5432 | |
steps: | |
- name: Get latest release | |
uses: rez0n/actions-github-release@main | |
id: latest_release | |
env: | |
token: ${{ secrets.GITHUB_TOKEN }} | |
repository: ${{ github.repository }} | |
type: "stable" | |
- name: Checkout latest release | |
uses: actions/checkout@v3 | |
with: | |
ref: ${{ steps.latest_release.outputs.release }} | |
- uses: actions/setup-python@v4 | |
with: | |
python-version: "3.8.x" | |
architecture: "x64" | |
- name: Install tox | |
run: python3 -m pip install tox | |
- id: auth-on-release | |
if: ${{ matrix.warehouse == 'bigquery' }} | |
uses: google-github-actions/auth@v1 | |
with: | |
workload_identity_provider: ${{ secrets.GCP_WORKLOAD_IDENTITY_PROVIDER }} | |
service_account: ${{ secrets.GCP_SERVICE_ACCOUNT }} | |
- name: Build tables for latest release | |
env: | |
DBT_VERSION: "noversion" | |
run: tox -e integration_${{ matrix.warehouse }} | |
- name: Checkout | |
uses: actions/checkout@v3 | |
with: | |
ref: ${{ github.event.pull_request.head.sha }} # Check out the code of the PR | |
# Need to get the auth file again | |
- id: auth-on-pr | |
if: ${{ matrix.warehouse == 'bigquery' }} | |
uses: google-github-actions/auth@v1 | |
with: | |
workload_identity_provider: ${{ secrets.GCP_WORKLOAD_IDENTITY_PROVIDER }} | |
service_account: ${{ secrets.GCP_SERVICE_ACCOUNT }} | |
- name: Run Tests on PR | |
run: tox -e integration_${{ matrix.warehouse }}_${{ env.LAST_RELEASE_SUPPORTED_DBT_VERSION }} | |
single-run-different-versions: | |
needs: integration | |
strategy: | |
fail-fast: false # Don't fail one DWH if the others fail | |
matrix: | |
warehouse: ["snowflake", "bigquery", "postgres"] | |
# When supporting a new version, update the list here | |
version: ["1_3_0", "1_4_0", "1_5_0", "1_6_0", "1_7_0", "1_8_0"] | |
runs-on: ubuntu-latest | |
environment: | |
name: Approve Integration Tests | |
permissions: | |
contents: "read" | |
id-token: "write" | |
services: | |
postgres: | |
image: postgres | |
env: | |
POSTGRES_PASSWORD: postgres | |
options: >- | |
--health-cmd pg_isready | |
--health-interval 10s | |
--health-timeout 5s | |
--health-retries 5 | |
ports: | |
- 5432:5432 | |
steps: | |
- uses: actions/setup-python@v4 | |
with: | |
python-version: "3.8.x" | |
architecture: "x64" | |
- name: Install tox | |
run: python3 -m pip install tox | |
- name: Install SQL Server | |
run: docker run -e "ACCEPT_EULA=Y" -e "MSSQL_SA_PASSWORD=123" -p 1433:1433 -d mcr.microsoft.com/mssql/server:2022-latest | |
- name: Install Microsoft ODBC | |
run: sudo ACCEPT_EULA=Y apt-get install msodbcsql18 -y | |
- name: Checkout | |
uses: actions/checkout@v3 | |
with: | |
ref: ${{ github.event.pull_request.head.sha }} # Check out the code of the PR | |
# Need to get the auth file again | |
- id: auth-on-pr | |
if: ${{ matrix.warehouse == 'bigquery' }} | |
uses: google-github-actions/auth@v1 | |
with: | |
workload_identity_provider: ${{ secrets.GCP_WORKLOAD_IDENTITY_PROVIDER }} | |
service_account: ${{ secrets.GCP_SERVICE_ACCOUNT }} | |
- name: Run Tests on PR | |
env: | |
DBT_VERSION: ${{ matrix.version }} | |
run: tox -e integration_${{ matrix.warehouse }}_${{ matrix.version }} | |
# Databricks doesn't like the matrix strategy, so moving back to the old integration testing without versioning | |
# integration-databricks: | |
# runs-on: ubuntu-latest | |
# environment: | |
# name: Approve Integration Tests | |
# steps: | |
# - name: Checkout | |
# uses: actions/checkout@v3 | |
# with: | |
# ref: ${{ github.event.pull_request.head.sha }} # Check out the code of the PR | |
# - uses: actions/setup-python@v4 | |
# with: | |
# python-version: '3.8.x' | |
# architecture: 'x64' | |
# - name: Install tox | |
# run: python3 -m pip install tox | |
# - name: Run Databricks Tests | |
# env: | |
# DBT_VERSION: '' | |
# run: tox -e integration_databricks | |
integration-sqlserver: | |
strategy: | |
fail-fast: false # Don't fail one DWH if the others fail | |
matrix: | |
# When supporting a new version, update the list here | |
version: ["1_3_0", "1_4_0", "1_7_0", "1_8_0"] | |
runs-on: ubuntu-latest | |
environment: | |
name: Approve Integration Tests | |
steps: | |
- uses: actions/setup-python@v4 | |
with: | |
python-version: "3.8.x" | |
architecture: "x64" | |
- name: Install SQL Server | |
uses: Particular/[email protected] | |
with: | |
connection-string-env-var: SQL_SERVER_CONNECTION_STRING | |
catalog: dbt_artifact_integrationtests | |
- name: Create DBT User | |
shell: pwsh | |
run: | | |
echo "Create dbt login with sysadmin" | |
sqlcmd -Q "CREATE LOGIN dbt WITH PASSWORD = '123', CHECK_POLICY = OFF, CHECK_EXPIRATION = OFF" -d "dbt_artifact_integrationtests" | |
sqlcmd -Q "ALTER SERVER ROLE sysadmin ADD MEMBER dbt" -d "dbt_artifact_integrationtests" | |
- name: Install tox | |
run: python3 -m pip install tox | |
- name: Install Microsoft ODBC | |
run: sudo ACCEPT_EULA=Y apt-get install msodbcsql18 -y | |
- name: Checkout | |
uses: actions/checkout@v3 | |
with: | |
ref: ${{ github.event.pull_request.head.sha }} # Check out the code of the PR | |
- name: Run Tests on PR | |
env: | |
DBT_VERSION: ${{ matrix.version }} | |
run: tox -e integration_sqlserver_${{ matrix.version }} |