Skip to content

feat: optimize data insertion with batch processing #1187

feat: optimize data insertion with batch processing

feat: optimize data insertion with batch processing #1187

name: Data Stack CI
on:
push:
jobs:
deploy-dagster:
uses: ./.github/workflows/deploy.yml
with:
app: Dagster (Production)
alias: dagster-production
branch: main
secrets: inherit
deploy-metabase:
uses: ./.github/workflows/deploy.yml
with:
app: Production - Metabase
alias: metabase-production
branch: main
secrets: inherit
build-and-upload-dbt-doc:
name: Build and Upload DBT Documentation
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.9'
- name: Install dbt
run: |
python -m pip install --upgrade pip
pip install dbt-core dbt-postgres dbt-duckdb
- name: Install dbt dependencies
working-directory: analytics/dbt
run: |
dbt deps
- name: Build DBT Docs
working-directory: analytics/dbt
run: |
dbt docs generate
mkdir -p ../dbt_docs
mv target/* ../dbt_docs/
- name: Configure AWS CLI
env:
CELLAR_KEY_ID: ${{ secrets.CELLAR_KEY_ID }}
CELLAR_KEY_SECRET: ${{ secrets.CELLAR_KEY_SECRET }}
run: |
aws configure set aws_access_key_id $CELLAR_KEY_ID
aws configure set aws_secret_access_key $CELLAR_KEY_SECRET
- name: Upload DBT Docs to Cellar
env:
DBT_DOC_BUCKET_NAME: ${{ vars.DBT_DOC_BUCKET_NAME }}
run: |
aws s3 cp analytics/dbt_docs/index.html s3://$DBT_DOC_BUCKET_NAME/index.html --acl public-read --endpoint-url https://cellar-c2.services.clever-cloud.com
aws s3 cp analytics/dbt_docs/manifest.json s3://$DBT_DOC_BUCKET_NAME/manifest.json --acl public-read --endpoint-url https://cellar-c2.services.clever-cloud.com
aws s3 cp analytics/dbt_docs/catalog.json s3://$DBT_DOC_BUCKET_NAME/catalog.json --acl public-read --endpoint-url https://cellar-c2.services.clever-cloud.com