-
Notifications
You must be signed in to change notification settings - Fork 4.2k
72 lines (70 loc) · 3.52 KB
/
connectors_up_to_date.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
name: Connectors up-to-date
concurrency:
group: ${{ github.workflow }}
cancel-in-progress: false
on:
schedule:
# Runs everyday Saturday at 12:00 UTC
- cron: "0 12 * * 6"
workflow_dispatch:
inputs:
connectors-options:
description: "Options to pass to the 'airbyte-ci connectors' command group."
default: "--language=python --language=low-code --language=manifest-only"
jobs:
generate_matrix:
name: Generate matrix
runs-on: ubuntu-latest
outputs:
generated_matrix: ${{ steps.generate_matrix.outputs.generated_matrix }}
steps:
- name: Checkout Airbyte
uses: actions/checkout@v4
- name: Run airbyte-ci connectors list [SCHEDULED TRIGGER]
if: github.event_name == 'schedule'
id: airbyte-ci-connectors-list-scheduled
uses: ./.github/actions/run-airbyte-ci
with:
context: "master"
subcommand: 'connectors --language=python --language=low-code --language=manifest-only --metadata-query="\"-rc.\" not in data.dockerImageTag and \"source-declarative-manifest\" not in data.dockerRepository" list --output=selected_connectors.json'
- name: Run airbyte-ci connectors list [MANUAL TRIGGER]
if: github.event_name == 'workflow_dispatch'
id: airbyte-ci-connectors-list-workflow-dispatch
uses: ./.github/actions/run-airbyte-ci
with:
context: "master"
subcommand: 'connectors ${{ github.event.inputs.connectors-options }} --metadata-query="\"-rc.\" not in data.dockerImageTag and \"source-declarative-manifest\" not in data.dockerRepository" list --output=selected_connectors.json'
# We generate a dynamic matrix from the list of selected connectors.
# A matrix is required in this situation because the number of connectors is large and running them all in a single job would exceed the maximum job time limit of 6 hours.
- name: Generate matrix - 100 connectors per job
id: generate_matrix
run: |
matrix=$(jq -c -r '{include: [.[] | "--name=" + .] | to_entries | group_by(.key / 100 | floor) | map(map(.value) | {"connector_names": join(" ")})}' selected_connectors.json)
echo "::set-output name=generated_matrix::$matrix"
run_connectors_up_to_date:
needs: generate_matrix
name: Connectors up-to-date
runs-on: connector-up-to-date-medium
continue-on-error: true
strategy:
matrix: ${{fromJson(needs.generate_matrix.outputs.generated_matrix)}}
permissions:
pull-requests: write
steps:
- name: Checkout Airbyte
uses: actions/checkout@v4
- name: Run airbyte-ci connectors up-to-date [WORKFLOW]
id: airbyte-ci-connectors-up-to-date-workflow-dispatch
uses: ./.github/actions/run-airbyte-ci
with:
context: "master"
dagger_cloud_token: ${{ secrets.DAGGER_CLOUD_TOKEN_CACHE_3 }}
docker_hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
docker_hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
gcp_gsm_credentials: ${{ secrets.GCP_GSM_CREDENTIALS }}
gcs_credentials: ${{ secrets.METADATA_SERVICE_PROD_GCS_CREDENTIALS }}
github_token: ${{ secrets.GH_PAT_MAINTENANCE_OSS }}
sentry_dsn: ${{ secrets.SENTRY_AIRBYTE_CI_DSN }}
s3_build_cache_access_key_id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }}
s3_build_cache_secret_key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }}
subcommand: "connectors --concurrency=10 ${{ matrix.connector_names}} up-to-date --create-prs --auto-merge"