feat: initial logical to physical compile #2512
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Engine (wren + sparrow) CI | |
on: | |
pull_request: | |
branches: [main] | |
push: | |
branches: [main] | |
merge_group: | |
branches: [main] | |
workflow_call: | |
workflow_dispatch: | |
# In cases of concurrent workflows running (consecutive pushes to PR) | |
# leave the latest workflow and cancel the other (older) workflows | |
# See https://docs.github.com/en/actions/using-jobs/using-concurrency | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.ref }} | |
cancel-in-progress: true | |
permissions: | |
actions: write | |
# Used by clippy action to report lint errors. | |
checks: write | |
contents: read | |
# For pushing Docker images to ghcr.io. | |
packages: write | |
env: | |
IMAGE: ghcr.io/${{ github.repository }}/engine-integration-tests | |
jobs: | |
integration_skip: | |
name: Integration Tests Skip Duplicates | |
runs-on: ubuntu-20.04 | |
# Map a step output to a job output | |
outputs: | |
should_skip: ${{ steps.skip_check.outputs.should_skip }} | |
run_id: ${{ fromJSON(steps.skip_check.outputs.skipped_by).id }} | |
steps: | |
- id: skip_check | |
uses: fkirc/skip-duplicate-actions@v5 | |
with: | |
concurrent_skipping: "never" | |
skip_after_successful_duplicate: "true" | |
paths: '["tests/integration/**", "Dockerfile.release", ".github/workflows/ci_engine.yml"]' | |
do_not_skip: '["schedule"]' | |
build_wren: | |
name: Build wren (golang) | |
uses: ./.github/workflows/reusable_ci_wren.yml | |
build_sparrow: | |
name: Build sparrow (rust) | |
uses: ./.github/workflows/reusable_ci_rust.yml | |
run_integration_tests: | |
name: Run integration tests | |
needs: [integration_skip, build_wren, build_sparrow] | |
runs-on: ubuntu-latest | |
if: ${{ needs.integration_skip.outputs.should_skip != 'true' || needs.build_wren.outputs.should_skip != 'true' || needs.build_sparrow.outputs.should_skip != 'true' }} | |
steps: | |
- name: Checkout sources | |
uses: actions/checkout@v3 | |
- name: Get wren binary | |
uses: actions/download-artifact@v3 | |
with: | |
name: manager-binary | |
path: release/linux/amd64/ | |
- name: Get Sparrow binary | |
uses: actions/download-artifact@v3 | |
with: | |
name: engine-debug | |
path: release/linux/amd64/ | |
- name: Install Buf | |
uses: bufbuild/[email protected] | |
with: | |
version: "1.17.0" | |
buf_user: ${{ secrets.BUF_USER }} | |
buf_api_token: ${{ secrets.BUF_API_TOKEN }} | |
github_token: ${{ secrets.GITHUB_TOKEN }} | |
- name: Generate Protos | |
run: buf generate | |
working-directory: proto | |
- name: Install GoLang | |
uses: actions/setup-go@v3 | |
with: | |
go-version: "1.19" | |
cache: true | |
cache-dependency-path: wren/go.sum | |
- name: Generate ent | |
run: go generate ./ent | |
working-directory: wren | |
- name: Copy NOTICE | |
run: cp NOTICE ./wren/ | |
- name: Create data dir with correct permissions | |
run: | | |
mkdir -p tests/integration/data/tmp | |
chmod -R a+rw tests/integration/ | |
echo "::group::Data Dir" | |
ls -laR tests/integration/data | |
echo "::endgroup::" | |
- name: Set up Docker Buildx | |
uses: docker/setup-buildx-action@v2 | |
- name: Docker compose up for integration tests | |
run: make ci/integration/tests/docker-compose-up | |
- name: Check docker containers healthcheck | |
run: timeout 240s sh -c 'until docker ps | grep pulsar | grep -q healthy; do echo "Waiting for container to be healthy..."; sleep 10; done' | |
- name: Show kaskada container logs (before tests) | |
run: make ci/integration/tests/docker-compose-logs-kaskada-only | |
# Workaround: The db file `kaskada.db` gets generated from within the kaskada container and is owned by the container's `root` | |
# Before we run any tests we make sure that all files under tests/integration/data (mapepd to /data in the container) is | |
# set to the user and group *of the user on GitHub runner* and make it -rw- for all. | |
# A better fix would probably be to match the user and group of the container's user and group to the of the user and group on the GitHub runner. | |
- name: Run integration tests | |
id: integration-tests | |
run: | | |
echo "::group::Data Dir" | |
sudo chown -R $USER:$(id -g -n $USER) tests/integration/data | |
chmod -R a+rw tests/integration/ | |
ls -laR tests/integration/data | |
echo "::endgroup::" | |
make ci/integration/tests/run/api | |
- name: Show kaskada container logs in case of failure | |
if: failure() && steps.integration-tests.outcome != 'success' | |
run: make ci/integration/tests/docker-compose-logs-kaskada-only | |
- name: Docker compose down for integration tests | |
run: make ci/integration/tests/docker-compose-down | |
run_integration_tests_s3: | |
name: Run S3 integration tests | |
needs: [integration_skip, build_wren, build_sparrow] | |
runs-on: ubuntu-latest | |
if: ${{ needs.integration_skip.outputs.should_skip != 'true' || needs.build_wren.outputs.should_skip != 'true' || needs.build_sparrow.outputs.should_skip != 'true' }} | |
steps: | |
- name: Checkout sources | |
uses: actions/checkout@v3 | |
- name: Get wren binary | |
uses: actions/download-artifact@v3 | |
with: | |
name: manager-binary | |
path: release/linux/amd64/ | |
- name: Get Sparrow binary | |
uses: actions/download-artifact@v3 | |
with: | |
name: engine-debug | |
path: release/linux/amd64/ | |
- name: Install Buf | |
uses: bufbuild/[email protected] | |
with: | |
version: "1.10.0" | |
buf_user: ${{ secrets.BUF_USER }} | |
buf_api_token: ${{ secrets.BUF_API_TOKEN }} | |
github_token: ${{ secrets.GITHUB_TOKEN }} | |
- name: Generate Protos | |
run: buf generate | |
working-directory: proto | |
- name: Install GoLang | |
uses: actions/setup-go@v3 | |
with: | |
go-version: "1.19" | |
cache: true | |
cache-dependency-path: wren/go.sum | |
- name: Generate ent | |
run: go generate ./ent | |
working-directory: wren | |
- name: Copy NOTICE | |
run: cp NOTICE ./wren/ | |
- name: Create data dir with correct permissions | |
run: | | |
mkdir -p tests/integration/data/tmp | |
chmod -R a+rw tests/integration/ | |
echo "::group::Data Dir" | |
ls -laR tests/integration/data | |
echo "::endgroup::" | |
- name: Set up Docker Buildx | |
uses: docker/setup-buildx-action@v2 | |
- name: Docker compose up for integration tests | |
run: make ci/integration/tests/docker-compose-up-s3 | |
- name: Check docker containers (pulsar) healthcheck | |
run: timeout 240s sh -c 'until docker ps | grep pulsar | grep -q healthy; do echo "Waiting for pulsar container to be healthy..."; sleep 10; done' | |
- name: Check docker containers (minio) healthcheck | |
run: timeout 240s sh -c 'until docker ps | grep minio | grep -q healthy; do echo "Waiting for minio container to be healthy..."; sleep 10; done' | |
- name: Show kaskada container logs | |
run: make ci/integration/tests/docker-compose-s3-logs-kaskada-only | |
# Workaround: The db file `kaskada.db` gets generated from within the kaskada container and is owned by the container's `root` | |
# Before we run any tests we make sure that all files under tests/integration/data (mapepd to /data in the container) is | |
# set to the user and group *of the user on GitHub runner* and make it -rw- for all. | |
# A better fix would probably be to match the user and group of the container's user and group to the of the user and group on the GitHub runner. | |
- name: Run S3 integration tests | |
id: s3-integration-tests | |
run: | | |
echo "::group::Data Dir" | |
sudo chown -R $USER:$(id -g -n $USER) tests/integration/data | |
chmod -R a+rw tests/integration/ | |
ls -laR tests/integration/data | |
echo "::endgroup::" | |
make ci/integration/tests/run/api-s3 | |
- name: Show all container logs in case of failure | |
if: failure() && steps.s3-integration-tests.outcome != 'success' | |
run: make ci/integration/tests/docker-compose-s3-logs-kaskada-only | |
- name: Docker compose down for integration tests | |
run: make ci/integration/tests/docker-compose-down-postgres-s3 |