diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 36a5467607..79f8dc5a2a 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -94,15 +94,24 @@ jobs: path: | /var/lib/docker/volumes/sentry-postgres/_data /var/lib/docker/volumes/sentry-clickhouse/_data + /var/lib/docker/volumes/sentry-kafka/_data - name: Install ${{ env.LATEST_TAG }} - run: ./install.sh + env: + SKIP_DB_MIGRATIONS: ${{ steps.restore_cache.outputs.cache-hit == 'true' && '1' || '' }} + run: | + # This is for the cache restore on Kafka to work in older releases + docker run --rm -v "sentry-kafka:/data" busybox chown -R 1000:1000 /data + ./install.sh - name: Prepare Docker Volume Caching run: | # Set permissions for docker volumes so we can cache and restore sudo chmod o+x /var/lib/docker sudo chmod -R o+rx /var/lib/docker/volumes + # Set tar ownership for it to be able to read + # From: https://github.com/actions/toolkit/issues/946#issuecomment-1726311681 + sudo chown root /usr/bin/tar && sudo chmod u+s /usr/bin/tar - name: Save DB Volumes Cache if: steps.restore_cache.outputs.cache-hit != 'true' @@ -112,12 +121,22 @@ jobs: path: | /var/lib/docker/volumes/sentry-postgres/_data /var/lib/docker/volumes/sentry-clickhouse/_data + /var/lib/docker/volumes/sentry-kafka/_data - name: Checkout current ref uses: actions/checkout@v4 - name: Install current ref - run: ./install.sh + run: | + # This is for the cache restore on Kafka to work in older releases + docker run --rm -v "sentry-kafka:/data" busybox chown -R 1000:1000 /data + ./install.sh + + - name: Inspect failure + if: failure() + run: | + docker compose ps + docker compose logs integration-test: if: github.repository_owner == 'getsentry' @@ -192,19 +211,24 @@ jobs: path: | /var/lib/docker/volumes/sentry-postgres/_data /var/lib/docker/volumes/sentry-clickhouse/_data + /var/lib/docker/volumes/sentry-kafka/_data - name: Install self-hosted - uses: nick-fields/retry@v3 - with: - timeout_minutes: 10 - max_attempts: 3 - command: ./install.sh + env: + SKIP_DB_MIGRATIONS: ${{ steps.restore_cache.outputs.cache-hit == 'true' && '1' || '' }} + run: | + # This is for the cache restore on Kafka to work in older releases + docker run --rm -v "sentry-kafka:/data" busybox chown -R 1000:1000 /data + ./install.sh - name: Prepare Docker Volume Caching run: | # Set permissions for docker volumes so we can cache and restore sudo chmod o+x /var/lib/docker sudo chmod -R o+rx /var/lib/docker/volumes + # Set tar ownership for it to be able to read + # From: https://github.com/actions/toolkit/issues/946#issuecomment-1726311681 + sudo chown root /usr/bin/tar && sudo chmod u+s /usr/bin/tar - name: Save DB Volumes Cache if: steps.restore_cache.outputs.cache-hit != 'true' @@ -214,6 +238,7 @@ jobs: path: | /var/lib/docker/volumes/sentry-postgres/_data /var/lib/docker/volumes/sentry-clickhouse/_data + /var/lib/docker/volumes/sentry-kafka/_data - name: Integration Test run: | diff --git a/install/set-up-and-migrate-database.sh b/install/set-up-and-migrate-database.sh index 770bfbdc61..a1cc8323e8 100644 --- a/install/set-up-and-migrate-database.sh +++ b/install/set-up-and-migrate-database.sh @@ -1,16 +1,17 @@ echo "${_group}Setting up / migrating database ..." -# Fixes https://github.com/getsentry/self-hosted/issues/2758, where a migration fails due to indexing issue -$dc up --wait postgres +if [[ -z "${SKIP_DB_MIGRATIONS:-}" ]]; then + # Fixes https://github.com/getsentry/self-hosted/issues/2758, where a migration fails due to indexing issue + $dc up --wait postgres -os=$($dc exec postgres cat /etc/os-release | grep 'ID=debian') -if [[ -z $os ]]; then - echo "Postgres image debian check failed, exiting..." - exit 1 -fi + os=$($dc exec postgres cat /etc/os-release | grep 'ID=debian') + if [[ -z $os ]]; then + echo "Postgres image debian check failed, exiting..." + exit 1 + fi -# Using django ORM to provide broader support for users with external databases -$dcr web shell -c " + # Using django ORM to provide broader support for users with external databases + $dcr web shell -c " from django.db import connection with connection.cursor() as cursor: @@ -18,16 +19,18 @@ with connection.cursor() as cursor: cursor.execute('DROP INDEX IF EXISTS sentry_groupedmessage_project_id_id_515aaa7e_uniq;') " -if [[ -n "${CI:-}" || "${SKIP_USER_CREATION:-0}" == 1 ]]; then - $dcr web upgrade --noinput --create-kafka-topics - echo "" - echo "Did not prompt for user creation. Run the following command to create one" - echo "yourself (recommended):" - echo "" - echo " $dc_base run --rm web createuser" - echo "" + if [[ -n "${CI:-}" || "${SKIP_USER_CREATION:-0}" == 1 ]]; then + $dcr web upgrade --noinput --create-kafka-topics + echo "" + echo "Did not prompt for user creation. Run the following command to create one" + echo "yourself (recommended):" + echo "" + echo " $dc_base run --rm web createuser" + echo "" + else + $dcr web upgrade --create-kafka-topics + fi else - $dcr web upgrade --create-kafka-topics + echo "Skipped DB migrations due to SKIP_DB_MIGRATIONS=$SKIP_DB_MIGRATIONS" fi - echo "${_endgroup}"