diff --git a/.github/workflows/container-smoke-test.yml b/.github/workflows/container-smoke-test.yml new file mode 100644 index 000000000..c0dcf877a --- /dev/null +++ b/.github/workflows/container-smoke-test.yml @@ -0,0 +1,37 @@ +name: Smoke test Docker image + +on: + pull_request: + push: + branches: [ main ] + +jobs: + smoke-test-container: + runs-on: ubuntu-latest + env: + BUILDKIT_PROGRESS: plain + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Build and start services + run: docker compose --file=container/compose.yml up --build --wait --detach + + - name: Smoke test registry + run: | + response=$( + curl --silent --verbose --fail --request POST \ + --header 'Content-Type: application/vnd.schemaregistry.v1+json' \ + --data '{"schema": "{\"type\": \"record\", \"name\": \"Obj\", \"fields\":[{\"name\": \"age\", \"type\": \"int\"}]}"}' \ + http://localhost:8081/subjects/test-key/versions + ) + echo "$response" + [[ $response == '{"id":1}' ]] || exit 1 + echo "Ok!" + + - name: Smoke test REST proxy + run: | + response=$(curl --silent --verbose --fail http://localhost:8082/topics) + echo "$response" + [[ $response == '["_schemas","__consumer_offsets"]' ]] || exit 1 + echo "Ok!" diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index d8f211fb7..7d0230b41 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -83,7 +83,7 @@ To use your development code, you need to set up a Kafka server and run Karapace virtual environment: ``` -docker-compose -f ./container/docker-compose.yml up -d kafka +docker compose -f ./container/compose.yml up -d kafka karapace karapace.config.json ``` diff --git a/README.rst b/README.rst index e1e4ca9aa..ae0c706c5 100644 --- a/README.rst +++ b/README.rst @@ -59,9 +59,9 @@ To get you up and running with the latest build of Karapace, a docker image is a # Fetch the latest release docker pull ghcr.io/aiven/karapace:latest -An example setup including configuration and Kafka connection is available as docker-compose example:: +An example setup including configuration and Kafka connection is available as compose example:: - docker-compose -f ./container/docker-compose.yml up -d + docker compose -f ./container/compose.yml up -d Then you should be able to reach two sets of endpoints: diff --git a/container/Dockerfile b/container/Dockerfile index 0120dca8e..10b7702fb 100644 --- a/container/Dockerfile +++ b/container/Dockerfile @@ -65,6 +65,8 @@ RUN pip3 install --no-deps /build/karapace-wheel/*.whl && rm -rf /build/karapace COPY ./container/start.sh /opt/karapace RUN chmod 500 /opt/karapace/start.sh && chown karapace:karapace /opt/karapace/start.sh +COPY ./container/healthcheck.py /opt/karapace + WORKDIR /opt/karapace USER karapace @@ -74,3 +76,6 @@ ARG COMMIT LABEL org.opencontainers.image.created=$CREATED \ org.opencontainers.image.version=$VERSION \ org.opencontainers.image.revision=$COMMIT + +HEALTHCHECK --interval=10s --timeout=30s --retries=3 --start-period=60s \ + CMD python3 healthcheck.py http://localhost:$KARAPACE_PORT/_health || exit 1 diff --git a/container/docker-compose.yml b/container/compose.yml similarity index 95% rename from container/docker-compose.yml rename to container/compose.yml index 1df42af08..852d87ea8 100644 --- a/container/docker-compose.yml +++ b/container/compose.yml @@ -56,6 +56,9 @@ services: karapace-registry: image: ghcr.io/aiven/karapace:develop + build: + context: .. + dockerfile: container/Dockerfile entrypoint: - /bin/bash - /opt/karapace/start.sh @@ -78,6 +81,9 @@ services: karapace-rest: image: ghcr.io/aiven/karapace:develop + build: + context: .. + dockerfile: container/Dockerfile entrypoint: - /bin/bash - /opt/karapace/start.sh diff --git a/container/healthcheck.py b/container/healthcheck.py new file mode 100644 index 000000000..2d08fb29d --- /dev/null +++ b/container/healthcheck.py @@ -0,0 +1,36 @@ +""" +Copyright (c) 2023 Aiven Ltd +See LICENSE for details +""" +from __future__ import annotations + +from http import HTTPStatus +from typing import Final + +import aiohttp +import asyncio +import sys + +timeout: Final = aiohttp.ClientTimeout(total=2) + + +async def check_ok(url: str) -> bool: + async with aiohttp.ClientSession() as session: + response = await session.get(url) + if response.status != HTTPStatus.OK: + print( + f"Server responded with non-OK {response.status=} {url=}", + file=sys.stderr, + ) + raise SystemExit(1) + print("Ok!", file=sys.stderr) + + +def main() -> None: + url = sys.argv[1] + print(f"Checking {url=}", file=sys.stderr) + asyncio.run(check_ok(url)) + + +if __name__ == "__main__": + main() diff --git a/performance-test/README.rst b/performance-test/README.rst index a5d981da5..f3da26c6e 100644 --- a/performance-test/README.rst +++ b/performance-test/README.rst @@ -5,7 +5,7 @@ Install development requirements per instructions from `README.rst <../README.rs Requires Kafka and Zookeeper running in the containers:: cd ../container - docker-compose start zookeeper kafka + docker compose start zookeeper kafka Create if necessary the `_schemas` topic to Kafka:: docker exec -it bash diff --git a/tests/integration/utils/kafka_server.py b/tests/integration/utils/kafka_server.py index f0d043af7..8b114e023 100644 --- a/tests/integration/utils/kafka_server.py +++ b/tests/integration/utils/kafka_server.py @@ -145,7 +145,7 @@ def configure_and_start_kafka( ] ) - # Keep in sync with containers/docker-compose.yml + # Keep in sync with containers/compose.yml kafka_ini = { "broker.id": 1, "broker.rack": "local", diff --git a/website/source/install.rst b/website/source/install.rst index 69fd9b941..bc0102318 100644 --- a/website/source/install.rst +++ b/website/source/install.rst @@ -12,9 +12,9 @@ To get you up and running with the latest build of Karapace, a docker image is a # Fetch the latest release docker pull ghcr.io/aiven/karapace:latest -An example setup including configuration and Kafka connection is available as docker-compose example:: +An example setup including configuration and Kafka connection is available as compose example:: - docker-compose -f ./container/docker-compose.yml up -d + docker compose -f ./container/compose.yml up -d Then you should be able to reach two sets of endpoints: