Skip to content

Address G115 findings #3477

Address G115 findings

Address G115 findings #3477

Workflow file for this run

name: Unit, Integration, and E2E Tests
on:
pull_request:
branches:
- main
- "release-v*" # release branches
push:
paths-ignore:
- 'README.md'
- 'docs/**'
branches:
- main
- "release-v*" # release branches
jobs:
unit:
runs-on: ubuntu-latest
steps:
- name: Check out code
uses: actions/checkout@v4
- name: Install Go
uses: actions/setup-go@v5
with:
go-version: '1.21.x'
cache: true
check-latest: true
- name: Install Trivy
run: make install-trivy
- name: Build
run: make build
- name: Test
run: make test-unit-coverage
test-docker-build:
runs-on: ubuntu-latest
strategy:
fail-fast: true
max-parallel: 4
matrix:
image:
- base
- git
- image-processing
- waiter
steps:
- uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to container registry
run: echo ${{ secrets.GITHUB_TOKEN }} | docker login -u ${{ github.repository_owner }} --password-stdin ghcr.io
- name: Build Image
working-directory: images/${{ matrix.image }}
run: |
NAMESPACE=$(tr '[:upper:]' '[:lower:]' <<<${{ github.repository_owner }})
IMAGE=test-build/base-${{ matrix.image }} NAMESPACE="${NAMESPACE}" docker buildx bake --file ../docker-bake.hcl
integration:
strategy:
fail-fast: false
matrix:
kubernetes:
- v1.27.11
- v1.29.2
tekton:
# oldest LTS that exists at the time of our planned next release
- v0.53.5
# newest LTS that exists at the time of our planned next release
- v0.59.2 # RETAIN-COMMENT: TEKTON_NEWEST_LTS
max-parallel: 4
runs-on: ubuntu-latest
steps:
- name: Check out code
uses: actions/checkout@v4
- name: Install Go
uses: actions/setup-go@v5
with:
go-version: '1.21.x'
cache: true
check-latest: true
- name: Install Ko
uses: ko-build/[email protected]
with:
version: v0.15.2
- name: Install kubectl
uses: azure/setup-kubectl@v4
with:
version: ${{ matrix.kubernetes }}
- name: Create kind cluster
uses: helm/kind-action@v1
with:
version: v0.22.0
node_image: kindest/node:${{ matrix.kubernetes }}
cluster_name: kind
wait: 120s
- name: Verify kind cluster
run: |
echo "# Using KinD context..."
kubectl config use-context "kind-kind"
echo "# KinD nodes:"
kubectl get nodes
NODE_STATUS=$(kubectl get node kind-control-plane -o json | jq -r .'status.conditions[] | select(.type == "Ready") | .status')
if [ "${NODE_STATUS}" != "True" ]; then
echo "# Node is not ready:"
kubectl describe node kind-control-plane
echo "# Pods:"
kubectl get pod -A
echo "# Events:"
kubectl get events -A
exit 1
fi
- name: Install Tekton
env:
TEKTON_VERSION: ${{ matrix.tekton }}
run: |
make kind-tekton
kubectl -n tekton-pipelines rollout status deployment tekton-pipelines-controller --timeout=1m
kubectl -n tekton-pipelines rollout status deployment tekton-pipelines-webhook --timeout=1m
- name: Test
run: |
# host.docker.internal does not work in a GitHub action
docker exec kind-control-plane bash -c "echo '172.17.0.1 host.docker.internal' >>/etc/hosts"
# Build and load the Git and Bundle image
export GIT_CONTAINER_IMAGE="$(KO_DOCKER_REPO=kind.local ko publish ./cmd/git)"
export BUNDLE_CONTAINER_IMAGE="$(KO_DOCKER_REPO=kind.local ko publish ./cmd/bundle)"
export IMAGE_PROCESSING_CONTAINER_IMAGE="$(KO_DOCKER_REPO=kind.local ko publish ./cmd/image-processing)"
make test-integration
e2e:
strategy:
fail-fast: false
matrix:
kubernetes:
- v1.27.11
- v1.29.2
tekton:
# oldest LTS that exists at the time of our planned next release
- v0.53.5
# newest LTS that exists at the time of our planned next release
- v0.59.2 # RETAIN-COMMENT: TEKTON_NEWEST_LTS
max-parallel: 4
runs-on: ubuntu-latest
steps:
- name: Maximize build space
uses: easimon/maximize-build-space@fc881a613ad2a34aca9c9624518214ebc21dfc0c
with:
root-reserve-mb: 30720
swap-size-mb: 1024
remove-android: "true"
remove-codeql: "true"
remove-docker-images: "true"
remove-dotnet: "true"
remove-haskell: "true"
- name: Check out code
uses: actions/checkout@v4
- name: Install Go
uses: actions/setup-go@v5
with:
go-version: '1.21.x'
cache: true
check-latest: true
- name: Install kubectl
uses: azure/setup-kubectl@v4
with:
version: ${{ matrix.kubernetes }}
- name: Create kind cluster
uses: helm/kind-action@v1
with:
version: v0.22.0
node_image: kindest/node:${{ matrix.kubernetes }}
cluster_name: kind
config: test/kind/config.yaml
wait: 120s
- name: Verify kind cluster
run: |
echo "# Using KinD context..."
kubectl config use-context "kind-kind"
echo "# KinD nodes:"
kubectl get nodes
NODE_STATUS=$(kubectl get node kind-control-plane -o json | jq -r .'status.conditions[] | select(.type == "Ready") | .status')
if [ "${NODE_STATUS}" != "True" ]; then
echo "# Node is not ready:"
kubectl describe node kind-control-plane
echo "# Pods:"
kubectl get pod -A
echo "# Events:"
kubectl get events -A
exit 1
fi
- name: Install Tekton
env:
TEKTON_VERSION: ${{ matrix.tekton }}
run: |
make kind-tekton
kubectl -n tekton-pipelines rollout status deployment tekton-pipelines-controller --timeout=1m
kubectl -n tekton-pipelines rollout status deployment tekton-pipelines-webhook --timeout=1m
- name: Install Registry
run: |
kubectl apply -f test/data/registry.yaml
kubectl -n registry rollout status deployment registry --timeout=1m
- name: Install Ko
uses: ko-build/[email protected]
with:
version: v0.15.2
- name: Install Shipwright Build
run: |
make install-controller-kind
kubectl -n shipwright-build rollout status deployment shipwright-build-controller --timeout=1m || true
kubectl -n shipwright-build rollout status deployment shipwright-build-webhook --timeout=1m || true
- name: Test
run: |
kubectl create namespace shp-e2e
export TEST_NAMESPACE=shp-e2e
export TEST_IMAGE_REPO=registry.registry.svc.cluster.local:32222/shipwright-io/build-e2e
export TEST_IMAGE_REPO_INSECURE=true
export TEST_E2E_TIMEOUT_MULTIPLIER=2
make test-e2e
- name: Build controller logs
if: ${{ failure() }}
run: |
echo "# Pods:"
kubectl -n shipwright-build get pod
PODS=$(kubectl -n shipwright-build get pod -o json)
POD_NAME=$(echo "${PODS}" | jq -r '.items[] | select(.metadata.name | startswith("shipwright-build-controller-")) | .metadata.name')
if [ "${POD_NAME}" != "" ]; then
RESTART_COUNT=$(echo "${PODS}" | jq -r ".items[] | select(.metadata.name == \"${POD_NAME}\") | .status.containerStatuses[0].restartCount")
if [ "${RESTART_COUNT}" != "0" ]; then
echo "# Build Controller Previous logs:"
kubectl -n shipwright-build logs "${POD_NAME}" --previous || true
fi
echo "# Build Controller Logs:"
kubectl -n shipwright-build logs "${POD_NAME}"
else
echo "# Pod is missing, there are no logs to retrieve, bailing out..."
fi
WEBHOOK_POD_NAME=$(echo "${PODS}" | jq -r '.items[] | select(.metadata.name | startswith("shipwright-build-webhook-")) | .metadata.name')
if [ "${WEBHOOK_POD_NAME}" != "" ]; then
RESTART_COUNT=$(echo "${PODS}" | jq -r ".items[] | select(.metadata.name == \"${WEBHOOK_POD_NAME}\") | .status.containerStatuses[0].restartCount")
if [ "${RESTART_COUNT}" != "0" ]; then
echo "# Build Webhook Previous logs:"
kubectl -n shipwright-build logs "${WEBHOOK_POD_NAME}" --previous || true
fi
echo "# Build Webhook Logs:"
kubectl -n shipwright-build logs "${WEBHOOK_POD_NAME}"
else
echo "# Pod is missing, there are no logs to retrieve, bailing out..."
fi