Skip to content

manual pytest execution #3035

manual pytest execution

manual pytest execution #3035

Workflow file for this run

name: manual pytest execution
env:
# AWS credentials
AWS_EKS_NAME: tip-wlan-main
AWS_DEFAULT_OUTPUT: json
AWS_DEFAULT_REGION: ap-south-1
AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_CLIENT_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_CLIENT_KEY }}
on:
workflow_dispatch:
inputs:
testbed:
default: "basic-01"
description: "testbed to execute tests against"
required: false
firmware:
default: "next-latest"
description: "target firmware version to be specified <branch>-<commit | latest>"
required: false
marker_expression:
default: ""
description: "marker expression that will be passed to pytest's -m"
required: false
additional_args:
default: ""
description: "additional arguments that will be passed to the pytest execution"
required: false
upload_report:
type: boolean
default: "false"
description: "check if the report should be uploaded to S3"
required: false
tests_release:
required: false
default: ""
description: "Tests release branch to use (i.e. 'release/v2.8.0' or 'master'). If left empty, latest release branch is used"
defaults:
run:
shell: bash
jobs:
vars:
runs-on: ubuntu-latest
outputs:
tests_release: ${{ steps.vars.outputs.tests_release }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: set variables
id: vars
run: |
LATEST_TESTS_RELEASE=$(git branch -r | grep 'release/v' | sed 's!\s*origin/!!' | tail -1)
if [[ -z "${{ github.event.inputs.tests_release }}" ]]; then
echo "Tests release was not passed, using branch $LATEST_TESTS_RELEASE"
echo "tests_release=$LATEST_TESTS_RELEASE" >> $GITHUB_OUTPUT
else
echo "Tests release was passed - ${{ github.event.inputs.tests_release }}"
echo "tests_release=${{ github.event.inputs.tests_release }}" >> $GITHUB_OUTPUT
fi
build:
runs-on: ubuntu-latest
needs: ["vars"]
steps:
- uses: actions/checkout@v3
with:
ref: ${{ needs.vars.outputs.tests_release }}
- name: build and push Docker image
uses: ./.github/actions/build-and-push-docker
with:
registry: tip-tip-wlan-cloud-docker-repo.jfrog.io
registry_user: ${{ secrets.DOCKER_USER_NAME }}
registry_password: ${{ secrets.DOCKER_USER_PASSWORD }}
test:
runs-on: [self-hosted, small]
needs: [build]
timeout-minutes: 1440
steps:
- uses: actions/checkout@v3
- name: install JRE
run: |
sudo apt-get update
sudo apt-get install -y default-jre
- name: install aws CLI tool
run: |
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
unzip awscliv2.zip
sudo ./aws/install
- name: get EKS access credentials
run: aws eks update-kubeconfig --name ${{ env.AWS_EKS_NAME }}
- name: install kubectl
run: |
# TODO WIFI-7839 revert to using stable when issue is resolved on AWS CLI side
curl -s -LO "https://dl.k8s.io/release/v1.27.6/bin/linux/amd64/kubectl"
sudo install -o root -g root -m 0755 kubectl /usr/local/bin/kubectl
- name: save time for logs gathering
id: logs_start_time
run: |
echo "time=$(date --iso-8601=s)" >> $GITHUB_OUTPUT
- name: tests env info
run: |
export NAMESPACE=manual-${{ github.run_id }}-${{ github.event.inputs.testbed }}
echo "Namespace name - $NAMESPACE"
echo "Link to related job in Kubernetes dashboard - https://k8s-dashboard.lab.wlan.tip.build/#/job/$NAMESPACE/testing?namespace=$NAMESPACE"
echo "In order to get logs, open the link above and press 'View logs' button in top right corner"
echo "If you want to get logs auto-updated, choose this option in dropdown menu hidden behind three dots in top right corner of logs viewer"
- name: run tests
uses: ./.github/actions/run-tests
with:
namespace: "manual-${{ github.run_id }}-${{ github.event.inputs.testbed }}"
testbed: "${{ github.event.inputs.testbed }}"
marker_expression: "${{ github.event.inputs.marker_expression }}"
configuration: "${{ secrets.LAB_INFO_JSON }}"
testing_docker_image: tip-tip-wlan-cloud-docker-repo.jfrog.io/cloud-sdk-nightly:${{ github.run_id }}
additional_args: "-o firmware=${{ github.event.inputs.firmware }} ${{ github.event.inputs.additional_args }}"
allure_results_artifact_name: allure-results
- name: show gw logs
if: failure()
run: kubectl -n openwifi-qa01 logs deployment/owgw --since-time ${{ steps.logs_start_time.outputs.time }}
- name: show fms logs
if: failure()
run: kubectl -n openwifi-qa01 logs deployment/owfms --since-time ${{ steps.logs_start_time.outputs.time }}
- name: show prov logs
if: failure()
run: kubectl -n openwifi-qa01 logs deployment/owprov --since-time ${{ steps.logs_start_time.outputs.time }}
- name: show analytics logs
if: failure()
run: kubectl -n openwifi-qa01 logs deployment/owanalytics --since-time ${{ steps.logs_start_time.outputs.time }}
- name: show subscription (userportal) logs
if: failure()
run: kubectl -n openwifi-qa01 logs deployment/owsub --since-time ${{ steps.logs_start_time.outputs.time }}
- name: show sec logs
if: failure()
run: kubectl -n openwifi-qa01 logs deployment/owsec --since-time ${{ steps.logs_start_time.outputs.time }}
# necessary because if conditionals in composite actions are currently not respected
- name: get tests logs
if: always()
continue-on-error: true
run: |
podname=$(kubectl get pods -n manual-${{ github.run_id }}-${{ github.event.inputs.testbed }} --no-headers -o custom-columns=":metadata.name" -l job-name=testing | sed "s/pod\///")
kubectl logs --timestamps -n manual-${{ github.run_id }}-${{ github.event.inputs.testbed }} $podname || true
- name: delete namespace
if: always()
continue-on-error: true
run: kubectl delete ns --ignore-not-found=true --wait manual-${{ github.run_id }}-${{ github.event.inputs.testbed }}
report:
runs-on: ubuntu-latest
needs: [ test ]
if: always()
steps:
- name: checkout testing repo
uses: actions/checkout@v3
- uses: actions/download-artifact@v3
with:
name: allure-results
path: allure-results
- name: generate Allure report
uses: ./.github/actions/generate-allure-report
with:
results_path: ./allure-results
- name: upload Allure report as artifact
uses: actions/upload-artifact@v3
with:
name: allure-report
path: allure-report
- name: upload to S3
if: github.event.inputs.upload_report
uses: ./.github/actions/allure-report-to-s3
with:
test_type: manual
testbed: "${{ github.event.inputs.testbed }}"
report_path: allure-report
s3_access_key_id: ${{ secrets.ALLURE_S3_ACCESS_KEY_ID }}
s3_access_key_secret: ${{ secrets.ALLURE_S3_ACCESS_KEY_SECRET }}
cleanup:
needs: [test]
runs-on: ubuntu-latest
if: always()
steps:
- uses: actions/checkout@v3
- name: cleanup Docker image
uses: ./.github/actions/cleanup-docker
with:
registry_user: ${{ secrets.DOCKER_USER_NAME }}
registry_password: ${{ secrets.DOCKER_USER_PASSWORD }}