Skip to content

Commit

Permalink
Add edgecore_eap111 to sanity and testbed name fix in regression
Browse files Browse the repository at this point in the history
* Added edgecore_eap111 in sanity overview page

Signed-off-by: jitendracandela <[email protected]>

* Added edgecore_eap111 in sanity and new schedule for nightly sanity

Signed-off-by: jitendracandela <[email protected]>

* Added edgecore_eap111 in performance overview page

Signed-off-by: jitendracandela <[email protected]>

* Added edgecore_eap111 in performane and new schedule for performance

Signed-off-by: jitendracandela <[email protected]>

* fixed syntax error

Signed-off-by: jitendracandela <[email protected]>

* Added edgecore_eap111 in regression and new schedule for regression

Signed-off-by: jitendracandela <[email protected]>

* Added proper testbed name as per the lab info file for regression

Signed-off-by: jitendracandela <[email protected]>

---------

Signed-off-by: jitendracandela <[email protected]>
  • Loading branch information
jitendracandela authored Apr 1, 2024
1 parent 67dad64 commit cc4e1e7
Show file tree
Hide file tree
Showing 4 changed files with 666 additions and 84 deletions.
4 changes: 2 additions & 2 deletions .allure-overview/overviews.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"sanity": {
"OVERVIEW_TITLE": "'OpenWifi sanity results'",
"OVERVIEW_TESTBEDS": ["edgecore_oap101-6e","edgecore_eap101","cig_wf188n","cig_wf196","edgecore_eap102","edgecore_eap104","cig_wf186w","hfcl_ion4xe","yuncore_fap655","yuncore_ax820"]
"OVERVIEW_TESTBEDS": ["edgecore_oap101-6e","edgecore_eap101","cig_wf188n","cig_wf196","edgecore_eap102","edgecore_eap104","cig_wf186w","hfcl_ion4xe","yuncore_fap655","yuncore_ax820","edgecore_eap111"]
},
"interop": {
"OVERVIEW_TITLE": "'OpenWifi interop results'",
Expand All @@ -15,6 +15,6 @@

"performance": {
"OVERVIEW_TITLE": "'OpenWifi performance results'",
"OVERVIEW_TESTBEDS": ["edgecore_oap101-6e","edgecore_eap101","cig_wf188n","cig_wf196","edgecore_eap102","edgecore_eap104","cig_wf186w","hfcl_ion4xe","yuncore_fap655","yuncore_ax820"]
"OVERVIEW_TESTBEDS": ["edgecore_oap101-6e","edgecore_eap101","cig_wf188n","cig_wf196","edgecore_eap102","edgecore_eap104","cig_wf186w","hfcl_ion4xe","yuncore_fap655","yuncore_ax820","edgecore_eap111"]
}
}
199 changes: 187 additions & 12 deletions .github/workflows/performance.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ on:
description: "revision of the Open Wifi Helm chart"
ap_models:
required: true
default: "cig_wf188n,cig_wf196,hfcl_ion4xe,yuncore_fap655,yuncore_ax820,edgecore_oap101-6e,edgecore_eap102,edgecore_eap101,edgecore_eap104,cig_wf186w"
default: "cig_wf188n,cig_wf196,hfcl_ion4xe,yuncore_fap655,yuncore_ax820,edgecore_oap101-6e,edgecore_eap102,edgecore_eap101,edgecore_eap104,cig_wf186w,edgecore_eap111"
description: "the AP models to test"
ap_version:
required: true
Expand All @@ -41,7 +41,7 @@ on:
default: ""
description: "Tests release branch to use (i.e. 'release/v2.8.0' or 'master'). If left empty, latest release branch is used"
schedule:
- cron: "30 18 * * 2-5"
- cron: "30 18 * * 1-4"

jobs:
# Set vars
Expand Down Expand Up @@ -82,16 +82,14 @@ jobs:
echo "Current slot is $SLOT"
# choose AP models as per the day and slot if the scheduled run gets triggered else provided.
if [[ "$DOW" -eq "2" ]]; then
AP_MODELS="${{ github.event.inputs.ap_models || 'edgecore_oap101-6e,cig_wf188n,hfcl_ion4xe' }}"
if [[ "$DOW" -eq "1" ]]; then
AP_MODELS="${{ github.event.inputs.ap_models || 'cig_wf188n,edgecore_oap101-6e,hfcl_ion4xe' }}"
elif [[ "$DOW" -eq "2" ]]; then
AP_MODELS="${{ github.event.inputs.ap_models || 'edgecore_eap101,yuncore_fap655,yuncore_ax820' }}"
elif [[ "$DOW" -eq "3" ]]; then
AP_MODELS="${{ github.event.inputs.ap_models || 'yuncore_fap655,yuncore_ax820' }}"
AP_MODELS="${{ github.event.inputs.ap_models || 'edgecore_eap104,cig_wf186w,edgecore_eap111' }}"
elif [[ "$DOW" -eq "4" ]]; then
AP_MODELS="${{ github.event.inputs.ap_models || 'edgecore_eap102,cig_wf196,edgecore_eap101' }}"
elif [[ "$DOW" -eq "5" ]]; then
AP_MODELS="${{ github.event.inputs.ap_models || 'edgecore_eap104,cig_wf186w' }}"
else
AP_MODELS="${{ github.event.inputs.ap_models || 'cig_wf188n,cig_wf196,hfcl_ion4xe,yuncore_fap655,yuncore_ax820,edgecore_oap101-6e,edgecore_eap102,edgecore_eap101,edgecore_eap104,cig_wf186w' }}"
AP_MODELS="${{ github.event.inputs.ap_models || 'edgecore_eap102,cig_wf196' }}"
fi
echo "List of AP models to test - $AP_MODELS"
AP_MODELS=$(echo $AP_MODELS | sed "s/,/\",\"/g" | sed 's/^/[\"/g' | sed 's/$/\"]/g')
Expand Down Expand Up @@ -1896,12 +1894,189 @@ jobs:
if: failure()
run: kubectl -n openwifi-${{ needs.vars.outputs.existing_controller }} logs deployment/owsec

test-edgecore-eap111:
needs: [ "vars", "build" ]
runs-on: [ self-hosted, small ]
timeout-minutes: 1440
if: "!cancelled() && contains(fromJSON(needs.vars.outputs.ap_models), 'edgecore_eap111')"
env:
AP_MODEL: edgecore_eap111
steps:
- name: Set AP model output
id: ap_model
run: |
echo "model=${AP_MODEL}" >> $GITHUB_OUTPUT
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.8"



# TODO WIFI-7839 delete when issue is resolved on AWS CLI side
- name: install kubectl
run: |
curl -s -LO "https://dl.k8s.io/release/v1.27.6/bin/linux/amd64/kubectl"
sudo install -o root -g root -m 0755 kubectl /usr/local/bin/kubectl
- name: install aws CLI tool
run: |
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
unzip awscliv2.zip
sudo ./aws/install
- name: get EKS access credentials
run: aws eks update-kubeconfig --name ${{ env.AWS_EKS_NAME }}

- name: prepare namespace name
id: namespace
run: |
NAMESPACE="performance-${{ github.run_id }}-$(echo ${{ steps.ap_model.outputs.model }} | tr '[:upper:]' '[:lower:]' | tr '_' '-')"
echo "name=${NAMESPACE}" >> $GITHUB_OUTPUT
- name: prepare configuration
run: |
cat << EOF > lab_info.json
${{ secrets.LAB_INFO_JSON }}
EOF
- name: run tests dataplane_tests
uses: ./.github/actions/run-tests
if: "!cancelled() && contains(fromJSON(needs.vars.outputs.marker_expressions), 'dataplane_tests')"
with:
namespace: ${{ steps.namespace.outputs.name }}-dtt
testbed: basic-6
marker_expression: "performance and dataplane_tests"
configuration_file: "./lab_info.json"
testing_docker_image: tip-tip-wlan-cloud-docker-repo.jfrog.io/cloud-sdk-nightly:${{ github.run_id }}
additional_args: '-o firmware="${{ needs.vars.outputs.ap_version }}"'
allure_results_artifact_name: "allure-results-${{ steps.ap_model.outputs.model }}-dataplane_tests"
dns_records_to_resolve: "sec-${{ needs.vars.outputs.existing_controller }}.cicd.lab.wlan.tip.build gw-${{ needs.vars.outputs.existing_controller }}.cicd.lab.wlan.tip.build fms-${{ needs.vars.outputs.existing_controller }}.cicd.lab.wlan.tip.build"

# necessary because if conditionals in composite actions are currently not respected
- name: get tests logs
if: always()
continue-on-error: true
run: |
podname=$(kubectl get pods -n ${{ steps.namespace.outputs.name }}-dtt --no-headers -o custom-columns=":metadata.name" -l job-name=testing | sed "s/pod\///")
kubectl logs --timestamps -n ${{ steps.namespace.outputs.name }}-dtt $podname || true
- name: delete namespace
if: always()
continue-on-error: true
run: kubectl delete ns --ignore-not-found=true --wait ${{ steps.namespace.outputs.name }}-dtt

- name: run tests peak_throughput_tests
uses: ./.github/actions/run-tests
if: "!cancelled() && contains(fromJSON(needs.vars.outputs.marker_expressions), 'peak_throughput_tests')"
with:
namespace: ${{ steps.namespace.outputs.name }}-ssdbt
testbed: basic-6
marker_expression: "performance and peak_throughput_tests"
configuration_file: "./lab_info.json"
testing_docker_image: tip-tip-wlan-cloud-docker-repo.jfrog.io/cloud-sdk-nightly:${{ github.run_id }}
additional_args: '-o firmware="${{ needs.vars.outputs.ap_version }}"'
allure_results_artifact_name: "allure-results-${{ steps.ap_model.outputs.model }}-peak_throughput_tests"
dns_records_to_resolve: "sec-${{ needs.vars.outputs.existing_controller }}.cicd.lab.wlan.tip.build gw-${{ needs.vars.outputs.existing_controller }}.cicd.lab.wlan.tip.build fms-${{ needs.vars.outputs.existing_controller }}.cicd.lab.wlan.tip.build"

# necessary because if conditionals in composite actions are currently not respected
- name: get tests logs
if: always()
continue-on-error: true
run: |
podname=$(kubectl get pods -n ${{ steps.namespace.outputs.name }}-ssdbt --no-headers -o custom-columns=":metadata.name" -l job-name=testing | sed "s/pod\///")
kubectl logs --timestamps -n ${{ steps.namespace.outputs.name }}-ssdbt $podname || true
- name: delete namespace
if: always()
continue-on-error: true
run: kubectl delete ns --ignore-not-found=true --wait ${{ steps.namespace.outputs.name }}-ssdbt

- name: run tests client_scale_tests
uses: ./.github/actions/run-tests
if: "!cancelled() && contains(fromJSON(needs.vars.outputs.marker_expressions), 'client_scale_tests')"
with:
namespace: ${{ steps.namespace.outputs.name }}-wct
testbed: basic-6
marker_expression: "performance and client_scale_tests"
configuration_file: "./lab_info.json"
testing_docker_image: tip-tip-wlan-cloud-docker-repo.jfrog.io/cloud-sdk-nightly:${{ github.run_id }}
additional_args: '-o firmware="${{ needs.vars.outputs.ap_version }}"'
allure_results_artifact_name: "allure-results-${{ steps.ap_model.outputs.model }}-client_scale_tests"
dns_records_to_resolve: "sec-${{ needs.vars.outputs.existing_controller }}.cicd.lab.wlan.tip.build gw-${{ needs.vars.outputs.existing_controller }}.cicd.lab.wlan.tip.build fms-${{ needs.vars.outputs.existing_controller }}.cicd.lab.wlan.tip.build"

# necessary because if conditionals in composite actions are currently not respected
- name: get tests logs
if: always()
continue-on-error: true
run: |
podname=$(kubectl get pods -n ${{ steps.namespace.outputs.name }}-wct --no-headers -o custom-columns=":metadata.name" -l job-name=testing | sed "s/pod\///")
kubectl logs --timestamps -n ${{ steps.namespace.outputs.name }}-wct $podname || true
- name: delete namespace
if: always()
continue-on-error: true
run: kubectl delete ns --ignore-not-found=true --wait ${{ steps.namespace.outputs.name }}-wct

- name: run tests dual_band_tests
uses: ./.github/actions/run-tests
if: "!cancelled() && contains(fromJSON(needs.vars.outputs.marker_expressions), 'dual_band_tests')"
with:
namespace: ${{ steps.namespace.outputs.name }}-wct
testbed: basic-6
marker_expression: "performance and dual_band_tests"
configuration_file: "./lab_info.json"
testing_docker_image: tip-tip-wlan-cloud-docker-repo.jfrog.io/cloud-sdk-nightly:${{ github.run_id }}
additional_args: '-o firmware="${{ needs.vars.outputs.ap_version }}"'
allure_results_artifact_name: "allure-results-${{ steps.ap_model.outputs.model }}-dual_band_tests"
dns_records_to_resolve: "sec-${{ needs.vars.outputs.existing_controller }}.cicd.lab.wlan.tip.build gw-${{ needs.vars.outputs.existing_controller }}.cicd.lab.wlan.tip.build fms-${{ needs.vars.outputs.existing_controller }}.cicd.lab.wlan.tip.build"

# necessary because if conditionals in composite actions are currently not respected
- name: get tests logs
if: always()
continue-on-error: true
run: |
podname=$(kubectl get pods -n ${{ steps.namespace.outputs.name }}-wct --no-headers -o custom-columns=":metadata.name" -l job-name=testing | sed "s/pod\///")
kubectl logs --timestamps -n ${{ steps.namespace.outputs.name }}-wct $podname || true
- name: delete namespace
if: always()
continue-on-error: true
run: kubectl delete ns --ignore-not-found=true --wait ${{ steps.namespace.outputs.name }}-wct

- name: show gw logs
if: failure()
run: kubectl -n openwifi-${{ needs.vars.outputs.existing_controller }} logs deployment/owgw

- name: show fms logs
if: failure()
run: kubectl -n openwifi-${{ needs.vars.outputs.existing_controller }} logs deployment/owfms

- name: show prov logs
if: failure()
run: kubectl -n openwifi-${{ needs.vars.outputs.existing_controller }} logs deployment/owprov

- name: show analytics logs
if: failure()
run: kubectl -n openwifi-${{ needs.vars.outputs.existing_controller }} logs deployment/owanalytics

- name: show subscription (userportal) logs
if: failure()
run: kubectl -n openwifi-${{ needs.vars.outputs.existing_controller }} logs deployment/owsub

- name: show sec logs
if: failure()
run: kubectl -n openwifi-${{ needs.vars.outputs.existing_controller }} logs deployment/owsec


# Save reports
report:
if: "!cancelled()"
runs-on: ubuntu-latest
needs: [vars, test-cig-wf188n, test-edgecore-oap101-6e, test-cig-wf196, test-edgecore-eap102, test-hfcl-ion4xe, test-edgecore-eap101, test-yuncore-fap655, test-edgecore-eap104, test-yuncore-ax820, test-cig-wf186w]
needs: [vars, test-cig-wf188n, test-edgecore-oap101-6e, test-cig-wf196, test-edgecore-eap102, test-hfcl-ion4xe, test-edgecore-eap101, test-yuncore-fap655, test-edgecore-eap104, test-yuncore-ax820, test-cig-wf186w, test-edgecore-eap111]
strategy:
fail-fast: false
matrix:
Expand Down Expand Up @@ -1976,7 +2151,7 @@ jobs:

# Cleanup
cleanup:
needs: [test-cig-wf188n, test-edgecore-oap101-6e, test-cig-wf196, test-edgecore-eap102, test-hfcl-ion4xe, test-edgecore-eap101, test-yuncore-fap655, test-edgecore-eap104, test-yuncore-ax820, test-cig-wf186w]
needs: [test-cig-wf188n, test-edgecore-oap101-6e, test-cig-wf196, test-edgecore-eap102, test-hfcl-ion4xe, test-edgecore-eap101, test-yuncore-fap655, test-edgecore-eap104, test-yuncore-ax820, test-cig-wf186w, test-edgecore-eap111]
runs-on: ubuntu-latest
if: always()
steps:
Expand Down
Loading

0 comments on commit cc4e1e7

Please sign in to comment.