-
Notifications
You must be signed in to change notification settings - Fork 5
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
fix: added workflow for escu tests (#95)
- Loading branch information
1 parent
31f938c
commit 97175ef
Showing
2 changed files
with
257 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,256 @@ | ||
name: escu-manual-workflow | ||
on: | ||
workflow_call: | ||
inputs: | ||
TA_BUILD: | ||
description: 'TA build number (e.g. s3://ta-production-artifacts/ta-apps/{ta-name}-$build_number$.spl)' | ||
required: true | ||
type: string | ||
TESTS: | ||
description: 'Comma-Separated List of detections to run (e.g. detection1,detection2,detection3)' | ||
required: true | ||
type: string | ||
secrets: | ||
AWS_ACCESS_KEY_ID: | ||
description: AWS access key id | ||
required: true | ||
AWS_DEFAULT_REGION: | ||
description: AWS default region | ||
required: true | ||
AWS_SECRET_ACCESS_KEY: | ||
description: AWS secret access key | ||
required: true | ||
OTHER_TA_REQUIRED_CONFIGS: | ||
description: other required configs | ||
required: true | ||
|
||
jobs: | ||
meta: | ||
runs-on: ubuntu-latest | ||
outputs: | ||
matrix_supportedSplunk: ${{ steps.matrix.outputs.latestSplunk }} | ||
steps: | ||
- name: Checkout | ||
uses: actions/checkout@v3 | ||
with: | ||
submodules: false | ||
persist-credentials: false | ||
- name: matrix | ||
id: matrix | ||
uses: splunk/[email protected] | ||
|
||
test-inventory: | ||
runs-on: ubuntu-latest | ||
outputs: | ||
escu: ${{ steps.testset.outputs.escu }} | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- id: testset | ||
name: testsets | ||
run: | | ||
find tests -type d -maxdepth 1 -mindepth 1 | sed 's|^tests/||g' | while read -r TESTSET; do echo "::set-output name=$TESTSET::true"; echo "$TESTSET::true"; done | ||
setup: | ||
runs-on: ubuntu-latest | ||
container: | ||
image: ghcr.io/splunk/workflow-engine-base:2.0.3 | ||
outputs: | ||
argo-server: ${{ steps.test-setup.outputs.argo-server }} | ||
argo-http1: ${{ steps.test-setup.outputs.argo-http1 }} | ||
argo-secure: ${{ steps.test-setup.outputs.argo-secure }} | ||
argo-href: "" | ||
argo-base-href: ${{ steps.test-setup.outputs.argo-base-href }} | ||
argo-workflow-tmpl-name: ${{ steps.test-setup.outputs.argo-workflow-tmpl-name }} | ||
argo-namespace: ${{ steps.test-setup.outputs.argo-namespace }} | ||
addon-name: ${{ steps.test-setup.outputs.addon-name }} | ||
job-name: ${{ steps.test-setup.outputs.job-name }} | ||
labels: ${{ steps.test-setup.outputs.labels }} | ||
addon-upload-path: ${{ steps.test-setup.outputs.addon-upload-path }} | ||
directory-path: ${{ steps.test-setup.outputs.directory-path }} | ||
s3-bucket: ${{ steps.test-setup.outputs.s3-bucket }} | ||
steps: | ||
- uses: actions/checkout@v3 | ||
with: | ||
submodules: recursive | ||
- name: setup for test | ||
id: test-setup | ||
shell: bash | ||
run: | | ||
echo "::set-output name=argo-server::argo.wfe.splgdi.com:443" | ||
echo "::set-output name=argo-http1::true" | ||
echo "::set-output name=argo-secure::true" | ||
echo "::set-output name=argo-base-href::\'\'" | ||
echo "::set-output name=argo-namespace::workflows" | ||
echo "::set-output name=argo-workflow-tmpl-name::ta-workflow" | ||
ADDON_NAME=$(crudini --get package/default/app.conf id name | tr '[:lower:]' '[:upper:]') | ||
if [[ -n $(echo "${ADDON_NAME}" | awk -F 'SPLUNK_TA_' '{print $2}') ]]; | ||
then | ||
ADDON_NAME=$(echo "${ADDON_NAME}" | awk -F 'SPLUNK_TA_' '{print $2}') | ||
elif [[ -n $(echo "${ADDON_NAME}" | awk -F '_FOR_SPLUNK' '{print $1}') ]]; | ||
then | ||
ADDON_NAME=$(echo "${ADDON_NAME}" | awk -F '_FOR_SPLUNK' '{print $1}') | ||
fi | ||
echo "::set-output name=addon-name::\"$ADDON_NAME\"" | ||
JOB_NAME=$(echo "$ADDON_NAME" | tail -c 16)-$(echo "${GITHUB_SHA}" | tail -c 8)-TEST-TYPE-${GITHUB_RUN_ID} | ||
JOB_NAME=${JOB_NAME//[_.]/-} | ||
echo "::set-output name=job-name::wf-$JOB_NAME" | ||
LABELS="addon-name=${ADDON_NAME}" | ||
echo "::set-output name=labels::$LABELS" | ||
ADDON_BUILD_NAME=$(crudini --get package/default/app.conf id name) | ||
ADDON_UPLOAD_PATH="s3://ta-production-artifacts/ta-apps/${ADDON_BUILD_NAME}-${{ inputs.TA_BUILD }}.spl" | ||
echo "::set-output name=addon-upload-path::$ADDON_UPLOAD_PATH" | ||
echo "::set-output name=directory-path::/tmp" | ||
echo "::set-output name=s3-bucket::ta-production-artifacts" | ||
run-escu-tests: | ||
if: ${{ needs.test-inventory.outputs.escu == 'true' }} | ||
needs: | ||
- test-inventory | ||
- setup | ||
- meta | ||
runs-on: ubuntu-latest | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
splunk: | ||
- ${{ fromJson(needs.meta.outputs.matrix_supportedSplunk) }} | ||
container: | ||
image: ghcr.io/splunk/workflow-engine-base:2.0.3 | ||
env: | ||
ARGO_SERVER: ${{ needs.setup.outputs.argo-server }} | ||
ARGO_HTTP1: ${{ needs.setup.outputs.argo-http1 }} | ||
ARGO_SECURE: ${{ needs.setup.outputs.argo-secure }} | ||
ARGO_BASE_HREF: ${{ needs.setup.outputs.argo-href }} | ||
ARGO_NAMESPACE: ${{ needs.setup.outputs.argo-namespace }} | ||
TEST_TYPE: "escu" | ||
steps: | ||
- uses: actions/checkout@v3 | ||
with: | ||
submodules: recursive | ||
- name: Configure AWS credentials | ||
uses: aws-actions/configure-aws-credentials@v1 | ||
with: | ||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} | ||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | ||
aws-region: ${{ secrets.AWS_DEFAULT_REGION }} | ||
- name: Read secrets from AWS Secrets Manager into environment variables | ||
id: get-argo-token | ||
run: | | ||
ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') | ||
echo "::set-output name=argo-token::$ARGO_TOKEN" | ||
- name: create job name | ||
id: create-job-name | ||
shell: bash | ||
run: | | ||
RANDOM_STRING=$(head -3 /dev/urandom | tr -cd '[:lower:]' | cut -c -4) | ||
JOB_NAME=${{ needs.setup.outputs.job-name }}-${RANDOM_STRING} | ||
JOB_NAME=${JOB_NAME//TEST-TYPE/${{ env.TEST_TYPE }}} | ||
JOB_NAME=${JOB_NAME//[_.]/-} | ||
JOB_NAME=$(echo "$JOB_NAME" | tr '[:upper:]' '[:lower:]') | ||
echo "::set-output name=job-name::$JOB_NAME" | ||
- name: run-tests | ||
id: run-tests | ||
env: | ||
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | ||
uses: splunk/wfe-test-runner-action@master | ||
with: | ||
splunk: ${{ matrix.splunk.version }} | ||
test-type: ${{ env.TEST_TYPE }} | ||
test-args: "-tf ${{ inputs.TESTS }}" | ||
job-name: ${{ steps.create-job-name.outputs.job-name }} | ||
labels: ${{ needs.setup.outputs.labels }} | ||
workflow-tmpl-name: ${{ needs.setup.outputs.argo-workflow-tmpl-name }} | ||
workflow-template-ns: ${{ needs.setup.outputs.argo-namespace }} | ||
delay-destroy: "No" | ||
addon-url: ${{ needs.setup.outputs.addon-upload-path }} | ||
addon-name: ${{ needs.setup.outputs.addon-name }} | ||
vendor-version: ${{ matrix.vendor-version.image }} | ||
sc4s-version: "No" | ||
- name: Check if pod was deleted | ||
id: is-pod-deleted | ||
if: always() | ||
shell: bash | ||
env: | ||
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | ||
run: | | ||
set -o xtrace | ||
if argo watch ${{ steps.run-tests.outputs.workflow-name }} -n workflows | grep "pod deleted"; then | ||
echo "::set-output name=retry-workflow::true" | ||
fi | ||
- name: Retrying workflow | ||
id: retry-wf | ||
shell: bash | ||
env: | ||
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | ||
if: always() | ||
run: | | ||
set -o xtrace | ||
set +e | ||
if [[ "${{ steps.is-pod-deleted.outputs.retry-workflow }}" == "true" ]] | ||
then | ||
WORKFLOW_NAME=$(argo resubmit -v -o json -n workflows "${{ steps.run-tests.outputs.workflow-name }}" | jq -r .metadata.name) | ||
echo "::set-output name=workflow-name::$WORKFLOW_NAME" | ||
argo logs --follow "${WORKFLOW_NAME}" -n workflows || echo "... there was an error fetching logs, the workflow is still in progress. please wait for the workflow to complete ..." | ||
else | ||
echo "No retry required" | ||
argo wait "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | ||
argo watch "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | grep "test-addon" | ||
fi | ||
- name: check if workflow completed | ||
env: | ||
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | ||
if: always() | ||
shell: bash | ||
run: | | ||
set +e | ||
# shellcheck disable=SC2157 | ||
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | ||
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | ||
else | ||
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | ||
fi | ||
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | ||
echo "Status of workflow:" "$ARGO_STATUS" | ||
while [ "$ARGO_STATUS" == "Running" ] || [ "$ARGO_STATUS" == "Pending" ] | ||
do | ||
echo "... argo Workflow ${WORKFLOW_NAME} is running, waiting for it to complete." | ||
argo wait "${WORKFLOW_NAME}" -n workflows || true | ||
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | ||
done | ||
- name: pull artifacts from s3 bucket | ||
if: always() | ||
run: | | ||
echo "pulling artifacts" | ||
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | ||
tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} | ||
- name: pull logs from s3 bucket | ||
if: always() | ||
run: | | ||
# shellcheck disable=SC2157 | ||
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | ||
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | ||
else | ||
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | ||
fi | ||
echo "pulling logs" | ||
mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs | ||
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive | ||
- uses: actions/upload-artifact@v3 | ||
if: always() | ||
with: | ||
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} tests artifacts | ||
path: | | ||
${{ needs.setup.outputs.directory-path }}/test-results | ||
- uses: actions/upload-artifact@v3 | ||
if: always() | ||
with: | ||
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} tests logs | ||
path: | | ||
${{ needs.setup.outputs.directory-path }}/argo-logs | ||
- name: Test Report | ||
uses: dorny/test-reporter@v1 | ||
if: always() | ||
with: | ||
name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} test report | ||
path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" | ||
reporter: java-junit |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters