Skip to content

Commit

Permalink
Upload unit test reports to DataDog (#1059)
Browse files Browse the repository at this point in the history
Unit tests produce test results in JUnit xml format. These files can be
ingested by DataDog to fuel their CI analysis, unlocking flaky test
detection, regressions, platform analysis, and more!

The upload must happen in a separately triggered workflow to have access
to upload the reports. In a forked PR workflow this access is
restricted.

Initially reports from unit tests run on-host from linux, evergreen, and
win32 will be uploaded.

b/290997541
  • Loading branch information
oxve committed Aug 1, 2023
1 parent 193dca5 commit 3865d14
Show file tree
Hide file tree
Showing 3 changed files with 21 additions and 35 deletions.
34 changes: 20 additions & 14 deletions .github/actions/on_host_test/action.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -60,12 +60,15 @@ runs:
run: |
echo "PYTHONPATH=$GITHUB_WORKSPACE" >> $GITHUB_ENV
echo "TEST_RESULTS_DIR=${GITHUB_WORKSPACE}/unit-test-results" >> $GITHUB_ENV
echo "XML_FILES_DIR=${GITHUB_WORKSPACE}/unit-test-results/${{ matrix.platform }}/${{ matrix.shard }}" >> $GITHUB_ENV
echo "COVERAGE_DIR=${GITHUB_WORKSPACE}/coverage" >> $GITHUB_ENV
echo "TEST_REPORT_FILE=${GITHUB_WORKSPACE}/${{matrix.platform}}-${{matrix.shard}}" >> $GITHUB_ENV
- name: Run Tests
id: run-tests
shell: bash
run: |
set -x
test_type=""
# Starboard toolchains are downloaded to a different dir on github. Create a symlink to reassure our tooling that everything is fine.
if [ -d /root/starboard-toolchains ]; then
ln -s /root/starboard-toolchains /github/home/starboard-toolchains
Expand All @@ -85,28 +88,31 @@ runs:
elif [[ "${{matrix.shard}}" == 'evergreen-as-blackbox' ]]; then
xvfb-run -a --server-args="-screen 0 1920x1080x24i +render +extension GLX -noreset" python3 $GITHUB_WORKSPACE/cobalt/black_box_tests/black_box_tests.py --platform ${{matrix.target_platform}} --config ${{matrix.config}} ${loader_args} --test_set evergreen
elif [[ "${{matrix.shard}}" == 'coverage' ]]; then
xvfb-run -a --server-args="-screen 0 1920x1080x24i +render +extension GLX -noreset" python3 ${GITHUB_WORKSPACE}/starboard/tools/testing/test_runner.py --platform ${{matrix.target_platform}} --config ${{matrix.config}} -r ${loader_args} --xml_output_dir=${TEST_RESULTS_DIR} --coverage_dir=${COVERAGE_DIR} --coverage_report
xvfb-run -a --server-args="-screen 0 1920x1080x24i +render +extension GLX -noreset" python3 ${GITHUB_WORKSPACE}/starboard/tools/testing/test_runner.py --platform ${{matrix.target_platform}} --config ${{matrix.config}} -r ${loader_args} --xml_output_dir=${XML_FILES_DIR} --coverage_dir=${COVERAGE_DIR} --coverage_report
else
test_type=unit_tests
if [[ "${{inputs.os}}" == 'windows' ]]; then
python3 ${GITHUB_WORKSPACE}/starboard/tools/testing/test_runner.py --platform ${{matrix.target_platform}} --config ${{matrix.config}} -s ${{matrix.shard}} -r
python3 ${GITHUB_WORKSPACE}/starboard/tools/testing/test_runner.py --platform ${{matrix.target_platform}} --config ${{matrix.config}} -s ${{matrix.shard}} -r --xml_output_dir=${XML_FILES_DIR}
else
xvfb-run -a --server-args="-screen 0 1920x1080x24i +render +extension GLX -noreset" python3 ${GITHUB_WORKSPACE}/starboard/tools/testing/test_runner.py --platform ${{matrix.target_platform}} --config ${{matrix.config}} -s ${{matrix.shard}} -r ${loader_args} --xml_output_dir=${TEST_RESULTS_DIR}
xvfb-run -a --server-args="-screen 0 1920x1080x24i +render +extension GLX -noreset" python3 ${GITHUB_WORKSPACE}/starboard/tools/testing/test_runner.py --platform ${{matrix.target_platform}} --config ${{matrix.config}} -s ${{matrix.shard}} -r ${loader_args} --xml_output_dir=${XML_FILES_DIR}
fi
fi
- name: Process unit test results
if: failure()
echo "test_type=$test_type" >> $GITHUB_OUTPUT
- name: Populate TAGS for unit test report
if: ${{ steps.run-tests.outputs.test_type == 'unit_tests' }}
shell: bash
run: |
set -x
echo "Saving unit test report to ${TEST_REPORT_FILE}"
python3 ${GITHUB_WORKSPACE}/starboard/tools/testing/test_report_parser.py ${TEST_RESULTS_DIR} > ${TEST_REPORT_FILE}
- name: Upload unit test report
# Set tags for test differentiation.
tags="platform:${{ matrix.platform }}"
tags="${tags},os:${{ inputs.os }}"
echo $tags > ${TEST_RESULTS_DIR}/${{ matrix.platform }}/TAGS
- name: Archive unit test results
uses: actions/upload-artifact@v3
if: failure()
if: ${{ steps.run-tests.outputs.test_type == 'unit_tests' }}
with:
name: unit-test-reports
path: ${{env.TEST_REPORT_FILE}}
- name: Upload coverage html report
name: unit-test-results
path: ${{env.TEST_RESULTS_DIR}}/
- name: Archive coverage html report
if: success() && matrix.shard == 'coverage'
uses: actions/upload-artifact@v3
with:
Expand Down
20 changes: 0 additions & 20 deletions .github/workflows/main.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -320,23 +320,3 @@ jobs:
uses: ./.github/actions/on_host_test
with:
os: linux

# Gets unit test report from on host tests and prints it.
on-host-unit-test-report:
needs: [on-host-test]
permissions: {}
if: failure()
runs-on: ubuntu-latest
steps:
- name: Collect Unit Test Reports
uses: actions/download-artifact@v3
with:
name: unit-test-reports
path: unit-test-reports
- name: Print Unit Test Reports
run: |
for filename in ${GITHUB_WORKSPACE}/unit-test-reports/*; do
basename $filename
cat $filename
echo
done
2 changes: 1 addition & 1 deletion starboard/tools/testing/test_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -477,7 +477,7 @@ def MakeLauncher():
logging.info(("Xml results for this test will "
"be logged to '%s'."), test_result_xml_path)
elif self.xml_output_dir:
xml_output_subdir = os.path.join(self.xml_output_dir, target_name)
xml_output_subdir = os.path.join(self.xml_output_dir)
try:
os.makedirs(xml_output_subdir)
except OSError as ose:
Expand Down

0 comments on commit 3865d14

Please sign in to comment.