diff --git a/.github/workflows/fedora-copr-build.yml b/.github/workflows/fedora-copr-build.yml index b1e75b45..4a9cf819 100644 --- a/.github/workflows/fedora-copr-build.yml +++ b/.github/workflows/fedora-copr-build.yml @@ -129,7 +129,7 @@ jobs: --description "`cat project-description.md`" \ --unlisted-on-hp on \ --enable-net on \ - --runtime-repo-dependency "https://download.copr.fedorainfracloud.org/results/%40fedora-llvm-team/llvm-compat-packages/fedora-\$releasever-\$basearch" \ + --runtime-repo-dependency "https://download.copr.fedorainfracloud.org/results/%40fedora-llvm-team/llvm-compat-packages/\$distname-\$releasever-\$basearch" \ --multilib on \ --appstream off \ --delete-after-days 32 \ diff --git a/.github/workflows/generate-snapshot-tarballs.yml b/.github/workflows/generate-snapshot-tarballs.yml deleted file mode 100644 index 412d7201..00000000 --- a/.github/workflows/generate-snapshot-tarballs.yml +++ /dev/null @@ -1,148 +0,0 @@ -name: "Generate LLVM snapshot tarballs" - -# PURPOSE: -# -# We want to provide LLVM snapshot packagers and distributors with *daily* -# source tarballs that are easy to consume. Typically, packagers have to clone -# the whole LLVM monorepo themselves and run the "git archive" command to -# generate source tarballs for each LLVM component. Those tarballs are the -# input to the packaging system (e.g. DEB, RPM, etc.). With this workflow we -# can provide the daily source tarballs to the community similar to the source -# tarballs of regular releases. Everything a packager needs to know is the -# current date (YYYYMMDD) and go ahead and download the LLVM component of -# choice, i.e.: -# -# https://github.com/fedora-llvm-team/llvm-snapshots/releases/download/source-snapshot/clang-tools-extra-20210417.src.tar.xz -# -# Notice the absence of the LLVM version. To get it, a packager can download -# -# https://github.com/fedora-llvm-team/llvm-snapshots/releases/download/source-snapshot/llvm-release-20210417.txt -# -# To get the git revision standalone: -# -# https://github.com/fedora-llvm-team/llvm-snapshots/releases/download/source-snapshot/llvm-git-revision-20210417.txt -# -# The subtle benefit of this naming convention is that you don't need to know -# the LLVM version before downloading the source tarball. I mean, how could -# you know the LLVM version of a daily snapshot upfront? In fact, the source -# tarball for "clang-tools-extra" contains no version information whatsoever -# and yet it requires a special LLVM version which is why we provide the -# "llvm-release-.txt" files. -# -# WHAT: -# -# At 00:00 in the morning, this workflow creates source tarballs for the -# latest stable commit of all LLVM components of the current "main" branch and -# uploads them as assets to a pre-release called "source-snapshot". (A -# pre-release won't show up on the github repositories front-page.) The assets -# that are seven days or older will be deleted on each run. If the workflow -# runs twice a day, the old assets of the day will also be deleted. If the -# "source-snapshot" release doesn't exist, it will be created automatically. - -on: - schedule: - # Everyday at 00:00am - # See https://docs.github.com/en/actions/reference/events-that-trigger-workflows#schedule - - cron: '0 0 * * *' - - workflow_dispatch: - inputs: - commit_hash: - description: 'Commit hash to use without tests' - required: true - default: origin/main - type: string - -permissions: - # For release assets to be deletable we need this permission - contents: write - -jobs: - - # In order to re-build source snapshots and upload them, we must first delete - # the old ones from today; otherwise there would be a conflict. As a measure - # of not storing old snapshots for too long we'll delete older ones here as - # well. - delete-old-tarballs-and-todays: - name: "Delete old and today's tarballs" - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 1 - - - uses: ./.github/actions/prepare-python - - - name: "delete assets older than 33 days and from today" - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - ./scripts/delete-assets.py \ - --token ${{ secrets.GITHUB_TOKEN }} \ - --project ${{ github.repository }} \ - --release-name source-snapshot \ - --delete-older 33 \ - --delete-today - - generate-source-tarballs: - name: Generate snapshot tarballs - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 1 - path: llvm-snapshots - - - uses: ./llvm-snapshots/.github/actions/prepare-python - with: - checkout-path: llvm-snapshots - - - name: Determine good commit (on schedule only) - uses: ./llvm-snapshots/.github/actions/get-good-commit - if: github.event_name != 'workflow_dispatch' - id: good-commit - with: - token: ${{ secrets.GITHUB_TOKEN }} - checkout-path: llvm-snapshots - github-project: llvm/llvm-project - start-ref: main - max-tries: 500 - - - name: "Variables and functions" - shell: bash -e {0} - run: | - if [[ "${{github.event_name}}" == "workflow_dispatch" ]]; then - echo "commit_hash=${{inputs.commit_hash}}" >> $GITHUB_ENV - else - echo "commit_hash=${{ steps.good-commit.outputs.good-commit }}" >> $GITHUB_ENV - fi - - - name: "checkout llvm/llvm-project" - uses: actions/checkout@v4 - with: - repository: llvm/llvm-project - ref: main - token: ${{ secrets.GITHUB_TOKEN }} - submodules: true - path: llvm-project - - - name: "fetch LLVM revision to archive" - run: git -C llvm-project fetch --depth=1 --no-tags origin ${{ env.commit_hash }} - - - name: "create source-snapshot tarballs" - run: | - llvm-project/llvm/utils/release/export.sh \ - --git-ref ${{ env.commit_hash }} \ - --template '${PROJECT}-${YYYYMMDD}.src.tar.xz' - - - name: >- - upload source-snapshots and version files to the 'source-snapshot' - pre-release of ${{ github.repository }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - llvm-snapshots/scripts/upload-source-snapshots.py \ - --token ${{ secrets.GITHUB_TOKEN }} \ - --project ${{ github.repository }} \ - --release-name source-snapshot \ - --yyyymmdd "$(date +%Y%m%d)" diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml new file mode 100644 index 00000000..a8615433 --- /dev/null +++ b/.github/workflows/pre-commit.yml @@ -0,0 +1,14 @@ +name: pre-commit + +on: + pull_request: + push: + branches: [main] + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/prepare-python + - uses: pre-commit/action@v3.0.1 diff --git a/.github/workflows/retest.yml b/.github/workflows/retest.yml index 1b2755e9..57b0ad7b 100644 --- a/.github/workflows/retest.yml +++ b/.github/workflows/retest.yml @@ -78,6 +78,7 @@ jobs: shell: bash -e {0} env: GITHUB_TOKEN: ${{ secrets.GH_TEST_TOKEN }} + chroots: ${{ env.chroots }} run: | python3 snapshot_manager/main.py \ --github-repo ${GITHUB_REPOSITORY} \ diff --git a/.github/workflows/sync-on-llvm-version.yml b/.github/workflows/sync-on-llvm-version.yml new file mode 100644 index 00000000..c2e404f6 --- /dev/null +++ b/.github/workflows/sync-on-llvm-version.yml @@ -0,0 +1,106 @@ +name: "Sync on LLVM version" + +on: + schedule: + # Everyday at 00:00am + # See https://docs.github.com/en/actions/reference/events-that-trigger-workflows#schedule + - cron: '0 0 * * *' + + workflow_dispatch: + inputs: + commit_hash: + description: 'Commit hash to use without tests' + required: true + default: main + type: string + +permissions: + # For release assets to be deletable we need this permission + contents: write + +jobs: + + # In order to re-build source snapshots and upload them, we must first delete + # the old ones from today; otherwise there would be a conflict. As a measure + # of not storing old snapshots for too long we'll delete older ones here as + # well. + regenerate-assets: + name: "(Re)generate assets" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 1 + + - uses: ./.github/actions/prepare-python + + - name: "delete assets older than 33 days and from today" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + ./scripts/delete-assets.py \ + --token ${{ secrets.GITHUB_TOKEN }} \ + --project ${{ github.repository }} \ + --release-name snapshot-version-sync \ + --delete-older 33 \ + --delete-today + + - name: Determine good commit (on schedule only) + uses: ./.github/actions/get-good-commit + id: good-commit + with: + token: ${{ secrets.GITHUB_TOKEN }} + checkout-path: . + github-project: llvm/llvm-project + start-ref: main + max-tries: 500 + + - name: "Generate snapshot version info" + shell: bash -e {0} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + if [[ "${{github.event_name}}" == "workflow_dispatch" ]]; then + commit_hash=${{inputs.commit_hash}} + else + commit_hash=${{ steps.good-commit.outputs.good-commit }} + fi + + if [[ "$commit_hash" =~ ^[0-9a-f]{40}$ ]]; then + echo "commit_hash looks like a SHA1. No need to resolve: ${commit_hash}" + else + echo "commit_hash doesn't look like a SHA1 (maybe it is a branch or tag name). Trying to resolve it: ${commit_hash}" + # See https://docs.github.com/de/rest/commits/commits?apiVersion=2022-11-28#list-branches-for-head-commit + commit_hash=`curl -L \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer ${{env.GITHUB_TOKEN}}" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + https://api.github.com/repos/llvm/llvm-project/commits/${commit_hash}/branches-where-head \ + | jq -r '.[0].commit.sha'` + fi + + echo "commit_hash=${commit_hash}" >> $GITHUB_ENV + + yyyymmdd=$(date +%Y%m%d) + versionfile=LLVMVersion.cmake + url=https://raw.githubusercontent.com/llvm/llvm-project/${commit_hash}/cmake/Modules/${versionfile} + echo "Getting ${url}" + curl -sL -o ${versionfile} ${url} + + echo "Version file:" + cat ${versionfile} + + llvm_snapshot_git_revision=${commit_hash} + llvm_snapshot_version=`grep -ioP 'set\(\s*LLVM_VERSION_(MAJOR|MINOR|PATCH)\s\K[0-9]+' ${versionfile} | paste -sd '.'` + + echo "${llvm_snapshot_version}" > llvm-release-${yyyymmdd}.txt + echo "${llvm_snapshot_git_revision}" > llvm-git-revision-${yyyymmdd}.txt + + echo "llvm_release=`cat llvm-release-${yyyymmdd}.txt`" + echo "llvm_git_revision=`cat llvm-git-revision-${yyyymmdd}.txt`" + + ./scripts/upload-source-snapshots.py \ + --token ${{ secrets.GITHUB_TOKEN }} \ + --project ${{ github.repository }} \ + --release-name snapshot-version-sync \ + --yyyymmdd "$(date +%Y%m%d)" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d57823cf..ede07872 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,35 +1,58 @@ # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks repos: -- repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 - hooks: - - id: trailing-whitespace - - id: end-of-file-fixer - - id: check-yaml - - id: check-added-large-files - args: ['--maxkb=3000'] - - id: check-docstring-first - - id: name-tests-test - - id: requirements-txt-fixer + - repo: https://github.com/PyCQA/isort + rev: "5.13.2" + hooks: + - id: isort -# See https://tmt.readthedocs.io/en/latest/guide.html#checking-data-validity -- repo: https://github.com/teemtee/tmt.git - rev: 1.32.2 - hooks: - - id: tmt-lint + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: check-ast + - id: check-case-conflict + - id: check-docstring-first + - id: check-executables-have-shebangs + - id: check-merge-conflict + - id: check-symlinks + - id: trailing-whitespace + - id: end-of-file-fixer + - id: mixed-line-ending + - id: fix-byte-order-marker + - id: detect-private-key + - id: check-toml + - id: check-yaml + args: + - "--allow-multiple-documents" + - id: check-added-large-files + args: ['--maxkb=3000'] + - id: check-docstring-first + - id: name-tests-test + - id: requirements-txt-fixer -# See https://black.readthedocs.io/en/stable/integrations/source_version_control.html -# Using this mirror lets us use mypyc-compiled black, which is about 2x faster -- repo: https://github.com/psf/black-pre-commit-mirror - rev: 24.3.0 - hooks: - - id: black - # It is recommended to specify the latest version of Python - # supported by your project here, or alternatively use - # pre-commit's default_language_version, see - # https://pre-commit.com/#top_level-default_language_version - language_version: python3.12 - force-exclude: "^snapshot_manager/tests/(test_logs|testing-farm-logs)/" + - repo: https://github.com/asottile/pyupgrade + rev: v3.15.2 + hooks: + - id: pyupgrade + args: + - "--py311-plus" + # See https://tmt.readthedocs.io/en/latest/guide.html#checking-data-validity + - repo: https://github.com/teemtee/tmt.git + rev: 1.32.2 + hooks: + - id: tmt-lint + + # See https://black.readthedocs.io/en/stable/integrations/source_version_control.html + # Using this mirror lets us use mypyc-compiled black, which is about 2x faster + - repo: https://github.com/psf/black-pre-commit-mirror + rev: 24.3.0 + hooks: + - id: black + # It is recommended to specify the latest version of Python + # supported by your project here, or alternatively use + # pre-commit's default_language_version, see + # https://pre-commit.com/#top_level-default_language_version + language_version: python3.12 + force-exclude: "^snapshot_manager/tests/(test_logs|testing-farm-logs)/" exclude: "^(snapshot_manager/tests/(test_logs|testing-farm-logs)/|media)" diff --git a/README.adoc b/README.adoc index be31e231..0cb18976 100644 --- a/README.adoc +++ b/README.adoc @@ -5,7 +5,6 @@ :showtitle: :homepage: https://github.com/fedora-llvm-team/llvm-snapshots -image:https://github.com/fedora-llvm-team/llvm-snapshots/actions/workflows/generate-snapshot-tarballs.yml/badge.svg[link="https://github.com/fedora-llvm-team/llvm-snapshots/actions/workflows/generate-snapshot-tarballs.yml"] image:https://github.com/fedora-llvm-team/llvm-snapshots/actions/workflows/fedora-copr-build.yml/badge.svg[link="https://github.com/fedora-llvm-team/llvm-snapshots/actions/workflows/fedora-copr-build.yml"] image:https://github.com/fedora-llvm-team/llvm-snapshots/actions/workflows/check-snapshots.yml/badge.svg[link="https://github.com/fedora-llvm-team/llvm-snapshots/actions/workflows/check-snapshots.yml"] image:https://github.com/fedora-llvm-team/llvm-snapshots/actions/workflows/python-format-and-tests.yml/badge.svg[link="https://github.com/fedora-llvm-team/llvm-snapshots/actions/workflows/python-format-and-tests.yml"] diff --git a/requirements.txt b/requirements.txt index b4759d84..0f335094 100644 --- a/requirements.txt +++ b/requirements.txt @@ -22,11 +22,11 @@ colorama==0.4.6 # typer commonmark==0.9.1 # via rich -copr==1.130 +copr==1.132 # via # -r requirements.txt.in # copr-cli -copr-cli==1.110 +copr-cli==1.112 # via -r requirements.txt.in cryptography==42.0.4 # via pyjwt @@ -72,9 +72,9 @@ python-dateutil==2.8.2 # via pandas pytz==2023.4 # via pandas -regex==2023.12.25 +regex==2024.5.15 # via -r requirements.txt.in -requests==2.31.0 +requests==2.32.3 # via # -r requirements.txt.in # copr @@ -87,8 +87,6 @@ rich==12.6.0 # via typer shellingham==1.5.4 # via typer -simplejson==3.19.2 - # via copr-cli six==1.16.0 # via # copr @@ -103,7 +101,7 @@ typing-extensions==4.8.0 # via pygithub tzdata==2023.4 # via pandas -urllib3==1.26.18 +urllib3==1.26.19 # via # pygithub # requests diff --git a/requirements.txt.in b/requirements.txt.in index 7b80f716..bf3be32c 100644 --- a/requirements.txt.in +++ b/requirements.txt.in @@ -8,8 +8,8 @@ pandas plotly==5.22.0 copr-cli tft-cli==0.0.16 -regex==2023.12.25 +regex==2024.5.15 munch==4.0.0 -copr==1.130 -requests==2.31.0 +copr==1.132 +requests==2.32.3 fnc diff --git a/scripts/create-diagrams.py b/scripts/create-diagrams.py index 69112bff..e8cafe26 100755 --- a/scripts/create-diagrams.py +++ b/scripts/create-diagrams.py @@ -4,14 +4,14 @@ import argparse import re from datetime import datetime +from pathlib import Path +import numpy as np import pandas as pd import plotly.express as px -import plotly.io as pio import plotly.graph_objects as go +import plotly.io as pio from plotly.offline import plot -from pathlib import Path -import numpy as np # %% @@ -127,7 +127,7 @@ def add_html_header_menu( all_packages (str]): All the packages names for which to generate a menu entry plotly_div_id (str, optional): Plotly's HTML div's ID. Defaults to "plotly_div_id". """ - replace_me = '
llvm+clang+compiler-rt+libomp' ) + header_menu += ' | llvm (big-merge)' header_menu += "
" header_menu += replace_me @@ -250,6 +251,7 @@ def create_index_page(all_packages: list[str], filepath: str = "index.html") ->
Last updated: {last_updated} @@ -321,7 +323,7 @@ def main() -> None: # To debug, uncomment the following: # fig.show() # break - filepath = "fig-{}.html".format(package_name) + filepath = f"fig-{package_name}.html" save_figure(fig=fig, filepath=filepath) add_html_header_menu(filepath=filepath, all_packages=all_packages) @@ -358,6 +360,12 @@ def main() -> None: save_figure(fig=fig, filepath=filepath) add_html_header_menu(filepath=filepath, all_packages=all_packages) + # Create dedicated big-merge figure with nothing else in it. + fig = create_figure(df=df_big_merge) + filepath = "fig-big-merge.html" + save_figure(fig=fig, filepath=filepath) + add_html_header_menu(filepath=filepath, all_packages=all_packages) + # Create an index HTML overview page that links to each figure page create_index_page(all_packages=all_packages, filepath="index.html") diff --git a/scripts/delete-assets.py b/scripts/delete-assets.py index 38f6bc8c..fb6cabef 100755 --- a/scripts/delete-assets.py +++ b/scripts/delete-assets.py @@ -1,10 +1,11 @@ #!/bin/env python3 -from github import Github, UnknownObjectException import argparse import datetime import sys +from github import Github, UnknownObjectException + def delete_assets( token: str, project: str, release_name: str, delete_older: int, delete_today: bool diff --git a/scripts/functions.sh b/scripts/functions.sh index d08a929f..bf14bf69 100644 --- a/scripts/functions.sh +++ b/scripts/functions.sh @@ -92,408 +92,6 @@ function has_all_good_builds(){ diff -bus /tmp/expected.txt /tmp/actual.txt } -# Max. allowed bytes per code snippet in a broken snapshot issue comment body. -# If we don't shorten the snippets, the comment body will be too long for a -# github comment. -function max_context_bytes() { - echo 3000 -} - -# Shortens a given file to at max. 3000 byte. NOTE: 3000 comes from -# max_context_bytes(). -function shorten_file() { - local file_path=$1 - truncate --size="<`max_context_bytes`" $file_path -} - -#region error causes - -# For a given project this function prints causes for all cases of errors it can -# automatically identify (e.g. copr_timeout, network_issue). The caller needs to -# make sure the labels exists before adding them to an issue. If you pass an -# additional file name, we will write every error cause with additional -# information to it -# (;;;;). The -# context file contains the lines before and after the error in the build log. -function get_error_causes(){ - local project=$1 - local causes_file=$2 - local grep_opts="-n --context=3" - local monitor_file=$(mktemp) - local context_file=$(mktemp) - local log_file=$(mktemp) - - >&2 echo "Start getting error causes from Copr monitor..." - - [[ -n "$causes_file" ]] && truncate --size 0 $causes_file - - copr monitor \ - --output-format json \ - --fields chroot,name,state,url_build_log,url_build,build_id $project \ - > $monitor_file - - cat $monitor_file | jq -r '.[] | select(.state == "failed") | to_entries | map(.value | if . then . else "NOTFOUND" end) | @tsv' \ - | while IFS=$'\t' read -r build_id chroot package_name state build_url build_log_url; do - >&2 cat <&2 echo "Cause: $cause" - >&2 echo "Context File: $context_file" - echo $line >> $causes_file - # For the next error we need to make room an create a new context file - context_file=$(mktemp) - fi - got_cause=1 - } - - # Surround context file with markdown code fence and shorten it - # appropriately. - function wrap_file_in_md_code_fence() { - local context_file=$1 - shorten_file $context_file - sed -i '1s;^;```\n;' $context_file - echo -e '\n```\n' >> $context_file - } - - # Treat errors with no build logs as unknown and tell user to visit the - # build URL manually. - if [ "$build_log_url" == "NOTFOUND" ]; then - # See https://github.com/fedora-copr/log-detective-website/issues/73#issuecomment-1889042206 - source_build_log_url="https://download.copr.fedorainfracloud.org/results/$project/srpm-builds/$(printf "%08d" $build_id)/builder-live.log.gz" - >&2 echo "No build log found. Falling back to scanning the SRPM build log: $source_build_log_url". - - source_build_log_file=$(mktemp) - curl -sL $source_build_log_url | gunzip -c > $source_build_log_file - - cat <> $context_file -

No build log available

-Sorry, but this build contains no build log file, please consult the build page to find out more. - -

Errors in SRPM build log

-We've scanned the SRPM build log for error: (case insesitive) and here's what we've found: - -\`\`\` -$(grep --context=3 -i 'error:' $source_build_log_file | head --bytes=`max_context_bytes`) -\`\`\` -EOF - store_cause "srpm_build_issue" - continue; - fi - - curl -sL $build_log_url | gunzip -c > $log_file - - # Check for timeout - if [ -n "$(grep $grep_opts '!! Copr timeout' $log_file | tee $context_file)" ]; then - wrap_file_in_md_code_fence $context_file - store_cause "copr_timeout" - - # Check for network issues - elif [ -n "$(grep $grep_opts 'Errors during downloading metadata for repository' $log_file | tee $context_file)" ]; then - wrap_file_in_md_code_fence $context_file - store_cause "network_issue" - - # Check for dependency issues - elif [ -n "$(grep $grep_opts -P '(No matching package to install:|Not all dependencies satisfied|No match for argument:)' $log_file | tee $context_file)" ]; then - wrap_file_in_md_code_fence $context_file - store_cause "dependency_issue" - - # Check for test issues - elif [ -n "$(pcre2grep -n --after-context=10 -M '(Failed Tests|Unexpectedly Passed Tests).*(\n|.)*Total Discovered Tests:' $log_file | tee $context_file)" ]; then - wrap_file_in_md_code_fence $context_file - sed -i '1s;^;### Failing tests\n\n;' $context_file - - echo "" >> $context_file - echo "### Test output" >> $context_file - echo "" >> $context_file - echo -e '\n```\n' >> $context_file - # Extend the context by the actual test errors - local test_output_file=$(mktemp) - sed -n -e '/\(\*\)\{20\} TEST [^\*]* FAILED \*\{20\}/,/\*\{20\}/ p' $log_file > $test_output_file - shorten_file $test_output_file - cat $test_output_file >> $context_file - echo -e '\n```\n' >> $context_file - store_cause "test" - - - # TODO: Feel free to add your check here... - # elif [ -n "$(grep $grep_opts 'MY PATTERN' $log_file | tee $context_file)" ]; then - # wrap_file_in_md_code_fence $context_file - # store_cause "MY_ERROR" - - fi - - if [ "$got_cause" == "0" ]; then - cat < $context_file -### Build log tail - -Sometimes the end of the build log contains useful information. - -\`\`\` -$(tail --bytes=`max_context_bytes` $log_file) -\`\`\` - -### RPM build errors - -If we have found RPM build errors in the log file, you'll find them here. - -\`\`\` -$(sed -n -e '/RPM build errors/,/Finish:/ p' $log_file | head --bytes=`max_context_bytes`) -\`\`\` - -### Errors to look into - -If we have found the term error: (case insentitive) in the build log, -you'll find all occurrences here together with the preceding lines. - -\`\`\` -$(grep --before-context=1 -i 'error:' $log_file | head --bytes=`max_context_bytes`) -\`\`\` -EOF - store_cause "unknown" - fi - - done | sort | uniq - - >&2 echo "Done getting error causes from Copr monitor." -} - -function get_arch_from_chroot() { - local chroot=$1 - echo $chroot | grep -ioP '[^-]+-[0-9,rawhide]+-\K[^\s]+' -} - -function get_os_from_chroot() { - local chroot=$1 - echo $chroot | grep -ioP '[^-]+-[0-9,rawhide]+' -} - -# Prints the marker after a broken snapshot issue comment body when the updates -# shall follow. -function update_marker() { - echo '' -} - -# Takes a file with error causes and promotes unknown build causes as their own -# comment on the given issue. -# -# A causes file looks like a semicolon separated list file: -# -# network_issue;llvm;fedora-rawhide-i386;https://download.copr.fedorainfracloud.org/results/@fedora-llvm-team/llvm-snapshots-big-merge-20240105/fedora-rawhide-i386/06865034-llvm/builder-live.log.gz;/tmp/tmp.v17rnmc4rp -# copr_timeout;llvm;fedora-39-ppc64le;https://download.copr.fedorainfracloud.org/results/@fedora-llvm-team/llvm-snapshots-big-merge-20240105/fedora-39-ppc64le/06865030-llvm/builder-live.log.gz;/tmp/tmp.PMXc0b7uEE -function report_build_issues() { - local github_repo=$1 - local issue_num=$2 - local causes_file_path=$3 - local maintainer_handle=$4 - local comment_body_file=$(mktemp) - - # To store all important causes like "unknown" - local sorted_causes_file=$(mktemp) - grep -P '^unknown;' $causes_file_path | sort --stable --ignore-case > $sorted_causes_file - # To store the rest of causes - grep -Pv '^unknown;' $causes_file_path | sort --stable --ignore-case >> $sorted_causes_file - - # Store existing comment body in a file and continously append to that file - # before making it the new issue comment. - gh --repo $github_repo issue view $issue_num --json body --jq ".body" > $comment_body_file - - # Shorten body until update marker because we're gonna re-add all errors again. - sed -i "/$(update_marker)/q" $comment_body_file - - # For older issues where the comment marker is not there yet, we'll simply add - # it on purpose here. - echo "$(update_marker)" >> $comment_body_file - - echo "

Last updated: $(date)

" >> $comment_body_file - - echo "
    " >> $comment_body_file - - archs="" - oses="" - package_names="" - error_causes="" - prev_cause="" - - >&2 echo "Begin reporting build issues from causes file: $causes_file_path..." - while IFS=';' read -r cause package_name chroot build_log_url build_url build_id context_file; - do - >&2 cat <' $comment_body_file)" != "" ]; then - # >&2 echo "Comment body already contains entry for this cause/package/chroot combination. Continuing" - # continue; - # fi - - # Wrap "more interesting" build log snippets in a
    block - details_begin="
    " - if [ "$cause" == "unknown" ]; then - # details_begin="
    " - details_begin="
    " - fi - - build_log_entry="(see build log)" - if [ "$build_log_url" == "NOTFOUND" ]; then - build_log_entry="(see build)" - fi - - heading="" - if [ "$prev_cause" != "$cause" ]; then - heading="

$cause

    " - fi - prev_cause=$cause - - cat <> $comment_body_file -$heading - -
  1. -$details_begin - -$package_name on $chroot $build_log_entry - - -$(cat $context_file) - - -
  2. -EOF - done < $sorted_causes_file - - echo "
" >> $comment_body_file - - if [ "$archs" != "" ]; then - create_labels_for_archs $github_repo "$archs" - create_labels_for_oses $github_repo "$oses" - create_labels_for_projects $github_repo "$package_names" - create_labels_for_error_causes $github_repo "$error_causes" - - os_labels=`for os in $oses; do echo -n " --add-label os/$os "; done` - arch_labels=`for arch in $archs; do echo -n " --add-label arch/$arch " ; done` - project_labels=`for project in $package_names; do echo -n " --add-label project/$project "; done` - error_labels=`for cause in $error_causes; do echo -n " --add-label error/$cause "; done` - - cat $comment_body_file - - gh --repo $github_repo \ - issue edit $issue_num \ - --body-file $comment_body_file $os_labels $arch_labels $project_labels $error_labels - - fi - >&2 echo "Done updating issue comment for issue number $issue_num in $github_repo" -} - -# This function inspects causes of build errors and adds a comment to today's -# issue. Maybe we can identify new causes for errors by inspecting the build -# logs. -function handle_error_causes() { - local github_repo=$1 - local strategy=$2 - local maintainer_handle=$3 - local copr_project_today=$4 - local causes_file=$5 - local issue_number=`todays_issue_number $github_repo $strategy` - local comment_file=`mktemp` - - >&2 echo "TODAY'S ISSUE IS $issue_number" - - >&2 echo "Begin handling error causes." - - # If no error causes file was passed, process build logs to get - # error causes. Also ensure the error cause labels are created. - if [[ -z "$causes_file" || ! -f "$causes_file" ]]; then - causes_file=`mktemp` - error_causes="`get_error_causes $copr_project_today $causes_file`" - create_labels_for_error_causes $github_repo "$error_causes" - fi - - # Turn some error causes into their own comment. - report_build_issues \ - $github_repo \ - "$issue_number" \ - "$causes_file" \ - "$maintainer_handle" - - >&2 echo "Done handling error causes." -} - -#endregion -#region labels - -# Iterates over the given labels and creates or edits each label in the list -# with the given prefix and color. -function _create_labels() { - local repo=$1 - local labels="$2" - local label_prefix=$3 - local color=$4 - - # Deduplicate labels - for label in $(echo $labels | tr ' ' '\n' | sort | uniq | tr '\n' ' '); do - local label_name=$label_prefix$label - >&2 echo "Creating label: repo=$repo name=$label_name color=$color" - gh --repo $repo label create $label_name --color $color --force - done - -} - -function create_labels_for_error_causes() { - local repo=$1 - local error_causes="$2" - _create_labels $repo "$error_causes" "error/" "FBCA04" -} - -function create_labels_for_archs() { - local repo=$1 - local archs="$2" - _create_labels $repo "$archs" "arch/" "C5DEF5" -} - -function create_labels_for_oses() { - local repo=$1 - local oses="$2" - _create_labels $repo "$oses" "os/" "F9D0C4" -} - -function create_labels_for_projects() { - local repo=$1 - local projects="$2" - _create_labels $repo "$projects" "project/" "BFDADC" -} - -function create_labels_for_strategies() { - local repo=$1 - local strategies="$2" - _create_labels $repo "$strategies" "strategy/" "FFFFFF" -} #endregion # This installs the gh client for Fedora as described here: diff --git a/scripts/get-build-stats.py b/scripts/get-build-stats.py index 23076093..d424a4e1 100755 --- a/scripts/get-build-stats.py +++ b/scripts/get-build-stats.py @@ -1,12 +1,13 @@ #!/bin/env python3 -import os -from datetime import datetime import argparse -import sys -from copr.v3 import Client, CoprNoResultException import calendar +import os +import sys import time +from datetime import datetime + +from copr.v3 import Client, CoprNoResultException def gather_build_stats( @@ -69,7 +70,7 @@ def gather_build_stats( def main(): defaut_yyyymmdd = datetime.today().strftime("%Y%m%d") default_copr_ownername = "@fedora-llvm-team" - default_copr_projectname = "llvm-snapshots-incubator-{}".format(defaut_yyyymmdd) + default_copr_projectname = f"llvm-snapshots-incubator-{defaut_yyyymmdd}" parser = argparse.ArgumentParser( description="Print stats for a snapshot run in CVS format for further consumption" @@ -80,7 +81,7 @@ def main(): dest="copr_ownername", type=str, default=default_copr_ownername, - help="copr ownername to use (default: {})".format(default_copr_ownername), + help=f"copr ownername to use (default: {default_copr_ownername})", ) parser.add_argument( diff --git a/scripts/get-good-commit.py b/scripts/get-good-commit.py index aecace9a..82d4cfa5 100755 --- a/scripts/get-good-commit.py +++ b/scripts/get-good-commit.py @@ -1,9 +1,10 @@ #!/bin/env python3 import argparse +import logging import sys + from github import Github -import logging def get_good_commit( @@ -38,7 +39,7 @@ def get_good_commit( """ ) - required_checks = set((check, "success") for check in required_checks) + required_checks = {(check, "success") for check in required_checks} for i in range(0, max_tries): commit = repo.get_commit(sha=next_sha) commit_url = f"https://github.com/{project}/commit/{commit.sha}" @@ -49,9 +50,9 @@ def get_good_commit( ) # Makes sure the required checks are among the ones that have been run # on the commit. - actual_checks = set( + actual_checks = { (status.context, status.state) for status in commit.get_statuses() - ) + } if not required_checks.issubset(actual_checks): logging.warning( f"- Ignoring commit because of missing or failed check(s): {required_checks - actual_checks}" diff --git a/scripts/upload-source-snapshots.py b/scripts/upload-source-snapshots.py index a2643434..b55f4501 100755 --- a/scripts/upload-source-snapshots.py +++ b/scripts/upload-source-snapshots.py @@ -1,11 +1,12 @@ #!/bin/env python3 -from github import Github, UnknownObjectException import argparse import datetime import os from glob import glob +from github import Github, UnknownObjectException + def main(args) -> None: g = Github(login_or_token=args.token) @@ -14,11 +15,11 @@ def main(args) -> None: yyyymmdd = args.yyyymmdd release_name = args.release_name tag_name = release_name - print("uploading assets for yyyymmdd='{}'".format(yyyymmdd)) + print(f"uploading assets for yyyymmdd='{yyyymmdd}'") try: release = repo.get_release(release_name) except UnknownObjectException as ex: - print("release '{}' not found but creating it now".format(release_name)) + print(f"release '{release_name}' not found but creating it now") release = repo.create_git_release( prerelease=True, name=release_name, @@ -28,7 +29,7 @@ def main(args) -> None: ) else: dir = os.getenv(key="GITHUB_WORKSPACE", default=".") - print("looking for source snapshots in directory: {}".format(dir)) + print(f"looking for source snapshots in directory: {dir}") glob_patterns = [ "*-{}.src.tar.xz", "llvm-release-{}.txt", @@ -38,7 +39,7 @@ def main(args) -> None: for pattern in glob_patterns: for name in glob(pattern.format(yyyymmdd)): path = os.path.join(dir, name) - print("uploading path: {}".format(path)) + print(f"uploading path: {path}") release.upload_asset(path=path) diff --git a/scripts/workflow-artifact-exists.py b/scripts/workflow-artifact-exists.py index 5ed07767..d2825e0c 100755 --- a/scripts/workflow-artifact-exists.py +++ b/scripts/workflow-artifact-exists.py @@ -1,12 +1,13 @@ #!/bin/env python3 import argparse -from github import Github -from datetime import date import re import sys +from datetime import date from pprint import pprint +from github import Github + def workflow_artifact_exists( token: str, diff --git a/snapshot_manager/main.py b/snapshot_manager/main.py index cdae698b..e98d0393 100644 --- a/snapshot_manager/main.py +++ b/snapshot_manager/main.py @@ -1,7 +1,7 @@ import argparse +import datetime import logging import sys -import datetime import snapshot_manager.config as config import snapshot_manager.snapshot_manager as snapshot_manager diff --git a/snapshot_manager/snapshot_manager/build_status.py b/snapshot_manager/snapshot_manager/build_status.py index 1750fc0d..a8bada2f 100644 --- a/snapshot_manager/snapshot_manager/build_status.py +++ b/snapshot_manager/snapshot_manager/build_status.py @@ -71,7 +71,12 @@ class ErrorCause(enum.StrEnum): ISSUE_DEPENDENCY = "dependency_issue" ISSUE_TEST = "test" ISSUE_DOWNSTREAM_PATCH_APPLICATION = "downstream_patch_application" - ISSUE_INSTALLED_BUT_UNPACKAGED_FILES_FOUND = "installed_but_unpackaged_files_found" + ISSUE_RPM__INSTALLED_BUT_UNPACKAGED_FILES_FOUND = ( + "rpm__installed_but_unpackaged_files_found" + ) + ISSUE_RPM__DIRECTORY_NOT_FOUND = "rpm__directory_not_found" + ISSUE_RPM__FILE_NOT_FOUND = "rpm__file_not_found" + ISSUE_CMAKE_ERROR = "cmake_error" ISSUE_UNKNOWN = "unknown" @classmethod @@ -332,7 +337,63 @@ def handle_golden_file(cause: ErrorCause, ctx: str) -> tuple[ErrorCause, str]: logging.info(" Checking for installed but unackaged files...") ret, ctx, _ = util.grep_file( - pattern=r"(?s)RPM build errors:\n Installed \(but unpackaged\) file\(s\) found:.*Finish", + pattern=r"(?s)RPM build errors:\n.* Installed \(but unpackaged\) file\(s\) found:.*Finish", + extra_args="-Pzo", + filepath=build_log_file, + ) + if ret == 0: + # Remove trailing binary zero + ctx = ctx.rstrip("\x00") + return handle_golden_file( + ErrorCause.ISSUE_RPM__INSTALLED_BUT_UNPACKAGED_FILES_FOUND, + util.fenced_code_block(ctx), + ) + + logging.info(" Checking for alternative installed but unackaged files...") + ret, ctx, _ = util.grep_file( + pattern=r"(?s)Checking for unpackaged file(s):.*Installed (but unpackaged) file(s) found:.*\n\n", + extra_args="-Pzo", + filepath=build_log_file, + ) + if ret == 0: + # Remove trailing binary zero + ctx = ctx.rstrip("\x00") + return handle_golden_file( + ErrorCause.ISSUE_RPM__INSTALLED_BUT_UNPACKAGED_FILES_FOUND, + util.fenced_code_block(ctx), + ) + + logging.info(" Checking for directory not found...") + ret, ctx, _ = util.grep_file( + pattern=r"(?s)RPM build errors:\n.* Directory not found: /builddir/.*Finish", + extra_args="-Pzo", + filepath=build_log_file, + ) + if ret == 0: + # Remove trailing binary zero + ctx = ctx.rstrip("\x00") + return handle_golden_file( + ErrorCause.ISSUE_RPM__DIRECTORY_NOT_FOUND, + util.fenced_code_block(ctx), + ) + + logging.info(" Checking for file not found...") + ret, ctx, _ = util.grep_file( + pattern=r"(?s)RPM build errors:\n.* File not found: /builddir/.*Finish", + extra_args="-Pzo", + filepath=build_log_file, + ) + if ret == 0: + # Remove trailing binary zero + ctx = ctx.rstrip("\x00") + return handle_golden_file( + ErrorCause.ISSUE_RPM__FILE_NOT_FOUND, + util.fenced_code_block(ctx), + ) + + logging.info(" Checking for CMake error...") + ret, ctx, _ = util.grep_file( + pattern=r"(?s)CMake Error at.*Configuring incomplete, errors occurred!", extra_args="-Pzo", filepath=build_log_file, ) @@ -340,7 +401,7 @@ def handle_golden_file(cause: ErrorCause, ctx: str) -> tuple[ErrorCause, str]: # Remove trailing binary zero ctx = ctx.rstrip("\x00") return handle_golden_file( - ErrorCause.ISSUE_INSTALLED_BUT_UNPACKAGED_FILES_FOUND, + ErrorCause.ISSUE_CMAKE_ERROR, util.fenced_code_block(ctx), ) diff --git a/snapshot_manager/snapshot_manager/config.py b/snapshot_manager/snapshot_manager/config.py index d0cbff64..bff0a634 100644 --- a/snapshot_manager/snapshot_manager/config.py +++ b/snapshot_manager/snapshot_manager/config.py @@ -2,13 +2,13 @@ config """ -import datetime import dataclasses +import datetime @dataclasses.dataclass(kw_only=True) class Config: - chroot_pattern: str = r"^fedora-(rawhide|[0-9]+)" + chroot_pattern: str = r"^(fedora-(rawhide|[0-9]+)|rhel-9-)" """Regular expression to select chroots from all chroots currently supported on Copr.""" packages: list[str] = dataclasses.field( @@ -66,7 +66,7 @@ class Config: label_prefix_in_testing: str = "in_testing/" label_prefix_tested_on: str = "tested_on/" - label_prefix_failed_on: str = "failed_on/" + label_prefix_tests_failed_on: str = "tests_failed_on/" label_prefix_llvm_release: str = "release/" diff --git a/snapshot_manager/snapshot_manager/github_graphql.py b/snapshot_manager/snapshot_manager/github_graphql.py index 531788be..a303eed9 100644 --- a/snapshot_manager/snapshot_manager/github_graphql.py +++ b/snapshot_manager/snapshot_manager/github_graphql.py @@ -2,10 +2,9 @@ GithubGraphQL """ -import pathlib -import os import logging -import fnc +import os +import pathlib from typing import Any, Union import fnc @@ -68,7 +67,7 @@ def encoding(self) -> str: return self.__encoding def run_from_file( - self, filename: str, variables: dict[str, Union[str, int]] = None, **kwargs + self, filename: str, variables: dict[str, str | int] = None, **kwargs ) -> Any: """ Read the query/mutation from the given file and execute it with the variables @@ -85,7 +84,7 @@ def run_from_file( variables (dict): The variables to be applied to the query/mutation. **kwargs: key-value pairs (e.g. {raise_on_error=True}) """ - with open(file=filename, mode="r", encoding=self.encoding) as file_handle: + with open(file=filename, encoding=self.encoding) as file_handle: query = file_handle.read() return self.run(query, variables, **kwargs) @@ -104,9 +103,7 @@ def close(self): """Closes the session.""" self.__session.close() - def run( - self, query: str, variables: dict[str, Union[str, int]] = None, **kwargs - ) -> dict: + def run(self, query: str, variables: dict[str, str | int] = None, **kwargs) -> dict: """ Execute the query with the variables applied. If not requested otherwise the plain result is returned. If you want to raise an exception in case diff --git a/snapshot_manager/snapshot_manager/github_util.py b/snapshot_manager/snapshot_manager/github_util.py index cdadd912..5046160b 100644 --- a/snapshot_manager/snapshot_manager/github_util.py +++ b/snapshot_manager/snapshot_manager/github_util.py @@ -3,23 +3,23 @@ """ import datetime -import os +import enum import logging -import fnc -import typing +import os import pathlib -import enum +import typing +import fnc import github import github.GithubException import github.Issue import github.IssueComment -import github.Repository -import github.PaginatedList import github.Label +import github.PaginatedList +import github.Repository -import snapshot_manager.config as config import snapshot_manager.build_status as build_status +import snapshot_manager.config as config import snapshot_manager.github_graphql as github_graphql import snapshot_manager.util as util @@ -120,7 +120,7 @@ def initial_comment(self) -> str: For each cause we will list the packages and the relevant log excerpts.
Use of labels
Let's assume a unit test test in upstream LLVM was broken. -We will then add these labels to this issue: error/test, arch/x86_64, os/fedora-rawhide, project/llvm. +We will then add these labels to this issue: error/test, build_failed_on/fedora-rawhide-x86_64, project/llvm. If you manually restart a build in Copr and can bring it to a successful state, we will automatically remove the aforementioned labels.
@@ -137,6 +137,16 @@ def initial_comment(self) -> str: def last_updated_html(cls) -> str: return f"

Last updated: {datetime.datetime.now().isoformat()}

" + def issue_title(self, strategy: str = None, yyyymmdd: str = None) -> str: + """Constructs the issue title we want to use""" + if strategy is None: + strategy = self.config.build_strategy + if yyyymmdd is None: + yyyymmdd = self.config.yyyymmdd + llvm_release = util.get_release_for_yyyymmdd(yyyymmdd) + llvm_git_revision = util.get_git_revision_for_yyyymmdd(yyyymmdd) + return f"Snapshot for {yyyymmdd}, v{llvm_release}, {llvm_git_revision[:7]} ({strategy})" + def create_or_get_todays_github_issue( self, maintainer_handle: str, @@ -154,11 +164,9 @@ def create_or_get_todays_github_issue( repo = self.gh_repo logging.info("Creating issue for today") - llvm_release = util.get_release_for_yyyymmdd(self.config.yyyymmdd) - llvm_git_revision = util.get_git_revision_for_yyyymmdd(self.config.yyyymmdd) issue = repo.create_issue( assignee=maintainer_handle, - title=f"Snapshot for {self.config.yyyymmdd}, v{llvm_release}, {llvm_git_revision[:7]} ({strategy})", + title=self.issue_title(), body=self.initial_comment, ) self.create_labels_for_strategies(labels=[strategy]) @@ -236,11 +244,10 @@ def get_label_names_on_issue( def get_error_label_names_on_issue(self, issue: github.Issue.Issue) -> list[str]: return self.get_label_names_on_issue(issue, prefix="error/") - def get_os_label_names_on_issue(self, issue: github.Issue.Issue) -> list[str]: - return self.get_label_names_on_issue(issue, prefix="os/") - - def get_arch_label_names_on_issue(self, issue: github.Issue.Issue) -> list[str]: - return self.get_label_names_on_issue(issue, prefix="arch/") + def get_build_failed_on_names_on_issue( + self, issue: github.Issue.Issue + ) -> list[str]: + return self.get_label_names_on_issue(issue, prefix="build_failed_on/") def get_project_label_names_on_issue(self, issue: github.Issue.Issue) -> list[str]: return self.get_label_names_on_issue(issue, prefix="project/") @@ -252,11 +259,11 @@ def create_labels_for_error_causes( labels=labels, prefix="error/", color="FBCA04", **kw_args ) - def create_labels_for_oses( + def create_labels_for_build_failed_on( self, labels: list[str], **kw_args ) -> list[github.Label.Label]: return self.create_labels( - labels=labels, prefix="os/", color="F9D0C4", **kw_args + labels=labels, prefix="build_failed_on/", color="F9D0C4", **kw_args ) def create_labels_for_projects( @@ -273,13 +280,6 @@ def create_labels_for_strategies( labels=labels, prefix="strategy/", color="FFFFFF", *kw_args ) - def create_labels_for_archs( - self, labels: list[str], **kw_args - ) -> list[github.Label.Label]: - return self.create_labels( - labels=labels, prefix="arch/", color="C5DEF5", *kw_args - ) - def create_labels_for_in_testing( self, labels: list[str], **kw_args ) -> list[github.Label.Label]: @@ -300,12 +300,12 @@ def create_labels_for_tested_on( *kw_args, ) - def create_labels_for_failed_on( + def create_labels_for_tests_failed_on( self, labels: list[str], **kw_args ) -> list[github.Label.Label]: return self.create_labels( labels=labels, - prefix=self.config.label_prefix_failed_on, + prefix=self.config.label_prefix_tests_failed_on, color="D93F0B", *kw_args, ) @@ -504,7 +504,7 @@ def label_in_testing(self, chroot: str) -> str: return f"{self.config.label_prefix_in_testing}{chroot}" def label_failed_on(self, chroot: str) -> str: - return f"{self.config.label_prefix_failed_on}{chroot}" + return f"{self.config.label_prefix_tests_failed_on}{chroot}" def label_tested_on(self, chroot: str) -> str: return f"{self.config.label_prefix_tested_on}{chroot}" diff --git a/snapshot_manager/snapshot_manager/snapshot_manager.py b/snapshot_manager/snapshot_manager/snapshot_manager.py old mode 100755 new mode 100644 index 895aceab..d4792221 --- a/snapshot_manager/snapshot_manager/snapshot_manager.py +++ b/snapshot_manager/snapshot_manager/snapshot_manager.py @@ -139,7 +139,10 @@ def retest( new_comment_body = self.remove_chroot_html_comment( comment_body=new_comment_body, chroot=chroot ) - issue.edit(body=new_comment_body) + issue.edit( + body=new_comment_body, + title=self.github.issue_title(strategy=strategy, yyyymmdd=yyyymmdd), + ) # Kick off a new workflow run and pass the exact date in YYYYMMDD # form because we don't know if the issue was for today @@ -222,7 +225,7 @@ def check_todays_builds(self) -> None: {build_status_matrix} {tf.TestingFarmRequest.dict_to_html_comment(requests)} """ - issue.edit(body=comment_body) + issue.edit(body=comment_body, title=self.github.issue_title()) logging.info("Filter testing-farm requests by chroot of interest") new_requests = dict() @@ -294,7 +297,7 @@ def check_todays_builds(self) -> None: in_testing = f"{self.config.label_prefix_in_testing}{chroot}" tested_on = f"{self.config.label_prefix_tested_on}{chroot}" - failed_on = f"{self.config.label_prefix_failed_on}{chroot}" + failed_on = f"{self.config.label_prefix_tests_failed_on}{chroot}" # Gather build IDs associated with this chroot. # We'll attach them a new testing-farm request, and for a recovered @@ -388,7 +391,7 @@ def check_todays_builds(self) -> None: {build_status_matrix} {tf.TestingFarmRequest.dict_to_html_comment(requests)} """ - issue.edit(body=comment_body) + issue.edit(body=comment_body, title=self.github.issue_title()) logging.info("Checking if issue can be closed") # issue.update() @@ -406,7 +409,11 @@ def check_todays_builds(self) -> None: msg = f"@{self.config.maintainer_handle}, all required packages have been successfully built and tested on all required chroots. We'll close this issue for you now as completed. Congratulations!" logging.info(msg) issue.create_comment(body=msg) - issue.edit(state="closed", state_reason="completed") + issue.edit( + state="closed", + state_reason="completed", + title=self.github.issue_title(), + ) # TODO(kwk): Promotion of issue goes here. else: logging.info("Cannot close issue yet.") @@ -422,8 +429,9 @@ def handle_labels( logging.info("Gather labels based on the errors we've found") error_labels = list({f"error/{err.err_cause}" for err in errors}) project_labels = list({f"project/{err.package_name}" for err in errors}) - os_labels = list({f"os/{err.os}" for err in errors}) - arch_labels = list({f"arch/{err.arch}" for err in errors}) + build_failed_on_labels = list( + {f"build_failed_on/{err.chroot}" for err in errors} + ) strategy_labels = [f"strategy/{self.config.build_strategy}"] llvm_release = util.get_release_for_yyyymmdd(self.config.yyyymmdd) other_labels: list[str] = [ @@ -437,13 +445,12 @@ def handle_labels( labels=["broken_snapshot_detected"], color="F46696", prefix="" ) self.github.create_labels_for_error_causes(error_labels) - self.github.create_labels_for_oses(os_labels) + self.github.create_labels_for_build_failed_on(build_failed_on_labels) self.github.create_labels_for_projects(project_labels) - self.github.create_labels_for_archs(arch_labels) self.github.create_labels_for_strategies(strategy_labels) self.github.create_labels_for_in_testing(all_chroots) self.github.create_labels_for_tested_on(all_chroots) - self.github.create_labels_for_failed_on(all_chroots) + self.github.create_labels_for_tests_failed_on(all_chroots) self.github.create_labels_for_llvm_releases([llvm_release]) # Remove old labels from issue if they no longer apply. This is great @@ -453,11 +460,15 @@ def handle_labels( labels_to_be_removed: list[str] = [] old_error_labels = self.github.get_error_label_names_on_issue(issue=issue) old_project_labels = self.github.get_project_label_names_on_issue(issue=issue) - old_arch_labels = self.github.get_arch_label_names_on_issue(issue=issue) + old_build_failed_labels = self.github.get_build_failed_on_names_on_issue( + issue=issue + ) labels_to_be_removed.extend(set(old_error_labels) - set(error_labels)) labels_to_be_removed.extend(set(old_project_labels) - set(project_labels)) - labels_to_be_removed.extend(set(old_arch_labels) - set(arch_labels)) + labels_to_be_removed.extend( + set(old_build_failed_labels) - set(build_failed_on_labels) + ) for label in labels_to_be_removed: logging.info(f"Removing label that no longer applies: {label}") @@ -467,8 +478,7 @@ def handle_labels( labels_to_add = ( error_labels + project_labels - + os_labels - + arch_labels + + build_failed_on_labels + strategy_labels + other_labels ) diff --git a/snapshot_manager/snapshot_manager/testing_farm_util.py b/snapshot_manager/snapshot_manager/testing_farm_util.py index c0ff2705..7093c3f0 100644 --- a/snapshot_manager/snapshot_manager/testing_farm_util.py +++ b/snapshot_manager/snapshot_manager/testing_farm_util.py @@ -2,26 +2,25 @@ testing_farm_util """ +import dataclasses +import datetime import enum +import json import logging -import re -import string -import uuid import os -import json +import pathlib import re -import dataclasses -import datetime +import string import urllib.parse -import pathlib +import uuid +import xml.etree.ElementTree as ET from typing import ClassVar -import regex import github.Issue -import xml.etree.ElementTree as ET +import regex -import snapshot_manager.util as util import snapshot_manager.config as config +import snapshot_manager.util as util @dataclasses.dataclass(kw_only=True, unsafe_hash=True) @@ -201,6 +200,7 @@ def make( --arch {util.chroot_arch(chroot)} \ --plan /tests/snapshot-gating \ --environment COPR_PROJECT={config.copr_projectname} \ + --environment COPR_CHROOT={chroot} \ --context distro={util.chroot_os(chroot)} \ --context arch={util.chroot_arch(chroot)} \ --no-wait \ @@ -348,7 +348,26 @@ def is_arch_supported(cls, arch: str, ranch: str) -> bool: @classmethod def get_compose(cls, chroot: str) -> str: + """ + Returns the testing farm compose for the given chroot + + For the redhat ranch see this list: https://api.testing-farm.io/v0.1/composes/redhat + For the public ranch see this list: https://api.testing-farm.io/v0.1/composes/public + + Examples: + + >>> TestingFarmRequest.get_compose("fedora-rawhide-x86_64") + 'Fedora-Rawhide' + >>> TestingFarmRequest.get_compose("fedora-39-x86_64") + 'Fedora-39' + >>> TestingFarmRequest.get_compose("rhel-9-aarch") + 'RHEL-9-Nightly' + """ util.expect_chroot(chroot) + + if util.chroot_name(chroot) == "rhel": + return f"RHEL-{util.chroot_version(chroot)}-Nightly" + if util.chroot_version(chroot) == "rawhide": return "Fedora-Rawhide" return util.chroot_os(chroot).capitalize() @@ -579,6 +598,13 @@ def from_output(cls, string: str) -> tuple["TestingFarmWatchResult", str]: >>> s = base64.b64decode(s).decode() >>> TestingFarmWatchResult.from_output(s) (, None) + >>> s='''8J+UjiBhcGkgaHR0cHM6Ly9hcGkuZGV2LnRlc3RpbmctZmFybS5pby92MC4xL3JlcXVlc3RzLzk3 + ... YTdjYzI0LTY5MjYtNDA1OS04NGFjLWQwMDc4Mjk3YzMxOQrwn5qAIHJlcXVlc3QgaXMgcnVubmlu + ... Zwrwn5qiIGFydGlmYWN0cyBodHRwczovL2FydGlmYWN0cy5kZXYudGVzdGluZy1mYXJtLmlvLzk3 + ... YTdjYzI0LTY5MjYtNDA1OS04NGFjLWQwMDc4Mjk3YzMxOQo=''' + >>> s = base64.b64decode(s).decode() + >>> TestingFarmWatchResult.from_output(s) + (, 'https://artifacts.dev.testing-farm.io/97a7cc24-6926-4059-84ac-d0078297c319') """ string = clean_testing_farm_output(string) for watch_result in TestingFarmWatchResult.all_watch_results(): diff --git a/snapshot_manager/snapshot_manager/util.py b/snapshot_manager/snapshot_manager/util.py index f1332217..8859d46a 100644 --- a/snapshot_manager/snapshot_manager/util.py +++ b/snapshot_manager/snapshot_manager/util.py @@ -2,17 +2,17 @@ util """ +import datetime +import functools import logging +import os import pathlib +import re import shlex import subprocess -import os -import re -import datetime -import functools -import requests import regex +import requests import snapshot_manager.file_access as file_access @@ -376,7 +376,7 @@ def chroot_arch(chroot: str) -> str: def get_git_revision_for_yyyymmdd(yyyymmdd: str) -> str: """Get LLVM commit hash for the given date""" yyyymmdd = get_yyyymmdd_from_string(yyyymmdd) - url = f"https://github.com/fedora-llvm-team/llvm-snapshots/releases/download/source-snapshot/llvm-git-revision-{yyyymmdd}.txt" + url = f"https://github.com/fedora-llvm-team/llvm-snapshots/releases/download/snapshot-version-sync/llvm-git-revision-{yyyymmdd}.txt" logging.info(f"Getting URL {url}") response = requests.get(url) return response.text.strip() @@ -386,7 +386,7 @@ def get_git_revision_for_yyyymmdd(yyyymmdd: str) -> str: def get_release_for_yyyymmdd(yyyymmdd: str) -> str: """Get LLVM release (e.g. 19.0.0) for the given date""" yyyymmdd = get_yyyymmdd_from_string(yyyymmdd) - url = f"https://github.com/fedora-llvm-team/llvm-snapshots/releases/download/source-snapshot/llvm-release-{yyyymmdd}.txt" + url = f"https://github.com/fedora-llvm-team/llvm-snapshots/releases/download/snapshot-version-sync/llvm-release-{yyyymmdd}.txt" logging.info(f"Getting URL {url}") response = requests.get(url) return response.text.strip() diff --git a/snapshot_manager/tests/base_test.py b/snapshot_manager/tests/base_test.py index 9189fc6d..1b09eff8 100644 --- a/snapshot_manager/tests/base_test.py +++ b/snapshot_manager/tests/base_test.py @@ -3,12 +3,12 @@ """ import contextlib +import logging import os import pathlib +import sys import typing import unittest -import logging -import sys import snapshot_manager.config as config diff --git a/snapshot_manager/tests/build_status_test.py b/snapshot_manager/tests/build_status_test.py index e59093ab..0b316bb3 100644 --- a/snapshot_manager/tests/build_status_test.py +++ b/snapshot_manager/tests/build_status_test.py @@ -13,10 +13,11 @@ def test_get_cause_from_build_log(self): causes = [e.value for e in ErrorCause] - # TODO(kwk): Find good example log files for these two error causes + # TODO(kwk): Find good example log files for these three error causes # (existing ones have timed out and were already deleted) causes.remove(ErrorCause.ISSUE_NETWORK) causes.remove(ErrorCause.ISSUE_SRPM_BUILD) + causes.remove(ErrorCause.ISSUE_UNKNOWN) for expectedCause in causes: with self.subTest(expectedCause=expectedCause): diff --git a/snapshot_manager/tests/config_test.py b/snapshot_manager/tests/config_test.py index 84bc4028..9be2a9ea 100644 --- a/snapshot_manager/tests/config_test.py +++ b/snapshot_manager/tests/config_test.py @@ -3,6 +3,7 @@ import datetime import tests.base_test as base_test + import snapshot_manager.config as config diff --git a/snapshot_manager/tests/copr_util_test.py b/snapshot_manager/tests/copr_util_test.py index f2bbb51d..6d6698e7 100644 --- a/snapshot_manager/tests/copr_util_test.py +++ b/snapshot_manager/tests/copr_util_test.py @@ -3,8 +3,9 @@ import uuid import tests.base_test as base_test -import snapshot_manager.copr_util as copr_util + import snapshot_manager.config as config +import snapshot_manager.copr_util as copr_util class TestCopr(base_test.TestBase): diff --git a/snapshot_manager/tests/file_access_test.py b/snapshot_manager/tests/file_access_test.py index db8083aa..615a24ed 100644 --- a/snapshot_manager/tests/file_access_test.py +++ b/snapshot_manager/tests/file_access_test.py @@ -2,8 +2,8 @@ import tests.base_test as base_test -import snapshot_manager.file_access as file_access import snapshot_manager.build_status as build_status +import snapshot_manager.file_access as file_access class TestFileAccess(base_test.TestBase): @@ -17,6 +17,7 @@ def load_tests(loader, tests, ignore): See https://stackoverflow.com/a/27171468 """ import doctest + import snapshot_manager.file_access tests.addTests(doctest.DocTestSuite(snapshot_manager.file_access)) diff --git a/snapshot_manager/tests/github_util_test.py b/snapshot_manager/tests/github_util_test.py index 138ff53d..db47846e 100644 --- a/snapshot_manager/tests/github_util_test.py +++ b/snapshot_manager/tests/github_util_test.py @@ -5,6 +5,7 @@ import uuid import tests.base_test as base_test + import snapshot_manager.github_util as github_util @@ -82,7 +83,7 @@ def test_flip_test_label(self): all_chroots = [chroot] logging.info("Creating test labels") gh.create_labels_for_in_testing(all_chroots) - gh.create_labels_for_failed_on(all_chroots) + gh.create_labels_for_tests_failed_on(all_chroots) gh.create_labels_for_tested_on(all_chroots) in_testing = gh.label_in_testing(chroot=chroot) diff --git a/snapshot_manager/tests/snapshot_manager_test.py b/snapshot_manager/tests/snapshot_manager_test.py index d76acf60..751aa564 100644 --- a/snapshot_manager/tests/snapshot_manager_test.py +++ b/snapshot_manager/tests/snapshot_manager_test.py @@ -3,8 +3,9 @@ import datetime import tests.base_test as base_test -import snapshot_manager.snapshot_manager as snapshot_manager + import snapshot_manager.github_util as github_util +import snapshot_manager.snapshot_manager as snapshot_manager class TestSnapshotManager(base_test.TestBase): diff --git a/snapshot_manager/tests/test_logs/cause_cmake_error.golden.txt b/snapshot_manager/tests/test_logs/cause_cmake_error.golden.txt new file mode 100644 index 00000000..600a8d40 --- /dev/null +++ b/snapshot_manager/tests/test_logs/cause_cmake_error.golden.txt @@ -0,0 +1,13 @@ + +``` +CMake Error at /usr/share/cmake/Modules/FindPackageHandleStandardArgs.cmake:230 (message): +Jun 02 02:16:32 Could NOT find Threads (missing: Threads_FOUND) +Jun 02 02:16:32 Call Stack (most recent call first): +Jun 02 02:16:32 /usr/share/cmake/Modules/FindPackageHandleStandardArgs.cmake:600 (_FPHSA_FAILURE_MESSAGE) +Jun 02 02:16:32 /usr/share/cmake/Modules/FindThreads.cmake:226 (FIND_PACKAGE_HANDLE_STANDARD_ARGS) +Jun 02 02:16:32 /builddir/build/BUILD/llvm-19.0.0~pre20240602.g0310f7f2d0c56a-build/llvm-project-0310f7f2d0c56a5697710251cec9803cbf7b4d56/openmp/runtime/cmake/config-ix.cmake:158 (find_package) +Jun 02 02:16:32 /builddir/build/BUILD/llvm-19.0.0~pre20240602.g0310f7f2d0c56a-build/llvm-project-0310f7f2d0c56a5697710251cec9803cbf7b4d56/openmp/runtime/CMakeLists.txt:279 (include) +Jun 02 02:16:32 +Jun 02 02:16:32 +Jun 02 02:16:32 -- Configuring incomplete, errors occurred! +``` diff --git a/snapshot_manager/tests/test_logs/cause_cmake_error.log.gz b/snapshot_manager/tests/test_logs/cause_cmake_error.log.gz new file mode 100644 index 00000000..e2a3e835 Binary files /dev/null and b/snapshot_manager/tests/test_logs/cause_cmake_error.log.gz differ diff --git a/snapshot_manager/tests/test_logs/cause_rpm__directory_not_found.golden.txt b/snapshot_manager/tests/test_logs/cause_rpm__directory_not_found.golden.txt new file mode 100644 index 00000000..02dbcc7f --- /dev/null +++ b/snapshot_manager/tests/test_logs/cause_rpm__directory_not_found.golden.txt @@ -0,0 +1,15 @@ + +``` +RPM build errors: + Directory not found: /builddir/build/BUILDROOT/llvm-19.0.0~pre20240528.g1de1ee9cbabd64-1.fc41.s390x/usr/lib/clang/19/bin +Finish: rpmbuild llvm-19.0.0~pre20240528.g1de1ee9cbabd64-1.fc41.src.rpm +Finish: build phase for llvm-19.0.0~pre20240528.g1de1ee9cbabd64-1.fc41.src.rpm +INFO: chroot_scan: 1 files copied to /var/lib/copr-rpmbuild/results/chroot_scan +INFO: /var/lib/mock/fedora-rawhide-s390x-1716860898.527168/root/var/log/dnf5.log +ERROR: Exception(/var/lib/copr-rpmbuild/results/llvm-19.0.0~pre20240528.g1de1ee9cbabd64-1.fc41.src.rpm) Config(fedora-rawhide-s390x) 494 minutes 28 seconds +INFO: Results and/or logs in: /var/lib/copr-rpmbuild/results +INFO: Cleaning up build root ('cleanup_on_failure=True') +Start: clean chroot +INFO: unmounting tmpfs. +Finish +``` diff --git a/snapshot_manager/tests/test_logs/cause_rpm__directory_not_found.log.gz b/snapshot_manager/tests/test_logs/cause_rpm__directory_not_found.log.gz new file mode 100644 index 00000000..b27010d7 Binary files /dev/null and b/snapshot_manager/tests/test_logs/cause_rpm__directory_not_found.log.gz differ diff --git a/snapshot_manager/tests/test_logs/cause_rpm__file_not_found.golden.txt b/snapshot_manager/tests/test_logs/cause_rpm__file_not_found.golden.txt new file mode 100644 index 00000000..946c843b --- /dev/null +++ b/snapshot_manager/tests/test_logs/cause_rpm__file_not_found.golden.txt @@ -0,0 +1,21 @@ + +``` +RPM build errors: + absolute symlink: /usr/bin/clang-format-diff -> /usr/share/clang/clang-format-diff.py + File not found: /builddir/build/BUILDROOT/llvm-19.0.0~pre20240528.g1de1ee9cbabd64-1.fc39.i386/usr/lib/clang/19/bin/hwasan_symbolize + File not found: /builddir/build/BUILDROOT/llvm-19.0.0~pre20240528.g1de1ee9cbabd64-1.fc39.i386/usr/lib/clang/19/lib/i686-redhat-linux-gnu/libclang_rt.* + File not found: /builddir/build/BUILDROOT/llvm-19.0.0~pre20240528.g1de1ee9cbabd64-1.fc39.i386/usr/lib/clang/19/lib/i686-redhat-linux-gnu/clang_rt.crtbegin.o + File not found: /builddir/build/BUILDROOT/llvm-19.0.0~pre20240528.g1de1ee9cbabd64-1.fc39.i386/usr/lib/clang/19/lib/i686-redhat-linux-gnu/clang_rt.crtend.o +Finish: rpmbuild llvm-19.0.0~pre20240528.g1de1ee9cbabd64-1.fc39.src.rpm +Finish: build phase for llvm-19.0.0~pre20240528.g1de1ee9cbabd64-1.fc39.src.rpm +INFO: chroot_scan: 3 files copied to /var/lib/copr-rpmbuild/results/chroot_scan +INFO: /var/lib/mock/fedora-39-i686-1716860810.346640/root/var/log/dnf.log +/var/lib/mock/fedora-39-i686-1716860810.346640/root/var/log/dnf.librepo.log +/var/lib/mock/fedora-39-i686-1716860810.346640/root/var/log/dnf.rpm.log +ERROR: Exception(/var/lib/copr-rpmbuild/results/llvm-19.0.0~pre20240528.g1de1ee9cbabd64-1.fc39.src.rpm) Config(fedora-39-i686) 398 minutes 25 seconds +INFO: Results and/or logs in: /var/lib/copr-rpmbuild/results +INFO: Cleaning up build root ('cleanup_on_failure=True') +Start: clean chroot +INFO: unmounting tmpfs. +Finish +``` diff --git a/snapshot_manager/tests/test_logs/cause_rpm__file_not_found.log.gz b/snapshot_manager/tests/test_logs/cause_rpm__file_not_found.log.gz new file mode 100644 index 00000000..258172ec Binary files /dev/null and b/snapshot_manager/tests/test_logs/cause_rpm__file_not_found.log.gz differ diff --git a/snapshot_manager/tests/test_logs/cause_installed_but_unpackaged_files_found.golden.txt b/snapshot_manager/tests/test_logs/cause_rpm__installed_but_unpackaged_files_found.golden.txt similarity index 100% rename from snapshot_manager/tests/test_logs/cause_installed_but_unpackaged_files_found.golden.txt rename to snapshot_manager/tests/test_logs/cause_rpm__installed_but_unpackaged_files_found.golden.txt diff --git a/snapshot_manager/tests/test_logs/cause_installed_but_unpackaged_files_found.log.gz b/snapshot_manager/tests/test_logs/cause_rpm__installed_but_unpackaged_files_found.log.gz similarity index 100% rename from snapshot_manager/tests/test_logs/cause_installed_but_unpackaged_files_found.log.gz rename to snapshot_manager/tests/test_logs/cause_rpm__installed_but_unpackaged_files_found.log.gz diff --git a/snapshot_manager/tests/test_logs/cause_unknown.golden.txt b/snapshot_manager/tests/test_logs/cause_unknown.golden.txt deleted file mode 100644 index 4dbdd3f9..00000000 --- a/snapshot_manager/tests/test_logs/cause_unknown.golden.txt +++ /dev/null @@ -1,50 +0,0 @@ - -### Build log tail - -Sometimes the end of the build log contains useful information. - -``` -ERROR: Exception(/var/lib/copr-rpmbuild/results/llvm-19.0.0~pre20240222.gd17eade22ab9a6-1.fc41.src.rpm) Config(fedora-rawhide-x86_64) 70 minutes 7 seconds -INFO: Results and/or logs in: /var/lib/copr-rpmbuild/results -INFO: Cleaning up build root ('cleanup_on_failure=True') -Start: clean chroot -INFO: unmounting tmpfs. -Finish: clean chroot -ERROR: Command failed: - # /usr/bin/systemd-nspawn -q -M b4d70a36ac5d427d981d509ae869c488 -D /var/lib/mock/fedora-rawhide-x86_64-1708566228.210033/root -a -u mockbuild --capability=cap_ipc_lock --rlimit=RLIMIT_NOFILE=10240 --capability=cap_ipc_lock --bind=/tmp/mock-resolv.jdo0dr4s:/etc/resolv.conf --bind=/dev/btrfs-control --bind=/dev/mapper/control --bind=/dev/fuse --bind=/dev/loop-control --bind=/dev/loop0 --bind=/dev/loop1 --bind=/dev/loop2 --bind=/dev/loop3 --bind=/dev/loop4 --bind=/dev/loop5 --bind=/dev/loop6 --bind=/dev/loop7 --bind=/dev/loop8 --bind=/dev/loop9 --bind=/dev/loop10 --bind=/dev/loop11 --console=pipe --setenv=TERM=vt100 --setenv=SHELL=/bin/bash --setenv=HOME=/builddir --setenv=HOSTNAME=mock --setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin '--setenv=PROMPT_COMMAND=printf "\033]0;\007"' '--setenv=PS1= \s-\v\$ ' --setenv=LANG=C.UTF-8 --resolv-conf=off bash --login -c '/usr/bin/rpmbuild -bb --target x86_64 --nodeps /builddir/build/originals/llvm.spec' - -Copr build error: Build failed - -``` - -### RPM build errors - -If we have found RPM build errors in the log file, you'll find them here. - -``` - -``` - -### Errors to look into - -If we have found the term error: (case insentitive) in the build log, -you'll find all occurrences here together with the preceding lines. - -``` -Feb 22 02:00:59 [2640/3735] cd /builddir/build/BUILD/llvm-project-19.0.0.src/llvm/redhat-linux-build/docs && /usr/bin/cmake -E env /usr/bin/sphinx-build-3 -b man -d /builddir/build/BUILD/llvm-project-19.0.0.src/llvm/redhat-linux-build/docs/_doctrees-dsymutil-man -q -t builder-man -D version=19 -D release=19.0.0pre20240222.gd17eade22ab9a6 /builddir/build/BUILD/llvm-project-19.0.0.src/llvm/docs /builddir/build/BUILD/llvm-project-19.0.0.src/llvm/redhat-linux-build/docs/man -Feb 22 02:00:59 /builddir/build/BUILD/llvm-project-19.0.0.src/llvm/docs/AMDGPUDwarfExtensionAllowLocationDescriptionOnTheDwarfExpressionStack/AMDGPUDwarfExtensionAllowLocationDescriptionOnTheDwarfExpressionStack.md:396: ERROR: Unexpected indentation. --- -Feb 22 02:00:59 /builddir/build/BUILD/llvm-project-19.0.0.src/llvm/docs/AMDGPUDwarfExtensionAllowLocationDescriptionOnTheDwarfExpressionStack/AMDGPUDwarfExtensionAllowLocationDescriptionOnTheDwarfExpressionStack.md:398: WARNING: Inline interpreted text or phrase reference start-string without end-string. -Feb 22 02:00:59 /builddir/build/BUILD/llvm-project-19.0.0.src/llvm/docs/AMDGPUDwarfExtensionAllowLocationDescriptionOnTheDwarfExpressionStack/AMDGPUDwarfExtensionAllowLocationDescriptionOnTheDwarfExpressionStack.md:408: ERROR: Unexpected indentation. --- -Feb 22 02:00:59 /builddir/build/BUILD/llvm-project-19.0.0.src/llvm/docs/AMDGPUDwarfExtensionAllowLocationDescriptionOnTheDwarfExpressionStack/AMDGPUDwarfExtensionAllowLocationDescriptionOnTheDwarfExpressionStack.md:432: WARNING: Inline interpreted text or phrase reference start-string without end-string. -Feb 22 02:00:59 /builddir/build/BUILD/llvm-project-19.0.0.src/llvm/docs/AMDGPUDwarfExtensionAllowLocationDescriptionOnTheDwarfExpressionStack/AMDGPUDwarfExtensionAllowLocationDescriptionOnTheDwarfExpressionStack.md:441: ERROR: Unexpected indentation. --- -Feb 22 02:00:59 /builddir/build/BUILD/llvm-project-19.0.0.src/llvm/docs/AMDGPUDwarfExtensionAllowLocationDescriptionOnTheDwarfExpressionStack/AMDGPUDwarfExtensionAllowLocationDescriptionOnTheDwarfExpressionStack.md:447: WARNING: Inline interpreted text or phrase reference start-string without end-string. -Feb 22 02:00:59 /builddir/build/BUILD/llvm-project-19.0.0.src/llvm/docs/AMDGPUDwarfExtensionAllowLocationDescriptionOnTheDwarfExpressionStack/AMDGPUDwarfExtensionAllowLocationDescriptionOnTheDwarfExpressionStack.md:476: ERROR: Unexpected indentation. --- -Feb 22 02:00:59 /builddir/build/BUILD/llvm-project-19.0.0.src/llvm/docs/AMDGPUDwarfExtensionAllowLocationDescriptionOnTheDwarfExpressionStack/AMDGPUDwarfExtensionAllowLocationDescriptionOnTheDwarfExpressionStack.md:478: WARNING: Inline interpreted text or phrase reference start-string without end-string. -Feb 22 02:00:59 /builddir/build/BUILD/llvm-project-19.0.0.src/llvm/docs/AMDGPUDwarfExtensionAllowLocationDescriptionOnTheDwarfExpressionStack/AMDGPUDwarfExtensionAllowLocationDescriptionOnTheDwarfExpressionStack.md:488: ERROR: Unexpected indentation. --- -Feb 22 02:00:59 /builddir/build -``` diff --git a/snapshot_manager/tests/test_logs/cause_unknown.log.gz b/snapshot_manager/tests/test_logs/cause_unknown.log.gz deleted file mode 100644 index e0f8fd55..00000000 Binary files a/snapshot_manager/tests/test_logs/cause_unknown.log.gz and /dev/null differ diff --git a/snapshot_manager/tests/util_test.py b/snapshot_manager/tests/util_test.py index cde34699..cc97e97e 100644 --- a/snapshot_manager/tests/util_test.py +++ b/snapshot_manager/tests/util_test.py @@ -43,6 +43,7 @@ def load_tests(loader, tests, ignore): See https://stackoverflow.com/a/27171468 """ import doctest + import snapshot_manager.util tests.addTests(doctest.DocTestSuite(snapshot_manager.util)) diff --git a/tests/snapshot-gating.fmf b/tests/snapshot-gating.fmf index c18f03cc..1179190c 100644 --- a/tests/snapshot-gating.fmf +++ b/tests/snapshot-gating.fmf @@ -11,7 +11,7 @@ summary: LLVM Tests for snapshot gating prepare: - how: install - copr: "@fedora-llvm-team/$COPR_PROJECT" + copr: "@fedora-llvm-team/$COPR_PROJECT $COPR_CHROOT" # Lower the priority of the testing-farm-tag-repository so that our copr repo is picked up. # See: https://docs.testing-farm.io/Testing%20Farm/0.1/test-environment.html#_tag_repository - how: shell @@ -46,7 +46,7 @@ discover: filter: "tag:-spoils-installation & tag:-not-in-default" - name: llvm-tests how: fmf - url: https://src.fedoraproject.org/forks/kkleine/tests/llvm.git + url: https://src.fedoraproject.org/tests/llvm.git ref: main filter: "tag:-spoils-installation & tag:-not-in-default" - name: python-lit