Skip to content

Commit

Permalink
boot-utils: Add '--gh-json-file'
Browse files Browse the repository at this point in the history
In moving the rootfs images to GitHub releases, we risk hitting GitHub's
API rate limit with GITHUB_TOKEN, which is 1,000 requests per hour per
repository, because each boot test within a workflow will be a separate
call. It is totally possible for us to run 1,000 boots an hour during a
busy workflow period, so this needs special consideration.

To make it easier for CI to cache the results of a GitHub release API
query, add '--gh-json-file' to both boot-qemu.py and boot-uml.py to
allow the tuxsuite parent job to generate boot-utils.json and pass that
along to each child job, so that at worst, each workflow will query the
API three times (once for defconfigs, allconfigs, and distro configs).

Signed-off-by: Nathan Chancellor <[email protected]>
  • Loading branch information
nathanchance committed Jun 2, 2023
1 parent 9290444 commit 98f47f1
Show file tree
Hide file tree
Showing 3 changed files with 38 additions and 8 deletions.
12 changes: 11 additions & 1 deletion boot-qemu.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ def __init__(self):
self.efi = False
self.gdb = False
self.gdb_bin = ''
self.gh_json_file = None
self.interactive = False
self.kernel = None
self.kernel_dir = None
Expand Down Expand Up @@ -162,7 +163,8 @@ def _have_dev_kvm_access(self):
def _prepare_initrd(self):
if not self._initrd_arch:
raise RuntimeError('No initrd architecture specified?')
return utils.prepare_initrd(self._initrd_arch)
return utils.prepare_initrd(self._initrd_arch,
gh_json_file=self.gh_json_file)

def _run_fg(self):
# Pretty print and run QEMU command
Expand Down Expand Up @@ -740,6 +742,11 @@ def parse_arguments():
'--gdb-bin',
default='gdb-multiarch',
help='gdb binary to use for debugging (default: gdb-multiarch)')
parser.add_argument(
'--gh-json-file',
help=
'Use file for downloading rootfs images, instead of querying GitHub API directly'
)
parser.add_argument(
'-k',
'--kernel-location',
Expand Down Expand Up @@ -827,6 +834,9 @@ def parse_arguments():
runner.gdb = True
runner.gdb_bin = args.gdb_bin

if args.gh_json_file:
runner.gh_json_file = Path(args.gh_json_file).resolve()

if args.no_kvm:
runner.use_kvm = False

Expand Down
14 changes: 13 additions & 1 deletion boot-uml.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
# pylint: disable=invalid-name

import argparse
from pathlib import Path
import subprocess

import utils
Expand All @@ -16,6 +17,12 @@ def parse_arguments():
"""
parser = argparse.ArgumentParser()

parser.add_argument(
'-g',
'--gh-json-file',
help=
'Use file for downloading rootfs images, instead of querying GitHub API directly'
)
parser.add_argument(
"-i",
"--interactive",
Expand Down Expand Up @@ -54,7 +61,12 @@ def run_kernel(kernel_image, rootfs, interactive):

if __name__ == '__main__':
args = parse_arguments()

kernel = utils.get_full_kernel_path(args.kernel_location, "linux")
initrd = utils.prepare_initrd('x86_64', rootfs_format='ext4')

initrd_args = {'rootfs_format': 'ext4'}
if args.gh_json_file:
initrd_args['gh_json_file'] = Path(args.gh_json_file).resolve()
initrd = utils.prepare_initrd('x86_64', **initrd_args)

run_kernel(kernel, initrd, args.interactive)
20 changes: 14 additions & 6 deletions utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ def green(string):
print(f"\n\033[01;32m{string}\033[0m", flush=True)


def prepare_initrd(architecture, rootfs_format='cpio'):
def prepare_initrd(architecture, rootfs_format='cpio', gh_json_file=None):
"""
Returns a decompressed initial ramdisk.
Expand All @@ -189,11 +189,19 @@ def prepare_initrd(architecture, rootfs_format='cpio'):
gh_json_rl = get_gh_json('https://api.github.com/rate_limit')
remaining = gh_json_rl['resources']['core']['remaining']

# If we have API calls remaining, we can query for the latest release to
# make sure that we are up to date.
if remaining > 0:
gh_json_rel = get_gh_json(
f"https://api.github.com/repos/{REPO}/releases/latest")
# If we have API calls remaining or have already queried the API previously
# and cached the result, we can query for the latest release to make sure
# that we are up to date.
if remaining > 0 or gh_json_file:
if gh_json_file:
if not gh_json_file.exists():
raise FileNotFoundError(
f"Provided GitHub JSON file ('{gh_json_file}') does not exist!"
)
gh_json_rel = json.loads(gh_json_file.read_text(encoding='utf-8'))
else:
gh_json_rel = get_gh_json(
f"https://api.github.com/repos/{REPO}/releases/latest")
# Download the ramdisk if it is not already downloaded
if not src.exists():
download_initrd(gh_json_rel, src)
Expand Down

0 comments on commit 98f47f1

Please sign in to comment.