From ba4c0fbf6fce5db6f1e963fd5b7cb0a17a6dcf23 Mon Sep 17 00:00:00 2001 From: Mike Hommey Date: Sat, 25 Jan 2025 08:59:50 +0900 Subject: [PATCH] [CI] Duplicate the taskcluster workflow to github --- .github/actions/decision/action.yml | 48 ++++++ .github/actions/mounts/action.yml | 35 ++++ .github/actions/task/action.yml | 40 +++++ .github/workflows/env-tools.yml | 242 ++++++++++++++++++++++++++++ CI/decision.py | 80 ++++++++- CI/docker.py | 5 +- CI/run.py | 144 +++++++++++++++++ CI/tasks.py | 19 ++- CI/variables.py | 20 ++- 9 files changed, 621 insertions(+), 12 deletions(-) create mode 100644 .github/actions/decision/action.yml create mode 100644 .github/actions/mounts/action.yml create mode 100644 .github/actions/task/action.yml create mode 100644 .github/workflows/env-tools.yml create mode 100644 CI/run.py diff --git a/.github/actions/decision/action.yml b/.github/actions/decision/action.yml new file mode 100644 index 000000000..9370d1a28 --- /dev/null +++ b/.github/actions/decision/action.yml @@ -0,0 +1,48 @@ +name: Decision +description: Decision +outputs: + matrix: + description: Decision matrix + value: ${{ steps.matrix.outputs.result }} + artifacts: + description: Artifacts data + value: ${{ steps.decision.outputs.artifacts }} + mounts: + description: Mounts data + value: ${{ steps.decision.outputs.mounts }} +runs: + using: "composite" + steps: + - name: Decision + id: decision + shell: bash + run: | + python3 CI/decision.py >> $GITHUB_OUTPUT + - uses: actions/setup-node@v3 + if: ${{ steps.decision.outputs.matrix }} + with: + node-version: '20.x' + - shell: bash + if: ${{ steps.decision.outputs.matrix }} + run: npm install @actions/cache + - name: Check dependencies + id: matrix + if: ${{ steps.decision.outputs.matrix }} + uses: actions/github-script@v7 + with: + script: | + const cache = require('@actions/cache'); + const matrix = ${{ toJSON(fromJSON(steps.decision.outputs.matrix)) }}; + const artifacts = ${{ toJSON(fromJSON(steps.decision.outputs.artifacts)) }}; + const filtered = await Promise.all( + Object.entries(matrix).map(async ([name, items]) => { + const filtered_items = []; + for (const item of items) { + if (!(item.task in artifacts) || !await cache.restoreCache(artifacts[item.task].paths, artifacts[item.task].key, [], { lookupOnly: true }, true)) { + filtered_items.push(item); + } + } + return [name, filtered_items]; + }) + ); + return Object.fromEntries(filtered.filter(([_, items]) => items.length > 0)); diff --git a/.github/actions/mounts/action.yml b/.github/actions/mounts/action.yml new file mode 100644 index 000000000..41e9133cc --- /dev/null +++ b/.github/actions/mounts/action.yml @@ -0,0 +1,35 @@ +name: Mount dependencies +description: Mount dependencies +inputs: + mounts: + description: mounts + required: true +runs: + using: "composite" + steps: + - uses: actions/setup-node@v3 + with: + node-version: '20.x' + - shell: bash + run: npm install @actions/cache + - name: Mount dependencies + uses: actions/github-script@v7 + with: + script: | + const fs = require('fs'); + const path = require('path'); + const cache = require('@actions/cache'); + const mounts = ${{ toJSON(fromJSON(inputs.mounts)) }}; + const pwd = process.cwd(); + // Make actions/cache's getWorkingDirectory() return process.cwd. + delete process.env.GITHUB_WORKSPACE; + for (const mount of mounts) { + const dir = path.join(pwd, 'cache', mount.key, path.dirname(mount.artifact)); + fs.mkdirSync(dir, { recursive: true }); + process.chdir(dir); + if (await cache.restoreCache([path.basename(mount.artifact)], mount.key, [], {}, true)) { + console.log(`Cache restored from key: ${mount.key}`); + } else { + core.setFailed(`Failed to restore cache from key: ${mount.key}`); + } + } diff --git a/.github/actions/task/action.yml b/.github/actions/task/action.yml new file mode 100644 index 000000000..1d207659c --- /dev/null +++ b/.github/actions/task/action.yml @@ -0,0 +1,40 @@ +name: Task +description: Run task +inputs: + name: + description: task name + required: true + mounts: + description: mounts for the task + required: false + artifacts: + description: artifacts from the task + required: false +runs: + using: "composite" + steps: + - uses: actions/setup-python@v5 + if: ${{ runner.os == 'macOS' }} + with: + python-version: ${{ runner.arch == 'ARM64' && '3.11.7' || '3.9.14' }} + - name: Finish python setup + if: ${{ runner.os == 'macOS' }} + shell: bash + run: | + python3 -m pip install pip==20.3.4 wheel==0.37.0 --upgrade + - uses: actions/cache@v4 + if: ${{ fromJSON(inputs.artifacts) }} + id: cache + with: + path: ${{ join(fromJSON(inputs.artifacts).paths, '\n') }} + key: ${{ fromJSON(inputs.artifacts).key }} + enableCrossOsArchive: true + - uses: ./.github/actions/mounts + if: ${{ steps.cache.outputs.cache-hit != 'true' && fromJSON(inputs.mounts) }} + with: + mounts: ${{ inputs.mounts }} + - name: ${{ inputs.name }} + shell: bash + if: ${{ steps.cache.outputs.cache-hit != 'true' }} + run: | + python3 CI/run.py --cache cache --no-recurse --out . "${{ inputs.name }}" diff --git a/.github/workflows/env-tools.yml b/.github/workflows/env-tools.yml new file mode 100644 index 000000000..c8e50a4e8 --- /dev/null +++ b/.github/workflows/env-tools.yml @@ -0,0 +1,242 @@ +name: environment and tools +on: + push: [] + pull_request: [] +jobs: + decision: + runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.decision.outputs.matrix }} + artifacts: ${{ steps.decision.outputs.artifacts }} + mounts: ${{ steps.decision.outputs.mounts }} + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/decision + id: decision + + docker-base: + if: ${{ fromJSON(needs.decision.outputs.matrix)['docker-base'] && !cancelled() }} + needs: decision + strategy: + fail-fast: false + matrix: + include: ${{ fromJSON(needs.decision.outputs.matrix)['docker-base'] }} + runs-on: ${{ matrix.runner }} + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/task + with: + name: ${{ matrix.task }} + mounts: ${{ toJSON(fromJSON(needs.decision.outputs.mounts)[matrix.task]) }} + artifacts: ${{ toJSON(fromJSON(needs.decision.outputs.artifacts)[matrix.task]) }} + + docker: + if: ${{ fromJSON(needs.decision.outputs.matrix)['docker'] && !cancelled() }} + needs: [decision, docker-base] + strategy: + fail-fast: false + matrix: + include: ${{ fromJSON(needs.decision.outputs.matrix)['docker'] }} + runs-on: ${{ matrix.runner }} + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/task + with: + name: ${{ matrix.task }} + mounts: ${{ toJSON(fromJSON(needs.decision.outputs.mounts)[matrix.task]) }} + artifacts: ${{ toJSON(fromJSON(needs.decision.outputs.artifacts)[matrix.task]) }} + + msys2-base: + if: ${{ fromJSON(needs.decision.outputs.matrix)['msys2-base'] && !cancelled() }} + needs: [decision, docker-base] + strategy: + fail-fast: false + matrix: + include: ${{ fromJSON(needs.decision.outputs.matrix)['msys2-base'] }} + runs-on: ${{ matrix.runner }} + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/task + with: + name: ${{ matrix.task }} + mounts: ${{ toJSON(fromJSON(needs.decision.outputs.mounts)[matrix.task]) }} + artifacts: ${{ toJSON(fromJSON(needs.decision.outputs.artifacts)[matrix.task]) }} + + msys2: + if: ${{ fromJSON(needs.decision.outputs.matrix)['msys2'] && !cancelled() }} + needs: [decision, msys2-base] + strategy: + fail-fast: false + matrix: + include: ${{ fromJSON(needs.decision.outputs.matrix)['msys2'] }} + runs-on: ${{ matrix.runner }} + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/task + with: + name: ${{ matrix.task }} + mounts: ${{ toJSON(fromJSON(needs.decision.outputs.mounts)[matrix.task]) }} + artifacts: ${{ toJSON(fromJSON(needs.decision.outputs.artifacts)[matrix.task]) }} + + git: + if: ${{ fromJSON(needs.decision.outputs.matrix)['git'] && !cancelled() }} + needs: [decision, docker] + strategy: + fail-fast: false + matrix: + include: ${{ fromJSON(needs.decision.outputs.matrix)['git'] }} + runs-on: ${{ matrix.runner }} + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/task + with: + name: ${{ matrix.task }} + mounts: ${{ toJSON(fromJSON(needs.decision.outputs.mounts)[matrix.task]) }} + artifacts: ${{ toJSON(fromJSON(needs.decision.outputs.artifacts)[matrix.task]) }} + + hg: + if: ${{ fromJSON(needs.decision.outputs.matrix)['hg'] && !cancelled() }} + needs: [decision, docker, msys2] + strategy: + fail-fast: false + matrix: + include: ${{ fromJSON(needs.decision.outputs.matrix)['hg'] }} + runs-on: ${{ matrix.runner }} + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/task + with: + name: ${{ matrix.task }} + mounts: ${{ toJSON(fromJSON(needs.decision.outputs.mounts)[matrix.task]) }} + artifacts: ${{ toJSON(fromJSON(needs.decision.outputs.artifacts)[matrix.task]) }} + + build: + if: ${{ fromJSON(needs.decision.outputs.matrix)['build'] && !cancelled() }} + needs: [decision, docker] + strategy: + fail-fast: false + matrix: + include: ${{ fromJSON(needs.decision.outputs.matrix)['build'] }} + runs-on: ${{ matrix.runner }} + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/task + with: + name: ${{ matrix.task }} + mounts: ${{ toJSON(fromJSON(needs.decision.outputs.mounts)[matrix.task]) }} + artifacts: ${{ toJSON(fromJSON(needs.decision.outputs.artifacts)[matrix.task]) }} + + cram: + if: ${{ fromJSON(needs.decision.outputs.matrix)['cram'] && !cancelled() }} + needs: [decision, docker, git, hg, build] + strategy: + fail-fast: false + matrix: + include: ${{ fromJSON(needs.decision.outputs.matrix)['cram'] }} + runs-on: ${{ matrix.runner }} + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/task + with: + name: ${{ matrix.task }} + mounts: ${{ toJSON(fromJSON(needs.decision.outputs.mounts)[matrix.task]) }} + artifacts: ${{ toJSON(fromJSON(needs.decision.outputs.artifacts)[matrix.task]) }} + + download: + if: ${{ fromJSON(needs.decision.outputs.matrix)['download'] && !cancelled() }} + needs: [decision, docker, git, build, msys2] + strategy: + fail-fast: false + matrix: + include: ${{ fromJSON(needs.decision.outputs.matrix)['download'] }} + runs-on: ${{ matrix.runner }} + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/task + with: + name: ${{ matrix.task }} + mounts: ${{ toJSON(fromJSON(needs.decision.outputs.mounts)[matrix.task]) }} + artifacts: ${{ toJSON(fromJSON(needs.decision.outputs.artifacts)[matrix.task]) }} + + hg-clone: + if: ${{ fromJSON(needs.decision.outputs.matrix)['hg-clone'] && !cancelled() }} + needs: [decision, docker, hg] + strategy: + fail-fast: false + matrix: + include: ${{ fromJSON(needs.decision.outputs.matrix)['hg-clone'] }} + runs-on: ${{ matrix.runner }} + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/task + with: + name: ${{ matrix.task }} + mounts: ${{ toJSON(fromJSON(needs.decision.outputs.mounts)[matrix.task]) }} + artifacts: ${{ toJSON(fromJSON(needs.decision.outputs.artifacts)[matrix.task]) }} + + clone: + if: ${{ fromJSON(needs.decision.outputs.matrix)['clone'] && !cancelled() }} + needs: [decision, git, build, hg-clone] + strategy: + fail-fast: false + matrix: + include: ${{ fromJSON(needs.decision.outputs.matrix)['clone'] }} + runs-on: ${{ matrix.runner }} + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/task + with: + name: ${{ matrix.task }} + mounts: ${{ toJSON(fromJSON(needs.decision.outputs.mounts)[matrix.task]) }} + artifacts: ${{ toJSON(fromJSON(needs.decision.outputs.artifacts)[matrix.task]) }} + + graft: + if: ${{ fromJSON(needs.decision.outputs.matrix)['graft'] && !cancelled() }} + needs: [decision, docker, git, build, hg, clone, hg-clone] + strategy: + fail-fast: false + matrix: + include: ${{ fromJSON(needs.decision.outputs.matrix)['graft'] }} + runs-on: ${{ matrix.runner }} + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/task + with: + name: ${{ matrix.task }} + mounts: ${{ toJSON(fromJSON(needs.decision.outputs.mounts)[matrix.task]) }} + artifacts: ${{ toJSON(fromJSON(needs.decision.outputs.artifacts)[matrix.task]) }} + + test: + if: ${{ fromJSON(needs.decision.outputs.matrix)['test'] && !cancelled() }} + needs: [decision, docker, git, build, hg, clone, hg-clone] + strategy: + fail-fast: false + matrix: + include: ${{ fromJSON(needs.decision.outputs.matrix)['test'] }} + runs-on: ${{ matrix.runner }} + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/task + with: + name: ${{ matrix.task }} + mounts: ${{ toJSON(fromJSON(needs.decision.outputs.mounts)[matrix.task]) }} + artifacts: ${{ toJSON(fromJSON(needs.decision.outputs.artifacts)[matrix.task]) }} + + upload: + # Disabled until we turn it off on taskcluster. + if: ${{ false && fromJSON(needs.decision.outputs.matrix)['upload'] && !failure() }} + needs: [decision, docker, build, cram, graft, test] + strategy: + fail-fast: false + matrix: + include: ${{ fromJSON(needs.decision.outputs.matrix)['upload'] }} + runs-on: ${{ matrix.runner }} + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/task + with: + name: ${{ matrix.task }} + mounts: ${{ toJSON(fromJSON(needs.decision.outputs.mounts)[matrix.task]) }} + artifacts: ${{ toJSON(fromJSON(needs.decision.outputs.artifacts)[matrix.task]) }} diff --git a/CI/decision.py b/CI/decision.py index c37b19216..03248ee6a 100644 --- a/CI/decision.py +++ b/CI/decision.py @@ -404,7 +404,7 @@ def hg_trunk(): do_hg_version(trunk) -def main(): +def tasks(): try: func = action.by_name[TC_ACTION or "decision"].func except AttributeError: @@ -429,6 +429,11 @@ def main(): ) if merge_coverage: + kwargs = {} + if IS_GH: + kwargs["env"] = { + "CODECOV_TOKEN": "$CODECOV_TOKEN", + } Task( task_env=TaskEnvironment.by_name("linux.codecov"), description="upload coverage", @@ -447,7 +452,9 @@ def main(): '[\\"secret\\"][\\"token\\"])")' ), "set -x", - ], + ] + if IS_TC + else [], merge_coverage, [ "cd repo", @@ -457,7 +464,71 @@ def main(): ], ) ), + **kwargs, + ) + + +def print_output(name, value): + if not isinstance(value, str): + value = json.dumps(value, separators=(",", ":")) + print(f"{name}={value}") + + +def main_gh(): + tasks() + + RUNNER = { + "linux": "ubuntu-latest", + "osx": "macos-13", + "macos": "macos-14", + "windows": "windows-latest", + } + matrix = {} + artifacts = {} + mounts = {} + for t in Task.by_id.values(): + key = t.key + task = t.task + payload = task.get("payload", {}) + name = task.get("metadata", {})["name"] + job_name = name.split()[0] + if job_name == "hg" and name.startswith("hg clone"): + job_name = "hg-clone" + if job_name in ("docker", "msys2") and "base" in name: + job_name = f"{job_name}-base" + matrix.setdefault(job_name, []).append( + { + "task": name, + "runner": RUNNER[task["workerType"]], + } ) + for mount in payload.get("mounts", []): + content = mount["content"] + mounts.setdefault(name, []).append( + { + "artifact": content["artifact"], + "key": Task.by_id[content["taskId"]].key, + } + ) + + if payload.get("artifacts"): + assert name not in artifacts + artifacts[name] = { + "paths": [ + os.path.basename(artifact["name"]) + for artifact in payload.get("artifacts", []) + ], + "key": key, + } + for m in matrix.values(): + m.sort(key=lambda x: x["task"]) + print_output("matrix", matrix) + print_output("artifacts", artifacts) + print_output("mounts", mounts) + + +def main_tc(): + tasks() for t in Task.by_id.values(): t.submit() @@ -489,4 +560,7 @@ def main(): if __name__ == "__main__": - main() + if IS_GH: + main_gh() + else: + main_tc() diff --git a/CI/docker.py b/CI/docker.py index c516aac37..dc70579d1 100644 --- a/CI/docker.py +++ b/CI/docker.py @@ -256,7 +256,10 @@ def prepare_params(self, params): for v in volumes: run_cmd.append(f"--volume=./{v}:/{v}") for k, v in params.pop("env", {}).items(): - run_cmd.append(f"--env={k}={v}") + if v == f"${k}": + run_cmd.append(f"--env={k}") + else: + run_cmd.append(f"--env={k}={v}") for cap in params.pop("caps", []): run_cmd.append(f"--cap-add={cap}") run_cmd.append(image) diff --git a/CI/run.py b/CI/run.py new file mode 100644 index 000000000..a4c6d022c --- /dev/null +++ b/CI/run.py @@ -0,0 +1,144 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse +import errno +import io +import os +import platform +import shutil +import subprocess +import sys +import tarfile +import tempfile +import time + +import decision +from tasks import Task + + +def link_or_copy(src, dest): + try: + os.link(src, dest) + except OSError as e: + if e.errno == errno.EXDEV: + shutil.copy2(src, dest) + else: + raise + + +def run_task(task, cwd, out=None, cache=None, recurse=True): + id = task.id + key = task.key + task = task.task + + expected_system = { + "windows": ("Windows", "AMD64"), + "linux": ("Linux", "x86_64"), + "osx": ("Darwin", "x86_64"), + "macos": ("Darwin", "arm64"), + }.get(task["workerType"], (None, None)) + if (platform.system(), platform.machine()) != expected_system: + name = task.get("metadata", {}).get("name") + raise RuntimeError( + f"Cannot run '{name}' on {platform.system()} {platform.machine()}" + ) + + payload = task.get("payload", {}) + with tempfile.TemporaryDirectory(prefix="task", dir=cwd) as task_dir: + for mount in payload.get("mounts", []): + content = mount.get("content", {}) + task_id = content["taskId"] + dep_task = Task.by_id[task_id] + if cache: + artifacts_base_dir = os.path.join(cache, dep_task.key) + else: + artifacts_base_dir = os.path.join(cwd, task_id) + artifact = os.path.join(artifacts_base_dir, content["artifact"]) + if not os.path.exists(artifact): + if recurse: + run_task(dep_task, cwd, cache=cache) + else: + raise RuntimeError(f"Missing dependency {artifact}") + + if directory := mount.get("directory"): + assert "file" not in mount + directory = os.path.join(task_dir, directory) + assert mount.get("format", "tar.zst") + print(f"Extracting {os.path.basename(artifact)}", file=sys.stderr) + start = time.monotonic() + with subprocess.Popen( + ["zstd", "-cd", artifact], stdout=subprocess.PIPE + ) as proc: + stdout = io.BufferedReader(proc.stdout, 1024 * 1024) + with tarfile.open(fileobj=stdout, mode="r|") as tar: + for tarinfo in tar: + # We want to preserve file mode, but not timestamps. Owner would only + # matter when running as Admin/root, but we're not expecting to be. + tar.extract(tarinfo, path=directory, set_attrs=False) + if tarinfo.type == tarfile.REGTYPE: + os.chmod( + os.path.join(directory, tarinfo.name), tarinfo.mode + ) + end = time.monotonic() + print(f"Took {end - start:.2f}s", file=sys.stderr) + elif file := mount.get("file"): + assert "directory" not in mount + link_or_copy(artifact, os.path.join(task_dir, file)) + else: + assert False + + env = os.environ.copy() + env.update(payload.get("env", {})) + if task["workerType"] == "windows": + task_cmd = os.path.join(task_dir, "task.cmd") + with open(task_cmd, "w") as fh: + fh.write("\n".join(payload.get("command", []))) + subprocess.check_call([os.path.abspath(task_cmd)], cwd=task_dir, env=env) + else: + for command in payload.get("command", []): + subprocess.check_call(command, cwd=task_dir, env=env) + if cache: + artifacts_base_dir = os.path.join(cache, key) + else: + artifacts_base_dir = os.path.join(cwd, id) + if out: + os.makedirs(out, exist_ok=True) + for artifact in payload.get("artifacts", []): + assert artifact.get("type") == "file" + dest = os.path.join(artifacts_base_dir, artifact["name"]) + os.makedirs(os.path.dirname(dest), exist_ok=True) + assert not artifact["name"].startswith("/") + link_or_copy(os.path.join(task_dir, artifact["path"]), dest) + if out: + link_or_copy(dest, os.path.join(out, os.path.basename(dest))) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("--cache", type=str, metavar="PATH") + parser.add_argument("--out", type=str, metavar="PATH") + parser.add_argument("--no-recurse", action="store_true") + parser.add_argument("task") + args = parser.parse_args() + decision.tasks() + + with tempfile.TemporaryDirectory(prefix="run_task") as tmpdir: + for t in Task.by_id.values(): + if t.task.get("metadata", {}).get("name") == args.task: + run_task( + t, + cwd=tmpdir, + out=args.out, + cache=args.cache, + recurse=not args.no_recurse, + ) + break + else: + print(f"Unknown task: {args.task}", file=sys.stderr) + return 1 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/CI/tasks.py b/CI/tasks.py index 3d3be8fe4..508838a8b 100644 --- a/CI/tasks.py +++ b/CI/tasks.py @@ -14,12 +14,13 @@ from pkg_resources import parse_version # noqa: F401 from variables import * # noqa: F403 -if os.environ.get("DETERMINISTIC"): +if DETERMINISTIC: + import hashlib import random from uuid import UUID rand = random.Random() - rand.seed(0) + rand.seed(int(hashlib.sha256(TC_COMMIT.encode()).hexdigest(), 16)) def uuid4(): # noqa: F811 return UUID(int=rand.getrandbits(128), version=4) @@ -53,7 +54,7 @@ def __add__(self, other): task_group_id = os.environ.get("TC_GROUP_ID") or os.environ.get("TASK_ID") or slugid() -if os.environ.get("DETERMINISTIC"): +if DETERMINISTIC: now = datetime.fromtimestamp(0) else: now = datetime.utcnow() @@ -87,7 +88,7 @@ class Existing(str): def __init__(self): super(Index, self).__init__() - if os.environ.get("NO_INDEX"): + if NO_INDEX: self.session = None else: import requests @@ -189,6 +190,7 @@ def checkout(repo=None, commit=None, dest="repo"): commit = commit or TC_COMMIT return [ "git clone -n {} {}".format(repo, dest), + "git -C {} fetch origin {}".format(dest, commit), "git -c core.autocrlf=input -c advice.detachedHead=false" " -C {} checkout {}".format(dest, commit), ] @@ -226,7 +228,7 @@ def __init__(self, **kwargs): elif k == "description": task["metadata"][k] = task["metadata"]["name"] = v elif k == "index": - if TC_IS_PUSH and TC_BRANCH != "try": + if IS_GH or (TC_IS_PUSH and TC_BRANCH != "try"): task["routes"] = ["index.project.git-cinnabar.{}".format(v)] elif k == "expireIn": value = v.split() @@ -372,6 +374,13 @@ def file_format(url): self.task = task Task.by_id.setdefault(self.id, self) + @property + def key(self): + if routes := self.task.get("routes"): + assert len(routes) == 1 + return routes[0] + return self.id + def __str__(self): return self.id diff --git a/CI/variables.py b/CI/variables.py index 350e4ccbf..68da30457 100644 --- a/CI/variables.py +++ b/CI/variables.py @@ -20,8 +20,8 @@ DEFAULT_DATA = { "repo_name": "git-cinnabar", "login": "glandium", - "commit": "HEAD", - "branch": "", + "commit": os.environ.get("GITHUB_SHA", "HEAD"), + "branch": os.environ.get("GITHUB_REF_NAME", ""), "decision_id": "", } DEFAULT_DATA["repo_url"] = "https://github.com/{}/{}".format( @@ -46,7 +46,21 @@ def get(k): TC_BASE_REPO_NAME = get("base_repo_name") TC_ACTION = os.environ.get("TC_ACTION") -TC_IS_PUSH = os.environ.get("TC_IS_PUSH") == "1" DEFAULT_REPO = "https://hg.mozilla.org/users/mh_glandium.org/jqplot" REPO = os.environ.get("REPO", DEFAULT_REPO) + +IS_GH = "GITHUB_RUN_ID" in os.environ +IS_TC = "TC_GROUP_ID" in os.environ +if IS_TC: + NO_INDEX = os.environ.get("NO_INDEX") + TC_IS_PUSH = os.environ.get("TC_IS_PUSH") == "1" + DETERMINISTIC = os.environ.get("DETERMINISTIC") +elif IS_GH: + NO_INDEX = True + TC_IS_PUSH = os.environ.get("GITHUB_EVENT_NAME") == "push" + DETERMINISTIC = True +else: + NO_INDEX = True + TC_IS_PUSH = True + DETERMINISTIC = True