diff --git a/scripts/build-openstack-image.sh b/scripts/build-openstack-image.sh
index 25f177d07..3f7cc8d0d 100755
--- a/scripts/build-openstack-image.sh
+++ b/scripts/build-openstack-image.sh
@@ -12,6 +12,8 @@ RUNNER_TAR_URL="$1"
HTTP_PROXY="$2"
HTTPS_PROXY="$3"
NO_PROXY="$4"
+DOCKER_PROXY_SERVICE_CONF="$5"
+DOCKER_PROXY_CONF="$6"
# cleanup any existing mounts
cleanup() {
@@ -94,21 +96,35 @@ df -h # print disk free space
DEBIAN_FRONTEND=noninteractive /usr/bin/apt-get update -yq
DEBIAN_FRONTEND=noninteractive /usr/bin/apt-get upgrade -yq
DEBIAN_FRONTEND=noninteractive /usr/bin/apt-get install docker.io npm python3-pip shellcheck jq wget unzip gh -yq
+ln -s /usr/bin/python3 /usr/bin/python
# Uninstall unattended-upgrades, to avoid lock errors when unattended-upgrades is active in the runner
DEBIAN_FRONTEND=noninteractive /usr/bin/apt-get purge unattended-upgrades -yq
-if [[ -n "$HTTP_PROXY" ]]; then
- /snap/bin/lxc exec builder -- /usr/bin/npm config set proxy "$HTTP_PROXY"
-fi
-if [[ -n "$HTTPS_PROXY" ]]; then
- /snap/bin/lxc exec builder -- /usr/bin/npm config set https-proxy "$HTTPS_PROXY"
-fi
/usr/sbin/useradd -m ubuntu
/usr/bin/npm install --global yarn
/usr/sbin/groupadd microk8s
/usr/sbin/usermod -aG microk8s ubuntu
/usr/sbin/usermod -aG docker ubuntu
+/usr/bin/chmod 777 /usr/local/bin
+
+# Proxy configs
+if [[ -n "$HTTP_PROXY" ]]; then
+ /usr/bin/npm config set proxy "$HTTP_PROXY"
+fi
+if [[ -n "$HTTPS_PROXY" ]]; then
+ /usr/bin/npm config set https-proxy "$HTTPS_PROXY"
+fi
+if [[ -n "$DOCKER_PROXY_SERVICE_CONF" ]]; then
+ mkdir -p /etc/systemd/system/docker.service.d
+ echo "$DOCKER_PROXY_SERVICE_CONF" > /etc/systemd/system/docker.service.d/http-proxy.conf
+fi
+if [[ -n "$DOCKER_PROXY_CONF" ]]; then
+ mkdir -p /root/.docker
+ echo "$DOCKER_PROXY_CONF" > /root/.docker/config.json
+ mkdir -p /home/ubuntu/.docker
+ echo "$DOCKER_PROXY_CONF" > /home/ubuntu/.docker/config.json
+fi
# Reduce image size
/usr/bin/npm cache clean --force
@@ -121,8 +137,8 @@ DEBIAN_FRONTEND=noninteractive /usr/bin/apt-get clean
/usr/bin/wget https://github.com/mikefarah/yq/releases/latest/download/extract-checksum.sh -O extract-checksum.sh
/usr/bin/bash extract-checksum.sh SHA-256 "yq_linux_$BIN_ARCH" | /usr/bin/awk '{print \$2,\$1}' | /usr/bin/sha256sum -c | /usr/bin/grep OK
rm checksums checksums_hashes_order extract-checksum.sh
-chmod 755 yq_linux_$BIN_ARCH
-mv yq_linux_$BIN_ARCH /usr/bin/yq
+/usr/bin/chmod 755 yq_linux_$BIN_ARCH
+/usr/bin/mv yq_linux_$BIN_ARCH /usr/bin/yq
# Download runner bin and verify checksum
mkdir -p /home/ubuntu/actions-runner && cd /home/ubuntu/actions-runner
diff --git a/src-docs/openstack_manager.py.md b/src-docs/openstack_manager.py.md
index cad6898a2..c33d52f30 100644
--- a/src-docs/openstack_manager.py.md
+++ b/src-docs/openstack_manager.py.md
@@ -13,7 +13,7 @@ Module for handling interactions with OpenStack.
---
-
+
## function `list_projects`
@@ -35,7 +35,7 @@ It currently returns objects directly from the sdk, which may not be ideal (mapp
---
-
+
## function `build_image`
@@ -74,7 +74,7 @@ Build and upload an image to OpenStack.
---
-
+
## function `create_instance_config`
@@ -101,14 +101,17 @@ Create an instance config from charm data.
---
-
+
## function `create_instance`
```python
create_instance(
cloud_config: dict[str, dict],
- instance_config: InstanceConfig
+ instance_config: InstanceConfig,
+ proxies: Optional[ProxyConfig] = None,
+ dockerhub_mirror: Optional[str] = None,
+ ssh_debug_connections: list[SSHDebugConnection] | None = None
) → Server
```
diff --git a/src/charm.py b/src/charm.py
index cbc04fe21..363630470 100755
--- a/src/charm.py
+++ b/src/charm.py
@@ -367,6 +367,9 @@ def _on_install(self, _event: InstallEvent) -> None:
instance = openstack_manager.create_instance(
cloud_config=state.charm_config.openstack_clouds_yaml,
instance_config=instance_config,
+ proxies=state.proxy_config,
+ dockerhub_mirror=state.charm_config.dockerhub_mirror,
+ ssh_debug_connections=state.ssh_debug_connections
)
logger.info("OpenStack instance: %s", instance)
# Test out openstack integration and then go
diff --git a/src/openstack_manager.py b/src/openstack_manager.py
index 0634c0a50..7a6d6c6c0 100644
--- a/src/openstack_manager.py
+++ b/src/openstack_manager.py
@@ -2,6 +2,7 @@
# See LICENSE file for licensing details.
"""Module for handling interactions with OpenStack."""
+import json
import logging
import secrets
from dataclasses import dataclass
@@ -20,7 +21,7 @@
from openstack.exceptions import OpenStackCloudException
from openstack.identity.v3.project import Project
-from charm_state import Arch, ProxyConfig
+from charm_state import Arch, ProxyConfig, SSHDebugConnection
from errors import OpenStackUnauthorizedError, RunnerBinaryError
from github_client import GithubClient
from github_type import RunnerApplication
@@ -100,6 +101,25 @@ def _build_image_command(runner_info: RunnerApplication, proxies: ProxyConfig) -
https_proxy = proxies.https or ""
no_proxy = proxies.no_proxy or ""
+ environment = jinja2.Environment(loader=jinja2.FileSystemLoader("templates"), autoescape=True)
+ docker_proxy_service_conf_content = environment.get_template("systemd-docker-proxy.j2").render(
+ proxies=proxies
+ )
+ docker_proxy = {
+ "proxies": {
+ "default": {
+ key: value
+ for key, value in (
+ ("httpProxy", proxies.http),
+ ("httpsProxy", proxies.https),
+ ("noProxy", proxies.no_proxy),
+ )
+ if value
+ }
+ }
+ }
+ docker_client_proxy_content = json.dumps(docker_proxy)
+
cmd = [
"/usr/bin/bash",
BUILD_OPENSTACK_IMAGE_SCRIPT_FILENAME,
@@ -107,6 +127,8 @@ def _build_image_command(runner_info: RunnerApplication, proxies: ProxyConfig) -
http_proxy,
https_proxy,
no_proxy,
+ docker_proxy_service_conf_content,
+ docker_client_proxy_content,
]
return cmd
@@ -211,6 +233,9 @@ class InstanceLaunchError(Exception):
def create_instance(
cloud_config: dict[str, dict],
instance_config: InstanceConfig,
+ proxies: Optional[ProxyConfig] = None,
+ dockerhub_mirror: Optional[str] = None,
+ ssh_debug_connections: list[SSHDebugConnection] | None = None,
) -> openstack.compute.v2.server.Server:
"""Create an OpenStack instance.
@@ -225,11 +250,18 @@ def create_instance(
The created server.
"""
environment = jinja2.Environment(loader=jinja2.FileSystemLoader("templates"), autoescape=True)
+ env_contents = environment.get_template("env.j2").render(
+ proxies=proxies,
+ pre_job_script="",
+ dockerhub_mirror=dockerhub_mirror,
+ ssh_debug_info=(secrets.choice(ssh_debug_connections) if ssh_debug_connections else None),
+ )
cloud_userdata = environment.get_template("openstack-userdata.sh.j2").render(
github_url=f"https://github.com/{instance_config.github_path.path()}",
token=instance_config.registration_token,
instance_labels=",".join(instance_config.labels),
instance_name=instance_config.name,
+ env_contents=env_contents,
)
try:
diff --git a/templates/openstack-userdata.sh.j2 b/templates/openstack-userdata.sh.j2
index 23902519b..082fa9e8a 100644
--- a/templates/openstack-userdata.sh.j2
+++ b/templates/openstack-userdata.sh.j2
@@ -2,6 +2,9 @@
set -e
+# Write .env contents
+su - ubuntu -c 'cd ~/actions-runner && echo "{{ env_contents }}" > .env'
+
# Create the runner and start the configuration experience
su - ubuntu -c "cd ~/actions-runner && ./config.sh \
--url {{ github_url }} \
diff --git a/tests/integration/helpers.py b/tests/integration/helpers.py
index 473977bb2..c08d57953 100644
--- a/tests/integration/helpers.py
+++ b/tests/integration/helpers.py
@@ -10,7 +10,8 @@
import typing
from asyncio import sleep
from datetime import datetime, timezone
-from typing import Any, Awaitable, Callable, Union
+from functools import partial
+from typing import Any, Awaitable, Callable, ParamSpec, TypeVar, Union
import github
import juju.version
@@ -357,15 +358,15 @@ async def deploy_github_runner_charm(
storage["runner"] = {"pool": "rootfs", "size": 11}
default_config = {
- "path": path,
- "token": token,
- "virtual-machines": 0,
- "denylist": "10.10.0.0/16",
- "test-mode": "insecure",
- "reconcile-interval": reconcile_interval,
- "runner-storage": runner_storage,
- }
-
+ "path": path,
+ "token": token,
+ "virtual-machines": 0,
+ "denylist": "10.10.0.0/16",
+ "test-mode": "insecure",
+ "reconcile-interval": reconcile_interval,
+ "runner-storage": runner_storage,
+ }
+
if config:
default_config.update(config)
@@ -465,25 +466,35 @@ async def _assert_workflow_run_conclusion(
)
-async def _wait_for_workflow_to_complete(
- unit: Unit, workflow: Workflow, conclusion: str, start_time: datetime
-):
- """Wait for the workflow to complete.
+def _get_latest_run(
+ workflow: Workflow, start_time: datetime, branch: Branch | None = None
+) -> WorkflowRun | None:
+ """Get the latest run after start_time.
Args:
- unit: The unit which contains the runner.
- workflow: The workflow to wait for.
- conclusion: The workflow conclusion to wait for.
- start_time: The start time of the workflow.
+ workflow: The workflow to get the latest run for.
+ start_time: The minium start time of the run.
+
+ Returns:
+ The latest workflow run if the workflow has started. None otherwise.
"""
- runner_name = await get_runner_name(unit)
- await _wait_until_runner_is_used_up(runner_name, unit)
- # Wait for the workflow log to contain the conclusion
- await sleep(120)
+ try:
+ return workflow.get_runs(
+ branch=branch, created=f">={start_time.isoformat(timespec='seconds')}"
+ )[0]
+ except IndexError:
+ return None
- await _assert_workflow_run_conclusion(
- runner_name=runner_name, conclusion=conclusion, workflow=workflow, start_time=start_time
- )
+
+def _is_workflow_run_complete(run: WorkflowRun) -> bool:
+ """Wait for the workflow status to turn to complete.
+
+ Args:
+ run: The workflow run to check status for.
+ """
+ if run.update():
+ return run.status == "completed"
+ return False
async def dispatch_workflow(
@@ -492,6 +503,7 @@ async def dispatch_workflow(
github_repository: Repository,
conclusion: str,
workflow_id_or_name: str,
+ dispatch_input: dict | None = None,
):
"""Dispatch a workflow on a branch for the runner to run.
@@ -513,18 +525,33 @@ async def dispatch_workflow(
workflow = github_repository.get_workflow(id_or_file_name=workflow_id_or_name)
# The `create_dispatch` returns True on success.
- assert workflow.create_dispatch(branch, {"runner": app.name})
- await _wait_for_workflow_to_complete(
- unit=app.units[0], workflow=workflow, conclusion=conclusion, start_time=start_time
+ assert workflow.create_dispatch(
+ branch, dispatch_input or {"runner": app.name}
+ ), "Failed to create workflow"
+
+ # There is a very small chance of selecting a run not created by the dispatch above.
+ run = await wait_for(
+ partial(_get_latest_run, workflow=workflow, start_time=start_time, branch=branch)
)
+ await wait_for(partial(_is_workflow_run_complete, run=run), timeout=60 * 30, check_interval=60)
+
+ # The run object is updated by _is_workflow_run_complete function above.
+ assert (
+ run.conclusion == conclusion
+ ), f"Unexpected run conclusion, expected: {conclusion}, got: {run.conclusion}"
+
return workflow
+P = ParamSpec("P")
+R = TypeVar("R")
+
+
async def wait_for(
- func: Callable[[], Union[Awaitable, Any]],
+ func: Callable[P, R],
timeout: int = 300,
check_interval: int = 10,
-) -> Any:
+) -> R:
"""Wait for function execution to become truthy.
Args:
diff --git a/tests/integration/test_openstack.py b/tests/integration/test_openstack.py
index 9515f53c0..9a78139b8 100644
--- a/tests/integration/test_openstack.py
+++ b/tests/integration/test_openstack.py
@@ -41,6 +41,7 @@ async def test_openstack_integration(
github_repository=github_repository,
conclusion="success",
workflow_id_or_name=DISPATCH_E2E_TEST_RUN_WORKFLOW_FILENAME,
+ dispatch_input={"runner-tag": app_openstack_runner.name},
)
# 1. the workflow run completes successfully.
workflow_run: WorkflowRun = workflow.get_runs()[0]