diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 48a3f294..c895efe4 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -50,10 +50,10 @@ jobs: runs-on: ubuntu-latest env: PYTHONIOENCODING: utf-8 - NEURO_STAGING_URL: ${{ secrets.NEURO_STAGING_URL }} - APOLO_TOKEN: ${{ secrets.NEURO_TOKEN }} + NEURO_STAGING_URL: "https://api.dev.apolo.us/api/v1" + APOLO_TOKEN: ${{ secrets.CLIENT_TEST_E2E_USER_NAME }} APOLO_CLUSTER: default - APOLO_PROJECT: e2e-tests + APOLO_PROJECT: apolo-extras APOLO_EXTRAS_PRESET: cpu-small # Note: ${{ github.sha }} not working, see https://github.com/actions/checkout/issues/299 SHA: ${{ github.event.pull_request.head.sha || github.sha }} @@ -97,6 +97,11 @@ jobs: run: | docker push ghcr.io/neuro-inc/apolo-extras:$SHA + - name: Prepare volumes and disks + shell: bash + run: | + make prepare-e2e-test + test: name: Run tests needs: [pretest] @@ -130,7 +135,7 @@ jobs: - name: Authorize GCP uses: google-github-actions/auth@v2 with: - credentials_json: ${{ secrets.E2E_COOKIECUTTER_GCP_SA_KEY }} + credentials_json: ${{ secrets.E2E_TESTS_GCP_KEY }} # e2e-tests@development-421920.iam.gserviceaccount.com - name: Setup gcloud uses: google-github-actions/setup-gcloud@v2 @@ -171,10 +176,10 @@ jobs: - name: Configure environment env: - NEURO_STAGING_URL: ${{ secrets.NEURO_STAGING_URL }} - APOLO_TOKEN: ${{ secrets.NEURO_TOKEN }} + NEURO_STAGING_URL: "https://api.dev.apolo.us/api/v1" + APOLO_TOKEN: ${{ secrets.CLIENT_TEST_E2E_USER_NAME }} APOLO_CLUSTER: default - APOLO_PROJECT: e2e-tests + APOLO_PROJECT: apolo-extras APOLO_EXTRAS_PRESET: cpu-small AZURE_SAS_TOKEN: ${{ secrets.AZURE_SAS_TOKEN }} run: | diff --git a/Makefile b/Makefile index a299442a..1fdcc9dd 100644 --- a/Makefile +++ b/Makefile @@ -15,6 +15,10 @@ lint: format format: pre-commit run --all-files --show-diff-on-failure +.PHONY: prepare-e2e-test +prepare-e2e-test: + . tests/e2e/data/prepare.sh + .PHONY: test_e2e test_e2e: pytest -n $(PYTEST_PARALLEL) $(PYTEST_FLAGS) -m "(not serial) and (not smoke_only)" \ diff --git a/apolo_extras/assets/merge_docker_auths.sh b/apolo_extras/assets/merge_docker_auths.sh index cdbba156..c3898d28 100755 --- a/apolo_extras/assets/merge_docker_auths.sh +++ b/apolo_extras/assets/merge_docker_auths.sh @@ -14,7 +14,7 @@ then fi for auth in ${extra_auths}; do key="${auth%%=*}" - value="${auth#*=}" + value=$(printenv $key) if [ -f "${value}" ]; then # ENV var points to the file jq < ${value} > /dev/null 2>&1 && res=`echo $res | cat - ${value} | jq -sc 'reduce .[] as $item ({}; . * $item)'` diff --git a/apolo_extras/image.py b/apolo_extras/image.py index 1b3d8f19..b740fab9 100644 --- a/apolo_extras/image.py +++ b/apolo_extras/image.py @@ -362,7 +362,12 @@ async def _build_image( client, image_uri_str, project_name, scheme="image" ) async with get_platform_client(cluster=cluster) as client: - image_uri = client.parse.str_to_uri(image_uri_str, project_name=project_name) + try: + image_uri = str( + client.parse.str_to_uri(image_uri_str, project_name=project_name) + ) + except ValueError: + image_uri = image_uri_str image = await _parse_platform_image(str(image_uri)) context_uri = client.parse.str_to_uri( context, diff --git a/requirements/base.txt b/requirements/base.txt index 4cf8cc62..501c76f5 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,5 +1,5 @@ -apolo-cli==24.10.1 -apolo-sdk==24.11.0 +apolo-cli==24.12.3 +apolo-sdk==24.12.3 click==8.1.7 pyyaml==6.0.2 toml==0.10.2 diff --git a/requirements/test.txt b/requirements/test.txt index 35130628..536da8e6 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -4,7 +4,6 @@ deepdiff==8.0.1 pytest==8.3.3 pytest-asyncio==0.24.0 pytest-clarity==1.0.1 -pytest-lazy-fixture==0.6.3 pytest-xdist==3.6.1 tenacity==9.0.0 towncrier==24.8.0 diff --git a/setup.py b/setup.py index abdb88ce..a47989c0 100644 --- a/setup.py +++ b/setup.py @@ -20,7 +20,7 @@ url="https://github.com/neuro-inc/neuro-extras", packages=find_packages(), install_requires=[ - "apolo-cli>=24.10.1", + "apolo-cli>=24.12.3", "click>=8.0", "toml>=0.10.0", "pyyaml>=3.0", diff --git a/tests/e2e/conftest.py b/tests/e2e/conftest.py index 1a1c8ea2..dca873e6 100644 --- a/tests/e2e/conftest.py +++ b/tests/e2e/conftest.py @@ -46,7 +46,7 @@ TEST_DATA_COPY_PLATFORM_TO_CLOUD = True CLOUD_SOURCE_PREFIXES: Dict[str, str] = { - "gs": "gs://mlops-ci-e2e/assets/data", + "gs": "gs://mlops-ci-e2e-tests/assets/data", # "s3": "s3://because-clear-taken-cotton/assets/data", # "azure+https": "azure+https://neuromlops.blob.core.windows.net/cookiecutter-e2e/assets/data", # noqa: E501 "http": "http://because-clear-taken-cotton.s3.amazonaws.com/assets/data", @@ -55,7 +55,7 @@ CLOUD_DESTINATION_PREFIXES: Dict[str, str] = { # "s3": "s3://because-clear-taken-cotton/data_cp", - "gs": "gs://mlops-ci-e2e/data_cp", + "gs": "gs://mlops-ci-e2e-tests/data_cp", # "azure+https": "azure+https://neuromlops.blob.core.windows.net/cookiecutter-e2e/data_cp", # noqa: E501 "http": "http://because-clear-taken-cotton.s3.amazonaws.com/data_cp", "https": "https://because-clear-taken-cotton.s3.amazonaws.com/data_cp", @@ -66,7 +66,7 @@ # apolo cp -rT tests/assets/data storage:e2e/assets/data "storage": "storage:e2e/assets/data", # apolo disk create --name extras-e2e --timeout-unused 1000d 100M - # apolo run -v storage:e2e/assets/data:/storage -v disk:extras-e2e:/disk alpine -- cp -rT /storage /disk/assets/data # noqa: E501 + # apolo run -v storage:e2e/assets/data:/storage -v disk:extras-e2e:/disk alpine -- sh -c "mkdir -p /disk/assets && cp -rT /storage /disk/assets/data" # noqa: E501 "disk": f"disk:extras-e2e/assets/data", } @@ -260,7 +260,7 @@ def _f( args.extend( [ "-v", - "secret:neuro-extras-gcp:/gcp-creds.txt", + "secret:apolo-extras-gcp:/gcp-creds.txt", "-e", "GOOGLE_APPLICATION_CREDENTIALS=/gcp-creds.txt", ] diff --git a/tests/e2e/data/prepare.sh b/tests/e2e/data/prepare.sh new file mode 100644 index 00000000..d851a57e --- /dev/null +++ b/tests/e2e/data/prepare.sh @@ -0,0 +1,11 @@ +#! /bin/sh +# see tests/e2e/conftest.py line 64 comments + +apolo mkdir -p storage:e2e/assets/data +apolo cp -rT tests/assets/data storage:e2e/assets/data + +apolo disk get extras-e2e || exit_status=$? +if [ "${exit_status:-0}" -ne 0 ]; then + apolo disk create --name extras-e2e --timeout-unused 1000d 100M +fi +apolo run -v storage:e2e/assets/data:/storage -v disk:extras-e2e:/disk alpine -- sh -c "mkdir -p /disk/assets && cp -rT /storage /disk/assets/data" diff --git a/tests/e2e/data/resources.py b/tests/e2e/data/resources.py index be27096a..5c0e89b9 100644 --- a/tests/e2e/data/resources.py +++ b/tests/e2e/data/resources.py @@ -255,7 +255,7 @@ def get_extra_args(self) -> List[str]: if "gs" in schemas: extra_args += [ "-v", - "secret:neuro-extras-gcp:/gcp-creds.txt", + "secret:apolo-extras-gcp:/gcp-creds.txt", "-e", "GOOGLE_APPLICATION_CREDENTIALS=/gcp-creds.txt", ] diff --git a/tests/e2e/data/test_data_cp.py b/tests/e2e/data/test_data_cp.py index a28566db..1d3dbd73 100644 --- a/tests/e2e/data/test_data_cp.py +++ b/tests/e2e/data/test_data_cp.py @@ -10,7 +10,6 @@ import apolo_sdk import pytest from apolo_sdk import Client -from pytest_lazyfixture import lazy_fixture # type: ignore from tenacity import retry, stop_after_attempt, wait_random_exponential from ..conftest import ( @@ -135,33 +134,29 @@ def tempdir_fixture() -> Iterator[str]: @pytest.mark.xfail(strict=False) # TODO: remove when platform stabilizes -@pytest.mark.parametrize( - argnames="config", argvalues=[lazy_fixture("data_copy_config")] -) @pytest.mark.skipif(sys.platform == "win32", reason="tools don't work on Windows") -def test_data_copy(config: CopyTestConfig, tempdir_fixture: str, disk: str) -> None: - config.source.patch_tempdir(tempdir_fixture) - config.source.patch_disk(disk) - config.destination.patch_tempdir(tempdir_fixture) - config.destination.patch_disk(disk) - _run_data_copy_test_from_config(config=config) +def test_data_copy( + data_copy_config: CopyTestConfig, tempdir_fixture: str, disk: str +) -> None: + data_copy_config.source.patch_tempdir(tempdir_fixture) + data_copy_config.source.patch_disk(disk) + data_copy_config.destination.patch_tempdir(tempdir_fixture) + data_copy_config.destination.patch_disk(disk) + _run_data_copy_test_from_config(config=data_copy_config) @pytest.mark.smoke @pytest.mark.smoke_only @pytest.mark.xfail(strict=False) # TODO: remove when platform stabilizes -@pytest.mark.parametrize( - argnames="config", argvalues=[lazy_fixture("data_copy_config_smoke")] -) @pytest.mark.skipif(sys.platform == "win32", reason="tools don't work on Windows") def test_data_copy_smoke( - config: CopyTestConfig, tempdir_fixture: str, disk: str + data_copy_config_smoke: CopyTestConfig, tempdir_fixture: str, disk: str ) -> None: - config.source.patch_tempdir(tempdir_fixture) - config.source.patch_disk(disk) - config.destination.patch_tempdir(tempdir_fixture) - config.destination.patch_disk(disk) - _run_data_copy_test_from_config(config=config) + data_copy_config_smoke.source.patch_tempdir(tempdir_fixture) + data_copy_config_smoke.source.patch_disk(disk) + data_copy_config_smoke.destination.patch_tempdir(tempdir_fixture) + data_copy_config_smoke.destination.patch_disk(disk) + _run_data_copy_test_from_config(config=data_copy_config_smoke) @retry(stop=stop_after_attempt(5), wait=wait_random_exponential(min=10, max=60)) diff --git a/tests/unit/image/conftest.py b/tests/unit/image/conftest.py index 6b9f42ab..4a0a0035 100644 --- a/tests/unit/image/conftest.py +++ b/tests/unit/image/conftest.py @@ -41,6 +41,7 @@ def _get_mock_clusters() -> t.Dict[str, apolo_sdk.Cluster]: resource_pools={}, presets=_get_mock_presets(), orgs=[], + apps=apolo_sdk.AppsConfig(), ), } diff --git a/tests/unit/image/test_remote_image_builder.py b/tests/unit/image/test_remote_image_builder.py index d133725b..18e4c49a 100644 --- a/tests/unit/image/test_remote_image_builder.py +++ b/tests/unit/image/test_remote_image_builder.py @@ -26,9 +26,10 @@ async def test_image_builder__min_parameters( ) expected_storage_build_root = URL( - "storage://mycluster/myproject/.builds/mocked-uuid-4" + "storage://mycluster/NO_ORG/myproject/.builds/mocked-uuid-4" ) storage_mkdir_mock: mock.AsyncMock = remote_image_builder._client.storage.mkdir # type: ignore # noqa: E501 + storage_mkdir_mock.assert_awaited() storage_mkdir_mock.assert_awaited_once_with( expected_storage_build_root, parents=True ) @@ -60,18 +61,18 @@ async def test_image_builder__min_parameters( "--life-span=4h", "--schedule-timeout=20m", "--project=myproject", - "--tag=kaniko-builds-image:image://mycluster/myproject/targetimage:latest", - "--volume=storage://mycluster/myproject/.builds/mocked-uuid-4/.docker.config.json:/kaniko/.docker/config.json:ro", # noqa: E501 - "--volume=storage://mycluster/myproject/.builds/mocked-uuid-4/context:/kaniko_context:rw", # noqa: E501 + "--tag=kaniko-builds-image:image://mycluster/NO_ORG/myproject/targetimage:latest", # noqa: E501 + "--volume=storage://mycluster/NO_ORG/myproject/.builds/mocked-uuid-4/.docker.config.json:/kaniko/.docker/config.json:ro", # noqa: E501 + "--volume=storage://mycluster/NO_ORG/myproject/.builds/mocked-uuid-4/context:/kaniko_context:rw", # noqa: E501 "--env=container=docker", "gcr.io/kaniko-project/executor:v1.20.0-debug", ] assert start_build_kaniko_args == [ "--context=/kaniko_context", "--dockerfile=/kaniko_context/path/to/Dockerfile", - "--destination=registry.mycluster.noexists/myproject/targetimage:latest", + "--destination=registry.mycluster.noexists/NO_ORG/myproject/targetimage:latest", "--cache=true", - "--cache-repo=registry.mycluster.noexists/myproject/layer-cache/cache", + "--cache-repo=registry.mycluster.noexists/NO_ORG/myproject/layer-cache/cache", "--verbosity=info", "--image-fs-extract-retry=1", "--push-retry=3", @@ -104,7 +105,7 @@ async def test_image_builder__full_parameters( ) expected_storage_build_root = URL( - "storage://mycluster/myproject/.builds/mocked-uuid-4" + "storage://mycluster/NO_ORG/myproject/.builds/mocked-uuid-4" ) storage_mkdir_mock: mock.AsyncMock = remote_image_builder._client.storage.mkdir # type: ignore # noqa: E501 storage_mkdir_mock.assert_awaited_once_with( @@ -141,11 +142,11 @@ async def test_image_builder__full_parameters( "--preset=custom-preset", "--tag=tag1", "--tag=tag2", - "--tag=kaniko-builds-image:image://mycluster/myproject/targetimage:latest", + "--tag=kaniko-builds-image:image://mycluster/NO_ORG/myproject/targetimage:latest", # noqa: E501 "--volume=storage:somevol:/mnt/vol1", "--volume=storage:/someproject2/somevol2:/mnt/vol2", - "--volume=storage://mycluster/myproject/.builds/mocked-uuid-4/.docker.config.json:/kaniko/.docker/config.json:ro", # noqa: E501 - "--volume=storage://mycluster/myproject/.builds/mocked-uuid-4/context:/kaniko_context:rw", # noqa: E501 + "--volume=storage://mycluster/NO_ORG/myproject/.builds/mocked-uuid-4/.docker.config.json:/kaniko/.docker/config.json:ro", # noqa: E501 + "--volume=storage://mycluster/NO_ORG/myproject/.builds/mocked-uuid-4/context:/kaniko_context:rw", # noqa: E501 "--env=ENV1=VAL1", "--env=ENV2=VAL2", "--env=container=docker", @@ -154,9 +155,9 @@ async def test_image_builder__full_parameters( assert start_build_kaniko_args == [ "--context=/kaniko_context", "--dockerfile=/kaniko_context/path/to/Dockerfile", - "--destination=registry.mycluster.noexists/myproject/targetimage:latest", + "--destination=registry.mycluster.noexists/NO_ORG/myproject/targetimage:latest", "--cache=true", - "--cache-repo=registry.mycluster.noexists/myproject/layer-cache/cache", + "--cache-repo=registry.mycluster.noexists/NO_ORG/myproject/layer-cache/cache", "--verbosity=info", "--image-fs-extract-retry=1", "--push-retry=3", @@ -220,7 +221,7 @@ async def test_image_builder__custom_project( ) expected_storage_build_root = URL( - "storage://mycluster/otherproject/.builds/mocked-uuid-4" + "storage://mycluster/NO_ORG/otherproject/.builds/mocked-uuid-4" ) storage_mkdir_mock: mock.AsyncMock = remote_image_builder._client.storage.mkdir # type: ignore # noqa: E501 storage_mkdir_mock.assert_awaited_once_with( @@ -254,18 +255,18 @@ async def test_image_builder__custom_project( "--life-span=4h", "--schedule-timeout=20m", "--project=otherproject", - "--tag=kaniko-builds-image:image://mycluster/otherproject/targetimage:latest", - "--volume=storage://mycluster/otherproject/.builds/mocked-uuid-4/.docker.config.json:/kaniko/.docker/config.json:ro", # noqa: E501 - "--volume=storage://mycluster/otherproject/.builds/mocked-uuid-4/context:/kaniko_context:rw", # noqa: E501 + "--tag=kaniko-builds-image:image://mycluster/NO_ORG/otherproject/targetimage:latest", # noqa: E501 + "--volume=storage://mycluster/NO_ORG/otherproject/.builds/mocked-uuid-4/.docker.config.json:/kaniko/.docker/config.json:ro", # noqa: E501 + "--volume=storage://mycluster/NO_ORG/otherproject/.builds/mocked-uuid-4/context:/kaniko_context:rw", # noqa: E501 "--env=container=docker", "gcr.io/kaniko-project/executor:v1.20.0-debug", ] assert start_build_kaniko_args == [ "--context=/kaniko_context", "--dockerfile=/kaniko_context/path/to/Dockerfile", - "--destination=registry.mycluster.noexists/otherproject/targetimage:latest", + "--destination=registry.mycluster.noexists/NO_ORG/otherproject/targetimage:latest", # noqa: E501 "--cache=true", - "--cache-repo=registry.mycluster.noexists/otherproject/layer-cache/cache", + "--cache-repo=registry.mycluster.noexists/NO_ORG/otherproject/layer-cache/cache", # noqa: E501 "--verbosity=info", "--image-fs-extract-retry=1", "--push-retry=3", @@ -291,7 +292,7 @@ async def test_image_builder__storage_context( ) expected_storage_build_root = URL( - "storage://mycluster/myproject/.builds/mocked-uuid-4" + "storage://mycluster/NO_ORG/myproject/.builds/mocked-uuid-4" ) storage_mkdir_mock: mock.AsyncMock = remote_image_builder._client.storage.mkdir # type: ignore # noqa: E501 storage_mkdir_mock.assert_awaited_once_with( @@ -316,18 +317,18 @@ async def test_image_builder__storage_context( "--life-span=4h", "--schedule-timeout=20m", "--project=myproject", - "--tag=kaniko-builds-image:image://mycluster/myproject/targetimage:latest", - "--volume=storage://mycluster/myproject/.builds/mocked-uuid-4/.docker.config.json:/kaniko/.docker/config.json:ro", # noqa: E501 - "--volume=storage://mycluster/myproject/context:/kaniko_context:rw", + "--tag=kaniko-builds-image:image://mycluster/NO_ORG/myproject/targetimage:latest", # noqa: E501 + "--volume=storage://mycluster/NO_ORG/myproject/.builds/mocked-uuid-4/.docker.config.json:/kaniko/.docker/config.json:ro", # noqa: E501 + "--volume=storage://mycluster/NO_ORG/myproject/context:/kaniko_context:rw", "--env=container=docker", "gcr.io/kaniko-project/executor:v1.20.0-debug", ] assert start_build_kaniko_args == [ "--context=/kaniko_context", "--dockerfile=/kaniko_context/path/to/Dockerfile", - "--destination=registry.mycluster.noexists/myproject/targetimage:latest", + "--destination=registry.mycluster.noexists/NO_ORG/myproject/targetimage:latest", "--cache=true", - "--cache-repo=registry.mycluster.noexists/myproject/layer-cache/cache", + "--cache-repo=registry.mycluster.noexists/NO_ORG/myproject/layer-cache/cache", "--verbosity=info", "--image-fs-extract-retry=1", "--push-retry=3", diff --git a/tests/unit/test_select_job_preset.py b/tests/unit/test_select_job_preset.py index 603e7b37..8bc1347c 100644 --- a/tests/unit/test_select_job_preset.py +++ b/tests/unit/test_select_job_preset.py @@ -1,4 +1,5 @@ from decimal import Decimal +from typing import Any import pytest from apolo_sdk import Preset @@ -29,7 +30,11 @@ def test_cheapest_preset_is_selected(mock_client: MockApoloClient) -> None: assert selected_preset == "cheap" -@pytest.mark.parametrize("preset", ["bad", "cheap_scheduled"]) +@pytest.fixture(params=["bad", "cheap_scheduled"]) +def preset(request: Any) -> str: + return request.param + + def test_user_selection_is_respected(mock_client: MockApoloClient, preset: str) -> None: mock_client.presets.update(FAKE_PRESETS) selected_preset = select_job_preset(