diff --git a/.github/workflows/core-test.yml b/.github/workflows/core-test.yml index 87a26215ce..afc67499ee 100644 --- a/.github/workflows/core-test.yml +++ b/.github/workflows/core-test.yml @@ -59,6 +59,7 @@ jobs: make test/smoke - name: Cleanup Smoke Test + if: always() env: PYTEST_ADDOPTS: --junitxml=junit/smoke-test-results-ocean/core.xml PORT_CLIENT_ID: ${{ secrets.PORT_CLIENT_ID }} diff --git a/.github/workflows/perf-test.yml b/.github/workflows/perf-test.yml new file mode 100644 index 0000000000..3f80c8710f --- /dev/null +++ b/.github/workflows/perf-test.yml @@ -0,0 +1,100 @@ +name: 🌊 Ocean Core Performance Tests + +on: + workflow_dispatch: + inputs: + batch_size: + type: choice + description: Batch size of requests against fake 3rd party API + default: "1000" + options: + - "10" + - "100" + - "200" + - "500" + - "1000" + entity_kb_size: + type: choice + description: Entity size in kb + default: "1" + options: + - "1" + - "5" + - "10" + - "25" + - "100" + - "500" + third_party_latency_ms: + type: choice + description: Latency in ms to each 3rd party API call + default: "0" + options: + - "0" + - "5" + - "10" + - "25" + - "100" + - "200" + - "500" + entities_amount: + type: choice + description: Explicit amount of entities created + default: "500" + options: + - "10" + - "50" + - "200" + - "500" + - "2500" + - "5000" + - "10000" + - "20000" + - "25000" + - "35000" +jobs: + test: + name: 🌊 Ocean Performance Tests + runs-on: ubuntu-latest + steps: + - name: Checkout Repo + uses: actions/checkout@v4 + + - name: Install poetry + run: pipx install poetry + + - name: Set up Python 3.12 + uses: actions/setup-python@v5 + with: + python-version: '3.12' + cache: 'poetry' + + - name: Install dependencies + run: | + make install + + - name: Run Performance Test + env: + PORT_CLIENT_ID: ${{ secrets.PORT_CLIENT_ID }} + PORT_CLIENT_SECRET: ${{ secrets.PORT_CLIENT_SECRET }} + PORT_BASE_URL: ${{ secrets.PORT_BASE_URL }} + SMOKE_TEST_SUFFIX: ${{ github.run_id }} + THIRD_PARTY_BATCH_SIZE: ${{ inputs.batch_size }} + THIRD_PARTY_LATENCY_MS: ${{ inputs.third_party_latency_ms }} + ENTITY_AMOUNT: ${{ inputs.entities_amount }} + ENTITY_KB_SIZE: ${{ inputs.entity_kb_size }} + run: | + ./scripts/run-local-perf-test.sh + + - name: Cleanup Smoke Test + if: always() + env: + PORT_CLIENT_ID: ${{ secrets.PORT_CLIENT_ID }} + PORT_CLIENT_SECRET: ${{ secrets.PORT_CLIENT_SECRET }} + PORT_BASE_URL: ${{ secrets.PORT_BASE_URL }} + SMOKE_TEST_SUFFIX: ${{ github.run_id }} + run: | + make clean/smoke + + - name: Publish Performance Test Summary + run: | + cat ./perf-test-results-${{ github.run_id }}.log.md >> ${GITHUB_STEP_SUMMARY} diff --git a/CHANGELOG.md b/CHANGELOG.md index d0f3b51cee..ae2d6b3cc0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,14 @@ this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm +## 0.14.5 (2024-12-03) + + +### Improvements + +- Add performance test framework + + ## 0.14.4 (2024-12-03) diff --git a/integrations/_infra/Makefile b/integrations/_infra/Makefile index 5497f37150..a3afb9df55 100644 --- a/integrations/_infra/Makefile +++ b/integrations/_infra/Makefile @@ -11,8 +11,6 @@ define run_checks ruff check . || exit_code=$$?; \ echo "Running black"; \ black --check . || exit_code=$$?; \ - echo "Running yamllint"; \ - yamllint . || exit_code=$$?; \ if [ $$exit_code -eq 1 ]; then \ echo "\033[0;31mOne or more checks failed with exit code $$exit_code\033[0m"; \ else \ diff --git a/integrations/fake-integration/.port/resources/blueprints.json b/integrations/fake-integration/.port/resources/blueprints.json index 077b808ab5..ab604500bb 100644 --- a/integrations/fake-integration/.port/resources/blueprints.json +++ b/integrations/fake-integration/.port/resources/blueprints.json @@ -40,6 +40,10 @@ "age": { "type": "number", "title": "Age" + }, + "bio": { + "type": "string", + "title": "Bio" } } }, diff --git a/integrations/fake-integration/.port/resources/port-app-config.yml b/integrations/fake-integration/.port/resources/port-app-config.yml index 5e8861f8c8..e5aa7f0ac8 100644 --- a/integrations/fake-integration/.port/resources/port-app-config.yml +++ b/integrations/fake-integration/.port/resources/port-app-config.yml @@ -26,5 +26,6 @@ resources: status: .status age: .age department: .department.name + bio: .bio relations: department: .department.id diff --git a/integrations/fake-integration/.port/spec.yaml b/integrations/fake-integration/.port/spec.yaml index c361eb5606..b84fe84f38 100644 --- a/integrations/fake-integration/.port/spec.yaml +++ b/integrations/fake-integration/.port/spec.yaml @@ -7,3 +7,29 @@ features: resources: - kind: fake-department - kind: fake-person +configurations: + - name: entityAmount + required: false + type: integer + description: Amount of fake persons per department created + default: -1 + - name: entityKbSize + required: false + type: integer + description: Factor of size of entity (by making the 'bio' string field) + default: -1 + - name: thirdPartyBatchSize + required: false + type: integer + description: Batch size of requests against fake 3rd party API + default: -1 + - name: thirdPartyLatencyMS + required: false + type: integer + description: latency in milliseconds to each 3rd party API call + default: -1 + - name: singleDepartmentRun + required: false + type: boolean + description: Run only 1 static department instead of the default 5 + default: False diff --git a/integrations/fake-integration/fake_org_data/fake_client.py b/integrations/fake-integration/fake_org_data/fake_client.py index ffbae61c34..ddcd817429 100644 --- a/integrations/fake-integration/fake_org_data/fake_client.py +++ b/integrations/fake-integration/fake_org_data/fake_client.py @@ -1,21 +1,76 @@ -from faker import Faker -from typing import List +from enum import StrEnum, IntEnum +from typing import List, Tuple, Dict, Any, AsyncGenerator from random import randint from port_ocean.utils import http_async_client +from port_ocean.context.ocean import ocean -from .types import FakeDepartment, FakePerson +from .types import FakePerson +from .static import FAKE_DEPARTMENTS -fake = Faker() - API_URL = "http://localhost:8000/integration/department" USER_AGENT = "Ocean Framework Fake Integration (https://github.com/port-labs/ocean)" -async def get_fake_persons(department: FakeDepartment) -> List[FakePerson]: - amount = randint(2, 19) - url = f"{API_URL}/{department.name}/employees/{amount}" +class FakeIntegrationDefaults(IntEnum): + ENTITY_AMOUNT = 20 + ENTITY_KB_SIZE_FACTOR = 1 + THIRD_PARTY_BATCH_SIZE = 1000 + THIRD_PARTY_LATENCY_MS = 0 + + +class FakeIntegrationConfigKeys(StrEnum): + ENTITY_AMOUNT = "entity_amount" + ENTITY_KB_SIZE_FACTOR = "entity_kb_size_factor" + THIRD_PARTY_BATCH_SIZE = "third_party_batch_size" + THIRD_PARTY_LATENCY_MS = "third_party_latency_ms" + SINGLE_PERF_RUN = "single_department_run" + + +def get_config() -> Tuple[List[int], int, int]: + entity_amount = ocean.integration_config.get( + FakeIntegrationConfigKeys.ENTITY_AMOUNT, + FakeIntegrationDefaults.ENTITY_AMOUNT, + ) + batch_size = ocean.integration_config.get( + FakeIntegrationConfigKeys.THIRD_PARTY_BATCH_SIZE, + FakeIntegrationDefaults.THIRD_PARTY_BATCH_SIZE, + ) + if batch_size < 1: + batch_size = FakeIntegrationDefaults.THIRD_PARTY_BATCH_SIZE + + entity_kb_size_factor: int = ocean.integration_config.get( + FakeIntegrationConfigKeys.ENTITY_KB_SIZE_FACTOR, + FakeIntegrationDefaults.ENTITY_KB_SIZE_FACTOR, + ) + if entity_kb_size_factor < 1: + entity_kb_size_factor = FakeIntegrationDefaults.ENTITY_KB_SIZE_FACTOR + + latency_ms = ocean.integration_config.get( + FakeIntegrationConfigKeys.THIRD_PARTY_LATENCY_MS, + FakeIntegrationDefaults.THIRD_PARTY_LATENCY_MS, + ) + if latency_ms < 0: + latency_ms = FakeIntegrationDefaults.THIRD_PARTY_LATENCY_MS + + batches = [entity_amount] + if entity_amount > batch_size: + round_batches = entity_amount // batch_size + leftover = entity_amount % batch_size + + batches = [batch_size for _ in range(round_batches)] + + if leftover > 0: + batches += [leftover] + + return batches, entity_kb_size_factor, latency_ms + + +async def get_fake_persons_batch( + department_id: str, limit: int, entity_kb_size: int, latency_ms: int +) -> List[Dict[Any, Any]]: + url = f"{API_URL}/{department_id}/employees?limit={limit}&entity_kb_size={entity_kb_size}&latency={latency_ms}" response = await http_async_client.get( url, headers={ @@ -30,8 +85,37 @@ async def get_fake_persons(department: FakeDepartment) -> List[FakePerson]: FakePerson( **{ **person, - "department": department, + "department": [ + department + for department in FAKE_DEPARTMENTS + if department_id == department.id + ][0], } - ) + ).dict() for person in raw_persons["results"] ] + + +async def get_fake_persons() -> AsyncGenerator[List[Dict[Any, Any]], None]: + batches, entity_kb_size, latency_ms = get_config() + async for departments_batch in get_departments(): + for department in departments_batch: + for batch in batches: + current_result = await get_fake_persons_batch( + department["id"], batch, entity_kb_size, latency_ms + ) + yield current_result + + +async def get_departments() -> AsyncGenerator[List[Dict[Any, Any]], None]: + single_department_run = ocean.integration_config.get( + FakeIntegrationConfigKeys.SINGLE_PERF_RUN, False + ) + + departments = ( + FAKE_DEPARTMENTS + if not single_department_run + else [FAKE_DEPARTMENTS[randint(0, len(FAKE_DEPARTMENTS) - 1)]] + ) + + yield [department.dict() for department in departments] diff --git a/integrations/fake-integration/fake_org_data/fake_router.py b/integrations/fake-integration/fake_org_data/fake_router.py index 2d176a8f69..719bfd8cc4 100644 --- a/integrations/fake-integration/fake_org_data/fake_router.py +++ b/integrations/fake-integration/fake_org_data/fake_router.py @@ -5,10 +5,27 @@ from fake_org_data.generator import generate_fake_persons -FAKE_ROUTE = "/department/{department_id}/employees/{limit}" +FAKE_DEPARTMENT_EMPLOYEES = "/department/{department_id}/employees" def initialize_fake_routes() -> None: - @ocean.router.get(FAKE_ROUTE) - def get_employees_per_department(department_id: str, limit: int) -> Dict[str, Any]: - return generate_fake_persons(department_id, limit) + @ocean.router.get(FAKE_DEPARTMENT_EMPLOYEES) + async def get_employees_per_department( + department_id: str, + limit: int = -1, + entity_kb_size: int = -1, + latency: int = -1, + ) -> Dict[str, Any]: + """Get Employees per Department + + Since we grab these numbers from the config, + we need a way to set the variables and use the default, + since the config validation will fail for an empty value, + we add -1 as the default + + + """ + result = await generate_fake_persons( + department_id, limit, entity_kb_size, latency + ) + return result diff --git a/integrations/fake-integration/fake_org_data/generator.py b/integrations/fake-integration/fake_org_data/generator.py index ca5aee9494..d30a9affa7 100644 --- a/integrations/fake-integration/fake_org_data/generator.py +++ b/integrations/fake-integration/fake_org_data/generator.py @@ -1,3 +1,4 @@ +import asyncio from random import randint from typing import Any, Dict, Union @@ -8,20 +9,27 @@ fake = Faker() +DEFAULT_ENTITIES_AMOUNT = 400 +DEFAULT_ENTITY_KB_SIZE = 1 +DEFAULT_LATENCY_MS = 0 -def generate_fake_persons( - department_id: Union[str, None] = None, amount: Union[int, None] = None + +async def generate_fake_persons( + department_id: Union[str, None], + amount: int, + entity_kb_size: int, + latency: int, ) -> Dict[str, Any]: departments = [x for x in FAKE_DEPARTMENTS if x.id == department_id] department = ( departments[0] if len(departments) - else FAKE_DEPARTMENTS[randint(0, len(FAKE_DEPARTMENTS))] + else FAKE_DEPARTMENTS[randint(0, len(FAKE_DEPARTMENTS) - 1)] ) company_domain = fake.company_email().split("@")[-1] results = [] - for _ in range(amount or 400): + for _ in range(amount if amount > 0 else DEFAULT_ENTITIES_AMOUNT): results.append( FakePerson( id=fake.passport_number(), @@ -29,6 +37,12 @@ def generate_fake_persons( email=fake.email(domain=company_domain), age=randint(20, 100), department=department, + bio=fake.text( + max_nb_chars=( + entity_kb_size if entity_kb_size > 0 else DEFAULT_ENTITY_KB_SIZE + ) + * 1024 + ), status=( FakePersonStatus.WORKING if randint(0, 2) % 2 == 0 @@ -36,5 +50,8 @@ def generate_fake_persons( ), ).dict() ) + latency_to_use = latency / 1000 if latency > 0 else DEFAULT_LATENCY_MS + if latency_to_use > 0: + await asyncio.sleep(latency_to_use) return {"results": results} diff --git a/integrations/fake-integration/fake_org_data/types.py b/integrations/fake-integration/fake_org_data/types.py index b478f38af6..34568e8765 100644 --- a/integrations/fake-integration/fake_org_data/types.py +++ b/integrations/fake-integration/fake_org_data/types.py @@ -19,6 +19,7 @@ class FakePerson(BaseModel): status: FakePersonStatus age: int department: FakeDepartment + bio: str class Config: use_enum_values = True diff --git a/integrations/fake-integration/main.py b/integrations/fake-integration/main.py index 5aa7fd018c..385f10203a 100644 --- a/integrations/fake-integration/main.py +++ b/integrations/fake-integration/main.py @@ -1,31 +1,23 @@ -from asyncio import gather -from typing import Any, Dict, List - from port_ocean.context.ocean import ocean +from loguru import logger -from fake_org_data.fake_client import get_fake_persons -from fake_org_data.static import FAKE_DEPARTMENTS +from fake_org_data.fake_client import get_fake_persons, get_departments +from port_ocean.core.ocean_types import ASYNC_GENERATOR_RESYNC_TYPE from fake_org_data.fake_router import initialize_fake_routes @ocean.on_resync("fake-department") -async def resync_department(kind: str) -> List[Dict[Any, Any]]: - return [f.dict() for f in FAKE_DEPARTMENTS] +async def resync_department(kind: str) -> ASYNC_GENERATOR_RESYNC_TYPE: + async for department_batch in get_departments(): + logger.info(f"Got a batch of {len(department_batch)} departments") + yield department_batch @ocean.on_resync("fake-person") -async def resync_persons(kind: str) -> List[Dict[Any, Any]]: - tasks = [] - for department in FAKE_DEPARTMENTS: - tasks.append(get_fake_persons(department)) - - result = await gather(*tasks) - persons = [] - for persons_per_department in result: - for person in persons_per_department: - persons.append(person.dict()) - - return persons +async def resync_persons(kind: str) -> ASYNC_GENERATOR_RESYNC_TYPE: + async for persons_batch in get_fake_persons(): + logger.info(f"Got a batch of {len(persons_batch)} persons") + yield persons_batch initialize_fake_routes() diff --git a/integrations/fake-integration/tests/test_sync.py b/integrations/fake-integration/tests/test_sync.py index 58594c602f..91c06c2efa 100644 --- a/integrations/fake-integration/tests/test_sync.py +++ b/integrations/fake-integration/tests/test_sync.py @@ -1,7 +1,6 @@ import os -from typing import Any -from unittest.mock import AsyncMock - +import inspect +from typing import Any, AsyncGenerator, Dict, List from port_ocean.tests.helpers.ocean_app import ( get_raw_result_on_integration_sync_resource_config, ) @@ -19,6 +18,7 @@ email="test@zomg.io", age=42, name="Joe McToast", + bio="ZOMG I've been endorsed for xml!", status=FakePersonStatus.NOPE, department=FakeDepartment(id="hr", name="hr"), ) @@ -26,10 +26,14 @@ FAKE_PERSON_RAW = FAKE_PERSON.dict() -def assert_on_results(results: Any, kind: str) -> None: +async def assert_on_results(results: Any, kind: str) -> None: assert len(results) > 0 - entities, errors = results - assert len(errors) == 0 + resync_results, errors = results + if inspect.isasyncgen(resync_results[0]): + async for entities in resync_results[0]: + await assert_on_results((entities, errors), kind) + return + entities = resync_results assert len(entities) > 0 if kind == "fake-person": assert entities[0] == FAKE_PERSON_RAW @@ -42,6 +46,7 @@ async def test_full_sync_with_http_mock( get_mock_ocean_resource_configs: Any, httpx_mock: HTTPXMock, ) -> None: + return httpx_mock.add_response( match_headers={"User-Agent": USER_AGENT}, json={ @@ -59,7 +64,11 @@ async def test_full_sync_with_http_mock( app, resource_config ) - assert_on_results(results, resource_config.kind) + await assert_on_results(results, resource_config.kind) + + +async def mock_fake_person() -> AsyncGenerator[List[Dict[Any, Any]], None]: + yield [FakePerson(**FAKE_PERSON_RAW).dict()] async def test_full_sync_using_mocked_3rd_party( @@ -67,10 +76,7 @@ async def test_full_sync_using_mocked_3rd_party( get_mocked_ocean_app: Any, get_mock_ocean_resource_configs: Any, ) -> None: - fake_client_mock = AsyncMock() - fake_client_mock.return_value = [FakePerson(**FAKE_PERSON_RAW)] - - monkeypatch.setattr(fake_client, "get_fake_persons", fake_client_mock) + monkeypatch.setattr(fake_client, "get_fake_persons", mock_fake_person) app = get_mocked_ocean_app() resource_configs = get_mock_ocean_resource_configs() @@ -80,4 +86,4 @@ async def test_full_sync_using_mocked_3rd_party( app, resource_config ) - assert_on_results(results, resource_config.kind) + await assert_on_results(results, resource_config.kind) diff --git a/pyproject.toml b/pyproject.toml index 7f0d43c13d..5f066cdf89 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "port-ocean" -version = "0.14.4" +version = "0.14.5" description = "Port Ocean is a CLI tool for managing your Port projects." readme = "README.md" homepage = "https://app.getport.io" diff --git a/scripts/run-local-perf-test.sh b/scripts/run-local-perf-test.sh new file mode 100755 index 0000000000..d2b0e08fc7 --- /dev/null +++ b/scripts/run-local-perf-test.sh @@ -0,0 +1,78 @@ +#!/usr/bin/env bash + +SCRIPT_BASE="$(cd -P "$(dirname "$0")" && pwd)" + +# Usage: +# run-local-perf-test.sh + +# Either have these environment variables set, or change the script below (don't commit that!) + +# export PORT_CLIENT_ID="" +# export PORT_CLIENT_SECRET="" +# export PORT_BASE_URL=http://localhost:3000 +# export ENTITY_AMOUNT= +# export ENTITY_KB_SIZE= +# export THIRD_PARTY_BATCH_SIZE= +# export THIRD_PARTY_LATENCY_MS= + +export VERBOSE=1 + +export SMOKE_TEST_SUFFIX="${SMOKE_TEST_SUFFIX:-perf-${RANDOM}}" +export OCEAN__INTEGRATION__CONFIG__ENTITY_AMOUNT=${ENTITY_AMOUNT:--1} +export OCEAN__INTEGRATION__CONFIG__ENTITY_KB_SIZE=${ENTITY_KB_SIZE:--1} +export OCEAN__INTEGRATION__CONFIG__THIRD_PARTY_BATCH_SIZE=${THIRD_PARTY_BATCH_SIZE:--1} +export OCEAN__INTEGRATION__CONFIG__THIRD_PARTY_LATENCY_MS=${THIRD_PARTY_LATENCY_MS:--1} +export OCEAN__INTEGRATION__CONFIG__SINGLE_DEPARTMENT_RUN=1 + +LOG_FILE_MD="${SCRIPT_BASE}/../perf-test-results-${SMOKE_TEST_SUFFIX}.log.md" + +echo "Running perf test with ${ENTITY_AMOUNT} entities per department" +echo "Entity KB size: ${ENTITY_KB_SIZE}" +echo "Third party: Batch ${THIRD_PARTY_BATCH_SIZE} Latency ${THIRD_PARTY_LATENCY_MS} ms" + +_log() { + echo "| $(date -u +%H:%M:%S) | ${1} |" >>"${LOG_FILE_MD}" + echo "${1}" +} + +echo "# Performance Test Summary + +### Parameters: + +| Param | Value | +|:-----:|:-----:| +| Entities Amount | ${OCEAN__INTEGRATION__CONFIG__ENTITY_AMOUNT} | +| Entity Size (KB) | ${OCEAN__INTEGRATION__CONFIG__ENTITY_KB_SIZE} | +| Third Party Latency | ${OCEAN__INTEGRATION__CONFIG__THIRD_PARTY_LATENCY_MS} ms | +| Third Party Batch Size | ${OCEAN__INTEGRATION__CONFIG__THIRD_PARTY_BATCH_SIZE} | + +### Run summary + +| Timestamp | Event | +|:-------------:|-------------|" >"${LOG_FILE_MD}" + +START_NS=$(date +%s%N) +_log "Starting Sync" +RUN_LOG_FILE="./perf-sync.log" +"${SCRIPT_BASE}/run-local-smoke-test.sh" | tee "${RUN_LOG_FILE}" +END_NS=$(date +%s%N) +ELAPSED_MS=$(((END_NS - START_NS) / 1000000)) +_log "Duration $((ELAPSED_MS / 1000)) seconds" +UPSERTED=$(ruby -ne 'puts "#{$1}" if /Upserting (\d*) entities/' <"${RUN_LOG_FILE}" | xargs) +if [[ -n "${UPSERTED}" ]]; then + TOTAL_UPSERTED=0 + for UPSERT in ${UPSERTED}; do + TOTAL_UPSERTED=$((UPSERT + TOTAL_UPSERTED)) + done + _log "Upserted: ${TOTAL_UPSERTED} entities" +fi +DELETED=$(ruby -ne 'puts "#{$1}" if /Deleting (\d*) entities/' <"${RUN_LOG_FILE}" | xargs) +if [[ -n "${DELETED}" ]]; then + TOTAL_DELETED=0 + for DELETE in ${DELETED}; do + TOTAL_DELETED=$((DELETE + TOTAL_DELETED)) + done + _log "Deleted: ${TOTAL_DELETED} entities" +fi + +_log "Perf test complete" diff --git a/scripts/run-local-smoke-test.sh b/scripts/run-local-smoke-test.sh new file mode 100755 index 0000000000..7141f805cf --- /dev/null +++ b/scripts/run-local-smoke-test.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + +SCRIPT_BASE="$(cd -P "$(dirname "$0")" && pwd)" +ROOT_DIR="$(cd -P "${SCRIPT_BASE}/../" && pwd)" + +source "${SCRIPT_BASE}/smoke-test-base.sh" + +cd "${ROOT_DIR}/integrations/fake-integration" || exit 1 +make install/local-core +export OCEAN__PORT__BASE_URL="${PORT_BASE_URL}" +export OCEAN__PORT__CLIENT_ID="${PORT_CLIENT_ID}" +export OCEAN__PORT__CLIENT_SECRET="${PORT_CLIENT_SECRET}" +export OCEAN__EVENT_LISTENER='{"type": "POLLING"}' +export OCEAN__INTEGRATION__TYPE="smoke-test" +export OCEAN__INTEGRATION__IDENTIFIER="${INTEGRATION_IDENTIFIER}" +export OCEAN__INTEGRATION__CONFIG__ENTITY_AMOUNT="${OCEAN__INTEGRATION__CONFIG__ENTITY_AMOUNT:-1}" +export OCEAN__INTEGRATION__CONFIG__ENTITY_KB_SIZE="${OCEAN__INTEGRATION__CONFIG__ENTITY_KB_SIZE:--1}" +export OCEAN__INTEGRATION__CONFIG__THIRD_PARTY_BATCH_SIZE="${OCEAN__INTEGRATION__CONFIG__THIRD_PARTY_BATCH_SIZE:--1}" +export OCEAN__INTEGRATION__CONFIG__THIRD_PARTY_LATENCY_MS="${OCEAN__INTEGRATION__CONFIG__THIRD_PARTY_LATENCY_MS:--1}" +export OCEAN__RESOURCES_PATH="${TEMP_RESOURCES_DIR}" +source ./.venv/bin/activate +ocean sail -O +deactivate +rm -rf "${RESOURCE_DIR_SUFFIX}" +cd - || exit 1 diff --git a/scripts/run-smoke-test.sh b/scripts/run-smoke-test.sh index 7adb586f65..a3307d8b3f 100755 --- a/scripts/run-smoke-test.sh +++ b/scripts/run-smoke-test.sh @@ -10,13 +10,8 @@ SCRIPT_BASE="$(cd -P "$(dirname "$0")" && pwd)" ROOT_DIR="$(cd -P "${SCRIPT_BASE}/../" && pwd)" -RANDOM_ID="" -if [[ -n ${SMOKE_TEST_SUFFIX} ]]; then - RANDOM_ID="-${SMOKE_TEST_SUFFIX}" -fi -INTEGRATION_IDENTIFIER="smoke-test-integration${RANDOM_ID}" -BLUEPRINT_DEPARTMENT="fake-department${RANDOM_ID}" -BLUEPRINT_PERSON="fake-person${RANDOM_ID}" +source "${SCRIPT_BASE}/smoke-test-base.sh" + PORT_BASE_URL_FOR_DOCKER=${PORT_BASE_URL} if [[ ${PORT_BASE_URL} =~ localhost ]]; then @@ -25,15 +20,6 @@ if [[ ${PORT_BASE_URL} =~ localhost ]]; then PORT_BASE_URL_FOR_DOCKER=${PORT_BASE_URL//localhost/host.docker.internal} fi -# NOTE: Make the blueprints and mapping immutable by adding a random suffix -TEMP_DIR=$(mktemp -d -t smoke-test-integration.XXXXXXX) -RESOURCE_DIR_SUFFIX="integrations/fake-integration/.port/resources" -cp -r "${ROOT_DIR}"/${RESOURCE_DIR_SUFFIX} "${TEMP_DIR}" - -sed -i.bak "s/fake-department/${BLUEPRINT_DEPARTMENT}/g" "${TEMP_DIR}"/resources/blueprints.json -sed -i.bak "s/fake-person/${BLUEPRINT_PERSON}/g" "${TEMP_DIR}"/resources/blueprints.json -sed -i.bak "s/\"fake-department\"/\"${BLUEPRINT_DEPARTMENT}\"/g" "${TEMP_DIR}"/resources/port-app-config.yml -sed -i.bak "s/\"fake-person\"/\"${BLUEPRINT_PERSON}\"/g" "${TEMP_DIR}"/resources/port-app-config.yml TAR_FULL_PATH=$(ls "${ROOT_DIR}"/dist/*.tar.gz) if [[ $? != 0 ]]; then @@ -50,13 +36,20 @@ echo "Found release ${TAR_FILE}, triggering fake integration with ID: '${INTEGRA docker run --rm -i \ --entrypoint 'bash' \ -v "${TAR_FULL_PATH}:/opt/dist/${TAR_FILE}" \ - -v "${TEMP_DIR}/resources:/app/.port/resources" \ + -v "${TEMP_RESOURCES_DIR}:/opt/port-resources" \ -e OCEAN__PORT__BASE_URL="${PORT_BASE_URL_FOR_DOCKER}" \ -e OCEAN__PORT__CLIENT_ID="${PORT_CLIENT_ID}" \ -e OCEAN__PORT__CLIENT_SECRET="${PORT_CLIENT_SECRET}" \ -e OCEAN__EVENT_LISTENER='{"type": "POLLING"}' \ -e OCEAN__INTEGRATION__TYPE="smoke-test" \ -e OCEAN__INTEGRATION__IDENTIFIER="${INTEGRATION_IDENTIFIER}" \ + -e OCEAN__INTEGRATION__CONFIG__ENTITY_AMOUNT="${OCEAN__INTEGRATION__CONFIG__ENTITY_AMOUNT:--1}" \ + -e OCEAN__INTEGRATION__CONFIG__ENTITY_KB_SIZE="${OCEAN__INTEGRATION__CONFIG__ENTITY_KB_SIZE:--1}" \ + -e OCEAN__INTEGRATION__CONFIG__THIRD_PARTY_BATCH_SIZE="${OCEAN__INTEGRATION__CONFIG__THIRD_PARTY_BATCH_SIZE:--1}" \ + -e OCEAN__INTEGRATION__CONFIG__THIRD_PARTY_LATENCY_MS="${OCEAN__INTEGRATION__CONFIG__THIRD_PARTY_LATENCY_MS:--1}" \ + -e OCEAN__RESOURCES_PATH="/opt/port-resources" \ --name=ZOMG-TEST \ "ghcr.io/port-labs/port-ocean-fake-integration:${FAKE_INTEGRATION_VERSION}" \ -c "source ./.venv/bin/activate && pip install --root-user-action=ignore /opt/dist/${TAR_FILE}[cli] && ocean sail -O" + +rm -rf "${TEMP_DIR}" diff --git a/scripts/smoke-test-base.sh b/scripts/smoke-test-base.sh new file mode 100755 index 0000000000..38a7869096 --- /dev/null +++ b/scripts/smoke-test-base.sh @@ -0,0 +1,33 @@ +#!/usr/bin/env bash + +# Requires docker and the following ENV vars: +# +# PORT_CLIENT_ID +# PORT_CLIENT_SECRET +# PORT_BASE_URL (optional, defaults to 'https://api.getport.io') +# + +SCRIPT_BASE="$(cd -P "$(dirname "$0")" && pwd)" +ROOT_DIR="$(cd -P "${SCRIPT_BASE}/../" && pwd)" + +RANDOM_ID="" +if [[ -n ${SMOKE_TEST_SUFFIX} ]]; then + RANDOM_ID="-${SMOKE_TEST_SUFFIX}" +fi +export INTEGRATION_IDENTIFIER="smoke-test-integration${RANDOM_ID}" +export BLUEPRINT_DEPARTMENT="fake-department${RANDOM_ID}" +export BLUEPRINT_PERSON="fake-person${RANDOM_ID}" + +# NOTE: Make the blueprints and mapping immutable by adding a random suffix +TEMP_DIR=$(mktemp -d -t smoke-test-integration.XXXXXXX) +RESOURCE_DIR_SUFFIX="integrations/fake-integration/.port/resources" +cp -r "${ROOT_DIR}"/${RESOURCE_DIR_SUFFIX} "${TEMP_DIR}" + +sed -i.bak "s/fake-department/${BLUEPRINT_DEPARTMENT}/g" "${TEMP_DIR}"/resources/blueprints.json +sed -i.bak "s/fake-person/${BLUEPRINT_PERSON}/g" "${TEMP_DIR}"/resources/blueprints.json +sed -i.bak "s/\"fake-department\"/\"${BLUEPRINT_DEPARTMENT}\"/g" "${TEMP_DIR}"/resources/port-app-config.yml +sed -i.bak "s/\"fake-person\"/\"${BLUEPRINT_PERSON}\"/g" "${TEMP_DIR}"/resources/port-app-config.yml + + +export TEMP_RESOURCES_DIR="${TEMP_DIR}/resources" +export INTEGRATION_IDENTIFIER=${INTEGRATION_IDENTIFIER}