From 64e67b73c41acb53540a5beecc5d9a2fd21bd381 Mon Sep 17 00:00:00 2001 From: emileten Date: Mon, 19 Feb 2024 19:52:58 +0300 Subject: [PATCH] move integration tests to step before release, improve naming of workflows --- .github/workflows/build.yaml | 81 +++++- .github/workflows/build_and_release.yaml | 17 ++ .github/workflows/distribute.yaml | 4 - .github/workflows/integration-test.yaml | 104 -------- .github/workflows/test.yaml | 13 - integration_tests/cdk/config.py | 142 +---------- .../cdk/eoapi_template/pgStacInfra.py | 239 +----------------- integration_tests/cdk/eoapi_template/vpc.py | 25 +- integration_tests/cdk/package-lock.json | 8 +- integration_tests/cdk/package.json | 2 +- integration_tests/cdk/requirements.txt | 6 +- integration_tests/tests/README.md | 16 -- .../tests/eoapi_tests/conftest.py | 22 -- .../eoapi_tests/fixtures/test_collection.json | 45 ---- .../tests/eoapi_tests/fixtures/test_item.json | 12 - .../fixtures/test_titiler_search_request.json | 3 - .../tests/eoapi_tests/ingestion.py | 166 ------------ .../tests/eoapi_tests/settings.py | 12 - .../tests/eoapi_tests/test_stac_ingestion.py | 123 --------- integration_tests/tests/pyproject.toml | 21 -- lib/database/bootstrapper_runtime/handler.py | 2 +- lib/database/index.ts | 3 +- 22 files changed, 136 insertions(+), 930 deletions(-) create mode 100644 .github/workflows/build_and_release.yaml delete mode 100644 .github/workflows/integration-test.yaml delete mode 100644 .github/workflows/test.yaml delete mode 100644 integration_tests/tests/README.md delete mode 100644 integration_tests/tests/eoapi_tests/conftest.py delete mode 100644 integration_tests/tests/eoapi_tests/fixtures/test_collection.json delete mode 100644 integration_tests/tests/eoapi_tests/fixtures/test_item.json delete mode 100644 integration_tests/tests/eoapi_tests/fixtures/test_titiler_search_request.json delete mode 100644 integration_tests/tests/eoapi_tests/ingestion.py delete mode 100644 integration_tests/tests/eoapi_tests/settings.py delete mode 100644 integration_tests/tests/eoapi_tests/test_stac_ingestion.py delete mode 100644 integration_tests/tests/pyproject.toml diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index fee9392..d9d635e 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -12,17 +12,30 @@ on: required: false DS_RELEASE_BOT_PRIVATE_KEY: required: false - + AWS_DEFAULT_REGION_DEPLOY: + required: false + AWS_ACCESS_KEY_ID_DEPLOY: + required: false + AWS_SECRET_ACCESS_KEY_DEPLOY: + required: false + AWS_ACCOUNT_ID: + required: false jobs: build_and_package: name: Build and package runs-on: ubuntu-latest + timeout-minutes: 60 + env: + AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION_DEPLOY }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID_DEPLOY }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_DEPLOY }} + AWS_DEFAULT_ACCOUNT: ${{ secrets.AWS_ACCOUNT_ID }} steps: - uses: actions/checkout@v3 - uses: actions/setup-node@v3 with: - node-version: 16 + node-version: 18 cache: "npm" - name: Install Dependencies @@ -65,8 +78,70 @@ jobs: app_id: ${{ secrets.DS_RELEASE_BOT_ID }} private_key: ${{ secrets.DS_RELEASE_BOT_PRIVATE_KEY }} - - name: Maybe Release 🚀 + - name: Check release + id: check_release if: ${{ inputs.release }} + run: | + SHOULD_RELEASE=false + npm run semantic-release --dry-run > check_release_output.txt + if grep -q "Published release" check_release_output.txt; then + echo "SHOULD_RELEASE=true" >> $GITHUB_OUTPUT + else + echo "SHOULD_RELEASE=false" >> $GITHUB_OUTPUT + fi + + - name: Install deployment environment + if: "${{ inputs.release && steps.check_release.outputs.SHOULD_RELEASE }}" + id: install_deploy_env + run: | + # install deployment environment with eoapi-cdk from build + python -m venv .deployment_venv + source .deployment_venv/bin/activate + pip install dist/python/*.gz + cd integration_tests/cdk + pip install -r requirements.txt + npm install + deactivate + cd - + + + - name: Deploy test stack + if: "${{ inputs.release && steps.check_release.outputs.SHOULD_RELEASE }}" + id: deploy_step + run: | + source .deployment_venv/bin/activate + + # synthesize the stack + cd integration_tests/cdk + npx cdk synth --debug --all --require-approval never + + # deploy the stack and grab URLs for testing + npx cdk deploy --ci --all --require-approval never + echo "ingestor_url=$(aws cloudformation describe-stacks --stack-name eoapi-cdk-integration-test-pgSTAC-infra --query "Stacks[0].Outputs[?starts_with(OutputKey, 'stacingestor')].OutputValue | [0]" --output text)" >> $GITHUB_OUTPUT + echo "stac_api_url=$(aws cloudformation describe-stacks --stack-name eoapi-cdk-integration-test-pgSTAC-infra --query "Stacks[0].Outputs[?starts_with(OutputKey, 'pgstacapi')].OutputValue | [0]" --output text)" >> $GITHUB_OUTPUT + echo "titiler_pgstac_api_url=$(aws cloudformation describe-stacks --stack-name eoapi-cdk-integration-test-pgSTAC-infra --query "Stacks[0].Outputs[?starts_with(OutputKey, 'titilerpgstac')].OutputValue | [0]" --output text)" >> $GITHUB_OUTPUT + deactivate + cd - + + - name: Tear down any infrastructure + if: always() + run: | + cd integration_tests/cdk + # run this only if we find a 'cdk.out' directory, which means there might be things to tear down + if [ -d "cdk.out" ]; then + cd - + source .deployment_venv/bin/activate + cd integration_tests/cdk + # see https://github.com/aws/aws-cdk/issues/24946 + rm -f cdk.out/synth.lock + npx cdk destroy --ci --all --force + fi + + + # run if the previous step set SHOULD_RELEASE to true + - name: Maybe Release 🚀 + # only run if the previous step set SHOULD_RELEASE to true + if: "${{ inputs.release && steps.check_release.outputs.SHOULD_RELEASE }}" run: | npm run semantic-release env: diff --git a/.github/workflows/build_and_release.yaml b/.github/workflows/build_and_release.yaml new file mode 100644 index 0000000..e252d72 --- /dev/null +++ b/.github/workflows/build_and_release.yaml @@ -0,0 +1,17 @@ +name: Build & try to release + +on: + push: + +jobs: + package: + uses: ./.github/workflows/build.yaml + with: + release: true + secrets: + DS_RELEASE_BOT_ID: ${{ secrets.DS_RELEASE_BOT_ID }} + DS_RELEASE_BOT_PRIVATE_KEY: ${{ secrets.DS_RELEASE_BOT_PRIVATE_KEY }} + AWS_DEFAULT_REGION_DEPLOY: ${{ secrets.AWS_DEFAULT_REGION_DEPLOY }} + AWS_ACCESS_KEY_ID_DEPLOY: ${{ secrets.AWS_ACCESS_KEY_ID_DEPLOY }} + AWS_SECRET_ACCESS_KEY_DEPLOY: ${{ secrets.AWS_SECRET_ACCESS_KEY_DEPLOY }} + AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }} diff --git a/.github/workflows/distribute.yaml b/.github/workflows/distribute.yaml index e484b8f..fd7fe8a 100644 --- a/.github/workflows/distribute.yaml +++ b/.github/workflows/distribute.yaml @@ -9,10 +9,6 @@ jobs: package: uses: ./.github/workflows/build.yaml - integration-test: - uses: ./.github/workflows/integration-test.yaml - needs: package - distribute-python: runs-on: ubuntu-latest needs: package diff --git a/.github/workflows/integration-test.yaml b/.github/workflows/integration-test.yaml deleted file mode 100644 index eb9fe79..0000000 --- a/.github/workflows/integration-test.yaml +++ /dev/null @@ -1,104 +0,0 @@ -name: Deploy & Integration Test - -permissions: - id-token: write # required for requesting the JWT - contents: read # required for actions/checkout - -on: - workflow_call: - - workflow_dispatch: - - # remove later - # push: - # branches: - # - "feat/add-integration-tests" - -jobs: - deploy-and-integration-test: - name: Deploy & Integration Test - runs-on: ubuntu-latest - env: - AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION_DEPLOY }} - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID_DEPLOY }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_DEPLOY }} - - - steps: - - - name: Checkout repository # for runs after `Distribute`, pulling from main will always give the latest eoapi-cdk. - uses: actions/checkout@v3 - - - name: Set up python - uses: actions/setup-python@v2 - with: - cache: pip - - - name: Set up node - uses: actions/setup-node@v3 - with: - node-version: '18' - - - name: Download compiled eoapi-cdk artifact - uses: actions/download-artifact@v3 - with: - name: python - path: dist - - - name: Install eoapi-cdk from artifact - run: pip install dist/python/*.gz - - - name: Install deployment environment - working-directory: integration_tests/cdk - run: | - python -m venv .deployment_venv - source .deployment_venv/bin/activate - pip install -r requirements.txt - npm install - deactivate - - - name: Synthesize the stack - working-directory: integration_tests/cdk - run: | - source .deployment_venv/bin/activate - npx cdk synth --debug --all --require-approval never - deactivate - - # deploys and grabs URLs from the output for later tests - - name: Deploy the stack - id: deploy_step - working-directory: integration_tests/cdk - run: | - source .deployment_venv/bin/activate - npx cdk deploy --ci --all --require-approval never - echo "ingestor_url=$(aws cloudformation describe-stacks --stack-name eoapi-template-demo-test-pgSTAC-infra --query "Stacks[0].Outputs[?starts_with(OutputKey, 'stacingestor')].OutputValue | [0]" --output text)" >> $GITHUB_OUTPUT - echo "stac_api_url=$(aws cloudformation describe-stacks --stack-name eoapi-template-demo-test-pgSTAC-infra --query "Stacks[0].Outputs[?starts_with(OutputKey, 'pgstacapi')].OutputValue | [0]" --output text)" >> $GITHUB_OUTPUT - echo "titiler_pgstac_api_url=$(aws cloudformation describe-stacks --stack-name eoapi-template-demo-test-pgSTAC-infra --query "Stacks[0].Outputs[?starts_with(OutputKey, 'titilerpgstac')].OutputValue | [0]" --output text)" >> $GITHUB_OUTPUT - deactivate - - - name: Install test environment - working-directory: integration_tests/tests - run: | - python -m venv .tests_venv - source .tests_venv/bin/activate - pip install -e . - deactivate - - - name: Test the stack - working-directory: integration_tests/tests - env: - ingestor_url: ${{ steps.deploy_step.outputs.ingestor_url }} - stac_api_url: ${{ steps.deploy_step.outputs.stac_api_url }} - titiler_pgstac_api_url: ${{ steps.deploy_step.outputs.titiler_pgstac_api_url }} - run: | - source .tests_venv/bin/activate - pytest eoapi_tests - deactivate - - - name: Always tear down the stack - if: always() - working-directory: integration_tests/cdk - run: | - source .deployment_venv/bin/activate - npx cdk destroy --ci --all --force - deactivate diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml deleted file mode 100644 index e543929..0000000 --- a/.github/workflows/test.yaml +++ /dev/null @@ -1,13 +0,0 @@ -name: Test & Build - -on: - push: - -jobs: - package: - uses: ./.github/workflows/build.yaml - with: - release: true - secrets: - DS_RELEASE_BOT_ID: ${{ secrets.DS_RELEASE_BOT_ID }} - DS_RELEASE_BOT_PRIVATE_KEY: ${{ secrets.DS_RELEASE_BOT_PRIVATE_KEY }} diff --git a/integration_tests/cdk/config.py b/integration_tests/cdk/config.py index 70457c5..0bc6c46 100644 --- a/integration_tests/cdk/config.py +++ b/integration_tests/cdk/config.py @@ -1,15 +1,19 @@ -from typing import Any, Dict, List, Union +from typing import Dict import pydantic import yaml -from aws_cdk import aws_ec2 from pydantic_core.core_schema import FieldValidationInfo -from pydantic_settings import BaseSettings - +from pydantic_settings import BaseSettings, SettingsConfigDict class AppConfig(BaseSettings): + model_config = SettingsConfigDict( + env_file=".env" + ) + aws_default_account: str = pydantic.Field( + description="AWS account ID" + ) project_id: str = pydantic.Field( - description="Project ID", default="eoapi-template-demo" + description="Project ID", default="eoapi-cdk-integration" ) stage: str = pydantic.Field(description="Stage of deployment", default="test") # because of its validator, `tags` should always come after `project_id` and `stage` @@ -20,145 +24,17 @@ class AppConfig(BaseSettings): they will override any tags defined here.""", default=None, ) - auth_provider_jwks_url: str | None = pydantic.Field( - description="""Auth Provider JSON Web Key Set URL for - ingestion authentication. If not provided, - no authentication will be required.""", - default=None, - ) - data_access_role_arn: str | None = pydantic.Field( - description="""Role ARN for data access, that will be - used by the STAC ingestor for validation of assets - located in S3 and for the tiler application to access - assets located in S3. If none, the role will be - created at runtime with full S3 read access. If - provided, the existing role must be configured to - allow the tiler and STAC ingestor lambda roles to - assume it. See https://github.com/developmentseed/eoapi-cdk""", - default=None, - ) db_instance_type: str = pydantic.Field( description="Database instance type", default="t3.micro" ) db_allocated_storage: int = pydantic.Field( description="Allocated storage for the database", default=5 ) - public_db_subnet: bool = pydantic.Field( - description="Whether to put the database in a public subnet", default=True - ) - nat_gateway_count: int = pydantic.Field( - description="Number of NAT gateways to create", - default=0, - ) - bastion_host: bool = pydantic.Field( - description="""Whether to create a bastion host. It can typically - be used to make administrative connections to the database if - `public_db_subnet` is False""", - default=False, - ) - bastion_host_create_elastic_ip: bool = pydantic.Field( - description="""Whether to create an elastic IP for the bastion host. - Ignored if `bastion_host` equals `False`""", - default=False, - ) - bastion_host_allow_ip_list: List[str] = pydantic.Field( - description="""YAML file containing list of IP addresses to - allow SSH access to the bastion host. Ignored if `bastion_host` - equals `False`.""", - default=[], - ) - bastion_host_user_data: Union[Dict[str, Any], aws_ec2.UserData] = pydantic.Field( - description="""Path to file containing user data for the bastion host. - Ignored if `bastion_host` equals `False`.""", - default=aws_ec2.UserData.for_linux(), - ) - titiler_buckets: List[str] = pydantic.Field( - description="""Path to YAML file containing list of - buckets to grant access to the titiler API""", - default=[], - ) - acm_certificate_arn: str | None = pydantic.Field( - description="""ARN of ACM certificate to use for - custom domain names. If provided, - CDNs are created for all the APIs""", - default=None, - ) - stac_api_custom_domain: str | None = pydantic.Field( - description="""Custom domain name for the STAC API. - Must provide `acm_certificate_arn`""", - default=None, - ) - titiler_pgstac_api_custom_domain: str | None = pydantic.Field( - description="""Custom domain name for the titiler pgstac API. - Must provide `acm_certificate_arn`""", - default=None, - ) - stac_ingestor_api_custom_domain: str | None = pydantic.Field( - description="""Custom domain name for the STAC ingestor API. - Must provide `acm_certificate_arn`""", - default=None, - ) - tipg_api_custom_domain: str | None = pydantic.Field( - description="""Custom domain name for the tipg API. - Must provide `acm_certificate_arn`""", - default=None, - ) - stac_browser_version: str | None = pydantic.Field( - description="""Version of the Radiant Earth STAC browser to deploy. - If none provided, no STAC browser will be deployed. - If provided, `stac_api_custom_domain` must be provided - as it will be used as a backend.""", - default=None, - ) @pydantic.field_validator("tags") def default_tags(cls, v, info: FieldValidationInfo): return v or {"project_id": info.data["project_id"], "stage": info.data["stage"]} - @pydantic.model_validator(mode="after") - def validate_nat_gateway_count(self) -> "AppConfig": - if not self.public_db_subnet and ( - self.nat_gateway_count is not None and self.nat_gateway_count <= 0 - ): - raise ValueError( - """if the database and its associated services instances - are to be located in the private subnet of the VPC, NAT - gateways are needed to allow egress from the services - and therefore `nat_gateway_count` has to be > 0.""" - ) - else: - return self - - @pydantic.model_validator(mode="after") - def validate_stac_browser_version(self) -> "AppConfig": - if ( - self.stac_browser_version is not None - and self.stac_api_custom_domain is None - ): - raise ValueError( - """If a STAC browser version is provided, - a custom domain must be provided for the STAC API""" - ) - else: - return self - - @pydantic.model_validator(mode="after") - def validate_acm_certificate_arn(self) -> "AppConfig": - if self.acm_certificate_arn is None and any( - [ - self.stac_api_custom_domain, - self.titiler_pgstac_api_custom_domain, - self.stac_ingestor_api_custom_domain, - self.tipg_api_custom_domain, - ] - ): - raise ValueError( - """If any custom domain is provided, - an ACM certificate ARN must be provided""" - ) - else: - return self - def build_service_name(self, service_id: str) -> str: return f"{self.project_id}-{self.stage}-{service_id}" diff --git a/integration_tests/cdk/eoapi_template/pgStacInfra.py b/integration_tests/cdk/eoapi_template/pgStacInfra.py index 0f12e8d..f4d70b0 100644 --- a/integration_tests/cdk/eoapi_template/pgStacInfra.py +++ b/integration_tests/cdk/eoapi_template/pgStacInfra.py @@ -1,24 +1,12 @@ -import boto3 -import yaml from aws_cdk import ( - RemovalPolicy, Stack, - aws_certificatemanager, aws_ec2, - aws_iam, aws_rds, - aws_s3, ) -from aws_cdk.aws_apigateway import DomainNameOptions -from aws_cdk.aws_apigatewayv2_alpha import DomainName from constructs import Construct from eoapi_cdk import ( - BastionHost, PgStacApiLambda, PgStacDatabase, - StacBrowser, - StacIngestor, - TiPgApiLambda, TitilerPgstacApiLambda, ) @@ -48,41 +36,27 @@ def __init__( version=aws_rds.PostgresEngineVersion.VER_14 ), vpc_subnets=aws_ec2.SubnetSelection( - subnet_type=aws_ec2.SubnetType.PUBLIC - if app_config.public_db_subnet - else aws_ec2.SubnetType.PRIVATE_ISOLATED + subnet_type=aws_ec2.SubnetType.PUBLIC ), allocated_storage=app_config.db_allocated_storage, instance_type=aws_ec2.InstanceType(app_config.db_instance_type), + bootstrapper_lambda_function_options={ + "allow_public_subnet": True, + } ) + + pgstac_db.db.connections.allow_default_port_from_any_ipv4() - stac_api_lambda = PgStacApiLambda( + + PgStacApiLambda( self, "pgstac-api", api_env={ "NAME": app_config.build_service_name("STAC API"), "description": f"{app_config.stage} STAC API", }, - vpc=vpc, db=pgstac_db.db, - db_secret=pgstac_db.pgstac_secret, - subnet_selection=aws_ec2.SubnetSelection( - subnet_type=aws_ec2.SubnetType.PUBLIC - if app_config.public_db_subnet - else aws_ec2.SubnetType.PRIVATE_WITH_EGRESS - ), - stac_api_domain_name=DomainName( - self, - "stac-api-domain-name", - domain_name=app_config.stac_api_custom_domain, - certificate=aws_certificatemanager.Certificate.from_certificate_arn( - self, - "stac-api-cdn-certificate", - certificate_arn=app_config.acm_certificate_arn, - ), - ) - if app_config.stac_api_custom_domain - else None, + db_secret=pgstac_db.pgstac_secret ) TitilerPgstacApiLambda( @@ -92,196 +66,11 @@ def __init__( "NAME": app_config.build_service_name("titiler pgSTAC API"), "description": f"{app_config.stage} titiler pgstac API", }, - vpc=vpc, db=pgstac_db.db, db_secret=pgstac_db.pgstac_secret, - subnet_selection=aws_ec2.SubnetSelection( - subnet_type=aws_ec2.SubnetType.PUBLIC - if app_config.public_db_subnet - else aws_ec2.SubnetType.PRIVATE_WITH_EGRESS - ), - buckets=app_config.titiler_buckets, - titiler_pgstac_api_domain_name=DomainName( - self, - "titiler-pgstac-api-domain-name", - domain_name=app_config.titiler_pgstac_api_custom_domain, - certificate=aws_certificatemanager.Certificate.from_certificate_arn( - self, - "titiler-pgstac-api-cdn-certificate", - certificate_arn=app_config.acm_certificate_arn, - ), - ) - if app_config.titiler_pgstac_api_custom_domain - else None, + buckets=[], + lambda_function_options={ + "allow_public_subnet": True, + }, ) - - TiPgApiLambda( - self, - "tipg-api", - api_env={ - "NAME": app_config.build_service_name("tipg API"), - "description": f"{app_config.stage} tipg API", - }, - vpc=vpc, - db=pgstac_db.db, - db_secret=pgstac_db.pgstac_secret, - subnet_selection=aws_ec2.SubnetSelection( - subnet_type=aws_ec2.SubnetType.PUBLIC - if app_config.public_db_subnet - else aws_ec2.SubnetType.PRIVATE_WITH_EGRESS - ), - tipg_api_domain_name=DomainName( - self, - "tipg-api-domain-name", - domain_name=app_config.tipg_api_custom_domain, - certificate=aws_certificatemanager.Certificate.from_certificate_arn( - self, - "tipg-api-cdn-certificate", - certificate_arn=app_config.acm_certificate_arn, - ), - ) - if app_config.tipg_api_custom_domain - else None, - ) - - if app_config.bastion_host: - BastionHost( - self, - "bastion-host", - vpc=vpc, - db=pgstac_db.db, - ipv4_allowlist=app_config.bastion_host_allow_ip_list, - user_data=aws_ec2.UserData.custom( - yaml.dump(app_config.bastion_host_user_data) - ) - if app_config.bastion_host_user_data is not None - else aws_ec2.UserData.for_linux(), - create_elastic_ip=app_config.bastion_host_create_elastic_ip, - ) - - if app_config.data_access_role_arn: - # importing provided role from arn. - # the stac ingestor will try to assume it when called, - # so it must be listed in the data access role trust policy. - data_access_role = aws_iam.Role.from_role_arn( - self, - "data-access-role", - role_arn=app_config.data_access_role_arn, - ) - else: - data_access_role = self._create_data_access_role() - - stac_ingestor_env = {"REQUESTER_PAYS": "True"} - - if app_config.auth_provider_jwks_url: - stac_ingestor_env["JWKS_URL"] = app_config.auth_provider_jwks_url - - stac_ingestor = StacIngestor( - self, - "stac-ingestor", - stac_url=stac_api_lambda.url, - stage=app_config.stage, - vpc=vpc, - data_access_role=data_access_role, - stac_db_secret=pgstac_db.pgstac_secret, - stac_db_security_group=pgstac_db.db.connections.security_groups[0], - subnet_selection=aws_ec2.SubnetSelection( - subnet_type=aws_ec2.SubnetType.PRIVATE_WITH_EGRESS - ), - api_env=stac_ingestor_env, - ingestor_domain_name_options=DomainNameOptions( - domain_name=app_config.stac_ingestor_api_custom_domain, - certificate=aws_certificatemanager.Certificate.from_certificate_arn( - self, - "stac-ingestor-api-cdn-certificate", - certificate_arn=app_config.acm_certificate_arn, - ), - ) - if app_config.stac_ingestor_api_custom_domain - else None, - ) - - if app_config.stac_browser_version: - stac_browser_bucket = aws_s3.Bucket( - self, - "stac-browser-bucket", - bucket_name=app_config.build_service_name("stac-browser"), - removal_policy=RemovalPolicy.DESTROY, - auto_delete_objects=True, - website_index_document="index.html", - public_read_access=True, - block_public_access=aws_s3.BlockPublicAccess( - block_public_acls=False, - block_public_policy=False, - ignore_public_acls=False, - restrict_public_buckets=False, - ), - object_ownership=aws_s3.ObjectOwnership.OBJECT_WRITER, - ) - StacBrowser( - self, - "stac-browser", - github_repo_tag=app_config.stac_browser_version, - stac_catalog_url=f"https://{app_config.stac_api_custom_domain}", - website_index_document="index.html", - bucket_arn=stac_browser_bucket.bucket_arn, - ) - - # we can only do that if the role is created here. - # If injecting a role, that role's trust relationship - # must be already set up, or set up after this deployment. - if not app_config.data_access_role_arn: - data_access_role = self._grant_assume_role_with_principal_pattern( - data_access_role, stac_ingestor.handler_role.role_name - ) - - def _create_data_access_role(self) -> aws_iam.Role: - """ - Creates an IAM role with full S3 read access. - """ - - data_access_role = aws_iam.Role( - self, - "data-access-role", - assumed_by=aws_iam.ServicePrincipal("lambda.amazonaws.com"), - ) - - data_access_role.add_to_policy( - aws_iam.PolicyStatement( - actions=[ - "s3:Get*", - ], - resources=["*"], - effect=aws_iam.Effect.ALLOW, - ) - ) - return data_access_role - - def _grant_assume_role_with_principal_pattern( - self, - role_to_assume: aws_iam.Role, - principal_pattern: str, - account_id: str = boto3.client("sts").get_caller_identity().get("Account"), - ) -> aws_iam.Role: - """ - Grants assume role permissions to the role of the given - account with the given name pattern. Default account - is the current account. - """ - - role_to_assume.assume_role_policy.add_statements( - aws_iam.PolicyStatement( - effect=aws_iam.Effect.ALLOW, - principals=[aws_iam.AnyPrincipal()], - actions=["sts:AssumeRole"], - conditions={ - "StringLike": { - "aws:PrincipalArn": [ - f"arn:aws:iam::{account_id}:role/{principal_pattern}" - ] - } - }, - ) - ) - - return role_to_assume + \ No newline at end of file diff --git a/integration_tests/cdk/eoapi_template/vpc.py b/integration_tests/cdk/eoapi_template/vpc.py index bc723f4..b6bd3ba 100644 --- a/integration_tests/cdk/eoapi_template/vpc.py +++ b/integration_tests/cdk/eoapi_template/vpc.py @@ -20,29 +20,20 @@ def __init__(self, scope: Construct, app_config: AppConfig, **kwargs) -> None: aws_ec2.SubnetConfiguration( name="ingress", subnet_type=aws_ec2.SubnetType.PUBLIC, cidr_mask=24 ), - aws_ec2.SubnetConfiguration( - name="application", - subnet_type=aws_ec2.SubnetType.PRIVATE_WITH_EGRESS, - cidr_mask=24, - ), - aws_ec2.SubnetConfiguration( - name="rds", - subnet_type=aws_ec2.SubnetType.PRIVATE_ISOLATED, - cidr_mask=24, - ), - ], - nat_gateways=app_config.nat_gateway_count, - ) - - self.vpc.add_gateway_endpoint( - "DynamoDbEndpoint", service=aws_ec2.GatewayVpcEndpointAwsService.DYNAMODB + ] ) + self.vpc.add_interface_endpoint( "SecretsManagerEndpoint", service=aws_ec2.InterfaceVpcEndpointAwsService.SECRETS_MANAGER, ) - + + self.vpc.add_interface_endpoint( + "CloudWatchEndpoint", + service=aws_ec2.InterfaceVpcEndpointAwsService.CLOUDWATCH_LOGS, + ) + self.export_value( self.vpc.select_subnets(subnet_type=aws_ec2.SubnetType.PUBLIC) .subnets[0] diff --git a/integration_tests/cdk/package-lock.json b/integration_tests/cdk/package-lock.json index 9428ef3..5fa908a 100644 --- a/integration_tests/cdk/package-lock.json +++ b/integration_tests/cdk/package-lock.json @@ -8,13 +8,13 @@ "name": "eoapi-template", "version": "0.1.0", "dependencies": { - "aws-cdk": "^2.81.0" + "aws-cdk": "^2.99.1" } }, "node_modules/aws-cdk": { - "version": "2.96.2", - "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.96.2.tgz", - "integrity": "sha512-13ERpPV99OFAD75PLOtl0rRMXTWn6bCrmUPwYKkLwIMkj2xWCBiwo2Y9Qg+UzEszm5NMHA1N4ichSvuZ0mt2IQ==", + "version": "2.128.0", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.128.0.tgz", + "integrity": "sha512-epOAr/0WKqmyaKqBc7N0Ky5++93pu+v6yVN9jNOa4JYkAkGbeTS3vR9bj/W0o94jnlgWevG3HNHr83jtRvw/4A==", "bin": { "cdk": "bin/cdk" }, diff --git a/integration_tests/cdk/package.json b/integration_tests/cdk/package.json index 411cfc3..aaaa2d9 100644 --- a/integration_tests/cdk/package.json +++ b/integration_tests/cdk/package.json @@ -2,7 +2,7 @@ "name": "eoapi-template", "version": "0.1.0", "dependencies": { - "aws-cdk": "^2.81.0" + "aws-cdk": "^2.99.1" } } \ No newline at end of file diff --git a/integration_tests/cdk/requirements.txt b/integration_tests/cdk/requirements.txt index 53f55e7..3761dc1 100644 --- a/integration_tests/cdk/requirements.txt +++ b/integration_tests/cdk/requirements.txt @@ -1,6 +1,6 @@ -aws-cdk-lib>=2.75.0 -aws_cdk.aws_cognito_identitypool_alpha>=2.75.0a0 -aws-cdk.aws-apigatewayv2-alpha==2.95.1a0 +aws-cdk-lib>=2.99.1 +aws_cdk.aws_cognito_identitypool_alpha>=2.99.0a0 +aws-cdk.aws-apigatewayv2-alpha>=2.99.0a0 constructs>=10.0.0,<11.0.0 pydantic==2.0.2 pydantic-settings==2.0.1 diff --git a/integration_tests/tests/README.md b/integration_tests/tests/README.md deleted file mode 100644 index 0dcdaf5..0000000 --- a/integration_tests/tests/README.md +++ /dev/null @@ -1,16 +0,0 @@ -# Integration tests - -Standard integration tests for a suite of deployed [eoAPI](https://github.com/developmentseed/eoAPI) services. - -## Environment - -See `eoapi_tests/settings.py` that defines the required and optional environment variables. - -## Installation & Usage - -``` -python -m venv .testing_environment -source .testing_environment/bin/activate -pip install -e . -pytest eoapi_tests -``` \ No newline at end of file diff --git a/integration_tests/tests/eoapi_tests/conftest.py b/integration_tests/tests/eoapi_tests/conftest.py deleted file mode 100644 index ebf9e48..0000000 --- a/integration_tests/tests/eoapi_tests/conftest.py +++ /dev/null @@ -1,22 +0,0 @@ -import pytest -from ingestion import StacIngestion - - -@pytest.fixture(scope="module") -def stac_ingestion_instance(): - return StacIngestion() - - -@pytest.fixture(scope="module") -def test_collection(stac_ingestion_instance): - return stac_ingestion_instance.get_test_collection() - - -@pytest.fixture(scope="module") -def test_item(stac_ingestion_instance): - return stac_ingestion_instance.get_test_item() - - -@pytest.fixture(scope="module") -def test_titiler_search_request(stac_ingestion_instance): - return stac_ingestion_instance.get_test_titiler_search_request() diff --git a/integration_tests/tests/eoapi_tests/fixtures/test_collection.json b/integration_tests/tests/eoapi_tests/fixtures/test_collection.json deleted file mode 100644 index 4d34153..0000000 --- a/integration_tests/tests/eoapi_tests/fixtures/test_collection.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "id":"test_collection", - "type":"Collection", - "links": [], - "title":"Test Collection", - "extent": { - "spatial": { - "bbox": [ - [ - -180, 51.6, 180, 78 - ] - ] - }, - "temporal": { - "interval": [ - [ - "2019-01-01T00:00:00.000Z", - "2021-01-01T00:00:00.000Z" - ] - ] - } - }, - "license":"CC-BY", - "description":"Test collection", - "item_assets": { - "tif": { - "type":"image/tiff; application=geotiff; profile=cloud-optimized", - "roles": [ - "data", - "layer" - ], - "title":"Test collection", - "description":"Test collection" - }, - "csv": { - "type":"text/csv", - "roles": [ - "data" - ], - "title":"CSV", - "description":"Test collection" - } - }, - "stac_version":"1.0.0" -} \ No newline at end of file diff --git a/integration_tests/tests/eoapi_tests/fixtures/test_item.json b/integration_tests/tests/eoapi_tests/fixtures/test_item.json deleted file mode 100644 index bff8ea9..0000000 --- a/integration_tests/tests/eoapi_tests/fixtures/test_item.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "type": "Feature", - "id": "test_item", - "stac_version":"1.0.0", - "collection": "test_collection", - "links":[{"rel":"collection","type":"application/json","href":"https://stac.dit.maap-project.org/collections/test_collection"},{"rel":"parent","type":"application/json","href":"https://stac.dit.maap-project.org/collections/test_collection"},{"rel":"root","type":"application/json","href":"https://stac.dit.maap-project.org/"},{"rel":"self","type":"application/geo+json","href":"https://stac.dit.maap-project.org/collections/test_collection/items/test_item1"}], - "bbox":[-78.40290984426046,51.07724585591961,-77.04127077089376,51.92130718597872], - "assets":{"csv":{"href":"s3://nasa-maap-data-store/file-staging/nasa-map/icesat2-boreal/boreal_agb_202302031675450345_0177_train_data.csv","type":"text/csv","roles":["data"],"title":"CSV","description":"CSV of training data"},"tif":{"href":"s3://nasa-maap-data-store/file-staging/nasa-map/icesat2-boreal/boreal_agb_202302031675450345_0177.tif","type":"image/tiff; application=geotiff; profile=cloud-optimized","roles":["data"],"title":"Cloud Optimized GeoTIFF of boreal data","description":"Cloud Optimized GeoTIFF of boreal data","raster:bands":[{"scale":1.0,"nodata":-9999.0,"offset":0.0,"sampling":"area","data_type":"float32","histogram":{"max":105.78067016601562,"min":7.345508575439453,"count":11,"buckets":[2194,2380,972,469,298,184,85,30,8,4]},"statistics":{"mean":25.281058933423914,"stddev":13.868902983070951,"maximum":105.78067016601562,"minimum":7.345508575439453,"valid_percent":0.6317138671875}},{"scale":1.0,"nodata":-9999.0,"offset":0.0,"sampling":"area","data_type":"float32","histogram":{"max":60.75077819824219,"min":1.4587666988372803,"count":11,"buckets":[5140,1167,250,42,14,4,3,1,2,1]},"statistics":{"mean":5.982097533589976,"stddev":3.7930746502586974,"maximum":60.75077819824219,"minimum":1.4587666988372803,"valid_percent":0.6317138671875}}]}}, - "properties":{"datetime":"2023-02-15T00:00:00+00:00","proj:bbox":[4598521.999999994,5643304.000000009,4688521.999999994,5733304.000000009],"proj:shape":[3000,3000],"proj:geometry":{"type":"Polygon","coordinates":[[[4598521.999999994,5643304.000000009],[4688521.999999994,5643304.000000009],[4688521.999999994,5733304.000000009],[4598521.999999994,5733304.000000009],[4598521.999999994,5643304.000000009]]]},"proj:transform":[30.0,0.0,4598521.999999994,0.0,-30.0,5733304.000000009,0.0,0.0,1.0]}, - "stac_extensions":["https://stac-extensions.github.io/projection/v1.1.0/schema.json","https://stac-extensions.github.io/raster/v1.1.0/schema.json"], - "geometry":{"type":"Polygon","coordinates":[[[14.114837413118664,67.218607039971],[12.438229073696998,67.70894310918132],[11.17860397724852,67.06631312684836],[12.837754767891637,66.58867761064732],[14.114837413118664,67.218607039971]]]} -} \ No newline at end of file diff --git a/integration_tests/tests/eoapi_tests/fixtures/test_titiler_search_request.json b/integration_tests/tests/eoapi_tests/fixtures/test_titiler_search_request.json deleted file mode 100644 index 62aa226..0000000 --- a/integration_tests/tests/eoapi_tests/fixtures/test_titiler_search_request.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "collections": ["test_collection"] -} \ No newline at end of file diff --git a/integration_tests/tests/eoapi_tests/ingestion.py b/integration_tests/tests/eoapi_tests/ingestion.py deleted file mode 100644 index 62f3bc9..0000000 --- a/integration_tests/tests/eoapi_tests/ingestion.py +++ /dev/null @@ -1,166 +0,0 @@ -import json -import os - -import boto3 -import pystac -import requests -from pystac import STACValidationError -from settings import eoapiDeploymentSettings - - -class StacIngestion: - - """Class representing various test operations""" - - def __init__(self): - self.eoapi_deployment_settings = eoapiDeploymentSettings() - self.current_file_path = os.path.dirname(os.path.abspath(__file__)) - self.headers = self.get_headers() - - def validate_collection(self, collection): - try: - pystac.validation.validate_dict(collection) - except STACValidationError: - raise STACValidationError("Validation failed for the collection") - - def validate_item(self, item): - try: - pystac.validation.validate_dict(item) - except STACValidationError: - raise STACValidationError("Validation failed for the item") - - def get_authentication_token(self) -> str: - if not self.eoapi_deployment_settings.secret_id: - raise ValueError("You should provide a secret id") - - client = boto3.client("secretsmanager", region_name="us-west-2") - - try: - res_secret = client.get_secret_value( - SecretId=self.eoapi_deployment_settings.secret_id - ) - except client.exceptions.ResourceNotFoundException: - raise Exception( - "Unable to find a secret for " - "{self.eoapi_deployment_settings.secret_id}. " - "\n\nHint: Check your stage and service id." - "Also, verify that the correct " - "AWS_PROFILE is set on your environment." - ) - - # Authentication - Get TOKEN - secret = json.loads(res_secret["SecretString"]) - client_secret = secret["client_secret"] - client_id = secret["client_id"] - cognito_domain = secret["cognito_domain"] - scope = secret["scope"] - - res_token = requests.post( - f"{cognito_domain}/oauth2/token", - headers={ - "Content-Type": "application/x-www-form-urlencoded", - }, - auth=(client_id, client_secret), - data={ - "grant_type": "client_credentials", - # A space-separated list of scopes - # to request for the generated access token. - "scope": scope, - }, - ) - - token = res_token.json()["access_token"] - return token - - def get_headers(self) -> dict: - if self.eoapi_deployment_settings.secret_id: - return { - "headers": { - "Authorization": f"bearer {self.get_authentication_token()}" - } - } - else: - return {"params": {"provided_by": "eoapi-tests"}} - - def insert_collection(self, collection): - response = requests.post( - self.eoapi_deployment_settings.ingestor_url - + self.eoapi_deployment_settings.collections_endpoint, - json=collection, - **self.headers, - ) - return response - - def insert_item(self, item): - response = requests.post( - self.eoapi_deployment_settings.ingestor_url - + self.eoapi_deployment_settings.items_endpoint, - json=item, - **self.headers, - ) - return response - - def query_collection(self, collection_id): - response = requests.get( - self.eoapi_deployment_settings.stac_api_url - + self.eoapi_deployment_settings.collections_endpoint - + f"/{collection_id}" - ) - return response - - def query_items(self, collection_id): - response = requests.get( - self.eoapi_deployment_settings.stac_api_url - + self.eoapi_deployment_settings.collections_endpoint - + f"/{collection_id}/items" - ) - return response - - def register_mosaic(self, search_request): - response = requests.post( - self.eoapi_deployment_settings.titiler_pgstac_api_url + "/mosaic/register", - json=search_request, - ) - return response - - def list_mosaic_assets(self, search_id): - """list the assets of the first tile""" - response = requests.get( - self.eoapi_deployment_settings.titiler_pgstac_api_url - + f"/mosaic/{search_id}/tiles/0/0/0/assets" - ) - return response - - def get_test_collection(self): - with open( - os.path.join(self.current_file_path, "fixtures", "test_collection.json"), - "r", - ) as f: - test_collection = json.load(f) - return test_collection - - def get_test_item(self): - with open( - os.path.join(self.current_file_path, "fixtures", "test_item.json"), "r" - ) as f: - test_item = json.load(f) - return test_item - - def get_test_titiler_search_request(self): - with open( - os.path.join( - self.current_file_path, "fixtures", "test_titiler_search_request.json" - ), - "r", - ) as f: - test_titiler_search_request = json.load(f) - return test_titiler_search_request - - def delete_collection(self, collection_id): - response = requests.delete( - self.eoapi_deployment_settings.ingestor_url - + self.eoapi_deployment_settings.collections_endpoint - + f"/{collection_id}", - **self.headers, - ) - return response diff --git a/integration_tests/tests/eoapi_tests/settings.py b/integration_tests/tests/eoapi_tests/settings.py deleted file mode 100644 index de5d60e..0000000 --- a/integration_tests/tests/eoapi_tests/settings.py +++ /dev/null @@ -1,12 +0,0 @@ -from typing import Optional - -from pydantic_settings import BaseSettings - - -class eoapiDeploymentSettings(BaseSettings): - ingestor_url: str - stac_api_url: str - titiler_pgstac_api_url: str - secret_id: Optional[str] = None - collections_endpoint: Optional[str] = "/collections" - items_endpoint: Optional[str] = "/ingestions" diff --git a/integration_tests/tests/eoapi_tests/test_stac_ingestion.py b/integration_tests/tests/eoapi_tests/test_stac_ingestion.py deleted file mode 100644 index d09344a..0000000 --- a/integration_tests/tests/eoapi_tests/test_stac_ingestion.py +++ /dev/null @@ -1,123 +0,0 @@ -import time - -import pystac -import pytest - -INSERTION_WAIT_TIME = 10 - -# Test validating the collection -pytest.mark.order(0) - - -def test_validate_collection(test_collection): - pystac.validation.validate_dict(test_collection) - - -# Test validating the item -pytest.mark.order(1) - - -def test_validate_item(test_item): - pystac.validation.validate_dict(test_item) - - -# Test inserting collection -pytest.mark.order(2) - - -def test_insert_collection(stac_ingestion_instance, test_collection): - response = stac_ingestion_instance.insert_collection(test_collection) - assert response.status_code in [ - 200, - 201, - ], f"Failed to insert the test_collection :\n{response.text}" - # Wait for the collection to be inserted - time.sleep(INSERTION_WAIT_TIME) - - -# Test inserting item -pytest.mark.order(3) - - -def test_insert_item(stac_ingestion_instance, test_item): - response = stac_ingestion_instance.insert_item(test_item) - assert response.status_code in [ - 200, - 201, - ], f"Failed to insert the test_item :\n{response.text}" - # Wait for the item to be inserted - time.sleep(INSERTION_WAIT_TIME) - - -# Test querying collection and verifying inserted collection -pytest.mark.order(4) - - -def test_query_collection(stac_ingestion_instance, test_collection): - response = stac_ingestion_instance.query_collection(test_collection["id"]) - assert response.status_code in [ - 200, - 201, - ], f"Failed to query the test_collection :\n{response.text}" - - -# Test registering a mosaic and querying its assets -pytest.mark.order(5) - - -def test_titiler_pgstac( - stac_ingestion_instance, test_titiler_search_request, test_item -): - register_response = stac_ingestion_instance.register_mosaic( - test_titiler_search_request - ) - assert register_response.status_code in [ - 200, - 201, - ], f"Failed to register the mosaic :\n{register_response.text}" - search_id = register_response.json()["searchid"] - # allow for some time for the mosaic to be inserted - time.sleep(INSERTION_WAIT_TIME) - asset_query_response = stac_ingestion_instance.list_mosaic_assets(search_id) - assert asset_query_response.status_code in [ - 200, - 201, - ], "Failed to query the mosaic's assets" - "for mosaic {search_id} :\n{asset_query_response.text}" - assets_json = asset_query_response.json() - # expects a single item in the collection - assert len(assets_json) == 1 - assert all([k in assets_json[0]["assets"] for k in test_item["assets"].keys()]) - - -# Test querying items and verifying inserted items -pytest.mark.order(6) - - -def test_query_items(stac_ingestion_instance, test_collection, test_item): - response = stac_ingestion_instance.query_items(test_collection["id"]) - assert response.status_code in [ - 200, - 201, - ], f"Failed to query the items :\n{response.text}" - item = response.json()["features"][0] - assert ( - item["id"] == test_item["id"] - ), f"Inserted item - {test_item} \n not found in the queried items {item}" - - -# Test querying collection and verifying inserted collection -pytest.mark.order(7) - - -def test_delete_collection(stac_ingestion_instance, test_collection): - response = stac_ingestion_instance.delete_collection(test_collection["id"]) - assert response.status_code in [ - 200, - 201, - ], f"Failed to delete the test_collection :\n{response.text}" - - -# Run the tests -if __name__ == "__main__": - pytest.main() diff --git a/integration_tests/tests/pyproject.toml b/integration_tests/tests/pyproject.toml deleted file mode 100644 index 06213cc..0000000 --- a/integration_tests/tests/pyproject.toml +++ /dev/null @@ -1,21 +0,0 @@ -[build-system] -requires = ["setuptools", "wheel"] - -[project] -name = "eoapi_tests" -version = "0.1.0" -description = "test suite for eoAPI deployments" -authors = [ - {name = "Emile Tenezakis", email = "emile@developmentseed.org"} -] -license = {file = "LICENSE"} - -dependencies = [ - "pytest==7.4.0", - "boto3==1.28.39", - "pystac==1.8.3", - "pystac[validation]==1.8.3", - "requests==2.31.0", - "pydantic-settings==2.0.3", - "pytest-order==1.1.0" -] \ No newline at end of file diff --git a/lib/database/bootstrapper_runtime/handler.py b/lib/database/bootstrapper_runtime/handler.py index 0334729..9313d3c 100644 --- a/lib/database/bootstrapper_runtime/handler.py +++ b/lib/database/bootstrapper_runtime/handler.py @@ -58,7 +58,7 @@ def send( headers = {"content-type": "", "content-length": str(len(json_responseBody))} try: - response = httpx.put(responseUrl, data=json_responseBody, headers=headers) + response = httpx.put(responseUrl, data=json_responseBody, headers=headers, timeout=30) print("Status code: " + response.status_code) except Exception as e: print("send(..) failed executing httpx.put(..): " + str(e)) diff --git a/lib/database/index.ts b/lib/database/index.ts index 481f322..99ff510 100644 --- a/lib/database/index.ts +++ b/lib/database/index.ts @@ -73,8 +73,7 @@ export class PgStacDatabase extends Construct { // overwrites defaults with user-provided configurable properties ...props.bootstrapperLambdaFunctionOptions, // Non configurable properties that are going to be overwritten even if provided by the user - vpc: hasVpc(this.db) ? this.db.vpc : props.vpc, - allowPublicSubnet: true + vpc: hasVpc(this.db) ? this.db.vpc : props.vpc }); this.pgstacSecret = new secretsmanager.Secret(this, "bootstrappersecret", {