diff --git a/.github/workflows/changelog.yml b/.github/workflows/changelog.yml index 232d1498a..030906523 100644 --- a/.github/workflows/changelog.yml +++ b/.github/workflows/changelog.yml @@ -13,4 +13,4 @@ on: jobs: call-changelog-check-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-changelog-check.yml@v0.13.2 + uses: ASFHyP3/actions/.github/workflows/reusable-changelog-check.yml@v0.15.0 diff --git a/.github/workflows/create-jira-issue.yml b/.github/workflows/create-jira-issue.yml index 7646baa5f..b7ffba87b 100644 --- a/.github/workflows/create-jira-issue.yml +++ b/.github/workflows/create-jira-issue.yml @@ -6,7 +6,7 @@ on: jobs: call-create-jira-issue-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-create-jira-issue.yml@v0.13.2 + uses: ASFHyP3/actions/.github/workflows/reusable-create-jira-issue.yml@v0.15.0 secrets: JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} diff --git a/.github/workflows/deploy-daac.yml b/.github/workflows/deploy-daac.yml index 1b9206ca0..160055119 100644 --- a/.github/workflows/deploy-daac.yml +++ b/.github/workflows/deploy-daac.yml @@ -112,6 +112,6 @@ jobs: call-bump-version-workflow: if: github.ref == 'refs/heads/main' needs: deploy - uses: ASFHyP3/actions/.github/workflows/reusable-bump-version.yml@v0.13.2 + uses: ASFHyP3/actions/.github/workflows/reusable-bump-version.yml@v0.15.0 secrets: USER_TOKEN: ${{ secrets.TOOLS_BOT_PAK }} diff --git a/.github/workflows/deploy-opera-disp-sandbox.yml b/.github/workflows/deploy-opera-disp-sandbox.yml new file mode 100644 index 000000000..7c590798a --- /dev/null +++ b/.github/workflows/deploy-opera-disp-sandbox.yml @@ -0,0 +1,80 @@ +name: Deploy OPERA-DISP Sandbox Stack to AWS + +on: + push: + branches: + - develop + +concurrency: ${{ github.workflow }}-${{ github.ref }} + +jobs: + deploy: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + include: + - environment: hyp3-opera-disp-sandbox + domain: hyp3-opera-disp-sandbox.asf.alaska.edu + template_bucket: cf-templates-1hz9ldhhl4ahu-us-west-2 + image_tag: test + product_lifetime_in_days: 14 + default_credits_per_user: 0 + default_application_status: APPROVED + cost_profile: DEFAULT + deploy_ref: refs/heads/develop + job_files: >- + job_spec/OPERA_DISP_TMS.yml + instance_types: r6id.xlarge,r6id.2xlarge,r6id.4xlarge,r6id.8xlarge,r6idn.xlarge,r6idn.2xlarge,r6idn.4xlarge,r6idn.8xlarge + default_max_vcpus: 640 + expanded_max_vcpus: 640 + required_surplus: 0 + security_environment: ASF + ami_id: /aws/service/ecs/optimized-ami/amazon-linux-2023/recommended/image_id + distribution_url: '' + + environment: + name: ${{ matrix.environment }} + url: https://${{ matrix.domain }} + + steps: + - uses: actions/checkout@v4.2.2 + + - uses: aws-actions/configure-aws-credentials@v4 + with: + aws-access-key-id: ${{ secrets.V2_AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.V2_AWS_SECRET_ACCESS_KEY }} + aws-session-token: ${{ secrets.V2_AWS_SESSION_TOKEN }} + aws-region: ${{ secrets.AWS_REGION }} + + - uses: actions/setup-python@v5 + with: + python-version: 3.13 + + - uses: ./.github/actions/deploy-hyp3 + with: + TEMPLATE_BUCKET: ${{ matrix.template_bucket }} + STACK_NAME: ${{ matrix.environment }} + DOMAIN_NAME: ${{ matrix.domain }} + API_NAME: ${{ matrix.environment }} + CERTIFICATE_ARN: ${{ secrets.CERTIFICATE_ARN }} + IMAGE_TAG: ${{ matrix.image_tag }} + PRODUCT_LIFETIME: ${{ matrix.product_lifetime_in_days }} + VPC_ID: ${{ secrets.VPC_ID }} + SUBNET_IDS: ${{ secrets.SUBNET_IDS }} + SECRET_ARN: ${{ secrets.SECRET_ARN }} + CLOUDFORMATION_ROLE_ARN: ${{ secrets.CLOUDFORMATION_ROLE_ARN }} + DEFAULT_CREDITS_PER_USER: ${{ matrix.default_credits_per_user }} + DEFAULT_APPLICATION_STATUS: ${{ matrix.default_application_status }} + COST_PROFILE: ${{ matrix.cost_profile }} + JOB_FILES: ${{ matrix.job_files }} + DEFAULT_MAX_VCPUS: ${{ matrix.default_max_vcpus }} + EXPANDED_MAX_VCPUS: ${{ matrix.expanded_max_vcpus }} + MONTHLY_BUDGET: ${{ secrets.MONTHLY_BUDGET }} + REQUIRED_SURPLUS: ${{ matrix.required_surplus }} + ORIGIN_ACCESS_IDENTITY_ID: ${{ secrets.ORIGIN_ACCESS_IDENTITY_ID }} + SECURITY_ENVIRONMENT: ${{ matrix.security_environment }} + AMI_ID: ${{ matrix.ami_id }} + INSTANCE_TYPES: ${{ matrix.instance_types }} + DISTRIBUTION_URL: ${{ matrix.distribution_url }} + AUTH_PUBLIC_KEY: ${{ secrets.AUTH_PUBLIC_KEY }} diff --git a/.github/workflows/labeled-pr.yml b/.github/workflows/labeled-pr.yml index 465aaa825..2d4eb5bf3 100644 --- a/.github/workflows/labeled-pr.yml +++ b/.github/workflows/labeled-pr.yml @@ -12,4 +12,4 @@ on: jobs: call-labeled-pr-check-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-labeled-pr-check.yml@v0.13.2 + uses: ASFHyP3/actions/.github/workflows/reusable-labeled-pr-check.yml@v0.15.0 diff --git a/.github/workflows/release-template-comment.yml b/.github/workflows/release-template-comment.yml index 1dcb24a30..ba1a9c4ad 100644 --- a/.github/workflows/release-template-comment.yml +++ b/.github/workflows/release-template-comment.yml @@ -7,7 +7,7 @@ on: jobs: call-release-checklist-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-release-checklist-comment.yml@v0.13.2 + uses: ASFHyP3/actions/.github/workflows/reusable-release-checklist-comment.yml@v0.15.0 permissions: pull-requests: write with: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index b4852be3f..e01360300 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -7,7 +7,7 @@ on: jobs: call-release-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-release.yml@v0.13.2 + uses: ASFHyP3/actions/.github/workflows/reusable-release.yml@v0.15.0 with: release_prefix: HyP3 secrets: diff --git a/.github/workflows/static-analysis.yml b/.github/workflows/static-analysis.yml index 6a80718b4..3a4b6bfb3 100644 --- a/.github/workflows/static-analysis.yml +++ b/.github/workflows/static-analysis.yml @@ -8,10 +8,10 @@ on: push jobs: call-ruff-workflow: # Docs: https://github.com/ASFHyP3/actions - uses: ASFHyP3/actions/.github/workflows/reusable-ruff.yml@v0.13.2 + uses: ASFHyP3/actions/.github/workflows/reusable-ruff.yml@v0.15.0 call-mypy-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-mypy.yml@v0.14.0 + uses: ASFHyP3/actions/.github/workflows/reusable-mypy.yml@v0.15.0 cfn-lint: runs-on: ubuntu-latest @@ -84,4 +84,4 @@ jobs: snyk iac test --severity-threshold=high call-secrets-analysis-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-secrets-analysis.yml@v0.13.2 + uses: ASFHyP3/actions/.github/workflows/reusable-secrets-analysis.yml@v0.15.0 diff --git a/CHANGELOG.md b/CHANGELOG.md index b19ae904d..1dc611399 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,16 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [9.3.0] + +### Added +- Added `velocity` option for the `tile_type` parameter of `OPERA_DISP_TMS` jobs +- Restored previously deleted `hyp3-opera-disp-sandbox` deployment +- Added validator to check that bounds provided do not exceed maximum size for SRG jobs + +### Removed +- Removed default bounds option for SRG jobs + ## [9.2.0] ### Added @@ -345,7 +355,7 @@ HyP3's monthly quota system has been replaced by a credits system. Previously, H ### Changed - Update `INSAR_ISCE` and `INSAR_ISCE_TEST` job spec for GUNW version 3+ standard and custom products - `frame_id` is now a required parameter and has no default - - `compute_solid_earth_tide` and `estimate_ionosphere_delay` now default to `true` + - `compute_solid_earth_tide` and `estimate_ionosphere_delay` now default to `true` - `INSAR_ISCE_TEST` exposes custom `goldstein_filter_power`, `output_resolution`, `dense_offsets`, and `unfiltered_coherence` parameters ## [4.4.1] @@ -427,7 +437,7 @@ HyP3's monthly quota system has been replaced by a credits system. Previously, H ## [3.10.8] ### Changed - HyP3 deployments at JPL now use On Demand instances instead of Spot instances to prevent `INSAR_ISCE` jobs from being interrupted. - This *should* be a temporary change. + This *should* be a temporary change. ## [3.10.7] ### Changed @@ -571,7 +581,7 @@ HyP3's monthly quota system has been replaced by a credits system. Previously, H ## [3.2.0] ### Added - [`job_spec`s](job_spec/) can now specify a required set of secrets and an AWS Secrets Manage Secret ARN to pull the - secret values from. Notably, secrets are now externally managed and not part of the HyP3 stack. + secret values from. Notably, secrets are now externally managed and not part of the HyP3 stack. ## [3.1.2] ### Added @@ -596,13 +606,13 @@ HyP3's monthly quota system has been replaced by a credits system. Previously, H - The `flood_depth_estimator` parameter for `WATER_MAP` jobs is now restricted to a set of possible values. - Changed the default value for the `flood_depth_estimator` parameter for `WATER_MAP` jobs from `iterative` to `None`. A value of `None` indicates that a flood map will not be included. -- Reduced `ITS_LIVE` product lifetime cycle from 180 days to 45 days. +- Reduced `ITS_LIVE` product lifetime cycle from 180 days to 45 days. ### Removed - Removed the `include_flood_depth` parameter for `WATER_MAP` jobs. ## [2.25.0] ### Added -- `INSAR_ISCE` and `INSAR_ISCE_TEST` jobs now accept a `weather_model` parameter to specify which weather model to use +- `INSAR_ISCE` and `INSAR_ISCE_TEST` jobs now accept a `weather_model` parameter to specify which weather model to use when estimating trophospheric delay data. - Increases the memory available to `AUTORIFT` jobs for Landsat pairs @@ -642,7 +652,7 @@ HyP3's monthly quota system has been replaced by a credits system. Previously, H ## [2.21.8] ### Changed -- AUTORIFT jobs for Sentinel-2 scenes can now only be submitted using ESA naming convention. +- AUTORIFT jobs for Sentinel-2 scenes can now only be submitted using ESA naming convention. ## [2.21.7] ### Changed @@ -701,7 +711,7 @@ HyP3's monthly quota system has been replaced by a credits system. Previously, H ## [2.19.4] ### Changed -- `scale-cluster` now adjusts the compute environment size based on total month-to-date spending, rather than only EC2 +- `scale-cluster` now adjusts the compute environment size based on total month-to-date spending, rather than only EC2 spending. ## [2.19.3] @@ -865,7 +875,7 @@ HyP3's monthly quota system has been replaced by a credits system. Previously, H - `ASF` (default) -- AWS accounts managed by the Alaska Satellite Facility - `EDC` -- AWS accounts managed by the NASA Earthdata CLoud - `JPL` -- AWS accounts managed by the NASA Jet Propulsion Laboratory -- A `security_environment` Make variable used by the `render` target (and any target that depends on `render`). +- A `security_environment` Make variable used by the `render` target (and any target that depends on `render`). Use like `make security_environment=ASF build` ### Changed @@ -936,10 +946,10 @@ HyP3's monthly quota system has been replaced by a credits system. Previously, H ## [2.6.2](https://github.com/ASFHyP3/hyp3/compare/v2.6.1...v2.6.2) ### Added -- New `AmiId` stack parameter to specify a specific AMI for the AWS Batch compute environment +- New `AmiId` stack parameter to specify a specific AMI for the AWS Batch compute environment ### Changed -- `job_spec/*.yml` files are now explicitly selected allowing per-deployment job customization +- `job_spec/*.yml` files are now explicitly selected allowing per-deployment job customization ### Removed - `AutoriftImage`, `AutoriftNamingScheme`, and `AutoriftParameterFile` CloudFormation stack parameters @@ -994,7 +1004,7 @@ to the database but still validate it. - `name` gets only subscriptions with the given name - `job_type` gets only subscriptions with the given job type - `enabled` gets only subscriptions where `enabled` matches -- subscriptions now include `creation_date` which indicates date and time of subscription creation, responses from +- subscriptions now include `creation_date` which indicates date and time of subscription creation, responses from `GET /subscriptions` are sorted by `creation_date` decending @@ -1033,7 +1043,7 @@ to the database but still validate it. - `lib/dynamo` library to allow sharing common code among different apps. ## Changed -- `POST /jobs` responses no longer include the `job_id`, `request_time`, `status_code`, or `user_id` fields when `validate_only=true` +- `POST /jobs` responses no longer include the `job_id`, `request_time`, `status_code`, or `user_id` fields when `validate_only=true` - moved dynamodb functionality from `hyp3_api/dynamo` to `lib/dynamo` - moved job creation buisness logic from `hyp3_api/handlers` to `lib/dynamo` diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 4547fa8c7..091d0be46 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -3,14 +3,10 @@ ## Our Pledge We as members, contributors, and leaders pledge to make participation in our -community a harassment-free experience for everyone, regardless of age, body -size, visible or invisible disability, ethnicity, sex characteristics, gender -identity and expression, level of experience, education, socio-economic status, -nationality, personal appearance, race, religion, or sexual identity -and orientation. +community a harassment-free experience for everyone. We pledge to act and interact in ways that contribute to an open, welcoming, -diverse, inclusive, and healthy community. +and healthy community. ## Our Standards diff --git a/apps/api/src/hyp3_api/validation.py b/apps/api/src/hyp3_api/validation.py index 1358b1feb..7a882bea4 100644 --- a/apps/api/src/hyp3_api/validation.py +++ b/apps/api/src/hyp3_api/validation.py @@ -23,19 +23,19 @@ class BoundsValidationError(Exception): pass -with open(Path(__file__).parent / 'job_validation_map.yml') as f: - JOB_VALIDATION_MAP = yaml.safe_load(f.read()) +with open(Path(__file__).parent / 'job_validation_map.yml') as job_validation_map_file: + JOB_VALIDATION_MAP = yaml.safe_load(job_validation_map_file.read()) -def has_sufficient_coverage(granule: Polygon): +def _has_sufficient_coverage(granule: Polygon): global DEM_COVERAGE if DEM_COVERAGE is None: - DEM_COVERAGE = get_multipolygon_from_geojson('dem_coverage_map_cop30.geojson') + DEM_COVERAGE = _get_multipolygon_from_geojson('dem_coverage_map_cop30.geojson') return granule.intersects(DEM_COVERAGE) -def get_cmr_metadata(granules): +def _get_cmr_metadata(granules): cmr_parameters = { 'granule_ur': [f'{granule}*' for granule in granules], 'options[granule_ur][pattern]': 'true', @@ -58,27 +58,27 @@ def get_cmr_metadata(granules): granules = [ { 'name': entry.get('producer_granule_id', entry.get('title')), - 'polygon': Polygon(format_points(entry['polygons'][0][0])), + 'polygon': Polygon(_format_points(entry['polygons'][0][0])), } for entry in response.json()['feed']['entry'] ] return granules -def is_third_party_granule(granule): +def _is_third_party_granule(granule): return granule.startswith('S2') or granule.startswith('L') -def check_granules_exist(granules, granule_metadata): +def _make_sure_granules_exist(granules, granule_metadata): found_granules = [granule['name'] for granule in granule_metadata] not_found_granules = set(granules) - set(found_granules) - not_found_granules = {granule for granule in not_found_granules if not is_third_party_granule(granule)} + not_found_granules = {granule for granule in not_found_granules if not _is_third_party_granule(granule)} if not_found_granules: raise GranuleValidationError(f'Some requested scenes could not be found: {", ".join(not_found_granules)}') def check_dem_coverage(_, granule_metadata): - bad_granules = [g['name'] for g in granule_metadata if not has_sufficient_coverage(g['polygon'])] + bad_granules = [g['name'] for g in granule_metadata if not _has_sufficient_coverage(g['polygon'])] if bad_granules: raise GranuleValidationError(f'Some requested scenes do not have DEM coverage: {", ".join(bad_granules)}') @@ -126,13 +126,13 @@ def check_not_antimeridian(_, granule_metadata): raise GranuleValidationError(msg) -def format_points(point_string): +def _format_points(point_string): converted_to_float = [float(x) for x in point_string.split(' ')] points = [list(t) for t in zip(converted_to_float[1::2], converted_to_float[::2])] return points -def get_multipolygon_from_geojson(input_file): +def _get_multipolygon_from_geojson(input_file): dem_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), input_file) with open(dem_file) as f: shp = json.load(f)['features'][0]['geometry'] @@ -143,7 +143,7 @@ def get_multipolygon_from_geojson(input_file): def check_bounds_formatting(job, _): bounds = job['job_parameters']['bounds'] if bounds == [0.0, 0.0, 0.0, 0.0]: - return + raise BoundsValidationError('Invalid bounds. Bounds cannot be [0, 0, 0, 0].') if bounds[0] >= bounds[2] or bounds[1] >= bounds[3]: raise BoundsValidationError( @@ -165,9 +165,9 @@ def bad_lon(lon): def check_granules_intersecting_bounds(job, granule_metadata): bounds = job['job_parameters']['bounds'] if bounds == [0.0, 0.0, 0.0, 0.0]: - bounds = granule_metadata[0]['polygon'] - else: - bounds = Polygon.from_bounds(*bounds) + raise BoundsValidationError('Invalid bounds. Bounds cannot be [0, 0, 0, 0].') + + bounds = Polygon.from_bounds(*bounds) bad_granules = [] for granule in granule_metadata: bbox = granule['polygon'] @@ -177,7 +177,7 @@ def check_granules_intersecting_bounds(job, granule_metadata): raise GranuleValidationError(f'The following granules do not intersect the provided bounds: {bad_granules}.') -def check_same_relative_orbits(job, granule_metadata): +def check_same_relative_orbits(_, granule_metadata): previous_relative_orbit = None for granule in granule_metadata: name_split = granule['name'].split('_') @@ -194,7 +194,7 @@ def check_same_relative_orbits(job, granule_metadata): ) -def convert_single_burst_jobs(jobs: list[dict]) -> list[dict]: +def _convert_single_burst_jobs(jobs: list[dict]) -> list[dict]: jobs = deepcopy(jobs) for job in jobs: if job['job_type'] == 'INSAR_ISCE_BURST': @@ -204,13 +204,24 @@ def convert_single_burst_jobs(jobs: list[dict]) -> list[dict]: return jobs +def check_bounding_box_size(job: dict, _, max_bounds_area: float = 4.5): + bounds = job['job_parameters']['bounds'] + + bounds_area = (bounds[3] - bounds[1]) * (bounds[2] - bounds[0]) + + if bounds_area > max_bounds_area: + raise BoundsValidationError( + f'Bounds must be smaller than {max_bounds_area} degrees squared. Box provided was {bounds_area:.2f}' + ) + + def validate_jobs(jobs: list[dict]) -> None: - jobs = convert_single_burst_jobs(jobs) + jobs = _convert_single_burst_jobs(jobs) granules = get_granules(jobs) - granule_metadata = get_cmr_metadata(granules) + granule_metadata = _get_cmr_metadata(granules) - check_granules_exist(granules, granule_metadata) + _make_sure_granules_exist(granules, granule_metadata) for job in jobs: for validator_name in JOB_VALIDATION_MAP[job['job_type']]: job_granule_metadata = [granule for granule in granule_metadata if granule['name'] in get_granules([job])] diff --git a/apps/disable-private-dns/src/disable_private_dns.py b/apps/disable-private-dns/src/disable_private_dns.py index 53c205744..4f4e302aa 100644 --- a/apps/disable-private-dns/src/disable_private_dns.py +++ b/apps/disable-private-dns/src/disable_private_dns.py @@ -34,10 +34,10 @@ def set_private_dns_disabled(endpoint_id): def disable_private_dns(vpc_id, endpoint_name): endpoint = get_endpoint(vpc_id, endpoint_name) if endpoint['PrivateDnsEnabled']: - print(f"Private DNS enabled for VPC Endpoint: {endpoint['VpcEndpointId']}, changing...") + print(f'Private DNS enabled for VPC Endpoint: {endpoint["VpcEndpointId"]}, changing...') set_private_dns_disabled(endpoint['VpcEndpointId']) else: - print(f"Private DNS already disabled for VPC Endpoint: {endpoint['VpcEndpointId']}, doing nothing.") + print(f'Private DNS already disabled for VPC Endpoint: {endpoint["VpcEndpointId"]}, doing nothing.') def lambda_handler(event, context): diff --git a/job_spec/OPERA_DISP_TMS.yml b/job_spec/OPERA_DISP_TMS.yml index 40294504c..9a5f953a8 100644 --- a/job_spec/OPERA_DISP_TMS.yml +++ b/job_spec/OPERA_DISP_TMS.yml @@ -13,6 +13,7 @@ OPERA_DISP_TMS: enum: - displacement - secant_velocity + - velocity example: displacement bounds: api_schema: diff --git a/job_spec/SRG_GSLC.yml b/job_spec/SRG_GSLC.yml index 92421982e..d420dc6fd 100644 --- a/job_spec/SRG_GSLC.yml +++ b/job_spec/SRG_GSLC.yml @@ -1,6 +1,7 @@ SRG_GSLC: required_parameters: - granules + - bounds parameters: granules: api_schema: @@ -20,22 +21,22 @@ SRG_GSLC: bounds: api_schema: type: array - description: Bounds for extent of processing, formatted like [min lon, min lat, max lon, max lat] in EPSG:4326. Setting to [0, 0, 0, 0] will use the extent of the first granule. - default: [0.0, 0.0, 0.0, 0.0] + description: Bounds for extent of processing, formatted like [min lon, min lat, max lon, max lat] in EPSG:4326. minItems: 4 maxItems: 4 example: - - -116.583 - - 35.714 - - -113.209 - - 38.138 + - -114.87 + - 36.00 + - -114.66 + - 36.16 items: type: number - example: -116.583 + example: -114.87 validators: [ check_bounds_formatting, check_granules_intersecting_bounds, - check_same_relative_orbits + check_same_relative_orbits, + check_bounding_box_size ] cost_profiles: DEFAULT: diff --git a/job_spec/SRG_TIME_SERIES.yml b/job_spec/SRG_TIME_SERIES.yml index f073a338c..4a8929080 100644 --- a/job_spec/SRG_TIME_SERIES.yml +++ b/job_spec/SRG_TIME_SERIES.yml @@ -1,6 +1,7 @@ SRG_TIME_SERIES: required_parameters: - granules + - bounds parameters: granules: api_schema: @@ -21,22 +22,22 @@ SRG_TIME_SERIES: bounds: api_schema: type: array - description: Bounds for extent of processing, formatted like [min lon, min lat, max lon, max lat] in EPSG:4326. Setting to [0, 0, 0, 0] will use the extent of the first granule. - default: [0.0, 0.0, 0.0, 0.0] + description: Bounds for extent of processing, formatted like [min lon, min lat, max lon, max lat] in EPSG:4326. minItems: 4 maxItems: 4 example: - - -124.41473278572731 - - 37.098700238673814 - - -120.9825007499895 - - 39.52359974376425 + - -122.53 + - 37.78 + - -122.44 + - 37.85 items: type: number - example: -124.41473278572731 + example: -122.53 validators: [ check_bounds_formatting, check_granules_intersecting_bounds, - check_same_relative_orbits + check_same_relative_orbits, + check_bounding_box_size ] cost_profiles: DEFAULT: diff --git a/pyproject.toml b/pyproject.toml index 17429969b..f4235043a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,6 +39,10 @@ warn_unused_ignores = true warn_unreachable = true strict_equality = true check_untyped_defs = true +install_types = true +non_interactive = true +pretty = true +disable_error_code = ["import-untyped"] exclude = [ "/build/", "/setup\\.py$", diff --git a/requirements-all.txt b/requirements-all.txt index 5cfeef66c..bb1b9e051 100644 --- a/requirements-all.txt +++ b/requirements-all.txt @@ -5,14 +5,14 @@ -r requirements-apps-start-execution-worker.txt -r requirements-apps-disable-private-dns.txt -r requirements-apps-update-db.txt -boto3==1.35.93 +boto3==1.36.2 jinja2==3.1.5 -moto[dynamodb]==5.0.26 +moto[dynamodb]==5.0.27 pytest==8.3.4 PyYAML==6.0.2 -responses==0.25.3 +responses==0.25.6 ruff mypy -setuptools==75.7.0 +setuptools==75.8.0 openapi-spec-validator==0.7.1 -cfn-lint==1.22.3 +cfn-lint==1.22.5 diff --git a/requirements-apps-disable-private-dns.txt b/requirements-apps-disable-private-dns.txt index 288abcb01..a174c07ee 100644 --- a/requirements-apps-disable-private-dns.txt +++ b/requirements-apps-disable-private-dns.txt @@ -1 +1 @@ -boto3==1.35.93 +boto3==1.36.2 diff --git a/requirements-apps-start-execution-manager.txt b/requirements-apps-start-execution-manager.txt index 6aed09652..01b4b5af6 100644 --- a/requirements-apps-start-execution-manager.txt +++ b/requirements-apps-start-execution-manager.txt @@ -1,3 +1,3 @@ -boto3==1.35.93 +boto3==1.36.2 ./lib/dynamo/ ./lib/lambda_logging/ diff --git a/requirements-apps-start-execution-worker.txt b/requirements-apps-start-execution-worker.txt index 59eab2e78..39b079b07 100644 --- a/requirements-apps-start-execution-worker.txt +++ b/requirements-apps-start-execution-worker.txt @@ -1,2 +1,2 @@ -boto3==1.35.93 +boto3==1.36.2 ./lib/lambda_logging/ diff --git a/tests/test_api/test_list_jobs.py b/tests/test_api/test_list_jobs.py index 021d1bce9..3761af27f 100644 --- a/tests/test_api/test_list_jobs.py +++ b/tests/test_api/test_list_jobs.py @@ -188,7 +188,8 @@ def test_bad_date_formats(client): '2020-13-01T00:00:00Z', '01-JAN-2020', '01/01/2020', - '2020-01-01' '2020-01-01T00:00Z', + '2020-01-01', + '2020-01-01T00:00Z', '2020-01-01T00:00:00', '2020-01-01T00:00:00+01', '2020-01-01T00:00:00+0100', diff --git a/tests/test_api/test_validation.py b/tests/test_api/test_validation.py index 556c4d707..424699645 100644 --- a/tests/test_api/test_validation.py +++ b/tests/test_api/test_validation.py @@ -1,3 +1,5 @@ +import inspect + import responses from pytest import raises from shapely.geometry import Polygon @@ -28,49 +30,49 @@ def test_not_antimeridian(): def test_has_sufficient_coverage(): # Wyoming poly = rectangle(45, 41, -104, -111) - assert validation.has_sufficient_coverage(poly) + assert validation._has_sufficient_coverage(poly) # completely covered Aleutian Islands over antimeridian; should pass with fixed antimeridian poly = rectangle(51.7, 51.3, 179.7, -179.3) - assert validation.has_sufficient_coverage(poly) + assert validation._has_sufficient_coverage(poly) # not enough coverage of Aleutian Islands over antimeridian # NOTE: Passes today but should FAIL legacy with antimeridian feature fix poly = rectangle(51.7, 41.3, 179.7, -179.3) - assert validation.has_sufficient_coverage(poly) + assert validation._has_sufficient_coverage(poly) # completely encloses tile over Ascension Island in the Atlantic poly = rectangle(-6, -9, -15, -14) - assert validation.has_sufficient_coverage(poly) + assert validation._has_sufficient_coverage(poly) # # minimum sufficient coverage off the coast of Eureka, CA poly = rectangle(40.1, 40, -126, -125.000138) - assert validation.has_sufficient_coverage(poly) + assert validation._has_sufficient_coverage(poly) # almost minimum sufficient coverage off the coast of Eureka, CA poly = rectangle(40.1, 40, -126, -125.000140) - assert not validation.has_sufficient_coverage(poly) + assert not validation._has_sufficient_coverage(poly) # polygon in missing tile over Gulf of California poly = rectangle(26.9, 26.1, -110.1, -110.9) - assert not validation.has_sufficient_coverage(poly) + assert not validation._has_sufficient_coverage(poly) # southern Greenland poly = rectangle(62, 61, -44, -45) - assert validation.has_sufficient_coverage(poly) + assert validation._has_sufficient_coverage(poly) # Antarctica poly = rectangle(-62, -90, 180, -180) - assert validation.has_sufficient_coverage(poly) + assert validation._has_sufficient_coverage(poly) # ocean over antimeridian; this case incorrectly passes, see https://github.com/ASFHyP3/hyp3/issues/1989 poly = rectangle(-40, -41, 179.7, -179.3) - assert validation.has_sufficient_coverage(poly) + assert validation._has_sufficient_coverage(poly) def test_format_points(): point_string = '-31.43 25.04 -29.76 25.54 -29.56 24.66 -31.23 24.15 -31.43 25.04' - assert validation.format_points(point_string) == [ + assert validation._format_points(point_string) == [ [25.04, -31.43], [25.54, -29.76], [24.66, -29.56], @@ -295,7 +297,7 @@ def test_check_valid_polarizations(): validation.check_valid_polarizations(invalid_job, {}) -def test_check_granules_exist(): +def test_make_sure_granules_exist(): granule_metadata = [ { 'name': 'scene1', @@ -305,12 +307,12 @@ def test_check_granules_exist(): }, ] - validation.check_granules_exist([], granule_metadata) - validation.check_granules_exist(['scene1'], granule_metadata) - validation.check_granules_exist(['scene1', 'scene2'], granule_metadata) + validation._make_sure_granules_exist([], granule_metadata) + validation._make_sure_granules_exist(['scene1'], granule_metadata) + validation._make_sure_granules_exist(['scene1', 'scene2'], granule_metadata) with raises(validation.GranuleValidationError) as e: - validation.check_granules_exist( + validation._make_sure_granules_exist( ['scene1', 'scene2', 'scene3', 'scene4', 'S2_foo', 'LC08_bar', 'LC09_bar'], granule_metadata ) assert 'S2_foo' not in str(e) @@ -323,17 +325,17 @@ def test_check_granules_exist(): def test_is_third_party_granule(): - assert validation.is_third_party_granule('S2A_MSIL1C_20200627T150921_N0209_R025_T22WEB_20200627T170912') - assert validation.is_third_party_granule('S2B_22WEB_20200612_0_L1C') - assert validation.is_third_party_granule('LC08_L1TP_009011_20200820_20200905_02_T1') - assert validation.is_third_party_granule('LO08_L1GT_043001_20201106_20201110_02_T2') - assert validation.is_third_party_granule('LT08_L1GT_041001_20200125_20200925_02_T2') - assert validation.is_third_party_granule('LC09_L1GT_215109_20220125_20220125_02_T2') - assert validation.is_third_party_granule('LO09_L1GT_215109_20220210_20220210_02_T2') - assert validation.is_third_party_granule('LT09_L1GT_215109_20220210_20220210_02_T2') - assert not validation.is_third_party_granule('S1_249434_IW1_20230523T170733_VV_8850-BURST') - assert not validation.is_third_party_granule('S1A_IW_SLC__1SSH_20150608T205059_20150608T205126_006287_0083E8_C4F0') - assert not validation.is_third_party_granule('foo') + assert validation._is_third_party_granule('S2A_MSIL1C_20200627T150921_N0209_R025_T22WEB_20200627T170912') + assert validation._is_third_party_granule('S2B_22WEB_20200612_0_L1C') + assert validation._is_third_party_granule('LC08_L1TP_009011_20200820_20200905_02_T1') + assert validation._is_third_party_granule('LO08_L1GT_043001_20201106_20201110_02_T2') + assert validation._is_third_party_granule('LT08_L1GT_041001_20200125_20200925_02_T2') + assert validation._is_third_party_granule('LC09_L1GT_215109_20220125_20220125_02_T2') + assert validation._is_third_party_granule('LO09_L1GT_215109_20220210_20220210_02_T2') + assert validation._is_third_party_granule('LT09_L1GT_215109_20220210_20220210_02_T2') + assert not validation._is_third_party_granule('S1_249434_IW1_20230523T170733_VV_8850-BURST') + assert not validation._is_third_party_granule('S1A_IW_SLC__1SSH_20150608T205059_20150608T205126_006287_0083E8_C4F0') + assert not validation._is_third_party_granule('foo') @responses.activate @@ -354,7 +356,7 @@ def test_get_cmr_metadata(): } responses.post(CMR_URL, json=response_payload) - assert validation.get_cmr_metadata(['foo', 'bar', 'hello']) == [ + assert validation._get_cmr_metadata(['foo', 'bar', 'hello']) == [ { 'name': 'foo', 'polygon': Polygon([[25.0, -31.4], [25.5, -29.7], [24.6, -29.5], [24.1, -31.2]]), @@ -470,12 +472,22 @@ def test_validate_jobs(): validation.validate_jobs(jobs) +def test_all_validators_have_correct_signature(): + validators = [getattr(validation, attr) for attr in dir(validation) if attr.startswith('check_')] + + for validator in validators: + function_params = list(inspect.signature(validator).parameters) + + assert len(function_params) >= 2 + assert function_params[0] in ('job', '_') + assert function_params[1] in ('granule_metadata', '_') + + def test_check_bounds_formatting(): valid_jobs = [ {'job_parameters': {'bounds': [-10, 0, 10, 10]}}, {'job_parameters': {'bounds': [-180, -90, -170, -80]}}, {'job_parameters': {'bounds': [170, 75, 180, 90]}}, - {'job_parameters': {'bounds': [0, 0, 0, 0]}}, ] invalid_jobs_bad_order = [ {'job_parameters': {'bounds': [10, 0, -10, 10]}}, @@ -489,6 +501,9 @@ def test_check_bounds_formatting(): {'job_parameters': {'bounds': [-10, -100, 10, 80]}}, {'job_parameters': {'bounds': [-100, 0, 200, 10]}}, ] + + job_with_bad_bounds = {'job_parameters': {'bounds': [0, 0, 0, 0]}} + for valid_job in valid_jobs: validation.check_bounds_formatting(valid_job, {}) for invalid_job in invalid_jobs_bad_order: @@ -498,10 +513,13 @@ def test_check_bounds_formatting(): with raises(validation.BoundsValidationError, match=r'.*Invalid lon/lat value(s)*'): validation.check_bounds_formatting(invalid_job, {}) + with raises(validation.BoundsValidationError, match=r'.*Bounds cannot be.*'): + validation.check_bounds_formatting(job_with_bad_bounds, {}) + def test_check_granules_intersecting_bounds(): job_with_specified_bounds = {'job_parameters': {'bounds': [-10, 0, 10, 10]}} - job_with_default_bounds = {'job_parameters': {'bounds': [0, 0, 0, 0]}} + job_with_bad_bounds = {'job_parameters': {'bounds': [0, 0, 0, 0]}} valid_granule_metadata = [ {'name': 'intersects1', 'polygon': Polygon.from_bounds(-10.0, 0.0, 10.0, 10.0)}, {'name': 'intersects2', 'polygon': Polygon.from_bounds(-9.0, -1.0, 20.0, 11.0)}, @@ -515,12 +533,18 @@ def test_check_granules_intersecting_bounds(): {'name': 'does_not_intersect3', 'polygon': Polygon.from_bounds(100.0, -50.0, 120.0, -0.1)}, ] validation.check_granules_intersecting_bounds(job_with_specified_bounds, valid_granule_metadata) - validation.check_granules_intersecting_bounds(job_with_default_bounds, valid_granule_metadata) + + error_pattern = r'.*Bounds cannot be.*' + with raises(validation.BoundsValidationError, match=error_pattern): + validation.check_granules_intersecting_bounds(job_with_bad_bounds, valid_granule_metadata) + + with raises(validation.BoundsValidationError, match=error_pattern): + validation.check_granules_intersecting_bounds(job_with_bad_bounds, invalid_granule_metadata) + error_pattern = r".*bounds: \['does_not_intersect1', 'does_not_intersect2', 'does_not_intersect3'\]*" + with raises(validation.GranuleValidationError, match=error_pattern): validation.check_granules_intersecting_bounds(job_with_specified_bounds, invalid_granule_metadata) - with raises(validation.GranuleValidationError, match=error_pattern): - validation.check_granules_intersecting_bounds(job_with_default_bounds, invalid_granule_metadata) def test_check_same_relative_orbits(): @@ -536,3 +560,13 @@ def test_check_same_relative_orbits(): error_pattern = r'.*69 is not 87.*' with raises(validation.GranuleValidationError, match=error_pattern): validation.check_same_relative_orbits({}, invalid_granule_metadata) + + +def test_check_bounding_box_size(): + job = {'job_parameters': {'bounds': [0, 0, 10, 10]}} + + validation.check_bounding_box_size(job, None, max_bounds_area=100) + + error_pattern = r'.*Bounds must be smaller.*' + with raises(validation.BoundsValidationError, match=error_pattern): + validation.check_bounding_box_size(job, None, max_bounds_area=99.9) diff --git a/tests/test_upload_log.py b/tests/test_upload_log.py index c5a154bf0..8d1d14a9c 100644 --- a/tests/test_upload_log.py +++ b/tests/test_upload_log.py @@ -133,7 +133,7 @@ def test_lambda_handler_no_log_stream(mock_write_log_to_s3: MagicMock): 'processing_results': { 'step_0': { 'Error': '', - 'Cause': '{"Container": {},' '"Status": "FAILED",' '"StatusReason": "foo reason",' '"Attempts": []}', + 'Cause': '{"Container": {},"Status": "FAILED","StatusReason": "foo reason","Attempts": []}', } }, }