From 162a1a2c324b4c2bfe3451f7ae19d7840a0e0452 Mon Sep 17 00:00:00 2001 From: Matt McFarland Date: Fri, 13 May 2022 20:26:16 -0400 Subject: [PATCH] PgSTAC: API hydration of search result items (#397) * Upgrade to pgstac 0.5.1 Initial changes to get most tests passing. * Add option to hydrate pgstac search results in API * Support fields extension in nohydrate mode * Updates to hydrate and filter functionality. This was done in a pairing session with @mmcfarland * Fix fields extensions and reduce number of loops * Tolerate missing required attributes with fields extension Use of the fields extension can result in the return of invalid stac items if excludes is used on required attributes. When injecting item links, don't attempt to build links for which needed attributes aren't available. When API Hydrate is enabled, the required attributes are preserved prior to filtering and are used in the link generation. * Run pgstac tests in db and api hydrate mode * Merge dicts within lists during hydration In practice, an asset on a base_item and an item may have mergable dicts (ie, raster bands). * Add note on settings in readme * Pass request to base_item_cache This will be used by implementors who need app state which is stored on request. * Upgrade pypgstac and use included hydrate function The hydrate function was improved and moved to pypgstac so it could be used in other projects outside of stac-fastapi. It was developed with a corresponding dehydrate function to ensure parity between the two. The version of pypgstac is unpublished and pinned to a draft commit at the point and will be upgraded subsequently. * Improve fields extension implementation Correctly supports deeply nested property keys in both include and exclude, as well as improves variable naming, comments, and test cases. * Remove unused error type * adjust tests for changes in api * remove print statements * add bbox back to items in tests * Upgrade pgstac * Fix conformance test fixtures * Fix sqlalchemy test with new status for FK error * Align fields ext behavior for invalid includes * Lint * Changelog * Remove psycopg install dependency * Relax dependency version of pgstac to 0.6.* series * Update dev environment to pgstac 0.6.2 * Changelog fix Co-authored-by: Rob Emanuele Co-authored-by: Ubuntu --- CHANGES.md | 4 +- docker-compose.yml | 3 +- stac_fastapi/api/stac_fastapi/api/errors.py | 2 +- stac_fastapi/pgstac/README.md | 5 + stac_fastapi/pgstac/setup.py | 3 +- .../pgstac/stac_fastapi/pgstac/config.py | 10 + .../pgstac/stac_fastapi/pgstac/core.py | 104 +++++-- .../pgstac/types/base_item_cache.py | 55 ++++ .../pgstac/stac_fastapi/pgstac/utils.py | 115 ++++++++ .../pgstac/tests/clients/test_postgres.py | 2 +- stac_fastapi/pgstac/tests/conftest.py | 69 ++++- .../pgstac/tests/data/test2_collection.json | 271 ++++++++++++++++++ .../pgstac/tests/data/test2_item.json | 258 +++++++++++++++++ .../tests/resources/test_conformance.py | 4 +- .../pgstac/tests/resources/test_item.py | 145 +++++++--- .../sqlalchemy/tests/resources/test_item.py | 2 +- 16 files changed, 975 insertions(+), 77 deletions(-) create mode 100644 stac_fastapi/pgstac/stac_fastapi/pgstac/types/base_item_cache.py create mode 100644 stac_fastapi/pgstac/stac_fastapi/pgstac/utils.py create mode 100644 stac_fastapi/pgstac/tests/data/test2_collection.json create mode 100644 stac_fastapi/pgstac/tests/data/test2_item.json diff --git a/CHANGES.md b/CHANGES.md index 2355d3534..43a437221 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -17,6 +17,7 @@ * Bulk Transactions object Items iterator now returns the Item objects rather than the string IDs of the Item objects ([#355](https://github.com/stac-utils/stac-fastapi/issues/355)) * docker-compose now runs uvicorn with hot-reloading enabled +* Bump version of PGStac to 0.6.2 that includes support for hydrating results in the API backed ([#397](https://github.com/stac-utils/stac-fastapi/pull/397)) ### Removed @@ -27,7 +28,8 @@ * Fixes issues (and adds tests) for issues caused by regression in pgstac ([#345](https://github.com/stac-utils/stac-fastapi/issues/345) * Update error response payloads to match the API spec. ([#361](https://github.com/stac-utils/stac-fastapi/pull/361)) * Fixed stray `/` before the `#` in several extension conformance class strings ([383](https://github.com/stac-utils/stac-fastapi/pull/383)) -* SQLAlchemy backend bulk item insert now works ([#356]https://github.com/stac-utils/stac-fastapi/issues/356)) +* SQLAlchemy backend bulk item insert now works ([#356](https://github.com/stac-utils/stac-fastapi/issues/356)) +* PGStac Backend has stricter implementation of Fields Extension syntax ([#397](https://github.com/stac-utils/stac-fastapi/pull/397)) ## [2.3.0] diff --git a/docker-compose.yml b/docker-compose.yml index 996bb6593..c9337a593 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -50,6 +50,7 @@ services: - GDAL_DISABLE_READDIR_ON_OPEN=EMPTY_DIR - DB_MIN_CONN_SIZE=1 - DB_MAX_CONN_SIZE=1 + - USE_API_HYDRATE=${USE_API_HYDRATE:-false} ports: - "8082:8082" volumes: @@ -62,7 +63,7 @@ services: database: container_name: stac-db - image: ghcr.io/stac-utils/pgstac:v0.4.5 + image: ghcr.io/stac-utils/pgstac:v0.6.2 environment: - POSTGRES_USER=username - POSTGRES_PASSWORD=password diff --git a/stac_fastapi/api/stac_fastapi/api/errors.py b/stac_fastapi/api/stac_fastapi/api/errors.py index 95058d4c9..29df3b9ab 100644 --- a/stac_fastapi/api/stac_fastapi/api/errors.py +++ b/stac_fastapi/api/stac_fastapi/api/errors.py @@ -23,7 +23,7 @@ DEFAULT_STATUS_CODES = { NotFoundError: status.HTTP_404_NOT_FOUND, ConflictError: status.HTTP_409_CONFLICT, - ForeignKeyError: status.HTTP_422_UNPROCESSABLE_ENTITY, + ForeignKeyError: status.HTTP_424_FAILED_DEPENDENCY, DatabaseError: status.HTTP_424_FAILED_DEPENDENCY, Exception: status.HTTP_500_INTERNAL_SERVER_ERROR, InvalidQueryParameter: status.HTTP_400_BAD_REQUEST, diff --git a/stac_fastapi/pgstac/README.md b/stac_fastapi/pgstac/README.md index 787ff4fd2..7961ad260 100644 --- a/stac_fastapi/pgstac/README.md +++ b/stac_fastapi/pgstac/README.md @@ -46,7 +46,12 @@ pip install -e \ stac_fastapi/pgstac[dev,server] ``` +### Settings + +To configure PGStac stac-fastapi to [hydrate search result items in the API](https://github.com/stac-utils/pgstac#runtime-configurations), set the `USE_API_HYDRATE` environment variable to `true` or explicitly set the option in the PGStac Settings object. + ### Migrations + PGStac is an external project and the may be used by multiple front ends. For Stac FastAPI development, a docker image (which is pulled as part of the docker-compose) is available at bitner/pgstac:[version] that has the full database already set up for PGStac. diff --git a/stac_fastapi/pgstac/setup.py b/stac_fastapi/pgstac/setup.py index 726257435..a5d2eee41 100644 --- a/stac_fastapi/pgstac/setup.py +++ b/stac_fastapi/pgstac/setup.py @@ -17,16 +17,17 @@ "buildpg", "brotli_asgi", "pygeofilter @ git+https://github.com/geopython/pygeofilter@v0.1.1#egg=pygeofilter", + "pypgstac==0.6.*", ] extra_reqs = { "dev": [ + "pypgstac[psycopg]==0.6.*", "pytest", "pytest-cov", "pytest-asyncio>=0.17", "pre-commit", "requests", - "pypgstac==0.4.5", "httpx", ], "docs": ["mkdocs", "mkdocs-material", "pdocs"], diff --git a/stac_fastapi/pgstac/stac_fastapi/pgstac/config.py b/stac_fastapi/pgstac/stac_fastapi/pgstac/config.py index ce60bfc65..5312fcd08 100644 --- a/stac_fastapi/pgstac/stac_fastapi/pgstac/config.py +++ b/stac_fastapi/pgstac/stac_fastapi/pgstac/config.py @@ -1,5 +1,11 @@ """Postgres API configuration.""" +from typing import Type + +from stac_fastapi.pgstac.types.base_item_cache import ( + BaseItemCache, + DefaultBaseItemCache, +) from stac_fastapi.types.config import ApiSettings @@ -13,6 +19,7 @@ class Settings(ApiSettings): postgres_host_writer: hostname for the writer connection. postgres_port: database port. postgres_dbname: database name. + use_api_hydrate: perform hydration of stac items within stac-fastapi. """ postgres_user: str @@ -27,6 +34,9 @@ class Settings(ApiSettings): db_max_queries: int = 50000 db_max_inactive_conn_lifetime: float = 300 + use_api_hydrate: bool = False + base_item_cache: Type[BaseItemCache] = DefaultBaseItemCache + testing: bool = False @property diff --git a/stac_fastapi/pgstac/stac_fastapi/pgstac/core.py b/stac_fastapi/pgstac/stac_fastapi/pgstac/core.py index 4de102255..811afbba6 100644 --- a/stac_fastapi/pgstac/stac_fastapi/pgstac/core.py +++ b/stac_fastapi/pgstac/stac_fastapi/pgstac/core.py @@ -12,12 +12,15 @@ from pydantic import ValidationError from pygeofilter.backends.cql2_json import to_cql2 from pygeofilter.parsers.cql2_text import parse as parse_cql2_text +from pypgstac.hydration import hydrate from stac_pydantic.links import Relations from stac_pydantic.shared import MimeTypes from starlette.requests import Request +from stac_fastapi.pgstac.config import Settings from stac_fastapi.pgstac.models.links import CollectionLinks, ItemLinks, PagingLinks from stac_fastapi.pgstac.types.search import PgstacSearch +from stac_fastapi.pgstac.utils import filter_fields from stac_fastapi.types.core import AsyncBaseCoreClient from stac_fastapi.types.errors import InvalidQueryParameter, NotFoundError from stac_fastapi.types.stac import Collection, Collections, Item, ItemCollection @@ -103,8 +106,38 @@ async def get_collection(self, collection_id: str, **kwargs) -> Collection: return Collection(**collection) + async def _get_base_item( + self, collection_id: str, request: Request + ) -> Dict[str, Any]: + """Get the base item of a collection for use in rehydrating full item collection properties. + + Args: + collection: ID of the collection. + + Returns: + Item. + """ + item: Optional[Dict[str, Any]] + + pool = request.app.state.readpool + async with pool.acquire() as conn: + q, p = render( + """ + SELECT * FROM collection_base_item(:collection_id::text); + """, + collection_id=collection_id, + ) + item = await conn.fetchval(q, *p) + + if item is None: + raise NotFoundError(f"A base item for {collection_id} does not exist.") + + return item + async def _search_base( - self, search_request: PgstacSearch, **kwargs: Any + self, + search_request: PgstacSearch, + **kwargs: Any, ) -> ItemCollection: """Cross catalog search (POST). @@ -119,9 +152,11 @@ async def _search_base( items: Dict[str, Any] request: Request = kwargs["request"] + settings: Settings = request.app.state.settings pool = request.app.state.readpool - # pool = kwargs["request"].app.state.readpool + search_request.conf = search_request.conf or {} + search_request.conf["nohydrate"] = settings.use_api_hydrate req = search_request.json(exclude_none=True, by_alias=True) try: @@ -141,30 +176,65 @@ async def _search_base( next: Optional[str] = items.pop("next", None) prev: Optional[str] = items.pop("prev", None) collection = ItemCollection(**items) - cleaned_features: List[Item] = [] - for feature in collection.get("features") or []: - feature = Item(**feature) + exclude = search_request.fields.exclude + if exclude and len(exclude) == 0: + exclude = None + include = search_request.fields.include + if include and len(include) == 0: + include = None + + async def _add_item_links( + feature: Item, + collection_id: Optional[str] = None, + item_id: Optional[str] = None, + ) -> None: + """Add ItemLinks to the Item. + + If the fields extension is excluding links, then don't add them. + Also skip links if the item doesn't provide collection and item ids. + """ + collection_id = feature.get("collection") or collection_id + item_id = feature.get("id") or item_id + if ( search_request.fields.exclude is None or "links" not in search_request.fields.exclude + and all([collection_id, item_id]) ): - # TODO: feature.collection is not always included - # This code fails if it's left outside of the fields expression - # I've fields extension updated test cases to always include feature.collection feature["links"] = await ItemLinks( - collection_id=feature["collection"], - item_id=feature["id"], + collection_id=collection_id, + item_id=item_id, request=request, ).get_links(extra_links=feature.get("links")) - exclude = search_request.fields.exclude - if exclude and len(exclude) == 0: - exclude = None - include = search_request.fields.include - if include and len(include) == 0: - include = None - cleaned_features.append(feature) + cleaned_features: List[Item] = [] + + if settings.use_api_hydrate: + + async def _get_base_item(collection_id: str) -> Dict[str, Any]: + return await self._get_base_item(collection_id, request) + + base_item_cache = settings.base_item_cache( + fetch_base_item=_get_base_item, request=request + ) + + for feature in collection.get("features") or []: + base_item = await base_item_cache.get(feature.get("collection")) + feature = hydrate(base_item, feature) + + # Grab ids needed for links that may be removed by the fields extension. + collection_id = feature.get("collection") + item_id = feature.get("id") + + feature = filter_fields(feature, include, exclude) + await _add_item_links(feature, collection_id, item_id) + + cleaned_features.append(feature) + else: + for feature in collection.get("features") or []: + await _add_item_links(feature) + cleaned_features.append(feature) collection["features"] = cleaned_features collection["links"] = await PagingLinks( diff --git a/stac_fastapi/pgstac/stac_fastapi/pgstac/types/base_item_cache.py b/stac_fastapi/pgstac/stac_fastapi/pgstac/types/base_item_cache.py new file mode 100644 index 000000000..9b92e759d --- /dev/null +++ b/stac_fastapi/pgstac/stac_fastapi/pgstac/types/base_item_cache.py @@ -0,0 +1,55 @@ +"""base_item_cache classes for pgstac fastapi.""" +import abc +from typing import Any, Callable, Coroutine, Dict + +from starlette.requests import Request + + +class BaseItemCache(abc.ABC): + """ + A cache that returns a base item for a collection. + + If no base item is found in the cache, use the fetch_base_item function + to fetch the base item from pgstac. + """ + + def __init__( + self, + fetch_base_item: Callable[[str], Coroutine[Any, Any, Dict[str, Any]]], + request: Request, + ): + """ + Initialize the base item cache. + + Args: + fetch_base_item: A function that fetches the base item for a collection. + request: The request object containing app state that may be used by caches. + """ + self._fetch_base_item = fetch_base_item + self._request = request + + @abc.abstractmethod + async def get(self, collection_id: str) -> Dict[str, Any]: + """Return the base item for the collection and cache by collection id.""" + pass + + +class DefaultBaseItemCache(BaseItemCache): + """Implementation of the BaseItemCache that holds base items in a dict.""" + + def __init__( + self, + fetch_base_item: Callable[[str], Coroutine[Any, Any, Dict[str, Any]]], + request: Request, + ): + """Initialize the base item cache.""" + self._base_items = {} + super().__init__(fetch_base_item, request) + + async def get(self, collection_id: str): + """Return the base item for the collection and cache by collection id.""" + if collection_id not in self._base_items: + self._base_items[collection_id] = await self._fetch_base_item( + collection_id, + ) + return self._base_items[collection_id] diff --git a/stac_fastapi/pgstac/stac_fastapi/pgstac/utils.py b/stac_fastapi/pgstac/stac_fastapi/pgstac/utils.py new file mode 100644 index 000000000..4a0ce4c72 --- /dev/null +++ b/stac_fastapi/pgstac/stac_fastapi/pgstac/utils.py @@ -0,0 +1,115 @@ +"""stac-fastapi utility methods.""" +from typing import Any, Dict, Optional, Set, Union + +from stac_fastapi.types.stac import Item + + +def filter_fields( + item: Union[Item, Dict[str, Any]], + include: Optional[Set[str]] = None, + exclude: Optional[Set[str]] = None, +) -> Item: + """Preserve and remove fields as indicated by the fields extension include/exclude sets. + + Returns a shallow copy of the Item with the fields filtered. + + This will not perform a deep copy; values of the original item will be referenced + in the return item. + """ + if not include and not exclude: + return item + + # Build a shallow copy of included fields on an item, or a sub-tree of an item + def include_fields( + source: Dict[str, Any], fields: Optional[Set[str]] + ) -> Dict[str, Any]: + if not fields: + return source + + clean_item: Dict[str, Any] = {} + for key_path in fields or []: + key_path_parts = key_path.split(".") + key_root = key_path_parts[0] + if key_root in source: + if isinstance(source[key_root], dict) and len(key_path_parts) > 1: + # The root of this key path on the item is a dict, and the + # key path indicates a sub-key to be included. Walk the dict + # from the root key and get the full nested value to include. + value = include_fields( + source[key_root], fields=set([".".join(key_path_parts[1:])]) + ) + + if isinstance(clean_item.get(key_root), dict): + # A previously specified key and sub-keys may have been included + # already, so do a deep merge update if the root key already exists. + dict_deep_update(clean_item[key_root], value) + else: + # The root key does not exist, so add it. Fields + # extension only allows nested referencing on dicts, so + # this won't overwrite anything. + clean_item[key_root] = value + else: + # The item value to include is not a dict, or, it is a dict but the + # key path is for the whole value, not a sub-key. Include the entire + # value in the cleaned item. + clean_item[key_root] = source[key_root] + else: + # The key, or root key of a multi-part key, is not present in the item, + # so it is ignored + pass + return clean_item + + # For an item built up for included fields, remove excluded fields. This + # modifies `source` in place. + def exclude_fields(source: Dict[str, Any], fields: Optional[Set[str]]) -> None: + for key_path in fields or []: + key_path_part = key_path.split(".") + key_root = key_path_part[0] + if key_root in source: + if isinstance(source[key_root], dict) and len(key_path_part) > 1: + # Walk the nested path of this key to remove the leaf-key + exclude_fields( + source[key_root], fields=set([".".join(key_path_part[1:])]) + ) + # If, after removing the leaf-key, the root is now an empty + # dict, remove it entirely + if not source[key_root]: + del source[key_root] + else: + # The key's value is not a dict, or there is no sub-key to remove. The + # entire key can be removed from the source. + source.pop(key_root, None) + else: + # The key to remove does not exist on the source, so it is ignored + pass + + # Coalesce incoming type to a dict + item = dict(item) + + clean_item = include_fields(item, include) + + # If, after including all the specified fields, there are no included properties, + # return just id and collection. + if not clean_item: + return Item({"id": item.get(id), "collection": item.get("collection")}) + + exclude_fields(clean_item, exclude) + + return Item(**clean_item) + + +def dict_deep_update(merge_to: Dict[str, Any], merge_from: Dict[str, Any]) -> None: + """Perform a deep update of two dicts. + + merge_to is updated in-place with the values from merge_from. + merge_from values take precedence over existing values in merge_to. + """ + for k, v in merge_from.items(): + if ( + k in merge_to + and isinstance(merge_to[k], dict) + and isinstance(merge_from[k], dict) + ): + dict_deep_update(merge_to[k], merge_from[k]) + else: + merge_to[k] = v diff --git a/stac_fastapi/pgstac/tests/clients/test_postgres.py b/stac_fastapi/pgstac/tests/clients/test_postgres.py index f08952e64..b337566b7 100644 --- a/stac_fastapi/pgstac/tests/clients/test_postgres.py +++ b/stac_fastapi/pgstac/tests/clients/test_postgres.py @@ -64,6 +64,7 @@ async def test_create_item(app_client, load_test_data: Callable, load_test_colle resp = await app_client.get(f"/collections/{coll.id}/items/{post_item.id}") assert resp.status_code == 200 + get_item = Item.parse_obj(resp.json()) assert in_item.dict(exclude={"links"}) == get_item.dict(exclude={"links"}) @@ -79,7 +80,6 @@ async def test_update_item(app_client, load_test_collection, load_test_item): resp = await app_client.get(f"/collections/{coll.id}/items/{item.id}") assert resp.status_code == 200 - get_item = Item.parse_obj(resp.json()) assert item.dict(exclude={"links"}) == get_item.dict(exclude={"links"}) assert get_item.properties.description == "Update Test" diff --git a/stac_fastapi/pgstac/tests/conftest.py b/stac_fastapi/pgstac/tests/conftest.py index 170877a7d..1a88d979d 100644 --- a/stac_fastapi/pgstac/tests/conftest.py +++ b/stac_fastapi/pgstac/tests/conftest.py @@ -8,7 +8,8 @@ import pytest from fastapi.responses import ORJSONResponse from httpx import AsyncClient -from pypgstac import pypgstac +from pypgstac.db import PgstacDB +from pypgstac.migrate import Migrate from stac_pydantic import Collection, Item from stac_fastapi.api.app import StacApi @@ -30,6 +31,7 @@ DATA_DIR = os.path.join(os.path.dirname(__file__), "data") settings = Settings(testing=True) +pgstac_api_hydrate_settings = Settings(testing=True, use_api_hydrate=True) @pytest.fixture(scope="session") @@ -63,7 +65,10 @@ async def pg(): val = await conn.fetchval("SELECT true") print(val) await conn.close() - version = await pypgstac.run_migration(dsn=settings.testing_connection_string) + db = PgstacDB(dsn=settings.testing_connection_string) + migrator = Migrate(db) + version = migrator.run_migration() + db.close() print(f"PGStac Migrated to {version}") yield settings.testing_connection_string @@ -71,8 +76,15 @@ async def pg(): print("Getting rid of test database") os.environ["postgres_dbname"] = os.environ["orig_postgres_dbname"] conn = await asyncpg.connect(dsn=settings.writer_connection_string) - await conn.execute("DROP DATABASE pgstactestdb;") - await conn.close() + try: + await conn.execute("DROP DATABASE pgstactestdb;") + await conn.close() + except Exception: + try: + await conn.execute("DROP DATABASE pgstactestdb WITH (force);") + await conn.close() + except Exception: + pass @pytest.fixture(autouse=True) @@ -83,18 +95,20 @@ async def pgstac(pg): conn = await asyncpg.connect(dsn=settings.testing_connection_string) await conn.execute( """ - TRUNCATE pgstac.items CASCADE; - TRUNCATE pgstac.collections CASCADE; - TRUNCATE pgstac.searches CASCADE; - TRUNCATE pgstac.search_wheres CASCADE; + DROP SCHEMA IF EXISTS pgstac CASCADE; """ ) await conn.close() + with PgstacDB(dsn=settings.testing_connection_string) as db: + migrator = Migrate(db) + version = migrator.run_migration() + print(f"PGStac Migrated to {version}") -@pytest.fixture(scope="session") -def api_client(pg): - print("creating client with settings") +# Run all the tests that use the api_client in both db hydrate and api hydrate mode +@pytest.fixture(params=[settings, pgstac_api_hydrate_settings], scope="session") +def api_client(request, pg): + print("creating client with settings, hydrate:", request.param.use_api_hydrate) extensions = [ TransactionExtension(client=TransactionsClient(), settings=settings), @@ -105,9 +119,8 @@ def api_client(pg): TokenPaginationExtension(), ] post_request_model = create_post_request_model(extensions, base_model=PgstacSearch) - api = StacApi( - settings=settings, + settings=request.param, extensions=extensions, client=CoreCrudClient(post_request_model=post_request_model), search_get_request_model=create_get_request_model(extensions), @@ -118,8 +131,9 @@ def api_client(pg): return api -@pytest.fixture(scope="session") +@pytest.fixture(scope="function") async def app(api_client): + print("Creating app Fixture") time.time() app = api_client.app await connect_to_db(app) @@ -128,9 +142,12 @@ async def app(api_client): await close_db_connection(app) + print("Closed Pools.") -@pytest.fixture(scope="session") + +@pytest.fixture(scope="function") async def app_client(app): + print("creating app_client") async with AsyncClient(app=app, base_url="http://test") as c: yield c @@ -164,3 +181,25 @@ async def load_test_item(app_client, load_test_data, load_test_collection): ) assert resp.status_code == 200 return Item.parse_obj(resp.json()) + + +@pytest.fixture +async def load_test2_collection(app_client, load_test_data): + data = load_test_data("test2_collection.json") + resp = await app_client.post( + "/collections", + json=data, + ) + assert resp.status_code == 200 + return Collection.parse_obj(resp.json()) + + +@pytest.fixture +async def load_test2_item(app_client, load_test_data, load_test2_collection): + data = load_test_data("test2_item.json") + resp = await app_client.post( + "/collections/{coll.id}/items", + json=data, + ) + assert resp.status_code == 200 + return Item.parse_obj(resp.json()) diff --git a/stac_fastapi/pgstac/tests/data/test2_collection.json b/stac_fastapi/pgstac/tests/data/test2_collection.json new file mode 100644 index 000000000..32502a360 --- /dev/null +++ b/stac_fastapi/pgstac/tests/data/test2_collection.json @@ -0,0 +1,271 @@ +{ + "id": "test2-collection", + "type": "Collection", + "links": [ + { + "rel": "items", + "type": "application/geo+json", + "href": "https://pct-apis-staging.westeurope.cloudapp.azure.com/stac/collections/landsat-c2-l1/items" + }, + { + "rel": "parent", + "type": "application/json", + "href": "https://pct-apis-staging.westeurope.cloudapp.azure.com/stac/" + }, + { + "rel": "root", + "type": "application/json", + "href": "https://pct-apis-staging.westeurope.cloudapp.azure.com/stac/" + }, + { + "rel": "self", + "type": "application/json", + "href": "https://pct-apis-staging.westeurope.cloudapp.azure.com/stac/collections/landsat-c2-l1" + }, + { + "rel": "cite-as", + "href": "https://doi.org/10.5066/P9AF14YV", + "title": "Landsat 1-5 MSS Collection 2 Level-1" + }, + { + "rel": "license", + "href": "https://www.usgs.gov/core-science-systems/hdds/data-policy", + "title": "Public Domain" + }, + { + "rel": "describedby", + "href": "https://planetarycomputer.microsoft.com/dataset/landsat-c2-l1", + "title": "Human readable dataset overview and reference", + "type": "text/html" + } + ], + "title": "Landsat Collection 2 Level-1", + "assets": { + "thumbnail": { + "href": "https://ai4edatasetspublicassets.blob.core.windows.net/assets/pc_thumbnails/landsat-c2-l1-thumb.png", + "type": "image/png", + "roles": ["thumbnail"], + "title": "Landsat Collection 2 Level-1 thumbnail" + } + }, + "extent": { + "spatial": { + "bbox": [[-180, -90, 180, 90]] + }, + "temporal": { + "interval": [["1972-07-25T00:00:00Z", "2013-01-07T23:23:59Z"]] + } + }, + "license": "proprietary", + "keywords": ["Landsat", "USGS", "NASA", "Satellite", "Global", "Imagery"], + "providers": [ + { + "url": "https://landsat.gsfc.nasa.gov/", + "name": "NASA", + "roles": ["producer", "licensor"] + }, + { + "url": "https://www.usgs.gov/landsat-missions/landsat-collection-2-level-1-data", + "name": "USGS", + "roles": ["producer", "processor", "licensor"] + }, + { + "url": "https://planetarycomputer.microsoft.com", + "name": "Microsoft", + "roles": ["host"] + } + ], + "summaries": { + "gsd": [79], + "sci:doi": ["10.5066/P9AF14YV"], + "eo:bands": [ + { + "name": "B4", + "common_name": "green", + "description": "Visible green (Landsat 1-3 Band B4)", + "center_wavelength": 0.55, + "full_width_half_max": 0.1 + }, + { + "name": "B5", + "common_name": "red", + "description": "Visible red (Landsat 1-3 Band B5)", + "center_wavelength": 0.65, + "full_width_half_max": 0.1 + }, + { + "name": "B6", + "common_name": "nir08", + "description": "Near infrared (Landsat 1-3 Band B6)", + "center_wavelength": 0.75, + "full_width_half_max": 0.1 + }, + { + "name": "B7", + "common_name": "nir09", + "description": "Near infrared (Landsat 1-3 Band B7)", + "center_wavelength": 0.95, + "full_width_half_max": 0.3 + }, + { + "name": "B1", + "common_name": "green", + "description": "Visible green (Landsat 4-5 Band B1)", + "center_wavelength": 0.55, + "full_width_half_max": 0.1 + }, + { + "name": "B2", + "common_name": "red", + "description": "Visible red (Landsat 4-5 Band B2)", + "center_wavelength": 0.65, + "full_width_half_max": 0.1 + }, + { + "name": "B3", + "common_name": "nir08", + "description": "Near infrared (Landsat 4-5 Band B3)", + "center_wavelength": 0.75, + "full_width_half_max": 0.1 + }, + { + "name": "B4", + "common_name": "nir09", + "description": "Near infrared (Landsat 4-5 Band B4)", + "center_wavelength": 0.95, + "full_width_half_max": 0.3 + } + ], + "platform": [ + "landsat-1", + "landsat-2", + "landsat-3", + "landsat-4", + "landsat-5" + ], + "instruments": ["mss"], + "view:off_nadir": [0] + }, + "description": "Landsat Collection 2 Level-1 data, consisting of quantized and calibrated scaled Digital Numbers (DN) representing the multispectral image data. These [Level-1](https://www.usgs.gov/landsat-missions/landsat-collection-2-level-1-data) data can be [rescaled](https://www.usgs.gov/landsat-missions/using-usgs-landsat-level-1-data-product) to top of atmosphere (TOA) reflectance and/or radiance. Thermal band data can be rescaled to TOA brightness temperature.\\n\\nThis dataset represents the global archive of Level-1 data from [Landsat Collection 2](https://www.usgs.gov/core-science-systems/nli/landsat/landsat-collection-2) acquired by the [Multispectral Scanner System](https://landsat.gsfc.nasa.gov/multispectral-scanner-system/) onboard Landsat 1 through Landsat 5 from July 7, 1972 to January 7, 2013. Images are stored in [cloud-optimized GeoTIFF](https://www.cogeo.org/) format.\\n", + "item_assets": { + "red": { + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "roles": ["data"], + "title": "Red Band", + "description": "Collection 2 Level-1 Red Band Top of Atmosphere Radiance", + "raster:bands": [ + { + "unit": "watt/steradian/square_meter/micrometer", + "nodata": 0, + "data_type": "uint8", + "spatial_resolution": 60 + } + ] + }, + "green": { + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "roles": ["data"], + "title": "Green Band", + "description": "Collection 2 Level-1 Green Band Top of Atmosphere Radiance", + "raster:bands": [ + { + "unit": "watt/steradian/square_meter/micrometer", + "nodata": 0, + "data_type": "uint8", + "spatial_resolution": 60 + } + ] + }, + "nir08": { + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "roles": ["data"], + "title": "Near Infrared Band 0.8", + "description": "Collection 2 Level-1 Near Infrared Band 0.8 Top of Atmosphere Radiance", + "raster:bands": [ + { + "unit": "watt/steradian/square_meter/micrometer", + "nodata": 0, + "data_type": "uint8", + "spatial_resolution": 60 + } + ] + }, + "nir09": { + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "roles": ["data"], + "title": "Near Infrared Band 0.9", + "description": "Collection 2 Level-1 Near Infrared Band 0.9 Top of Atmosphere Radiance", + "raster:bands": [ + { + "unit": "watt/steradian/square_meter/micrometer", + "nodata": 0, + "data_type": "uint8", + "spatial_resolution": 60 + } + ] + }, + "mtl.txt": { + "type": "text/plain", + "roles": ["metadata"], + "title": "Product Metadata File (txt)", + "description": "Collection 2 Level-1 Product Metadata File (txt)" + }, + "mtl.xml": { + "type": "application/xml", + "roles": ["metadata"], + "title": "Product Metadata File (xml)", + "description": "Collection 2 Level-1 Product Metadata File (xml)" + }, + "mtl.json": { + "type": "application/json", + "roles": ["metadata"], + "title": "Product Metadata File (json)", + "description": "Collection 2 Level-1 Product Metadata File (json)" + }, + "qa_pixel": { + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "roles": ["cloud"], + "title": "Pixel Quality Assessment Band", + "description": "Collection 2 Level-1 Pixel Quality Assessment Band", + "raster:bands": [ + { + "unit": "bit index", + "nodata": 1, + "data_type": "uint16", + "spatial_resolution": 60 + } + ] + }, + "qa_radsat": { + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "roles": ["saturation"], + "title": "Radiometric Saturation and Dropped Pixel Quality Assessment Band", + "description": "Collection 2 Level-1 Radiometric Saturation and Dropped Pixel Quality Assessment Band", + "raster:bands": [ + { + "unit": "bit index", + "data_type": "uint16", + "spatial_resolution": 60 + } + ] + }, + "thumbnail": { + "type": "image/jpeg", + "roles": ["thumbnail"], + "title": "Thumbnail image" + }, + "reduced_resolution_browse": { + "type": "image/jpeg", + "roles": ["overview"], + "title": "Reduced resolution browse image" + } + }, + "stac_version": "1.0.0", + "stac_extensions": [ + "https://stac-extensions.github.io/item-assets/v1.0.0/schema.json", + "https://stac-extensions.github.io/view/v1.0.0/schema.json", + "https://stac-extensions.github.io/scientific/v1.0.0/schema.json", + "https://stac-extensions.github.io/raster/v1.0.0/schema.json", + "https://stac-extensions.github.io/eo/v1.0.0/schema.json" + ] +} diff --git a/stac_fastapi/pgstac/tests/data/test2_item.json b/stac_fastapi/pgstac/tests/data/test2_item.json new file mode 100644 index 000000000..62fa2521a --- /dev/null +++ b/stac_fastapi/pgstac/tests/data/test2_item.json @@ -0,0 +1,258 @@ +{ + "id": "test2-item", + "bbox": [-84.7340712, 30.8344014, -82.3892149, 32.6891482], + "type": "Feature", + "links": [ + { + "rel": "collection", + "type": "application/json", + "href": "https://pct-apis-staging.westeurope.cloudapp.azure.com/stac/collections/landsat-c2-l1" + }, + { + "rel": "parent", + "type": "application/json", + "href": "https://pct-apis-staging.westeurope.cloudapp.azure.com/stac/collections/landsat-c2-l1" + }, + { + "rel": "root", + "type": "application/json", + "href": "https://pct-apis-staging.westeurope.cloudapp.azure.com/stac/" + }, + { + "rel": "self", + "type": "application/geo+json", + "href": "https://pct-apis-staging.westeurope.cloudapp.azure.com/stac/collections/landsat-c2-l1/items/LM05_L1GS_018038_19901223_02_T2" + }, + { + "rel": "cite-as", + "href": "https://doi.org/10.5066/P9AF14YV", + "title": "Landsat 1-5 MSS Collection 2 Level-1" + }, + { + "rel": "via", + "href": "https://landsatlook.usgs.gov/stac-server/collections/landsat-c2l1/items/LM05_L1GS_018038_19901223_20200827_02_T2", + "type": "application/json", + "title": "USGS STAC Item" + }, + { + "rel": "preview", + "href": "https://pct-apis-staging.westeurope.cloudapp.azure.com/data/item/map?collection=landsat-c2-l1&item=LM05_L1GS_018038_19901223_02_T2", + "title": "Map of item", + "type": "text/html" + } + ], + "assets": { + "red": { + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-1/standard/mss/1990/018/038/LM05_L1GS_018038_19901223_20200827_02_T2/LM05_L1GS_018038_19901223_20200827_02_T2_B2.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "roles": ["data"], + "title": "Red Band (B2)", + "eo:bands": [ + { + "name": "B2", + "common_name": "red", + "description": "Landsat 4-5 Band B2", + "center_wavelength": 0.65, + "full_width_half_max": 0.1 + } + ], + "description": "Collection 2 Level-1 Red Band Top of Atmosphere Radiance", + "raster:bands": [ + { + "unit": "watt/steradian/square_meter/micrometer", + "scale": 0.66024, + "nodata": 0, + "offset": 2.03976, + "data_type": "uint8", + "spatial_resolution": 60 + } + ] + }, + "green": { + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-1/standard/mss/1990/018/038/LM05_L1GS_018038_19901223_20200827_02_T2/LM05_L1GS_018038_19901223_20200827_02_T2_B1.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "roles": ["data"], + "title": "Green Band (B1)", + "eo:bands": [ + { + "name": "B1", + "common_name": "green", + "description": "Landsat 4-5 Band B1", + "center_wavelength": 0.55, + "full_width_half_max": 0.1 + } + ], + "description": "Collection 2 Level-1 Green Band Top of Atmosphere Radiance", + "raster:bands": [ + { + "unit": "watt/steradian/square_meter/micrometer", + "scale": 0.88504, + "nodata": 0, + "offset": 1.51496, + "data_type": "uint8", + "spatial_resolution": 60 + } + ] + }, + "nir08": { + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-1/standard/mss/1990/018/038/LM05_L1GS_018038_19901223_20200827_02_T2/LM05_L1GS_018038_19901223_20200827_02_T2_B3.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "roles": ["data"], + "title": "Near Infrared Band 0.8 (B3)", + "eo:bands": [ + { + "name": "B3", + "common_name": "nir08", + "description": "Landsat 4-5 Band B3", + "center_wavelength": 0.75, + "full_width_half_max": 0.1 + } + ], + "description": "Collection 2 Level-1 Near Infrared Band 0.7 Top of Atmosphere Radiance", + "raster:bands": [ + { + "unit": "watt/steradian/square_meter/micrometer", + "scale": 0.55866, + "nodata": 0, + "offset": 4.34134, + "data_type": "uint8", + "spatial_resolution": 60 + } + ] + }, + "nir09": { + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-1/standard/mss/1990/018/038/LM05_L1GS_018038_19901223_20200827_02_T2/LM05_L1GS_018038_19901223_20200827_02_T2_B4.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "roles": ["data"], + "title": "Near Infrared Band 0.9 (B4)", + "eo:bands": [ + { + "name": "B4", + "common_name": "nir09", + "description": "Landsat 4-5 Band B4", + "center_wavelength": 0.95, + "full_width_half_max": 0.3 + } + ], + "description": "Collection 2 Level-1 Near Infrared Band 0.9 Top of Atmosphere Radiance", + "raster:bands": [ + { + "unit": "watt/steradian/square_meter/micrometer", + "scale": 0.46654, + "nodata": 0, + "offset": 1.03346, + "data_type": "uint8", + "spatial_resolution": 60 + } + ] + }, + "mtl.txt": { + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-1/standard/mss/1990/018/038/LM05_L1GS_018038_19901223_20200827_02_T2/LM05_L1GS_018038_19901223_20200827_02_T2_MTL.txt", + "type": "text/plain", + "roles": ["metadata"], + "title": "Product Metadata File (txt)", + "description": "Collection 2 Level-1 Product Metadata File (txt)" + }, + "mtl.xml": { + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-1/standard/mss/1990/018/038/LM05_L1GS_018038_19901223_20200827_02_T2/LM05_L1GS_018038_19901223_20200827_02_T2_MTL.xml", + "type": "application/xml", + "roles": ["metadata"], + "title": "Product Metadata File (xml)", + "description": "Collection 2 Level-1 Product Metadata File (xml)" + }, + "mtl.json": { + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-1/standard/mss/1990/018/038/LM05_L1GS_018038_19901223_20200827_02_T2/LM05_L1GS_018038_19901223_20200827_02_T2_MTL.json", + "type": "application/json", + "roles": ["metadata"], + "title": "Product Metadata File (json)", + "description": "Collection 2 Level-1 Product Metadata File (json)" + }, + "qa_pixel": { + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-1/standard/mss/1990/018/038/LM05_L1GS_018038_19901223_20200827_02_T2/LM05_L1GS_018038_19901223_20200827_02_T2_QA_PIXEL.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "roles": ["cloud"], + "title": "Pixel Quality Assessment Band (QA_PIXEL)", + "description": "Collection 2 Level-1 Pixel Quality Assessment Band", + "raster:bands": [ + { + "unit": "bit index", + "nodata": 1, + "data_type": "uint16", + "spatial_resolution": 60 + } + ] + }, + "qa_radsat": { + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-1/standard/mss/1990/018/038/LM05_L1GS_018038_19901223_20200827_02_T2/LM05_L1GS_018038_19901223_20200827_02_T2_QA_RADSAT.TIF", + "type": "image/tiff; application=geotiff; profile=cloud-optimized", + "roles": ["saturation"], + "title": "Radiometric Saturation and Dropped Pixel Quality Assessment Band (QA_RADSAT)", + "description": "Collection 2 Level-1 Radiometric Saturation and Dropped Pixel Quality Assessment Band", + "raster:bands": [ + { + "unit": "bit index", + "data_type": "uint16", + "spatial_resolution": 60 + } + ] + }, + "thumbnail": { + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-1/standard/mss/1990/018/038/LM05_L1GS_018038_19901223_20200827_02_T2/LM05_L1GS_018038_19901223_20200827_02_T2_thumb_small.jpeg", + "type": "image/jpeg", + "roles": ["thumbnail"], + "title": "Thumbnail image" + }, + "reduced_resolution_browse": { + "href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-1/standard/mss/1990/018/038/LM05_L1GS_018038_19901223_20200827_02_T2/LM05_L1GS_018038_19901223_20200827_02_T2_thumb_large.jpeg", + "type": "image/jpeg", + "roles": ["overview"], + "title": "Reduced resolution browse image" + } + }, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [-84.3264316, 32.6891482], + [-84.7340712, 31.1114869], + [-82.8283452, 30.8344014], + [-82.3892149, 32.4079117], + [-84.3264316, 32.6891482] + ] + ] + }, + "collection": "test2-collection", + "properties": { + "gsd": 79, + "created": "2022-03-31T16:51:57.476085Z", + "sci:doi": "10.5066/P9AF14YV", + "datetime": "1990-12-23T15:26:35.581000Z", + "platform": "landsat-5", + "proj:epsg": 32617, + "proj:shape": [3525, 3946], + "description": "Landsat Collection 2 Level-1", + "instruments": ["mss"], + "eo:cloud_cover": 23, + "proj:transform": [60, 0, 140790, 0, -60, 3622110], + "view:off_nadir": 0, + "landsat:wrs_row": "038", + "landsat:scene_id": "LM50180381990357AAA03", + "landsat:wrs_path": "018", + "landsat:wrs_type": "2", + "view:sun_azimuth": 147.23255058, + "landsat:correction": "L1GS", + "view:sun_elevation": 27.04507311, + "landsat:cloud_cover_land": 28, + "landsat:collection_number": "02", + "landsat:collection_category": "T2" + }, + "stac_version": "1.0.0", + "stac_extensions": [ + "https://stac-extensions.github.io/raster/v1.0.0/schema.json", + "https://stac-extensions.github.io/eo/v1.0.0/schema.json", + "https://stac-extensions.github.io/view/v1.0.0/schema.json", + "https://stac-extensions.github.io/projection/v1.0.0/schema.json", + "https://landsat.usgs.gov/stac/landsat-extension/v1.1.1/schema.json", + "https://stac-extensions.github.io/scientific/v1.0.0/schema.json" + ] +} diff --git a/stac_fastapi/pgstac/tests/resources/test_conformance.py b/stac_fastapi/pgstac/tests/resources/test_conformance.py index 2b7cd1d36..b080c4b8a 100644 --- a/stac_fastapi/pgstac/tests/resources/test_conformance.py +++ b/stac_fastapi/pgstac/tests/resources/test_conformance.py @@ -4,12 +4,12 @@ import pytest -@pytest.fixture(scope="module") +@pytest.fixture(scope="function") async def response(app_client): return await app_client.get("/") -@pytest.fixture(scope="module") +@pytest.fixture(scope="function") async def response_json(response) -> Dict: return response.json() diff --git a/stac_fastapi/pgstac/tests/resources/test_item.py b/stac_fastapi/pgstac/tests/resources/test_item.py index 8e9e7de1e..40b4b514a 100644 --- a/stac_fastapi/pgstac/tests/resources/test_item.py +++ b/stac_fastapi/pgstac/tests/resources/test_item.py @@ -133,7 +133,6 @@ async def test_delete_item( item = load_test_item resp = await app_client.delete(f"/collections/{coll.id}/items/{item.id}") - print(resp.content) assert resp.status_code == 200 resp = await app_client.get(f"/collections/{coll.id}/items/{item.id}") @@ -188,11 +187,9 @@ async def test_delete_missing_item( item = load_test_item resp = await app_client.delete(f"/collections/{coll.id}/items/{item.id}") - print(resp.content) assert resp.status_code == 200 resp = await app_client.delete(f"/collections/{coll.id}/items/{item.id}") - print(resp.content) assert resp.status_code == 404 @@ -245,9 +242,6 @@ async def test_pagination(app_client, load_test_data, load_test_collection): resp = await app_client.get(f"/collections/{coll.id}/items", params={"limit": 3}) assert resp.status_code == 200 first_page = resp.json() - for feature in first_page["features"]: - print(feature["id"], feature["properties"]["datetime"]) - print(f"first page links {first_page['links']}") assert len(first_page["features"]) == 3 nextlink = [ @@ -262,14 +256,9 @@ async def test_pagination(app_client, load_test_data, load_test_collection): "test-item18", ] - print(f"Next {nextlink}") - resp = await app_client.get(nextlink) assert resp.status_code == 200 second_page = resp.json() - for feature in second_page["features"]: - print(feature["id"], feature["properties"]["datetime"]) - print(f"second page links {second_page['links']}") assert len(first_page["features"]) == 3 nextlink = [ @@ -283,7 +272,6 @@ async def test_pagination(app_client, load_test_data, load_test_collection): ].pop() assert prevlink is not None - print(nextlink, prevlink) assert [f["id"] for f in second_page["features"]] == [ "test-item17", @@ -294,9 +282,6 @@ async def test_pagination(app_client, load_test_data, load_test_collection): resp = await app_client.get(prevlink) assert resp.status_code == 200 back_page = resp.json() - for feature in back_page["features"]: - print(feature["id"], feature["properties"]["datetime"]) - print(back_page["links"]) assert len(back_page["features"]) == 3 assert [f["id"] for f in back_page["features"]] == [ "test-item20", @@ -385,8 +370,6 @@ async def test_item_search_temporal_query_post( assert resp.status_code == 200 item_date = rfc3339_str_to_datetime(test_item["properties"]["datetime"]) - print(item_date) - item_date = item_date + timedelta(seconds=1) params = { "collections": [test_item["collection"]], @@ -395,7 +378,6 @@ async def test_item_search_temporal_query_post( } resp = await app_client.post("/search", json=params) - print(resp.content) resp_json = resp.json() assert len(resp_json["features"]) == 1 assert resp_json["features"][0]["id"] == test_item["id"] @@ -641,7 +623,6 @@ async def test_item_search_properties_jsonb( # EPSG is a JSONB key params = {"query": {"proj:epsg": {"gt": test_item["properties"]["proj:epsg"] - 1}}} - print(params) resp = await app_client.post("/search", json=params) assert resp.status_code == 200 resp_json = resp.json() @@ -659,13 +640,13 @@ async def test_item_search_properties_field( assert resp.status_code == 200 second_test_item = load_test_data("test_item2.json") + second_test_item["properties"]["eo:cloud_cover"] = 5 resp = await app_client.post( f"/collections/{test_item['collection']}/items", json=second_test_item ) assert resp.status_code == 200 params = {"query": {"eo:cloud_cover": {"eq": 0}}} - print(params) resp = await app_client.post("/search", json=params) assert resp.status_code == 200 resp_json = resp.json() @@ -790,7 +771,6 @@ async def test_item_search_get_filter_extension_cql2( ], }, } - print(params) resp = await app_client.post("/search", json=params) resp_json = resp.json() @@ -846,9 +826,7 @@ async def test_item_search_get_filter_extension_cql2_with_query_fails( }, "query": {"eo:cloud_cover": {"eq": 0}}, } - print(params) resp = await app_client.post("/search", json=params) - print(resp.content) assert resp.status_code == 400 @@ -881,8 +859,6 @@ async def test_pagination_item_collection( assert resp.status_code == 200 ids.append(uid) - print(ids) - # Paginate through all 5 items with a limit of 1 (expecting 5 requests) page = await app_client.get( f"/collections/{test_item['collection']}/items", params={"limit": 1} @@ -893,7 +869,6 @@ async def test_pagination_item_collection( idx += 1 page_data = page.json() item_ids.append(page_data["features"][0]["id"]) - print(idx, item_ids) nextlink = [ link["href"] for link in page_data["links"] if link["rel"] == "next" ] @@ -931,7 +906,6 @@ async def test_pagination_post(app_client, load_test_data, load_test_collection) "filter": {"op": "in", "args": [{"property": "id"}, ids]}, "limit": 1, } - print(f"REQUEST BODY: {request_body}") page = await app_client.post("/search", json=request_body) idx = 0 item_ids = [] @@ -939,7 +913,6 @@ async def test_pagination_post(app_client, load_test_data, load_test_collection) idx += 1 page_data = page.json() item_ids.append(page_data["features"][0]["id"]) - print(f"PAGING: {page_data['links']}") next_link = list(filter(lambda l: l["rel"] == "next", page_data["links"])) if not next_link: break @@ -951,7 +924,6 @@ async def test_pagination_post(app_client, load_test_data, load_test_collection) assert False # Our limit is 1 so we expect len(ids) number of requests before we run out of pages - print(idx, ids) assert idx == len(ids) # Confirm we have paginated through all items @@ -984,7 +956,6 @@ async def test_pagination_token_idempotent( }, ) page_data = page.json() - print(f"LINKS: {page_data['links']}") next_link = list(filter(lambda l: l["rel"] == "next", page_data["links"])) # Confirm token is idempotent @@ -1039,7 +1010,6 @@ async def test_field_extension_post(app_client, load_test_data, load_test_collec resp = await app_client.post("/search", json=body) resp_json = resp.json() - print(resp_json) assert "B1" not in resp_json["features"][0]["assets"].keys() assert not set(resp_json["features"][0]["properties"]) - { "orientation", @@ -1087,6 +1057,97 @@ async def test_field_extension_exclude_default_includes( assert "geometry" not in resp_json["features"][0] +async def test_field_extension_include_multiple_subkeys( + app_client, load_test_item, load_test_collection +): + """Test that multiple subkeys of an object field are included""" + body = {"fields": {"include": ["properties.width", "properties.height"]}} + + resp = await app_client.post("/search", json=body) + assert resp.status_code == 200 + resp_json = resp.json() + + resp_prop_keys = resp_json["features"][0]["properties"].keys() + assert set(resp_prop_keys) == set(["width", "height"]) + + +async def test_field_extension_include_multiple_deeply_nested_subkeys( + app_client, load_test_item, load_test_collection +): + """Test that multiple deeply nested subkeys of an object field are included""" + body = {"fields": {"include": ["assets.ANG.type", "assets.ANG.href"]}} + + resp = await app_client.post("/search", json=body) + assert resp.status_code == 200 + resp_json = resp.json() + + resp_assets = resp_json["features"][0]["assets"] + assert set(resp_assets.keys()) == set(["ANG"]) + assert set(resp_assets["ANG"].keys()) == set(["type", "href"]) + + +async def test_field_extension_exclude_multiple_deeply_nested_subkeys( + app_client, load_test_item, load_test_collection +): + """Test that multiple deeply nested subkeys of an object field are excluded""" + body = {"fields": {"exclude": ["assets.ANG.type", "assets.ANG.href"]}} + + resp = await app_client.post("/search", json=body) + assert resp.status_code == 200 + resp_json = resp.json() + + resp_assets = resp_json["features"][0]["assets"] + assert len(resp_assets.keys()) > 0 + assert "type" not in resp_assets["ANG"] + assert "href" not in resp_assets["ANG"] + + +async def test_field_extension_exclude_deeply_nested_included_subkeys( + app_client, load_test_item, load_test_collection +): + """Test that deeply nested keys of a nested object that was included are excluded""" + body = { + "fields": { + "include": ["assets.ANG.type", "assets.ANG.href"], + "exclude": ["assets.ANG.href"], + } + } + + resp = await app_client.post("/search", json=body) + assert resp.status_code == 200 + resp_json = resp.json() + + resp_assets = resp_json["features"][0]["assets"] + assert "type" in resp_assets["ANG"] + assert "href" not in resp_assets["ANG"] + + +async def test_field_extension_exclude_links( + app_client, load_test_item, load_test_collection +): + """Links have special injection behavior, ensure they can be excluded with the fields extension""" + body = {"fields": {"exclude": ["links"]}} + + resp = await app_client.post("/search", json=body) + assert resp.status_code == 200 + resp_json = resp.json() + + assert "links" not in resp_json["features"][0] + + +async def test_field_extension_include_only_non_existant_field( + app_client, load_test_item, load_test_collection +): + """Including only a non-existant field should return the full item""" + body = {"fields": {"include": ["non_existant_field"]}} + + resp = await app_client.post("/search", json=body) + assert resp.status_code == 200 + resp_json = resp.json() + + assert list(resp_json["features"][0].keys()) == ["id", "collection", "links"] + + async def test_search_intersects_and_bbox(app_client): """Test POST search intersects and bbox are mutually exclusive (core)""" bbox = [-118, 34, -117, 35] @@ -1150,7 +1211,6 @@ async def test_preserves_extra_link( ) assert response_item.status_code == 200 item = response_item.json() - extra_link = [link for link in item["links"] if link["rel"] == "preview"] assert extra_link assert extra_link[0]["href"] == expected_href @@ -1235,10 +1295,8 @@ async def test_item_search_get_filter_extension_cql2_2( ], }, } - print(json.dumps(params)) resp = await app_client.post("/search", json=params) resp_json = resp.json() - print(resp_json) assert resp.status_code == 200 assert len(resp_json.get("features")) == 0 @@ -1267,7 +1325,6 @@ async def test_item_search_get_filter_extension_cql2_2( } resp = await app_client.post("/search", json=params) resp_json = resp.json() - print(resp_json) assert len(resp.json()["features"]) == 1 assert ( resp_json["features"][0]["properties"]["proj:epsg"] @@ -1310,7 +1367,6 @@ async def test_filter_cql2text(app_client, load_test_data, load_test_collection) params = {"filter": filter, "filter-lang": "cql2-text"} resp = await app_client.get("/search", params=params) resp_json = resp.json() - print(resp_json) assert len(resp.json()["features"]) == 1 assert ( resp_json["features"][0]["properties"]["proj:epsg"] @@ -1321,5 +1377,20 @@ async def test_filter_cql2text(app_client, load_test_data, load_test_collection) params = {"filter": filter, "filter-lang": "cql2-text"} resp = await app_client.get("/search", params=params) resp_json = resp.json() - print(resp_json) assert len(resp.json()["features"]) == 0 + + +async def test_item_merge_raster_bands( + app_client, load_test2_item, load_test2_collection +): + resp = await app_client.get("/collections/test2-collection/items/test2-item") + resp_json = resp.json() + red_bands = resp_json["assets"]["red"]["raster:bands"] + + # The merged item should have merged the band dicts from base and item + # into a single dict + assert len(red_bands) == 1 + # The merged item should have the full 6 bands + assert len(red_bands[0].keys()) == 6 + # The merged item should have kept the item value rather than the base value + assert red_bands[0]["offset"] == 2.03976 diff --git a/stac_fastapi/sqlalchemy/tests/resources/test_item.py b/stac_fastapi/sqlalchemy/tests/resources/test_item.py index f75a802bf..2f671de68 100644 --- a/stac_fastapi/sqlalchemy/tests/resources/test_item.py +++ b/stac_fastapi/sqlalchemy/tests/resources/test_item.py @@ -194,7 +194,7 @@ def test_create_item_missing_collection(app_client, load_test_data): resp = app_client.post( f"/collections/{test_item['collection']}/items", json=test_item ) - assert resp.status_code == 422 + assert resp.status_code == 424 def test_update_item_already_exists(app_client, load_test_data):