diff --git a/openeo_driver/ProcessGraphDeserializer.py b/openeo_driver/ProcessGraphDeserializer.py index 7439cb7e..51fdfdb9 100644 --- a/openeo_driver/ProcessGraphDeserializer.py +++ b/openeo_driver/ProcessGraphDeserializer.py @@ -677,13 +677,13 @@ def apply_neighborhood(args: ProcessArgs, env: EvalEnv) -> DriverDataCube: @process def apply_dimension(args: ProcessArgs, env: EvalEnv) -> DriverDataCube: - data_cube = args.get_required("data", expected_type=DriverDataCube) + data_cube = args.get_required("data", expected_type=(DriverDataCube, DriverVectorCube)) process = args.get_deep("process", "process_graph", expected_type=dict) - dimension = args.get_required("dimension", expected_type=str) + dimension = args.get_required( + "dimension", expected_type=str, validator=ProcessArgs.validator_one_of(data_cube.get_dimension_names()) + ) target_dimension = args.get_optional("target_dimension", default=None, expected_type=str) context = args.get_optional("context", default=None) - # do check_dimension here for error handling - dimension, band_dim, temporal_dim = _check_dimension(cube=data_cube, dim=dimension, process="apply_dimension") cube = data_cube.apply_dimension( process=process, dimension=dimension, target_dimension=target_dimension, context=context, env=env @@ -747,10 +747,10 @@ def apply(args: ProcessArgs, env: EvalEnv) -> DriverDataCube: def reduce_dimension(args: ProcessArgs, env: EvalEnv) -> DriverDataCube: data_cube: DriverDataCube = args.get_required("data", expected_type=DriverDataCube) reduce_pg = args.get_deep("reducer", "process_graph", expected_type=dict) - dimension = args.get_required("dimension", expected_type=str) + dimension = args.get_required( + "dimension", expected_type=str, validator=ProcessArgs.validator_one_of(data_cube.get_dimension_names()) + ) context = args.get_optional("context", default=None) - # do check_dimension here for error handling - dimension, band_dim, temporal_dim = _check_dimension(cube=data_cube, dim=dimension, process="reduce_dimension") return data_cube.reduce_dimension(reducer=reduce_pg, dimension=dimension, context=context, env=env) @@ -915,60 +915,35 @@ def rename_labels(args: dict, env: EvalEnv) -> DriverDataCube: ) -def _check_dimension(cube: DriverDataCube, dim: str, process: str): - """ - Helper to check/validate the requested and available dimensions of a cube. - - :return: tuple (requested dimension, name of band dimension, name of temporal dimension) - """ - # Note: large part of this is support/adapting for old client - # (pre https://github.com/Open-EO/openeo-python-client/issues/93) - # TODO remove this legacy support when not necessary anymore - metadata = cube.metadata - try: - band_dim = metadata.band_dimension.name - except MetadataException: - band_dim = None - try: - temporal_dim = metadata.temporal_dimension.name - except MetadataException: - temporal_dim = None - - if dim not in metadata.dimension_names(): - if dim in ["spectral_bands", "bands"] and band_dim: - _log.warning("Probably old client requesting band dimension {d!r}," - " but actual band dimension name is {n!r}".format(d=dim, n=band_dim)) - dim = band_dim - elif dim == "temporal" and temporal_dim: - _log.warning("Probably old client requesting temporal dimension {d!r}," - " but actual temporal dimension name is {n!r}".format(d=dim, n=temporal_dim)) - dim = temporal_dim - else: - raise ProcessParameterInvalidException( - parameter="dimension", process=process, - reason="got {d!r}, but should be one of {n!r}".format(d=dim, n=metadata.dimension_names())) - - return dim, band_dim, temporal_dim - - @process def aggregate_temporal(args: ProcessArgs, env: EvalEnv) -> DriverDataCube: data_cube = args.get_required("data", expected_type=DriverDataCube) - reduce_pg = args.get_deep("reducer", "process_graph", expected_type=dict) - context = args.get_optional("context", default=None) intervals = args.get_required("intervals") + reduce_pg = args.get_deep("reducer", "process_graph", expected_type=dict) labels = args.get_optional("labels", default=None) - dimension = _get_time_dim_or_default(args, data_cube) - return data_cube.aggregate_temporal(intervals=intervals,labels=labels,reducer=reduce_pg, dimension=dimension, context=context) + dimension = args.get_optional( + "dimension", + default=lambda: data_cube.metadata.temporal_dimension.name, + validator=ProcessArgs.validator_one_of(data_cube.get_dimension_names()), + ) + context = args.get_optional("context", default=None) + + return data_cube.aggregate_temporal( + intervals=intervals, labels=labels, reducer=reduce_pg, dimension=dimension, context=context + ) @process_registry_100.add_function def aggregate_temporal_period(args: ProcessArgs, env: EvalEnv) -> DriverDataCube: data_cube = args.get_required("data", expected_type=DriverDataCube) + period = args.get_required("period") reduce_pg = args.get_deep("reducer", "process_graph", expected_type=dict) + dimension = args.get_optional( + "dimension", + default=lambda: data_cube.metadata.temporal_dimension.name, + validator=ProcessArgs.validator_one_of(data_cube.get_dimension_names()), + ) context = args.get_optional("context", default=None) - period = args.get_required("period") - dimension = _get_time_dim_or_default(args, data_cube, "aggregate_temporal_period") dry_run_tracer: DryRunDataTracer = env.get(ENV_DRY_RUN_TRACER) if dry_run_tracer: @@ -1045,24 +1020,6 @@ def _period_to_intervals(start, end, period) -> List[Tuple[pd.Timestamp, pd.Time return intervals -def _get_time_dim_or_default(args: ProcessArgs, data_cube, process_id="aggregate_temporal"): - dimension = args.get_optional("dimension", None) - if dimension is not None: - dimension, _, _ = _check_dimension(cube=data_cube, dim=dimension, process=process_id) - else: - # default: there is a single temporal dimension - try: - dimension = data_cube.metadata.temporal_dimension.name - except MetadataException: - raise ProcessParameterInvalidException( - parameter="dimension", process=process_id, - reason="No dimension was set, and no temporal dimension could be found. Available dimensions: {n!r}".format( - n=data_cube.metadata.dimension_names())) - # do check_dimension here for error handling - dimension, band_dim, temporal_dim = _check_dimension(cube=data_cube, dim=dimension, process=process_id) - return dimension - - @process_registry_100.add_function def aggregate_spatial(args: ProcessArgs, env: EvalEnv) -> DriverDataCube: cube = args.get_required("data", expected_type=DriverDataCube) @@ -1624,14 +1581,28 @@ def load_uploaded_files(args: dict, env: EvalEnv) -> Union[DriverVectorCube,Driv .returns("vector-cube", schema={"type": "object", "subtype": "vector-cube"}) ) def to_vector_cube(args: Dict, env: EvalEnv): - # TODO: standardization of something like this? https://github.com/Open-EO/openeo-processes/issues/346 + _log.warning("Experimental process `to_vector_cube` is deprecated, use `load_geojson` instead") + # TODO: remove this experimental/deprecated process data = extract_arg(args, "data", process_id="to_vector_cube") if isinstance(data, dict) and data.get("type") in {"Polygon", "MultiPolygon", "Feature", "FeatureCollection"}: return env.backend_implementation.vector_cube_cls.from_geojson(data) - # TODO: support more inputs: string with geojson, string with WKT, list of WKT, string with URL to GeoJSON, ... raise FeatureUnsupportedException(f"Converting {type(data)} to vector cube is not supported") +@process_registry_100.add_function(spec=read_spec("openeo-processes/2.x/proposals/load_geojson.json")) +def load_geojson(args: ProcessArgs, env: EvalEnv) -> DriverVectorCube: + data = args.get_required( + "data", + validator=ProcessArgs.validator_geojson_dict( + # TODO: also allow LineString and MultiLineString? + allowed_types=["Point", "MultiPoint", "Polygon", "MultiPolygon", "Feature", "FeatureCollection"] + ), + ) + properties = args.get_optional("properties", default=[], expected_type=(list, tuple)) + vector_cube = env.backend_implementation.vector_cube_cls.from_geojson(data, columns_for_cube=properties) + return vector_cube + + @non_standard_process( ProcessSpec("get_geometries", description="Reads vector data from a file or a URL or get geometries from a FeatureCollection") .param('filename', description="filename or http url of a vector file", schema={"type": "string"}, required=False) diff --git a/openeo_driver/_version.py b/openeo_driver/_version.py index 39e8de23..1db47040 100644 --- a/openeo_driver/_version.py +++ b/openeo_driver/_version.py @@ -1 +1 @@ -__version__ = "0.60.0a1" +__version__ = "0.61.0a1" diff --git a/openeo_driver/datacube.py b/openeo_driver/datacube.py index e33cd9b1..6bed712f 100644 --- a/openeo_driver/datacube.py +++ b/openeo_driver/datacube.py @@ -1,25 +1,26 @@ import abc import inspect +import io import logging import zipfile from pathlib import Path -from typing import List, Union, Optional, Dict, Any, Tuple, Sequence -import io +from typing import Any, Dict, List, Optional, Sequence, Tuple, Union import geopandas as gpd import numpy +import openeo.udf +import pandas import pyproj +import requests import shapely.geometry import shapely.geometry.base import shapely.ops import xarray -from pyproj import CRS -import requests - from openeo.metadata import CollectionMetadata from openeo.util import ensure_dir, str_truncate -import openeo.udf -from openeo_driver.datastructs import SarBackscatterArgs, ResolutionMergeArgs, StacAsset +from pyproj import CRS + +from openeo_driver.datastructs import ResolutionMergeArgs, SarBackscatterArgs, StacAsset from openeo_driver.errors import FeatureUnsupportedException, InternalException from openeo_driver.util.geometry import GeometryBufferer, validate_geojson_coordinates from openeo_driver.util.ioformats import IOFORMATS @@ -61,6 +62,9 @@ def __eq__(self, o: object) -> bool: return True return False + def get_dimension_names(self) -> List[str]: + return self.metadata.dimension_names() + def _not_implemented(self): """Helper to raise a NotImplemented exception containing method name""" raise NotImplementedError("DataCube method not implemented: {m!r}".format(m=inspect.stack()[1].function)) @@ -221,6 +225,9 @@ class DriverVectorCube: COLUMN_SELECTION_ALL = "all" COLUMN_SELECTION_NUMERICAL = "numerical" + # Xarray cube attribute to indicate that it is a dummy cube + CUBE_ATTR_VECTOR_CUBE_DUMMY = "vector_cube_dummy" + def __init__( self, geometries: gpd.GeoDataFrame, @@ -281,14 +288,21 @@ def from_geodataframe( elif columns_for_cube == cls.COLUMN_SELECTION_ALL: columns_for_cube = available_columns elif isinstance(columns_for_cube, list): - # TODO #114 limit to subset with available columns (and automatically fill in missing columns with nodata)? columns_for_cube = columns_for_cube else: raise ValueError(columns_for_cube) assert isinstance(columns_for_cube, list) if columns_for_cube: - cube_df = data[columns_for_cube] + existing = [c for c in columns_for_cube if c in available_columns] + to_add = [c for c in columns_for_cube if c not in available_columns] + if existing: + cube_df = data[existing] + if to_add: + cube_df.loc[:, to_add] = numpy.nan + else: + cube_df = pandas.DataFrame(index=data.index, columns=to_add) + # TODO: remove `columns_for_cube` from geopandas data frame? # Enabling that triggers failure of som existing tests that use `aggregate_spatial` # to "enrich" a vector cube with pre-existing properties @@ -308,7 +322,14 @@ def from_geodataframe( return cls(geometries=geometries_df, cube=cube) else: - return cls(geometries=data) + # Use 1D dummy cube of NaN values + cube: xarray.DataArray = xarray.DataArray( + data=numpy.full(shape=[data.shape[0]], fill_value=numpy.nan), + dims=[cls.DIM_GEOMETRIES], + coords={cls.DIM_GEOMETRIES: data.geometry.index.to_list()}, + attrs={cls.CUBE_ATTR_VECTOR_CUBE_DUMMY: True}, + ) + return cls(geometries=data, cube=cube) @classmethod def from_fiona( @@ -400,7 +421,7 @@ def _as_geopandas_df( """Join geometries and cube as a geopandas dataframe""" # TODO: avoid copy? df = self._geometries.copy(deep=True) - if self._cube is not None: + if self._cube is not None and not self._cube.attrs.get(self.CUBE_ATTR_VECTOR_CUBE_DUMMY): assert self._cube.dims[0] == self.DIM_GEOMETRIES # TODO: better way to combine cube with geometries # Flatten multiple (non-geometry) dimensions from cube to new properties in geopandas dataframe @@ -426,6 +447,16 @@ def to_wkt(self) -> List[str]: wkts = [str(g) for g in self._geometries.geometry] return wkts + def to_internal_json(self) -> dict: + """ + Export to an internal JSON-style representation. + Subject to change any time: not intended for public consumption, just for (unit) test purposes. + """ + return { + "geometries": shapely.geometry.mapping(self._geometries), + "cube": self._cube.to_dict(data="array") if self._cube is not None else None, + } + def get_crs(self) -> pyproj.CRS: return self._geometries.crs or pyproj.CRS.from_epsg(4326) @@ -485,7 +516,7 @@ def to_legacy_save_result(self) -> Union["AggregatePolygonResult", "JSONResult"] # TODO: eliminate these legacy, non-standard formats? from openeo_driver.save_result import AggregatePolygonResult, JSONResult - if self._cube is None: + if self._cube is None or self._cube.attrs.get(self.CUBE_ATTR_VECTOR_CUBE_DUMMY): # No cube: no real data to return (in legacy style), so let's just return a `null` per geometry. return JSONResult(data=[None] * self.geometry_count()) @@ -511,6 +542,12 @@ def to_legacy_save_result(self) -> Union["AggregatePolygonResult", "JSONResult"] f"Unsupported cube configuration {cube.dims} for _write_legacy_aggregate_polygon_result_json" ) + def get_dimension_names(self) -> List[str]: + if self._cube is None: + return [self.DIM_GEOMETRIES] + else: + return list(self._cube.dims) + def get_bounding_box(self) -> Tuple[float, float, float, float]: # TODO: cache bounding box? # TODO #114 #141 Open-EO/openeo-geopyspark-driver#239: option to buffer point geometries (if any) @@ -596,18 +633,23 @@ def apply_dimension( context: Optional[dict] = None, env: EvalEnv, ) -> "DriverVectorCube": + # Is callback a single run_udf node process? single_run_udf = SingleRunUDFProcessGraph.parse_or_none(process) if single_run_udf: # Process with single "run_udf" node - # TODO: check provided dimension with actual dimension of the cube - if dimension in (self.DIM_BANDS, self.DIM_PROPERTIES) and target_dimension is None: + if ( + dimension == self.DIM_GEOMETRIES + or (dimension in {self.DIM_BANDS, self.DIM_PROPERTIES}.intersection(self.get_dimension_names())) + and target_dimension is None + ): log.warning( f"Using experimental feature: DriverVectorCube.apply_dimension along dim {dimension} and empty cube" ) - # TODO: this is non-standard special case: vector cube with only geometries, but no "cube" data + # TODO: data chunking (e.g. large feature collections) gdf = self._as_geopandas_df() feature_collection = openeo.udf.FeatureCollection(id="_", data=gdf) + # TODO: dedicated UDF signature to indicate to work on vector cube through a feature collection based API udf_data = openeo.udf.UdfData( proj={"EPSG": self._geometries.crs.to_epsg()}, feature_collection_list=[feature_collection], diff --git a/openeo_driver/dummy/dummy_backend.py b/openeo_driver/dummy/dummy_backend.py index d2d29004..cbdd35b9 100644 --- a/openeo_driver/dummy/dummy_backend.py +++ b/openeo_driver/dummy/dummy_backend.py @@ -181,10 +181,12 @@ def __init__(self, metadata: CollectionMetadata = None): self.apply_tiles = Mock(name="apply_tiles", return_value=self) self.apply_tiles_spatiotemporal = Mock(name="apply_tiles_spatiotemporal", return_value=self) - # Create mock methods for remaining data cube methods that are not yet defined - already_defined = set(DummyDataCube.__dict__.keys()).union(self.__dict__.keys()) + # Create mock methods for remaining DriverDataCube methods that are not yet defined directly by DummyDataCube + to_keep = set(DummyDataCube.__dict__.keys()).union(self.__dict__.keys()) + to_keep.update(m for m in DriverDataCube.__dict__.keys() if m.startswith("_")) + to_keep.update(["get_dimension_names"]) for name, method in DriverDataCube.__dict__.items(): - if not name.startswith('_') and name not in already_defined and callable(method): + if not name in to_keep and callable(method): setattr(self, name, Mock(name=name, return_value=self)) for name in [n for n, m in DummyDataCube.__dict__.items() if getattr(m, '_mock_side_effect', False)]: diff --git a/openeo_driver/processes.py b/openeo_driver/processes.py index 2c4c6c7c..e77d8211 100644 --- a/openeo_driver/processes.py +++ b/openeo_driver/processes.py @@ -1,18 +1,18 @@ import functools import inspect -import typing import warnings from collections import namedtuple from pathlib import Path -from typing import Callable, Dict, List, Tuple, Optional, Any, Union +from typing import Any, Callable, Collection, Dict, List, Optional, Tuple, Union from openeo_driver.errors import ( - ProcessUnsupportedException, - ProcessParameterRequiredException, ProcessParameterInvalidException, + ProcessParameterRequiredException, + ProcessUnsupportedException, ) from openeo_driver.specs import SPECS_ROOT -from openeo_driver.utils import read_json, EvalEnv +from openeo_driver.util.geometry import validate_geojson_basic +from openeo_driver.utils import EvalEnv, read_json class ProcessParameter: @@ -289,7 +289,11 @@ def cast(cls, args: Union[dict, "ProcessArgs"], process_id: Optional[str] = None return args def get_required( - self, name: str, *, expected_type: Optional[Union[type, Tuple[type, ...]]] = None + self, + name: str, + *, + expected_type: Optional[Union[type, Tuple[type, ...]]] = None, + validator: Optional[Callable[[Any], bool]] = None, ) -> ArgumentValue: """ Get a required argument by name. @@ -301,33 +305,69 @@ def get_required( value = self[name] except KeyError: raise ProcessParameterRequiredException(process=self.process_id, parameter=name) from None - self._check_type(name=name, value=value, expected_type=expected_type) + self._check_value(name=name, value=value, expected_type=expected_type, validator=validator) return value - def _check_type(self, *, name: str, value: Any, expected_type: Optional[Union[type, Tuple[type, ...]]] = None): + def _check_value( + self, + *, + name: str, + value: Any, + expected_type: Optional[Union[type, Tuple[type, ...]]] = None, + validator: Optional[Callable[[Any], bool]] = None, + ): if expected_type: if not isinstance(value, expected_type): raise ProcessParameterInvalidException( parameter=name, process=self.process_id, reason=f"Expected {expected_type} but got {type(value)}." ) + if validator: + try: + valid = validator(value) + reason = "Failed validation." + except Exception as e: + valid = False + reason = str(e) + if not valid: + raise ProcessParameterInvalidException(parameter=name, process=self.process_id, reason=reason) def get_optional( - self, name: str, default: Any = None, *, expected_type: Optional[Union[type, Tuple[type, ...]]] = None + self, + name: str, + default: Union[Any, Callable[[], Any]] = None, + *, + expected_type: Optional[Union[type, Tuple[type, ...]]] = None, + validator: Optional[Callable[[Any], bool]] = None, ) -> ArgumentValue: """ Get an optional argument with default + + :param name: argument name + :param default: default value or a function/factory to generate the default value + :param expected_type: expected class (or list of multiple options) the value should be (unless it's None) + :param validator: optional validation callable """ - value = self.get(name, default) + if name in self: + value = self.get(name) + else: + value = default() if callable(default) else default if value is not None: - self._check_type(name=name, value=value, expected_type=expected_type) + self._check_value(name=name, value=value, expected_type=expected_type, validator=validator) + return value - def get_deep(self, *steps: str, expected_type: Optional[Union[type, Tuple[type, ...]]] = None) -> ArgumentValue: + def get_deep( + self, + *steps: str, + expected_type: Optional[Union[type, Tuple[type, ...]]] = None, + validator: Optional[Callable[[Any], bool]] = None, + ) -> ArgumentValue: """ Walk recursively through a dictionary to get to a value. Originally: `extract_deep` """ + # TODO: current implementation requires the argument. Allow it to be optional too? value = self for step in steps: keys = [step] if not isinstance(step, list) else step @@ -338,7 +378,7 @@ def get_deep(self, *steps: str, expected_type: Optional[Union[type, Tuple[type, else: raise ProcessParameterInvalidException(process=self.process_id, parameter=steps[0], reason=f"{step=}") - self._check_type(name=steps[0], value=value, expected_type=expected_type) + self._check_value(name=steps[0], value=value, expected_type=expected_type, validator=validator) return value def get_aliased(self, names: List[str]) -> ArgumentValue: @@ -371,7 +411,7 @@ def get_subset(self, names: List[str], aliases: Optional[Dict[str, str]] = None) kwargs[key] = self[alias] return kwargs - def get_enum(self, name: str, options: typing.Container[ArgumentValue]) -> ArgumentValue: + def get_enum(self, name: str, options: Collection[ArgumentValue]) -> ArgumentValue: """ Get argument by name and check if it belongs to given set of (enum) values. @@ -385,3 +425,32 @@ def get_enum(self, name: str, options: typing.Container[ArgumentValue]) -> Argum reason=f"Invalid enum value {value!r}. Expected one of {options}.", ) return value + + @staticmethod + def validator_one_of(options: list, show_value: bool = True): + """Build a validator function that check that the value is in given list""" + + def validator(value): + if value not in options: + if show_value: + message = f"Must be one of {options!r} but got {value!r}." + else: + message = f"Must be one of {options!r}." + raise ValueError(message) + return True + + return validator + + @staticmethod + def validator_geojson_dict( + allowed_types: Optional[Collection[str]] = None, + ): + """Build validator to verify that provided structure looks like a GeoJSON-style object""" + + def validator(value): + issues = validate_geojson_basic(value=value, allowed_types=allowed_types, raise_exception=False) + if issues: + raise ValueError(f"Invalid GeoJSON: {', '.join(issues)}.") + return True + + return validator diff --git a/openeo_driver/testing.py b/openeo_driver/testing.py index 95a90b99..24d18d66 100644 --- a/openeo_driver/testing.py +++ b/openeo_driver/testing.py @@ -6,28 +6,26 @@ import http.server import json import logging +import math import multiprocessing import re import urllib.request from pathlib import Path -from typing import Any, Callable, Dict, Optional, Pattern, Tuple, Union +from typing import Any, Callable, Collection, Dict, Optional, Pattern, Tuple, Union from unittest import mock +import openeo +import openeo.processes import pytest import shapely.geometry.base import shapely.wkt from flask import Response from flask.testing import FlaskClient +from openeo.capabilities import ComparableVersion from werkzeug.datastructures import Headers -import openeo -import openeo.processes -from openeo.capabilities import ComparableVersion from openeo_driver.users.auth import HttpAuthHandler -from openeo_driver.util.geometry import ( - as_geojson_feature, - as_geojson_feature_collection, -) +from openeo_driver.util.geometry import as_geojson_feature, as_geojson_feature_collection from openeo_driver.utils import generate_unique_id _log = logging.getLogger(__name__) @@ -494,6 +492,11 @@ def approxify(x: Any, rel: Optional = None, abs: Optional[float] = None) -> Any: raise ValueError(x) +class IsNan: + def __eq__(self, other): + return isinstance(other, float) and math.isnan(other) + + class ApproxGeometry: """Helper to compactly and approximately compare geometries.""" @@ -532,9 +535,57 @@ def to_geojson_feature_collection(self) -> dict: return approxify(result, rel=self.rel, abs=self.abs) -def caplog_with_custom_formatter( - caplog: pytest.LogCaptureFixture, format: Union[str, logging.Formatter] -): +class ApproxGeoJSONByBounds: + """ + pytest assert helper to build a matcher to check if a certain GeoJSON construct has expected bounds + + Usage example: + + >>> geometry = {"type": "Polygon", "coordinates": [...]} + # Check that this geometry has bounds (1, 2, 6, 5) with some absolute tolerance + >>> assert geometry == ApproxGeoJSONByBounds(1, 2, 6, 5, abs=0.1) + """ + + def __init__( + self, + *args, + types: Collection[str] = ("Polygon", "MultiPolygon"), + rel: Optional[float] = None, + abs: Optional[float] = None, + ): + bounds = args[0] if len(args) == 1 else args + bounds = [float(b) for b in bounds] + assert len(bounds) == 4 + self.expected_bounds = bounds + self.rel = rel + self.abs = abs + self.expected_types = set(types) + self.actual_info = [] + + def __eq__(self, other): + try: + assert isinstance(other, dict), "Not a dict" + assert "type" in other, "No 'type' field" + assert other["type"] in self.expected_types, f"Wrong type {other['type']!r}" + assert "coordinates" in other, "No 'coordinates' field" + + actual_bounds = shapely.geometry.shape(other).bounds + matching = actual_bounds == pytest.approx(self.expected_bounds, rel=self.rel, abs=self.abs) + if not matching: + self.actual_info.append(f"expected bounds {self.expected_bounds} != actual bounds: {actual_bounds}") + return matching + except Exception as e: + self.actual_info.append(str(e)) + return False + + def __repr__(self): + msg = f"<{type(self).__name__} types={self.expected_types} bounds={self.expected_bounds} rel={self.rel}, abs={self.abs}>" + if self.actual_info: + msg += "\n" + "\n".join(f" # {i}" for i in self.actual_info) + return msg + + +def caplog_with_custom_formatter(caplog: pytest.LogCaptureFixture, format: Union[str, logging.Formatter]): """ Context manager to set a custom formatter on the caplog fixture. diff --git a/openeo_driver/util/geometry.py b/openeo_driver/util/geometry.py index ccee107f..167d3adb 100644 --- a/openeo_driver/util/geometry.py +++ b/openeo_driver/util/geometry.py @@ -3,7 +3,7 @@ import logging import re from pathlib import Path -from typing import Union, Tuple, Optional, List, Mapping, Sequence +from typing import Any, Collection, List, Mapping, Optional, Sequence, Tuple, Union import pyproj import shapely.geometry @@ -17,6 +17,67 @@ _log = logging.getLogger(__name__) +GEOJSON_GEOMETRY_TYPES_BASIC = frozenset( + {"Point", "MultiPoint", "LineString", "MultiLineString", "Polygon", "MultiPolygon"} +) +GEOJSON_GEOMETRY_TYPES_EXTENDED = GEOJSON_GEOMETRY_TYPES_BASIC | {"GeometryCollection"} + + +def validate_geojson_basic( + value: Any, + *, + allowed_types: Optional[Collection[str]] = None, + raise_exception: bool = True, + recurse: bool = True, +) -> List[str]: + """ + Validate if given value looks like a valid GeoJSON construct. + + Note: this is just for basic inspection to catch simple/obvious structural issues. + It is not intended for a full-blown, deep GeoJSON validation and coordinate inspection. + + :param value: the value to inspect + :param allowed_types: optional collection of GeoJSON types to accept + :param raise_exception: whether to raise an exception when issues are found (default), + or just return list of issues + :param recurse: whether to recursively validate Feature's geometry and FeatureCollection's features + :returns: list of issues found (when `raise_exception` is off) + """ + try: + if not isinstance(value, dict): + raise ValueError(f"JSON object (mapping/dictionary) expected, but got {type(value).__name__}") + assert "type" in value, "No 'type' field" + geojson_type = value["type"] + assert isinstance(geojson_type, str), f"Invalid 'type' type: {type(geojson_type).__name__}" + if allowed_types and geojson_type not in allowed_types: + raise ValueError(f"Found type {geojson_type!r}, but expects one of {sorted(allowed_types)}") + if geojson_type in GEOJSON_GEOMETRY_TYPES_BASIC: + assert "coordinates" in value, f"No 'coordinates' field (type {geojson_type!r})" + elif geojson_type in {"GeometryCollection"}: + assert "geometries" in value, f"No 'geometries' field (type {geojson_type!r})" + # TODO: recursively check sub-geometries? + elif geojson_type in {"Feature"}: + assert "geometry" in value, f"No 'geometry' field (type {geojson_type!r})" + assert "properties" in value, f"No 'properties' field (type {geojson_type!r})" + if recurse: + validate_geojson_basic( + value["geometry"], recurse=True, allowed_types=GEOJSON_GEOMETRY_TYPES_EXTENDED, raise_exception=True + ) + elif geojson_type in {"FeatureCollection"}: + assert "features" in value, f"No 'features' field (type {geojson_type!r})" + if recurse: + for f in value["features"]: + validate_geojson_basic(f, recurse=True, allowed_types=["Feature"], raise_exception=True) + else: + raise ValueError(f"Invalid type {geojson_type!r}") + + except Exception as e: + if raise_exception: + raise + return [str(e)] + return [] + + def validate_geojson_coordinates(geojson): def _validate_coordinates(coordinates, initial_run=True): max_evaluations = 20 diff --git a/tests/data/geojson/Feature02-MultiPolygon.json b/tests/data/geojson/Feature02-MultiPolygon.json index 7a4ca69c..a4bdc24a 100644 --- a/tests/data/geojson/Feature02-MultiPolygon.json +++ b/tests/data/geojson/Feature02-MultiPolygon.json @@ -1,57 +1,15 @@ { "type": "Feature", - "properties": { - "id": "f1", - "name": "first", - "pop": 1234 - }, + "properties": {"id": "f1", "name": "first", "pop": 1234}, "geometry": { "type": "MultiPolygon", "coordinates": [ [ - [ - [ - 5.1, - 51.22 - ], - [ - 5.11, - 51.23 - ], - [ - 5.14, - 51.21 - ], - [ - 5.12, - 51.2 - ], - [ - 5.1, - 51.22 - ] - ] + [[5.1, 51.22], [5.11, 51.23], [5.14, 51.21], [5.12, 51.2], [5.1, 51.22]] ], [ - [ - [ - 5.12, - 51.23 - ], - [ - 5.13, - 51.24 - ], - [ - 5.14, - 51.22 - ], - [ - 5.12, - 51.23 - ] - ] + [[5.12, 51.23], [5.13, 51.24], [5.14, 51.22], [5.12, 51.23]] ] ] } -} \ No newline at end of file +} diff --git a/tests/data/geojson/FeatureCollection01.json b/tests/data/geojson/FeatureCollection01.json index 247d03ee..dcf64044 100644 --- a/tests/data/geojson/FeatureCollection01.json +++ b/tests/data/geojson/FeatureCollection01.json @@ -8,26 +8,11 @@ "type": "Polygon", "coordinates": [ [ - [ - 4.47, - 51.1 - ], - [ - 4.52, - 51.1 - ], - [ - 4.52, - 51.15 - ], - [ - 4.47, - 51.15 - ], - [ - 4.47, - 51.1 - ] + [4.47, 51.1], + [4.52, 51.1], + [4.52, 51.15], + [4.47, 51.15], + [4.47, 51.1] ] ] } @@ -39,26 +24,11 @@ "type": "Polygon", "coordinates": [ [ - [ - 4.45, - 51.17 - ], - [ - 4.5, - 51.17 - ], - [ - 4.5, - 51.2 - ], - [ - 4.45, - 51.2 - ], - [ - 4.45, - 51.17 - ] + [4.45, 51.17], + [4.5, 51.17], + [4.5, 51.2], + [4.45, 51.2], + [4.45, 51.17] ] ] } diff --git a/tests/data/geojson/FeatureCollection03.json b/tests/data/geojson/FeatureCollection03.json index a40d6537..adbc6a77 100644 --- a/tests/data/geojson/FeatureCollection03.json +++ b/tests/data/geojson/FeatureCollection03.json @@ -1,175 +1,89 @@ { - "type": "FeatureCollection", - "name": "2016 narrow TEST Fields Belgium", - "crs": { - "type": "name", - "properties": { - "name": "urn:ogc:def:crs:EPSG::31370" - } + "type": "FeatureCollection", + "name": "2016 narrow TEST Fields Belgium", + "crs": { + "type": "name", + "properties": {"name": "urn:ogc:def:crs:EPSG::31370"} + }, + "features": [ + { + "type": "Feature", + "properties": { + "fid": 1734, + "CODE_OBJ": "000028025D981557", + "CMP_COD": "2016" + }, + "geometry": { + "type": "MultiPolygon", + "coordinates": [ + [ + [ + [236946.3, 171498.69], + [236967.97, 171515.54], + [236973.44, 171519.42], + [236980.3, 171524.28], + [236986.37, 171529.15], + [236995.55, 171536.08], + [237042.98, 171457.76], + [237022, 171445], + [237014.5, 171439.75], + [236996, 171428.75], + [236944.82, 171497.48], + [236946.3, 171498.69] + ] + ] + ] + } }, - "features": [ - { - "type": "Feature", - "properties": { - "fid": 1734, - "CODE_OBJ": "000028025D981557", - "CMP_COD": "2016" - }, - "geometry": { - "type": "MultiPolygon", - "coordinates": [ - [ - [ - [ - 236946.3, - 171498.69 - ], - [ - 236967.97, - 171515.54 - ], - [ - 236973.44, - 171519.42 - ], - [ - 236980.3, - 171524.28 - ], - [ - 236986.37, - 171529.15 - ], - [ - 236995.55, - 171536.08 - ], - [ - 237042.98, - 171457.76 - ], - [ - 237022.0, - 171445.0 - ], - [ - 237014.5, - 171439.75 - ], - [ - 236996.0, - 171428.75 - ], - [ - 236944.82, - 171497.48 - ], - [ - 236946.3, - 171498.69 - ] - ] - ] - ] - } - }, - { - "type": "Feature", - "properties": { - "fid": 33591, - "CODE_OBJ": "000028025EF051D8", - "CMP_COD": "2016" - }, - "geometry": { - "type": "MultiPolygon", - "coordinates": [ - [ - [ - [ - 236497.33, - 171983.0 - ], - [ - 236738.0, - 172064.66 - ], - [ - 236751.5, - 172027.5 - ], - [ - 236507.85, - 171945.94 - ], - [ - 236506.04, - 171952.44 - ], - [ - 236503.97, - 171959.87 - ], - [ - 236500.03, - 171973.26 - ], - [ - 236498.43, - 171978.72 - ], - [ - 236497.33, - 171983.0 - ] - ] - ] - ] - } - }, - { - "type": "Feature", - "properties": { - "fid": 33594, - "CODE_OBJ": "0000280260D5544B", - "CMP_COD": "2016" - }, - "geometry": { - "type": "MultiPolygon", - "coordinates": [ - [ - [ - [ - 236676.5, - 171730.75 - ], - [ - 236835.5, - 171784.75 - ], - [ - 236837.5, - 171783.0 - ], - [ - 236866.46, - 171750.58 - ], - [ - 236705.6, - 171695.95 - ], - [ - 236694.61, - 171710.12 - ], - [ - 236676.5, - 171730.75 - ] - ] - ] - ] - } - } - ] + { + "type": "Feature", + "properties": { + "fid": 33591, + "CODE_OBJ": "000028025EF051D8", + "CMP_COD": "2016" + }, + "geometry": { + "type": "MultiPolygon", + "coordinates": [ + [ + [ + [236497.33, 171983], + [236738, 172064.66], + [236751.5, 172027.5], + [236507.85, 171945.94], + [236506.04, 171952.44], + [236503.97, 171959.87], + [236500.03, 171973.26], + [236498.43, 171978.72], + [236497.33, 171983] + ] + ] + ] + } + }, + { + "type": "Feature", + "properties": { + "fid": 33594, + "CODE_OBJ": "0000280260D5544B", + "CMP_COD": "2016" + }, + "geometry": { + "type": "MultiPolygon", + "coordinates": [ + [ + [ + [236676.5, 171730.75], + [236835.5, 171784.75], + [236837.5, 171783], + [236866.46, 171750.58], + [236705.6, 171695.95], + [236694.61, 171710.12], + [236676.5, 171730.75] + ] + ] + ] + } + } + ] } diff --git a/tests/data/geojson/FeatureCollection04.json b/tests/data/geojson/FeatureCollection04.json index 45890eeb..326ede8b 100644 --- a/tests/data/geojson/FeatureCollection04.json +++ b/tests/data/geojson/FeatureCollection04.json @@ -1,297 +1,106 @@ { - "type": "FeatureCollection", - "name": "ref_fields_harvest_detector", - "crs": { - "type": "name", - "properties": { - "name": "urn:ogc:def:crs:OGC:1.3:CRS84" - } + "type": "FeatureCollection", + "name": "ref_fields_harvest_detector", + "crs": { + "type": "name", + "properties": {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"} + }, + "features": [ + { + "type": "Feature", + "properties": {"CODE_OBJ": "00002808654583BD"}, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [5.015410659755014, 51.1749941804753], + [5.016929803651372, 51.17561722437745], + [5.017476812379938, 51.174900410668265], + [5.01695370426541, 51.17469294559212], + [5.017138866976332, 51.17450657733387], + [5.016304886575674, 51.17418539995633], + [5.015410659755014, 51.1749941804753] + ] + ] + } }, - "features": [ - { - "type": "Feature", - "properties": { - "CODE_OBJ": "00002808654583BD" - }, - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - 5.015410659755014, - 51.174994180475302 - ], - [ - 5.016929803651372, - 51.175617224377447 - ], - [ - 5.017476812379938, - 51.174900410668265 - ], - [ - 5.01695370426541, - 51.17469294559212 - ], - [ - 5.017138866976332, - 51.174506577333872 - ], - [ - 5.016304886575674, - 51.174185399956329 - ], - [ - 5.015410659755014, - 51.174994180475302 - ] - ] - ] - } - }, - { - "type": "Feature", - "properties": { - "CODE_OBJ": "0000280862FE3856" - }, - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - 5.014235318126361, - 51.17550731951949 - ], - [ - 5.015387559842375, - 51.176978513036197 - ], - [ - 5.01563666308942, - 51.177039710262655 - ], - [ - 5.015642689990128, - 51.177041205331285 - ], - [ - 5.015647308982032, - 51.17703398890518 - ], - [ - 5.016280819993641, - 51.176365067234684 - ], - [ - 5.016297330095693, - 51.176369740738025 - ], - [ - 5.016939007396039, - 51.175620949149859 - ], - [ - 5.015410659755014, - 51.174994180475302 - ], - [ - 5.015389752047644, - 51.175012632386427 - ], - [ - 5.015368453271792, - 51.175013198553714 - ], - [ - 5.015281256350187, - 51.175015474153355 - ], - [ - 5.014836281127843, - 51.174832111262226 - ], - [ - 5.014721082331196, - 51.174867169432176 - ], - [ - 5.014646104849949, - 51.174905063541786 - ], - [ - 5.014554240722759, - 51.175055590871537 - ], - [ - 5.014447256108486, - 51.175250156507772 - ], - [ - 5.01434658113913, - 51.17538410243003 - ], - [ - 5.014267866790361, - 51.175472354478764 - ], - [ - 5.014235318126361, - 51.17550731951949 - ] - ], - [ - [ - 5.01441223307144, - 51.175343922443311 - ], - [ - 5.01448242756633, - 51.175342639426802 - ], - [ - 5.014486952993698, - 51.175390615300394 - ], - [ - 5.014459500579303, - 51.175390855424041 - ], - [ - 5.014414175933029, - 51.175391283229324 - ], - [ - 5.01441223307144, - 51.175343922443311 - ] - ] - ] - } - }, - { - "type": "Feature", - "properties": { - "CODE_OBJ": "000028084F9B8C1A" - }, - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - 5.01486654560115, - 51.174828439873814 - ], - [ - 5.015368453271792, - 51.175013198553714 - ], - [ - 5.016057124268833, - 51.174394852250636 - ], - [ - 5.01668139945403, - 51.173866924023201 - ], - [ - 5.016596524878367, - 51.173830355843783 - ], - [ - 5.016596380646481, - 51.173830266746791 - ], - [ - 5.016595949199724, - 51.173830089337905 - ], - [ - 5.016595374769976, - 51.173829912714119 - ], - [ - 5.01658803767789, - 51.173826716998846 - ], - [ - 5.016152163744225, - 51.173670185829295 - ], - [ - 5.016154666291308, - 51.17366504842979 - ], - [ - 5.016108966987549, - 51.173648669723825 - ], - [ - 5.016010506352376, - 51.173766425037606 - ], - [ - 5.015789630310915, - 51.174048179766764 - ], - [ - 5.015695615938525, - 51.174177326297809 - ], - [ - 5.015519458801282, - 51.174126695821251 - ], - [ - 5.015360272605526, - 51.174083073302249 - ], - [ - 5.015341182419055, - 51.174119133475273 - ], - [ - 5.015289229869719, - 51.174239599608761 - ], - [ - 5.015200888401407, - 51.17444799663253 - ], - [ - 5.015117916045672, - 51.174651779824778 - ], - [ - 5.015100541267102, - 51.17468783055169 - ], - [ - 5.015083073011596, - 51.174717140122468 - ], - [ - 5.015044219156725, - 51.174751061274257 - ], - [ - 5.015017761126847, - 51.174771431188731 - ], - [ - 5.014973380276639, - 51.174798640923839 - ], - [ - 5.014943285250912, - 51.174814536297099 - ], - [ - 5.01486654560115, - 51.174828439873814 - ] - ] - ] - } - } - ] + { + "type": "Feature", + "properties": {"CODE_OBJ": "0000280862FE3856"}, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [5.014235318126361, 51.17550731951949], + [5.015387559842375, 51.1769785130362], + [5.01563666308942, 51.177039710262655], + [5.015642689990128, 51.177041205331285], + [5.015647308982032, 51.17703398890518], + [5.016280819993641, 51.176365067234684], + [5.016297330095693, 51.176369740738025], + [5.016939007396039, 51.17562094914986], + [5.015410659755014, 51.1749941804753], + [5.015389752047644, 51.17501263238643], + [5.015368453271792, 51.175013198553714], + [5.015281256350187, 51.175015474153355], + [5.014836281127843, 51.174832111262226], + [5.014721082331196, 51.174867169432176], + [5.014646104849949, 51.174905063541786], + [5.014554240722759, 51.17505559087154], + [5.014447256108486, 51.17525015650777], + [5.01434658113913, 51.17538410243003], + [5.014267866790361, 51.175472354478764], + [5.014235318126361, 51.17550731951949] + ], + [ + [5.01441223307144, 51.17534392244331], + [5.01448242756633, 51.1753426394268], + [5.014486952993698, 51.175390615300394], + [5.014459500579303, 51.17539085542404], + [5.014414175933029, 51.175391283229324], + [5.01441223307144, 51.17534392244331] + ] + ] + } + }, + { + "type": "Feature", + "properties": {"CODE_OBJ": "000028084F9B8C1A"}, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [5.01486654560115, 51.174828439873814], + [5.015368453271792, 51.175013198553714], + [5.016057124268833, 51.174394852250636], + [5.01668139945403, 51.1738669240232], + [5.016596524878367, 51.17383035584378], + [5.016596380646481, 51.17383026674679], + [5.016595949199724, 51.173830089337905], + [5.016595374769976, 51.17382991271412], + [5.01658803767789, 51.173826716998846], + [5.016152163744225, 51.173670185829295], + [5.016154666291308, 51.17366504842979], + [5.016108966987549, 51.173648669723825], + [5.016010506352376, 51.173766425037606], + [5.015789630310915, 51.174048179766764], + [5.015695615938525, 51.17417732629781], + [5.015519458801282, 51.17412669582125], + [5.015360272605526, 51.17408307330225], + [5.015341182419055, 51.17411913347527], + [5.015289229869719, 51.17423959960876], + [5.015200888401407, 51.17444799663253], + [5.015117916045672, 51.17465177982478], + [5.015100541267102, 51.17468783055169], + [5.015083073011596, 51.17471714012247], + [5.015044219156725, 51.17475106127426], + [5.015017761126847, 51.17477143118873], + [5.014973380276639, 51.17479864092384], + [5.014943285250912, 51.1748145362971], + [5.01486654560115, 51.174828439873814] + ] + ] + } + } + ] } diff --git a/tests/data/geojson/FeatureCollection06.json b/tests/data/geojson/FeatureCollection06.json index 0e9d8a6c..47be5395 100644 --- a/tests/data/geojson/FeatureCollection06.json +++ b/tests/data/geojson/FeatureCollection06.json @@ -1,7 +1,6 @@ { "type": "FeatureCollection", "features": [ - { "type": "Feature", "properties": {}, @@ -9,29 +8,14 @@ "type": "Polygon", "coordinates": [ [ - [ - 3.67466926574707, - 51.037804967049205 - ], - [ - 3.70737075805664, - 51.037804967049205 - ], - [ - 3.70737075805664, - 51.05793176907366 - ], - [ - 3.67466926574707, - 51.05793176907366 - ], - [ - 3.67466926574707, - 51.037804967049205 - ] + [3.67466926574707, 51.037804967049205], + [3.70737075805664, 51.037804967049205], + [3.70737075805664, 51.05793176907366], + [3.67466926574707, 51.05793176907366], + [3.67466926574707, 51.037804967049205] ] ] } } ] -} \ No newline at end of file +} diff --git a/tests/data/geojson/FeatureCollection07.json b/tests/data/geojson/FeatureCollection07.json index aa7ed9f6..4ab35f95 100644 --- a/tests/data/geojson/FeatureCollection07.json +++ b/tests/data/geojson/FeatureCollection07.json @@ -8,26 +8,11 @@ "type": "Polygon", "coordinates": [ [ - [ - 3.77466926574707, - 51.037804967049205 - ], - [ - 3.80737075805664, - 51.037804967049205 - ], - [ - 3.80737075805664, - 51.05793176907366 - ], - [ - 3.77466926574707, - 51.05793176907366 - ], - [ - 3.77466926574707, - 51.037804967049205 - ] + [3.77466926574707, 51.037804967049205], + [3.80737075805664, 51.037804967049205], + [3.80737075805664, 51.05793176907366], + [3.77466926574707, 51.05793176907366], + [3.77466926574707, 51.037804967049205] ] ] } @@ -39,26 +24,11 @@ "type": "Polygon", "coordinates": [ [ - [ - 3.67466926574707, - 51.037804967049205 - ], - [ - 3.70737075805664, - 51.037804967049205 - ], - [ - 3.70737075805664, - 51.05793176907366 - ], - [ - 3.67466926574707, - 51.05793176907366 - ], - [ - 3.67466926574707, - 51.037804967049205 - ] + [3.67466926574707, 51.037804967049205], + [3.70737075805664, 51.037804967049205], + [3.70737075805664, 51.05793176907366], + [3.67466926574707, 51.05793176907366], + [3.67466926574707, 51.037804967049205] ] ] } diff --git a/tests/data/geojson/FeatureCollection08.json b/tests/data/geojson/FeatureCollection08.json index 4e46c303..4274dbd7 100644 --- a/tests/data/geojson/FeatureCollection08.json +++ b/tests/data/geojson/FeatureCollection08.json @@ -1,14 +1,25 @@ { "type": "FeatureCollection", "name": "FeatureCollections08", - "crs": {"type": "name", "properties": {"name": "urn:ogc:def:crs:EPSG::32631"}}, + "crs": { + "type": "name", + "properties": {"name": "urn:ogc:def:crs:EPSG::32631"} + }, "features": [ { "type": "Feature", "properties": {}, "geometry": { "type": "Polygon", - "coordinates": [[[547302.56209084216971, 5654245.511211921460927], [549595.312723760260269, 5654267.013290034607053], [549573.822855299571529, 5656505.172573580406606], [547282.065756102674641, 5656483.673708858899772], [547302.56209084216971, 5654245.511211921460927]]] + "coordinates": [ + [ + [547302.5620908422, 5654245.511211921], + [549595.3127237603, 5654267.013290035], + [549573.8228552996, 5656505.17257358], + [547282.0657561027, 5656483.673708859], + [547302.5620908422, 5654245.511211921] + ] + ] } } ] diff --git a/tests/data/geojson/FeatureCollection09.json b/tests/data/geojson/FeatureCollection09.json index 1f90fb9d..dd784312 100644 --- a/tests/data/geojson/FeatureCollection09.json +++ b/tests/data/geojson/FeatureCollection09.json @@ -1,8 +1,26 @@ { -"type": "FeatureCollection", -"name": "FeatureCollections09", -"crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:EPSG::32634" } }, -"features": [ -{ "type": "Feature", "properties": { }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 546500.734324475633912, 8998496.532106472179294 ], [ 547068.869181835558265, 8998522.909413514658809 ], [ 546964.038438386865892, 9000767.116470934823155 ], [ 546397.169007838820107, 9000740.796458588913083 ], [ 546500.734324475633912, 8998496.532106472179294 ] ] ] } } -] + "type": "FeatureCollection", + "name": "FeatureCollections09", + "crs": { + "type": "name", + "properties": {"name": "urn:ogc:def:crs:EPSG::32634"} + }, + "features": [ + { + "type": "Feature", + "properties": {}, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [546500.7343244756, 8998496.532106472], + [547068.8691818356, 8998522.909413515], + [546964.0384383869, 9000767.116470935], + [546397.1690078388, 9000740.796458589], + [546500.7343244756, 8998496.532106472] + ] + ] + } + } + ] } diff --git a/tests/data/geojson/FeatureCollection10.json b/tests/data/geojson/FeatureCollection10.json new file mode 100644 index 00000000..b4de44e0 --- /dev/null +++ b/tests/data/geojson/FeatureCollection10.json @@ -0,0 +1,37 @@ +{ + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "properties": {"id": "first", "pop": 123}, + "geometry": { + "type": "Polygon", + "coordinates": [[[1, 1], [3, 1], [2, 3], [1, 1]]] + } + }, + { + "type": "Feature", + "properties": {"id": "second", "pop": 456}, + "geometry": { + "type": "Polygon", + "coordinates": [[[4, 2], [5, 4], [3, 4], [4, 2]]] + } + }, + { + "type": "Feature", + "properties": {"id": "third", "pop": 789}, + "geometry": { + "type": "Polygon", + "coordinates": [[[6, 2], [10, 2], [12, 6], [8, 6], [6, 2]]] + } + }, + { + "type": "Feature", + "properties": {"id": "fourth", "pop": 101112}, + "geometry": { + "type": "Polygon", + "coordinates": [[[-2, 7], [5, 7], [5, 14], [-2, 14], [-2, 7]]] + } + } + ] +} diff --git a/tests/data/geojson/GeometryCollection01.json b/tests/data/geojson/GeometryCollection01.json index 2ad86c65..a69afe92 100644 --- a/tests/data/geojson/GeometryCollection01.json +++ b/tests/data/geojson/GeometryCollection01.json @@ -4,54 +4,18 @@ { "type": "Polygon", "coordinates": [ - [ - [ - 5.05, - 51.25 - ], - [ - 5.1, - 51.25 - ], - [ - 5.1, - 51.3 - ], - [ - 5.05, - 51.3 - ], - [ - 5.05, - 51.25 - ] - ] + [[5.05, 51.25], [5.1, 51.25], [5.1, 51.3], [5.05, 51.3], [5.05, 51.25]] ] }, { "type": "Polygon", "coordinates": [ [ - [ - 5.12, - 51.21 - ], - [ - 5.15, - 51.21 - ], - [ - 5.15, - 51.26 - ], - [ - 5.12, - 51.26 - ], - [ - 5.12, - 51.21 - ] + [5.12, 51.21], + [5.15, 51.21], + [5.15, 51.26], + [5.12, 51.26], + [5.12, 51.21] ] ] } diff --git a/tests/data/geojson/MultiPolygon01.json b/tests/data/geojson/MultiPolygon01.json index 63c42aea..ed5639c1 100644 --- a/tests/data/geojson/MultiPolygon01.json +++ b/tests/data/geojson/MultiPolygon01.json @@ -1,49 +1,7 @@ { "type": "MultiPolygon", "coordinates": [ - [ - [ - [ - 5.1, - 51.22 - ], - [ - 5.11, - 51.23 - ], - [ - 5.14, - 51.21 - ], - [ - 5.12, - 51.2 - ], - [ - 5.1, - 51.22 - ] - ] - ], - [ - [ - [ - 5.12, - 51.23 - ], - [ - 5.13, - 51.24 - ], - [ - 5.14, - 51.22 - ], - [ - 5.12, - 51.23 - ] - ] - ] + [[[5.1, 51.22], [5.11, 51.23], [5.14, 51.21], [5.12, 51.2], [5.1, 51.22]]], + [[[5.12, 51.23], [5.13, 51.24], [5.14, 51.22], [5.12, 51.23]]] ] -} \ No newline at end of file +} diff --git a/tests/data/geojson/Polygon01.json b/tests/data/geojson/Polygon01.json index cb69c683..34855928 100644 --- a/tests/data/geojson/Polygon01.json +++ b/tests/data/geojson/Polygon01.json @@ -1,27 +1,6 @@ { "type": "Polygon", "coordinates": [ - [ - [ - 5.1, - 51.22 - ], - [ - 5.11, - 51.23 - ], - [ - 5.14, - 51.21 - ], - [ - 5.12, - 51.2 - ], - [ - 5.1, - 51.22 - ] - ] + [[5.1, 51.22], [5.11, 51.23], [5.14, 51.21], [5.12, 51.2], [5.1, 51.22]] ] -} \ No newline at end of file +} diff --git a/tests/data/geojson/mol.json b/tests/data/geojson/mol.json index ad52bba7..3cae3d6b 100644 --- a/tests/data/geojson/mol.json +++ b/tests/data/geojson/mol.json @@ -1,8 +1,42 @@ { -"type": "FeatureCollection", -"crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:OGC:1.3:CRS84" } }, -"features": [ -{ "type": "Feature", "properties": { "id": 23, "name": "Mol", "class": 4 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 5.099185309788365, 51.19128726234392 ], [ 5.11418113136547, 51.196949475430756 ], [ 5.126371845690255, 51.189132203507086 ], [ 5.117041764554222, 51.181488146097188 ], [ 5.097377985552487, 51.184446883422112 ], [ 5.099185309788365, 51.19128726234392 ] ] ] } }, -{ "type": "Feature", "properties": { "id": 58, "name": "TAP", "class": 5 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 5.07838044255931, 51.21833134619353 ], [ 5.079503025677416, 51.218789086283387 ], [ 5.080438992142096, 51.218135525244996 ], [ 5.079310900631511, 51.217555407500946 ], [ 5.07838044255931, 51.21833134619353 ] ] ] } } -] + "type": "FeatureCollection", + "crs": { + "type": "name", + "properties": {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"} + }, + "features": [ + { + "type": "Feature", + "properties": {"id": 23, "name": "Mol", "class": 4}, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [5.099185309788365, 51.19128726234392], + [5.11418113136547, 51.196949475430756], + [5.126371845690255, 51.189132203507086], + [5.117041764554222, 51.18148814609719], + [5.097377985552487, 51.18444688342211], + [5.099185309788365, 51.19128726234392] + ] + ] + } + }, + { + "type": "Feature", + "properties": {"id": 58, "name": "TAP", "class": 5}, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [5.07838044255931, 51.21833134619353], + [5.079503025677416, 51.21878908628339], + [5.080438992142096, 51.218135525244996], + [5.079310900631511, 51.217555407500946], + [5.07838044255931, 51.21833134619353] + ] + ] + } + } + ] } diff --git a/tests/data/geojson/test_geojson_crs_from_epsg.geojson b/tests/data/geojson/test_geojson_crs_from_epsg.geojson index af9f18b7..e83ef59c 100644 --- a/tests/data/geojson/test_geojson_crs_from_epsg.geojson +++ b/tests/data/geojson/test_geojson_crs_from_epsg.geojson @@ -1,10 +1,39 @@ { -"type": "FeatureCollection", -"name": "Fields_testing", -"crs": { "type": "name", "properties": { "name": "EPSG:4326" } }, -"features": [ -{ "type": "Feature", "properties": { "id": "LT - Bo", "2016": 0, "2018": 1, "2020": 1 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 4.829697291485939, 50.862054355087274 ], [ 4.831810433973708, 50.862769340731347 ], [ 4.832504715080205, 50.862051860576408 ], [ 4.830474041336835, 50.861345984658136 ], [ 4.829697291485939, 50.862054355087274 ] ] ] } }, -{ "type": "Feature", "properties": { "id": "GFT", "2016": 1, "2018": 1, "2020": 1 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 4.798718572026502, 50.828548571482415 ], [ 4.800991986712735, 50.827265262059683 ], [ 4.796242473228493, 50.823638376357145 ], [ 4.794060093259593, 50.824541266457985 ], [ 4.798718572026502, 50.828548571482415 ] ] ] } } -] + "type": "FeatureCollection", + "name": "Fields_testing", + "crs": {"type": "name", "properties": {"name": "EPSG:4326"}}, + "features": [ + { + "type": "Feature", + "properties": {"2016": 0, "2018": 1, "2020": 1, "id": "LT - Bo"}, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [4.829697291485939, 50.862054355087274], + [4.831810433973708, 50.86276934073135], + [4.832504715080205, 50.86205186057641], + [4.830474041336835, 50.861345984658136], + [4.829697291485939, 50.862054355087274] + ] + ] + } + }, + { + "type": "Feature", + "properties": {"2016": 1, "2018": 1, "2020": 1, "id": "GFT"}, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [4.798718572026502, 50.828548571482415], + [4.800991986712735, 50.82726526205968], + [4.796242473228493, 50.823638376357145], + [4.794060093259593, 50.824541266457985], + [4.798718572026502, 50.828548571482415] + ] + ] + } + } + ] } - diff --git a/tests/data/geojson/test_geojson_crs_from_ogc_urn.geojson b/tests/data/geojson/test_geojson_crs_from_ogc_urn.geojson index 5f032071..cf733e83 100644 --- a/tests/data/geojson/test_geojson_crs_from_ogc_urn.geojson +++ b/tests/data/geojson/test_geojson_crs_from_ogc_urn.geojson @@ -1,10 +1,42 @@ { -"type": "FeatureCollection", -"name": "Fields_testing", -"crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:OGC:1.3:CRS84" } }, -"features": [ -{ "type": "Feature", "properties": { "id": "LT - Bo", "2016": 0, "2018": 1, "2020": 1 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 4.829697291485939, 50.862054355087274 ], [ 4.831810433973708, 50.862769340731347 ], [ 4.832504715080205, 50.862051860576408 ], [ 4.830474041336835, 50.861345984658136 ], [ 4.829697291485939, 50.862054355087274 ] ] ] } }, -{ "type": "Feature", "properties": { "id": "GFT", "2016": 1, "2018": 1, "2020": 1 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 4.798718572026502, 50.828548571482415 ], [ 4.800991986712735, 50.827265262059683 ], [ 4.796242473228493, 50.823638376357145 ], [ 4.794060093259593, 50.824541266457985 ], [ 4.798718572026502, 50.828548571482415 ] ] ] } } -] + "type": "FeatureCollection", + "name": "Fields_testing", + "crs": { + "type": "name", + "properties": {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"} + }, + "features": [ + { + "type": "Feature", + "properties": {"2016": 0, "2018": 1, "2020": 1, "id": "LT - Bo"}, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [4.829697291485939, 50.862054355087274], + [4.831810433973708, 50.86276934073135], + [4.832504715080205, 50.86205186057641], + [4.830474041336835, 50.861345984658136], + [4.829697291485939, 50.862054355087274] + ] + ] + } + }, + { + "type": "Feature", + "properties": {"2016": 1, "2018": 1, "2020": 1, "id": "GFT"}, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [4.798718572026502, 50.828548571482415], + [4.800991986712735, 50.82726526205968], + [4.796242473228493, 50.823638376357145], + [4.794060093259593, 50.824541266457985], + [4.798718572026502, 50.828548571482415] + ] + ] + } + } + ] } - diff --git a/tests/data/geojson/test_geojson_crs_unspecified.geojson b/tests/data/geojson/test_geojson_crs_unspecified.geojson index 6fc6fb1b..1f6b7075 100644 --- a/tests/data/geojson/test_geojson_crs_unspecified.geojson +++ b/tests/data/geojson/test_geojson_crs_unspecified.geojson @@ -1,9 +1,38 @@ { -"type": "FeatureCollection", -"name": "Fields_testing", -"features": [ -{ "type": "Feature", "properties": { "id": "LT - Bo", "2016": 0, "2018": 1, "2020": 1 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 4.829697291485939, 50.862054355087274 ], [ 4.831810433973708, 50.862769340731347 ], [ 4.832504715080205, 50.862051860576408 ], [ 4.830474041336835, 50.861345984658136 ], [ 4.829697291485939, 50.862054355087274 ] ] ] } }, -{ "type": "Feature", "properties": { "id": "GFT", "2016": 1, "2018": 1, "2020": 1 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 4.798718572026502, 50.828548571482415 ], [ 4.800991986712735, 50.827265262059683 ], [ 4.796242473228493, 50.823638376357145 ], [ 4.794060093259593, 50.824541266457985 ], [ 4.798718572026502, 50.828548571482415 ] ] ] } } -] + "type": "FeatureCollection", + "name": "Fields_testing", + "features": [ + { + "type": "Feature", + "properties": {"2016": 0, "2018": 1, "2020": 1, "id": "LT - Bo"}, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [4.829697291485939, 50.862054355087274], + [4.831810433973708, 50.86276934073135], + [4.832504715080205, 50.86205186057641], + [4.830474041336835, 50.861345984658136], + [4.829697291485939, 50.862054355087274] + ] + ] + } + }, + { + "type": "Feature", + "properties": {"2016": 1, "2018": 1, "2020": 1, "id": "GFT"}, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [4.798718572026502, 50.828548571482415], + [4.800991986712735, 50.82726526205968], + [4.796242473228493, 50.823638376357145], + [4.794060093259593, 50.824541266457985], + [4.798718572026502, 50.828548571482415] + ] + ] + } + } + ] } - diff --git a/tests/data/geojson/test_geojson_invalid_coordinates.geojson b/tests/data/geojson/test_geojson_invalid_coordinates.geojson index ab42c886..a857b5fe 100644 --- a/tests/data/geojson/test_geojson_invalid_coordinates.geojson +++ b/tests/data/geojson/test_geojson_invalid_coordinates.geojson @@ -1,8 +1,38 @@ { -"type": "FeatureCollection", -"name": "Fields_testing", -"features": [ -{ "type": "Feature", "properties": { "id": "LT - Bo", "2016": 0, "2018": 1, "2020": 1 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 4.829697291485939, 50.862054355087274 ], [ 4.831810433973708, 50.862769340731347 ], [ 4.832504715080205, 50.862051860576408 ], [ -361.0, 50.861345984658136 ], [ 4.829697291485939, 50.862054355087274 ] ] ] } }, -{ "type": "Feature", "properties": { "id": "GFT", "2016": 1, "2018": 1, "2020": 1 }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 4.798718572026502, 50.828548571482415 ], [ 4.800991986712735, 50.827265262059683 ], [ 4.796242473228493, 50.823638376357145 ], [ 4.794060093259593, 50.824541266457985 ], [ 4.798718572026502, 50.828548571482415 ] ] ] } } -] + "type": "FeatureCollection", + "name": "Fields_testing", + "features": [ + { + "type": "Feature", + "properties": {"2016": 0, "2018": 1, "2020": 1, "id": "LT - Bo"}, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [4.829697291485939, 50.862054355087274], + [4.831810433973708, 50.86276934073135], + [4.832504715080205, 50.86205186057641], + [-361.0, 50.861345984658136], + [4.829697291485939, 50.862054355087274] + ] + ] + } + }, + { + "type": "Feature", + "properties": {"2016": 1, "2018": 1, "2020": 1, "id": "GFT"}, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [4.798718572026502, 50.828548571482415], + [4.800991986712735, 50.82726526205968], + [4.796242473228493, 50.823638376357145], + [4.794060093259593, 50.824541266457985], + [4.798718572026502, 50.828548571482415] + ] + ] + } + } + ] } diff --git a/tests/data/geojson/thaipolys_ad.geojson b/tests/data/geojson/thaipolys_ad.geojson index 55ea7375..5cbaae25 100644 --- a/tests/data/geojson/thaipolys_ad.geojson +++ b/tests/data/geojson/thaipolys_ad.geojson @@ -1,249 +1,106 @@ { - "type": "FeatureCollection", - "name": "thaipolys_ad", - "crs": + "type": "FeatureCollection", + "name": "thaipolys_ad", + "crs": { + "type": "name", + "properties": {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"} + }, + "features": [ { - "type": "name", - "properties": - { - "name": "urn:ogc:def:crs:OGC:1.3:CRS84" - } + "type": "Feature", + "properties": {"2016": 0, "2018": 1, "2020": 1, "id": "LT - Bo"}, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [98.77100705369968, 9.252203719672897], + [98.77100761749868, 9.251332023435165], + [98.77013019392238, 9.25133146227084], + [98.77012962796312, 9.252203158454805], + [98.77100705369968, 9.252203719672897] + ] + ] + } }, - "features": - [ - { - "type": "Feature", - "properties": - { - "id": "LT - Bo", - "2016": 0, - "2018": 1, - "2020": 1 - }, - "geometry": - { - "type": "Polygon", - "coordinates": - [ - [ - [ - 98.77100705369968, - 9.252203719672897 - ], - [ - 98.77100761749868, - 9.251332023435165 - ], - [ - 98.77013019392238, - 9.25133146227084 - ], - [ - 98.77012962796312, - 9.252203158454805 - ], - [ - 98.77100705369968, - 9.252203719672897 - ] - ] - ] - } - }, - { - "type": "Feature", - "properties": - { - "id": "yuyu", - "2016": 0, - "2018": 1, - "2020": 1 - }, - "geometry": - { - "type": "Polygon", - "coordinates": - [ - [ - [ - 100.22103486159209, - 19.937099522973114 - ], - [ - 100.22102817050522, - 19.93622889139501 - ], - [ - 100.22010752144534, - 19.93623521641539 - ], - [ - 100.22011420749023, - 19.93710584829251 - ], - [ - 100.22103486159209, - 19.937099522973114 - ] - ] - ] - } - }, - { - "type": "Feature", - "properties": - { - "id": "holahola", - "2016": 0, - "2018": 1, - "2020": 1 - }, - "geometry": - { - "type": "Polygon", - "coordinates": - [ - [ - [ - 98.41320559285191, - 9.130152677609567 - ], - [ - 98.4132064396922, - 9.129634801204366 - ], - [ - 98.41268534212357, - 9.12963395925008 - ], - [ - 98.41268449453133, - 9.130151835606735 - ], - [ - 98.41320559285191, - 9.130152677609567 - ] - ] - ] - } - }, - { - "type": "Feature", - "properties": - { - "id": "muu", - "2016": 0, - "2018": 1, - "2020": 1 - }, - "geometry": - { - "type": "Polygon", - "coordinates": - [ - [ - [ - 99.15161525072554, - 17.196952989011802 - ], - [ - 99.15161462797101, - 17.196187888971142 - ], - [ - 99.1508186217712, - 17.196188485959663 - ], - [ - 99.15081924125616, - 17.196953586028492 - ], - [ - 99.15161525072554, - 17.196952989011802 - ] - ] - ] - } - }, - { - "type": "Feature", - "properties": - { - "id": "meh", - "2016": 0, - "2018": 1, - "2020": 1 - }, - "geometry": - { - "type": "Polygon", - "coordinates": - [ - [ - [ - 103.24970664797146, - 16.149399463016426 - ], - [ - 103.24969462056812, - 16.14883702799096 - ], - [ - 103.24911269980473, - 16.14884865163908 - ], - [ - 103.24912472557321, - 16.149411087090822 - ], - [ - 103.24970664797146, - 16.149399463016426 - ] - ] - ] - } - }, - { - "type": "Feature", - "properties": - { - "id": "yeet", - "2016": 0, - "2018": 1, - "2020": 1 - }, - "geometry": - { - "type": "Polygon", - "coordinates": - [ - [ - [ - 101.54544714825106, - 13.083921911120795 - ], - [ - 101.5454421135335, - 13.083431498178864 - ], - [ - 101.54494182969499, - 13.083436432975173 - ], - [ - 101.54494686342564, - 13.083926846108417 - ], - [ - 101.54544714825106, - 13.083921911120795 - ] - ] - ] - } - } - ] -} \ No newline at end of file + { + "type": "Feature", + "properties": {"2016": 0, "2018": 1, "2020": 1, "id": "yuyu"}, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [100.22103486159209, 19.937099522973114], + [100.22102817050522, 19.93622889139501], + [100.22010752144534, 19.93623521641539], + [100.22011420749023, 19.93710584829251], + [100.22103486159209, 19.937099522973114] + ] + ] + } + }, + { + "type": "Feature", + "properties": {"2016": 0, "2018": 1, "2020": 1, "id": "holahola"}, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [98.41320559285191, 9.130152677609567], + [98.4132064396922, 9.129634801204366], + [98.41268534212357, 9.12963395925008], + [98.41268449453133, 9.130151835606735], + [98.41320559285191, 9.130152677609567] + ] + ] + } + }, + { + "type": "Feature", + "properties": {"2016": 0, "2018": 1, "2020": 1, "id": "muu"}, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [99.15161525072554, 17.196952989011802], + [99.15161462797101, 17.196187888971142], + [99.1508186217712, 17.196188485959663], + [99.15081924125616, 17.196953586028492], + [99.15161525072554, 17.196952989011802] + ] + ] + } + }, + { + "type": "Feature", + "properties": {"2016": 0, "2018": 1, "2020": 1, "id": "meh"}, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [103.24970664797146, 16.149399463016426], + [103.24969462056812, 16.14883702799096], + [103.24911269980473, 16.14884865163908], + [103.24912472557321, 16.149411087090822], + [103.24970664797146, 16.149399463016426] + ] + ] + } + }, + { + "type": "Feature", + "properties": {"2016": 0, "2018": 1, "2020": 1, "id": "yeet"}, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [101.54544714825106, 13.083921911120795], + [101.5454421135335, 13.083431498178864], + [101.54494182969499, 13.083436432975173], + [101.54494686342564, 13.083926846108417], + [101.54544714825106, 13.083921911120795] + ] + ] + } + } + ] +} diff --git a/tests/test_delayed_vector.py b/tests/test_delayed_vector.py index d374bfe2..bb77db94 100644 --- a/tests/test_delayed_vector.py +++ b/tests/test_delayed_vector.py @@ -1,9 +1,12 @@ +import re + import pytest +from pyproj import CRS from openeo_driver.delayed_vector import DelayedVector from openeo_driver.errors import OpenEOApiException + from .data import get_path -from pyproj import CRS def test_feature_collection_bounds(): @@ -40,11 +43,7 @@ def test_geojson_url_invalid(requests_mock): def test_geojson_invalid_coordinates(): - dv = DelayedVector( - str(get_path("geojson/test_geojson_invalid_coordinates.geojson")) - ) - with pytest.raises(OpenEOApiException) as e: + dv = DelayedVector(str(get_path("geojson/test_geojson_invalid_coordinates.geojson"))) + expected_error = "Failed to parse Geojson. Invalid coordinate: [-361.0, 50.861345984658136]" + with pytest.raises(OpenEOApiException, match=re.escape(expected_error)): _ = dv.bounds - assert e.value.message.startswith( - "Failed to parse Geojson. Invalid coordinate: [-361.0, 50.861345984658136]" - ) diff --git a/tests/test_processes.py b/tests/test_processes.py index 80ffd2fc..9ed730af 100644 --- a/tests/test_processes.py +++ b/tests/test_processes.py @@ -461,6 +461,38 @@ def test_get_required_with_type(self): ): _ = args.get_required("color", expected_type=DriverDataCube) + def test_get_required_with_validator(self): + args = ProcessArgs({"color": "red", "size": 5}, process_id="wibble") + assert args.get_required("color", expected_type=str, validator=lambda v: len(v) == 3) == "red" + assert ( + args.get_required( + "color", expected_type=str, validator=ProcessArgs.validator_one_of(["red", "green", "blue"]) + ) + == "red" + ) + assert args.get_required("size", expected_type=int, validator=lambda v: v % 3 == 2) == 5 + with pytest.raises( + ProcessParameterInvalidException, + match=re.escape( + "The value passed for parameter 'color' in process 'wibble' is invalid: Failed validation." + ), + ): + _ = args.get_required("color", expected_type=str, validator=lambda v: len(v) == 10) + with pytest.raises( + ProcessParameterInvalidException, + match=re.escape("The value passed for parameter 'size' in process 'wibble' is invalid: Failed validation."), + ): + _ = args.get_required("size", expected_type=int, validator=lambda v: v % 3 == 1) + with pytest.raises( + ProcessParameterInvalidException, + match=re.escape( + "The value passed for parameter 'color' in process 'wibble' is invalid: Must be one of ['yellow', 'violet'] but got 'red'." + ), + ): + _ = args.get_required( + "color", expected_type=str, validator=ProcessArgs.validator_one_of(["yellow", "violet"]) + ) + def test_get_optional(self): args = ProcessArgs({"foo": "bar"}, process_id="wibble") assert args.get_optional("foo") == "bar" @@ -468,6 +500,16 @@ def test_get_optional(self): assert args.get_optional("foo", 123) == "bar" assert args.get_optional("other", 123) == 123 + def test_get_optional_callable_default(self): + args = ProcessArgs({"foo": "bar"}, process_id="wibble") + assert args.get_optional("foo", default=lambda: 123) == "bar" + assert args.get_optional("other", default=lambda: 123) == 123 + + # Possible, but probably a bad idea: + default = [1, 2, 3].pop + assert args.get_optional("other", default=default) == 3 + assert args.get_optional("other", default=default) == 2 + def test_get_optional_with_type(self): args = ProcessArgs({"foo": "bar"}, process_id="wibble") assert args.get_optional("foo", expected_type=str) == "bar" @@ -480,7 +522,24 @@ def test_get_optional_with_type(self): "The value passed for parameter 'foo' in process 'wibble' is invalid: Expected but got ." ), ): - _ = args.get_required("foo", expected_type=DriverDataCube) + _ = args.get_optional("foo", expected_type=DriverDataCube) + + def test_get_optional_with_validator(self): + args = ProcessArgs({"foo": "bar"}, process_id="wibble") + assert args.get_optional("foo", validator=lambda s: all(c.lower() for c in s)) == "bar" + assert args.get_optional("foo", validator=ProcessArgs.validator_one_of(["bar", "meh"])) == "bar" + with pytest.raises( + ProcessParameterInvalidException, + match=re.escape("The value passed for parameter 'foo' in process 'wibble' is invalid: Failed validation."), + ): + _ = args.get_optional("foo", validator=lambda s: all(c.isupper() for c in s)) + with pytest.raises( + ProcessParameterInvalidException, + match=re.escape( + "The value passed for parameter 'foo' in process 'wibble' is invalid: Must be one of ['nope', 'meh'] but got 'bar'." + ), + ): + _ = args.get_optional("foo", validator=ProcessArgs.validator_one_of(["nope", "meh"])) def test_get_deep(self): args = ProcessArgs({"foo": {"bar": {"color": "red", "size": {"x": 5, "y": 8}}}}, process_id="wibble") @@ -508,6 +567,16 @@ def test_get_deep_with_type(self): ): _ = args.get_deep("foo", "bar", "size", "x", expected_type=(DriverDataCube, str)) + def test_get_deep_with_validator(self): + args = ProcessArgs({"foo": {"bar": {"color": "red", "size": {"x": 5, "y": 8}}}}, process_id="wibble") + assert args.get_deep("foo", "bar", "size", "x", validator=lambda v: v % 5 == 0) == 5 + + with pytest.raises( + ProcessParameterInvalidException, + match=re.escape("The value passed for parameter 'foo' in process 'wibble' is invalid: Failed validation."), + ): + _ = args.get_deep("foo", "bar", "size", "y", validator=lambda v: v % 5 == 0) + def test_get_aliased(self): args = ProcessArgs({"size": 5, "color": "red"}, process_id="wibble") assert args.get_aliased(["size", "dimensions"]) == 5 @@ -543,3 +612,26 @@ def test_get_enum(self): ), ): _ = args.get_enum("color", options=["R", "G", "B"]) + + def test_validator_geojson_dict(self): + polygon = {"type": "Polygon", "coordinates": [[1, 2]]} + args = ProcessArgs({"geometry": polygon, "color": "red"}, process_id="wibble") + + validator = ProcessArgs.validator_geojson_dict() + assert args.get_required("geometry", validator=validator) == polygon + with pytest.raises( + ProcessParameterInvalidException, + match=re.escape( + "The value passed for parameter 'color' in process 'wibble' is invalid: Invalid GeoJSON: JSON object (mapping/dictionary) expected, but got str." + ), + ): + _ = args.get_required("color", validator=validator) + + validator = ProcessArgs.validator_geojson_dict(allowed_types=["FeatureCollection"]) + with pytest.raises( + ProcessParameterInvalidException, + match=re.escape( + "The value passed for parameter 'geometry' in process 'wibble' is invalid: Invalid GeoJSON: Found type 'Polygon', but expects one of ['FeatureCollection']." + ), + ): + _ = args.get_required("geometry", validator=validator) diff --git a/tests/test_testing.py b/tests/test_testing.py index dac39ea7..eab3cead 100644 --- a/tests/test_testing.py +++ b/tests/test_testing.py @@ -7,13 +7,16 @@ import urllib.request import flask +import numpy import pytest import requests from openeo_driver.testing import ( ApiTester, + ApproxGeoJSONByBounds, DictSubSet, IgnoreOrder, + IsNan, ListSubSet, RegexMatcher, UrllibMocker, @@ -260,6 +263,18 @@ def test_approxify_tolerance_rel(): assert {"a": [10.1, 2.1]} != approxify({"a": [10, 2.3]}, rel=0.01) +@pytest.mark.parametrize("other", [float("nan"), numpy.nan]) +def test_is_nan(other): + assert other == IsNan() + assert IsNan() == other + + +@pytest.mark.parametrize("other", [0, 123, False, True, None, "dfd", [], {}, ()]) +def test_is_not_nan(other): + assert other != IsNan() + assert IsNan() != other + + @pytest.mark.parametrize( "format", [ @@ -284,3 +299,38 @@ def test_caplog_with_custom_formatter(caplog, format): "[WARNING] still not good (root)", "WARNING root:test_testing.py:XXX hmm bad times", ] + + +class TestApproxGeoJSONByBounds: + def test_basic(self): + geometry = {"type": "Polygon", "coordinates": [[[1, 2], [3, 1], [2, 4], [1, 2]]]} + assert geometry == ApproxGeoJSONByBounds(1, 1, 3, 4, abs=0.1) + + @pytest.mark.parametrize( + ["data", "expected_message"], + [ + ("nope", "# Not a dict"), + ({"foo": "bar"}, " # No 'type' field"), + ({"type": "Polygommm", "coordinates": [[[1, 2], [3, 1], [2, 4], [1, 2]]]}, " # Wrong type 'Polygommm'"), + ({"type": "Polygon"}, " # No 'coordinates' field"), + ], + ) + def test_invalid_construct(self, data, expected_message): + expected = ApproxGeoJSONByBounds(1, 2, 3, 4) + assert data != expected + assert expected_message in repr(expected) + + def test_out_of_bounds(self): + geometry = {"type": "Polygon", "coordinates": [[[1, 2], [3, 1], [2, 4], [1, 2]]]} + expected = ApproxGeoJSONByBounds(11, 22, 33, 44, abs=0.1) + assert geometry != expected + assert "# expected bounds [11.0, 22.0, 33.0, 44.0] != actual bounds: (1.0, 1.0, 3.0, 4.0)" in repr(expected) + + def test_types(self): + geometry = {"type": "Polygon", "coordinates": [[[1, 2], [3, 1], [2, 4], [1, 2]]]} + assert geometry == ApproxGeoJSONByBounds(1, 1, 3, 4, types=["Polygon"], abs=0.1) + assert geometry == ApproxGeoJSONByBounds(1, 1, 3, 4, types=["Polygon", "Point"], abs=0.1) + + expected = ApproxGeoJSONByBounds(1, 1, 3, 4, types=["MultiPolygon"], abs=0.1) + assert geometry != expected + assert "Wrong type 'Polygon'" in repr(expected) diff --git a/tests/test_vectorcube.py b/tests/test_vectorcube.py index c3ce62ba..b0029ced 100644 --- a/tests/test_vectorcube.py +++ b/tests/test_vectorcube.py @@ -6,11 +6,11 @@ import pyproj import pytest import xarray -from shapely.geometry import Polygon, MultiPolygon, Point +from shapely.geometry import MultiPolygon, Point, Polygon -from openeo_driver.errors import OpenEOApiException from openeo_driver.datacube import DriverVectorCube -from openeo_driver.testing import DictSubSet, ApproxGeometry +from openeo_driver.errors import OpenEOApiException +from openeo_driver.testing import ApproxGeometry, DictSubSet, IsNan from openeo_driver.util.geometry import as_geojson_feature_collection from openeo_driver.utils import EvalEnv @@ -83,6 +83,155 @@ def test_to_wkt(self, gdf): ['POLYGON ((1 1, 3 1, 2 3, 1 1))', 'POLYGON ((4 2, 5 4, 3 4, 4 2))'] ) + def test_to_internal_json_defaults(self, gdf): + vc = DriverVectorCube(gdf) + assert vc.to_internal_json() == { + "geometries": DictSubSet( + { + "type": "FeatureCollection", + "features": [ + DictSubSet( + { + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": (((1.0, 1.0), (3.0, 1.0), (2.0, 3.0), (1.0, 1.0)),), + }, + "properties": {"id": "first", "pop": 1234}, + } + ), + DictSubSet( + { + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": (((4.0, 2.0), (5.0, 4.0), (3.0, 4.0), (4.0, 2.0)),), + }, + "properties": {"id": "second", "pop": 5678}, + } + ), + ], + } + ), + "cube": None, + } + + @pytest.mark.parametrize( + ["columns_for_cube", "expected_cube"], + [ + ( + "numerical", + { + "name": None, + "dims": ("geometries", "properties"), + "coords": { + "geometries": {"attrs": {}, "data": [0, 1], "dims": ("geometries",)}, + "properties": {"attrs": {}, "data": ["pop"], "dims": ("properties",)}, + }, + "data": [[1234], [5678]], + "attrs": {}, + }, + ), + ( + "all", + { + "name": None, + "dims": ("geometries", "properties"), + "coords": { + "geometries": {"attrs": {}, "data": [0, 1], "dims": ("geometries",)}, + "properties": {"attrs": {}, "data": ["id", "pop"], "dims": ("properties",)}, + }, + "data": [["first", 1234], ["second", 5678]], + "attrs": {}, + }, + ), + ( + [], + { + "name": None, + "dims": ("geometries",), + "coords": { + "geometries": {"attrs": {}, "data": [0, 1], "dims": ("geometries",)}, + }, + "data": [IsNan(), IsNan()], + "attrs": {"vector_cube_dummy": True}, + }, + ), + ( + ["pop", "id"], + { + "name": None, + "dims": ("geometries", "properties"), + "coords": { + "geometries": {"attrs": {}, "data": [0, 1], "dims": ("geometries",)}, + "properties": {"attrs": {}, "data": ["pop", "id"], "dims": ("properties",)}, + }, + "data": [[1234, "first"], [5678, "second"]], + "attrs": {}, + }, + ), + ( + ["pop", "color"], + { + "name": None, + "dims": ("geometries", "properties"), + "coords": { + "geometries": {"attrs": {}, "data": [0, 1], "dims": ("geometries",)}, + "properties": {"attrs": {}, "data": ["pop", "color"], "dims": ("properties",)}, + }, + "data": [[1234.0, IsNan()], [5678.0, IsNan()]], + "attrs": {}, + }, + ), + ( + ["color"], + { + "name": None, + "dims": ("geometries", "properties"), + "coords": { + "geometries": {"attrs": {}, "data": [0, 1], "dims": ("geometries",)}, + "properties": {"attrs": {}, "data": ["color"], "dims": ("properties",)}, + }, + "data": [[IsNan()], [IsNan()]], + "attrs": {}, + }, + ), + ], + ) + def test_to_internal_json_columns_for_cube(self, gdf, columns_for_cube, expected_cube): + vc = DriverVectorCube.from_geodataframe(gdf, columns_for_cube=columns_for_cube) + internal = vc.to_internal_json() + assert internal == { + "geometries": DictSubSet( + { + "type": "FeatureCollection", + "features": [ + DictSubSet( + { + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": (((1.0, 1.0), (3.0, 1.0), (2.0, 3.0), (1.0, 1.0)),), + }, + "properties": {"id": "first", "pop": 1234}, + } + ), + DictSubSet( + { + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": (((4.0, 2.0), (5.0, 4.0), (3.0, 4.0), (4.0, 2.0)),), + }, + "properties": {"id": "second", "pop": 5678}, + } + ), + ], + } + ), + "cube": expected_cube, + } + def test_get_crs(self, gdf): vc = DriverVectorCube(gdf) assert vc.get_crs() == pyproj.CRS.from_epsg(4326) @@ -193,53 +342,134 @@ def test_from_geodataframe_default(self, gdf): assert {k: list(v.values) for k, v in cube.coords.items()} == {"geometries": [0, 1], "properties": ["pop"]} @pytest.mark.parametrize( - ["columns_for_cube", "expected"], + ["columns_for_cube", "expected_cube"], [ - ("numerical", {"shape": (2, 1), "coords": {"geometries": [0, 1], "properties": ["pop"]}}), - ("all", {"shape": (2, 2), "coords": {"geometries": [0, 1], "properties": ["id", "pop"]}}), - ([], None), - (["id"], {"shape": (2, 1), "coords": {"geometries": [0, 1], "properties": ["id"]}}), - (["pop", "id"], {"shape": (2, 2), "coords": {"geometries": [0, 1], "properties": ["pop", "id"]}}), - # TODO: test specifying non-existent column (to be filled with no-data): - # (["pop", "nopenope"], {"shape": (2, 2), "coords": {"geometries": [0, 1], "properties": ["pop", "nopenope"]}}), + ( + "numerical", + { + "name": None, + "dims": ("geometries", "properties"), + "coords": { + "geometries": {"attrs": {}, "data": [0, 1], "dims": ("geometries",)}, + "properties": {"attrs": {}, "data": ["pop"], "dims": ("properties",)}, + }, + "data": [[1234], [5678]], + "attrs": {}, + }, + ), + ( + "all", + { + "name": None, + "dims": ("geometries", "properties"), + "coords": { + "geometries": {"attrs": {}, "data": [0, 1], "dims": ("geometries",)}, + "properties": {"attrs": {}, "data": ["id", "pop"], "dims": ("properties",)}, + }, + "data": [["first", 1234], ["second", 5678]], + "attrs": {}, + }, + ), + ( + [], + { + "name": None, + "dims": ("geometries",), + "coords": { + "geometries": {"attrs": {}, "data": [0, 1], "dims": ("geometries",)}, + }, + "data": [IsNan(), IsNan()], + "attrs": {"vector_cube_dummy": True}, + }, + ), + ( + ["id"], + { + "name": None, + "dims": ("geometries", "properties"), + "coords": { + "geometries": {"attrs": {}, "data": [0, 1], "dims": ("geometries",)}, + "properties": {"attrs": {}, "data": ["id"], "dims": ("properties",)}, + }, + "data": [["first"], ["second"]], + "attrs": {}, + }, + ), + ( + ["pop", "id"], + { + "name": None, + "dims": ("geometries", "properties"), + "coords": { + "geometries": {"attrs": {}, "data": [0, 1], "dims": ("geometries",)}, + "properties": {"attrs": {}, "data": ["pop", "id"], "dims": ("properties",)}, + }, + "data": [[1234, "first"], [5678, "second"]], + "attrs": {}, + }, + ), + ( + ["color"], + { + "name": None, + "dims": ("geometries", "properties"), + "coords": { + "geometries": {"attrs": {}, "data": [0, 1], "dims": ("geometries",)}, + "properties": {"attrs": {}, "data": ["color"], "dims": ("properties",)}, + }, + "data": [[IsNan()], [IsNan()]], + "attrs": {}, + }, + ), + ( + ["pop", "color"], + { + "name": None, + "dims": ("geometries", "properties"), + "coords": { + "geometries": {"attrs": {}, "data": [0, 1], "dims": ("geometries",)}, + "properties": {"attrs": {}, "data": ["pop", "color"], "dims": ("properties",)}, + }, + "data": [[1234, IsNan()], [5678, IsNan()]], + "attrs": {}, + }, + ), ], ) - def test_from_geodataframe_columns_for_cube(self, gdf, columns_for_cube, expected): + def test_from_geodataframe_columns_for_cube(self, gdf, columns_for_cube, expected_cube): vc = DriverVectorCube.from_geodataframe(gdf, columns_for_cube=columns_for_cube) - assert vc.to_geojson() == DictSubSet( - { - "type": "FeatureCollection", - "features": [ - DictSubSet( - { - "type": "Feature", - "properties": {"id": "first", "pop": 1234}, - "geometry": { - "coordinates": (((1.0, 1.0), (3.0, 1.0), (2.0, 3.0), (1.0, 1.0)),), - "type": "Polygon", - }, - } - ), - DictSubSet( - { - "type": "Feature", - "properties": {"id": "second", "pop": 5678}, - "geometry": { - "coordinates": (((4.0, 2.0), (5.0, 4.0), (3.0, 4.0), (4.0, 2.0)),), - "type": "Polygon", - }, - } - ), - ], - } - ) - cube = vc.get_cube() - if expected is None: - assert cube is None - else: - assert cube.dims == ("geometries", "properties") - assert cube.shape == expected["shape"] - assert {k: list(v.values) for k, v in cube.coords.items()} == expected["coords"] + + assert vc.to_internal_json() == { + "geometries": DictSubSet( + { + "type": "FeatureCollection", + "features": [ + DictSubSet( + { + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": (((1.0, 1.0), (3.0, 1.0), (2.0, 3.0), (1.0, 1.0)),), + }, + "properties": {"id": "first", "pop": 1234}, + } + ), + DictSubSet( + { + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": (((4.0, 2.0), (5.0, 4.0), (3.0, 4.0), (4.0, 2.0)),), + }, + "properties": {"id": "second", "pop": 5678}, + } + ), + ], + } + ), + "cube": expected_cube, + } + @pytest.mark.parametrize(["geojson", "expected"], [ ( diff --git a/tests/test_views_execute.py b/tests/test_views_execute.py index 1a808d51..a43f4580 100644 --- a/tests/test_views_execute.py +++ b/tests/test_views_execute.py @@ -27,6 +27,7 @@ TEST_USER, TEST_USER_BEARER_TOKEN, ApiTester, + ApproxGeoJSONByBounds, DictSubSet, RegexMatcher, ephemeral_fileserver, @@ -396,27 +397,15 @@ def test_reduce_temporal_run_udf(api): assert dummy_backend.get_collection("S2_FAPAR_CLOUDCOVER").apply_tiles_spatiotemporal.call_count == 1 -def test_reduce_temporal_run_udf_legacy_client(api): - api.check_result( - "reduce_temporal_run_udf.json", - preprocess=preprocess_check_and_replace('"dimension": "t"', '"dimension": "temporal"') - ) - if api.api_version_compare.at_least("1.0.0"): - assert dummy_backend.get_collection("S2_FAPAR_CLOUDCOVER").reduce_dimension.call_count == 1 - else: - assert dummy_backend.get_collection("S2_FAPAR_CLOUDCOVER").apply_tiles_spatiotemporal.call_count == 1 - - def test_reduce_temporal_run_udf_invalid_dimension(api): resp = api.result( "reduce_temporal_run_udf.json", preprocess=preprocess_check_and_replace('"dimension": "t"', '"dimension": "tempo"') ) resp.assert_error( - 400, "ProcessParameterInvalid", - message="The value passed for parameter 'dimension' in process '{p}' is invalid: got 'tempo', but should be one of ['x', 'y', 't']".format( - p="reduce_dimension" if api.api_version_compare.at_least("1.0.0") else "reduce" - ) + 400, + "ProcessParameterInvalid", + message="The value passed for parameter 'dimension' in process 'reduce_dimension' is invalid: Must be one of ['x', 'y', 't'] but got 'tempo'.", ) @@ -428,27 +417,15 @@ def test_reduce_bands_run_udf(api): assert dummy_backend.get_collection("S2_FOOBAR").apply_tiles.call_count == 1 -def test_reduce_bands_run_udf_legacy_client(api): - api.check_result( - "reduce_bands_run_udf.json", - preprocess=preprocess_check_and_replace('"dimension": "bands"', '"dimension": "spectral_bands"') - ) - if api.api_version_compare.at_least("1.0.0"): - assert dummy_backend.get_collection("S2_FOOBAR").reduce_dimension.call_count == 1 - else: - assert dummy_backend.get_collection("S2_FOOBAR").apply_tiles.call_count == 1 - - def test_reduce_bands_run_udf_invalid_dimension(api): resp = api.result( "reduce_bands_run_udf.json", preprocess=preprocess_check_and_replace('"dimension": "bands"', '"dimension": "layers"') ) resp.assert_error( - 400, 'ProcessParameterInvalid', - message="The value passed for parameter 'dimension' in process '{p}' is invalid: got 'layers', but should be one of ['x', 'y', 't', 'bands']".format( - p="reduce_dimension" if api.api_version_compare.at_least("1.0.0") else "reduce" - ) + 400, + "ProcessParameterInvalid", + message="The value passed for parameter 'dimension' in process 'reduce_dimension' is invalid: Must be one of ['x', 'y', 't', 'bands'] but got 'layers'.", ) @@ -464,23 +441,15 @@ def test_apply_dimension_temporal_run_udf(api): assert load_parameters.process_types == set([ProcessType.GLOBAL_TIME]) -def test_apply_dimension_temporal_run_udf_legacy_client(api): - api.check_result( - "apply_dimension_temporal_run_udf.json", - preprocess=preprocess_check_and_replace('"dimension": "t"', '"dimension": "temporal"') - ) - dummy = dummy_backend.get_collection("S2_FAPAR_CLOUDCOVER") - assert dummy.apply_dimension.call_count == 1 - - def test_apply_dimension_temporal_run_udf_invalid_temporal_dimension(api): resp = api.result( "apply_dimension_temporal_run_udf.json", preprocess=preprocess_check_and_replace('"dimension": "t"', '"dimension": "letemps"') ) resp.assert_error( - 400, 'ProcessParameterInvalid', - message="The value passed for parameter 'dimension' in process 'apply_dimension' is invalid: got 'letemps', but should be one of ['x', 'y', 't']" + 400, + "ProcessParameterInvalid", + message="The value passed for parameter 'dimension' in process 'apply_dimension' is invalid: Must be one of ['x', 'y', 't'] but got 'letemps'.", ) @@ -508,17 +477,12 @@ def test_reduce_max_bands(api): api.check_result("reduce_max.json", preprocess=preprocess_check_and_replace("PLACEHOLDER", "bands")) -def test_reduce_max_bands_legacy_style(api): - api.check_result("reduce_max.json", preprocess=preprocess_check_and_replace("PLACEHOLDER", "spectral_bands")) - - def test_reduce_max_invalid_dimension(api): res = api.result("reduce_max.json", preprocess=preprocess_check_and_replace("PLACEHOLDER", "orbit")) res.assert_error( - 400, 'ProcessParameterInvalid', - message="The value passed for parameter 'dimension' in process '{p}' is invalid: got 'orbit', but should be one of ['x', 'y', 't', 'bands']".format( - p="reduce_dimension" if api.api_version_compare.at_least("1.0.0") else "reduce" - ) + 400, + "ProcessParameterInvalid", + message="The value passed for parameter 'dimension' in process 'reduce_dimension' is invalid: Must be one of ['x', 'y', 't', 'bands'] but got 'orbit'.", ) @@ -565,32 +529,13 @@ def test_reduce_bands(api): assert set(p[0] for p in visitor.processes) == {"sum", "subtract", "divide"} -def test_reduce_bands_legacy_client(api): - api.check_result( - "reduce_bands.json", - preprocess=preprocess_check_and_replace('"dimension": "bands"', '"dimension": "spectral_bands"') - ) - dummy = dummy_backend.get_collection("S2_FOOBAR") - if api.api_version_compare.at_least("1.0.0"): - reduce_bands = dummy.reduce_dimension - else: - reduce_bands = dummy.reduce_bands - - reduce_bands.assert_called_once() - if api.api_version_compare.below("1.0.0"): - visitor = reduce_bands.call_args_list[0][0][0] - assert isinstance(visitor, dummy_backend.DummyVisitor) - assert set(p[0] for p in visitor.processes) == {"sum", "subtract", "divide"} - - def test_reduce_bands_invalid_dimension(api): res = api.result("reduce_bands.json", preprocess=preprocess_check_and_replace('"dimension": "bands"', '"dimension": "layor"')) res.assert_error( - 400, "ProcessParameterInvalid", - message="The value passed for parameter 'dimension' in process '{p}' is invalid: got 'layor', but should be one of ['x', 'y', 't', 'bands']".format( - p="reduce_dimension" if api.api_version_compare.at_least("1.0.0") else "reduce" - ) + 400, + "ProcessParameterInvalid", + message="The value passed for parameter 'dimension' in process 'reduce_dimension' is invalid: Must be one of ['x', 'y', 't', 'bands'] but got 'layor'.", ) @@ -998,21 +943,15 @@ def test_aggregate_temporal_max(api): api.check_result("aggregate_temporal_max.json") -def test_aggregate_temporal_max_legacy_client(api): - api.check_result( - "aggregate_temporal_max.json", - preprocess=preprocess_check_and_replace('"dimension": "t"', '"dimension": "temporal"') - ) - - def test_aggregate_temporal_max_invalid_temporal_dimension(api): resp = api.result( "aggregate_temporal_max.json", preprocess=preprocess_check_and_replace('"dimension": "t"', '"dimension": "detijd"') ) resp.assert_error( - 400, 'ProcessParameterInvalid', - message="The value passed for parameter 'dimension' in process 'aggregate_temporal' is invalid: got 'detijd', but should be one of ['x', 'y', 't']" + 400, + "ProcessParameterInvalid", + message="The value passed for parameter 'dimension' in process 'aggregate_temporal' is invalid: Must be one of ['x', 'y', 't'] but got 'detijd'.", ) @@ -1372,7 +1311,7 @@ def test_run_udf_on_vector_read_vector(api100, udf_code): "udf": udf_code, "runtime": "Python", }, - "result": "true", + "result": True, }, } resp = api100.check_result(process_graph) @@ -1417,8 +1356,8 @@ def test_run_udf_on_vector_get_geometries(api100, udf_code): "udf": udf_code, "runtime": "Python", }, - "result": "true" - } + "result": True, + }, } resp = api100.check_result(process_graph) assert resp.json == [ @@ -1463,7 +1402,7 @@ def test_run_udf_on_vector_load_uploaded_files(api100, udf_code): "udf": udf_code, "runtime": "Python", }, - "result": "true", + "result": True, }, } resp = api100.check_result(process_graph) @@ -1862,6 +1801,175 @@ def test_vector_save_result(self, api, output_format, content_type, data_prefix, assert isinstance(geometry, shapely.geometry.Polygon) assert geometry.bounds == expected + @pytest.mark.parametrize( + ["geojson", "expected"], + [ + ( + {"type": "Polygon", "coordinates": [[(1, 1), (3, 1), (2, 3), (1, 1)]]}, + [ + { + "type": "Feature", + "geometry": {"type": "Polygon", "coordinates": [[[1, 1], [3, 1], [2, 3], [1, 1]]]}, + "properties": {}, + }, + ], + ), + ( + {"type": "MultiPolygon", "coordinates": [[[(1, 1), (3, 1), (2, 3), (1, 1)]]]}, + [ + { + "type": "Feature", + "geometry": {"type": "MultiPolygon", "coordinates": [[[[1, 1], [3, 1], [2, 3], [1, 1]]]]}, + "properties": {}, + }, + ], + ), + ( + { + "type": "Feature", + "geometry": {"type": "MultiPolygon", "coordinates": [[[(1, 1), (3, 1), (2, 3), (1, 1)]]]}, + "properties": {"id": "12_3"}, + }, + [ + { + "type": "Feature", + "geometry": {"type": "MultiPolygon", "coordinates": [[[[1, 1], [3, 1], [2, 3], [1, 1]]]]}, + "properties": {"id": "12_3"}, + }, + ], + ), + ( + { + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "geometry": {"type": "Polygon", "coordinates": [[(1, 1), (3, 1), (2, 3), (1, 1)]]}, + "properties": {"id": 1}, + }, + { + "type": "Feature", + "geometry": {"type": "MultiPolygon", "coordinates": [[[(1, 1), (3, 1), (2, 3), (1, 1)]]]}, + "properties": {"id": 2}, + }, + ], + }, + [ + { + "type": "Feature", + "geometry": {"type": "Polygon", "coordinates": [[[1, 1], [3, 1], [2, 3], [1, 1]]]}, + "properties": {"id": 1}, + }, + { + "type": "Feature", + "geometry": {"type": "MultiPolygon", "coordinates": [[[[1, 1], [3, 1], [2, 3], [1, 1]]]]}, + "properties": {"id": 2}, + }, + ], + ), + ], + ) + def test_to_vector_cube(self, api100, geojson, expected): + res = api100.check_result( + { + "vc": { + "process_id": "to_vector_cube", + "arguments": {"data": geojson}, + "result": True, + } + } + ) + assert res.json == DictSubSet( + { + "type": "FeatureCollection", + "features": expected, + } + ) + + @pytest.mark.parametrize( + ["geojson", "expected"], + [ + ( + {"type": "Point", "coordinates": (1, 2)}, + [ + { + "type": "Feature", + "geometry": {"type": "Point", "coordinates": [1, 2]}, + "properties": {}, + }, + ], + ), + ( + {"type": "Polygon", "coordinates": [[(1, 1), (3, 1), (2, 3), (1, 1)]]}, + [ + { + "type": "Feature", + "geometry": {"type": "Polygon", "coordinates": [[[1, 1], [3, 1], [2, 3], [1, 1]]]}, + "properties": {}, + }, + ], + ), + ( + {"type": "MultiPolygon", "coordinates": [[[(1, 1), (3, 1), (2, 3), (1, 1)]]]}, + [ + { + "type": "Feature", + "geometry": {"type": "MultiPolygon", "coordinates": [[[[1, 1], [3, 1], [2, 3], [1, 1]]]]}, + "properties": {}, + }, + ], + ), + ( + { + "type": "Feature", + "geometry": {"type": "MultiPolygon", "coordinates": [[[(1, 1), (3, 1), (2, 3), (1, 1)]]]}, + "properties": {"id": "12_3"}, + }, + [ + { + "type": "Feature", + "geometry": {"type": "MultiPolygon", "coordinates": [[[[1, 1], [3, 1], [2, 3], [1, 1]]]]}, + "properties": {"id": "12_3"}, + }, + ], + ), + ( + { + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "geometry": {"type": "Polygon", "coordinates": [[(1, 1), (3, 1), (2, 3), (1, 1)]]}, + "properties": {"id": 1}, + }, + { + "type": "Feature", + "geometry": {"type": "MultiPolygon", "coordinates": [[[(1, 1), (3, 1), (2, 3), (1, 1)]]]}, + "properties": {"id": 2}, + }, + ], + }, + [ + { + "type": "Feature", + "geometry": {"type": "Polygon", "coordinates": [[[1, 1], [3, 1], [2, 3], [1, 1]]]}, + "properties": {"id": 1}, + }, + { + "type": "Feature", + "geometry": {"type": "MultiPolygon", "coordinates": [[[[1, 1], [3, 1], [2, 3], [1, 1]]]]}, + "properties": {"id": 2}, + }, + ], + ), + ], + ) + def test_load_geojson(self, api100, geojson, expected): + res = api100.check_result( + {"vc": {"process_id": "load_geojson", "arguments": {"data": geojson}, "result": True}} + ) + assert res.json == DictSubSet({"type": "FeatureCollection", "features": expected}) + def test_no_nested_JSONResult(api): api.set_auth_bearer_token() @@ -3426,82 +3534,6 @@ def test_if_merge_cubes(api100): }) -@pytest.mark.parametrize(["geojson", "expected"], [ - ( - {"type": "Polygon", "coordinates": [[(1, 1), (3, 1), (2, 3), (1, 1)]]}, - [ - { - "type": "Feature", - "geometry": {"type": "Polygon", "coordinates": [[[1, 1], [3, 1], [2, 3], [1, 1]]]}, - "properties": {}, - }, - ], - ), - ( - {"type": "MultiPolygon", "coordinates": [[[(1, 1), (3, 1), (2, 3), (1, 1)]]]}, - [ - { - "type": "Feature", - "geometry": {"type": "MultiPolygon", "coordinates": [[[[1, 1], [3, 1], [2, 3], [1, 1]]]]}, - "properties": {}, - }, - ], - ), - ( - { - "type": "Feature", - "geometry": {"type": "MultiPolygon", "coordinates": [[[(1, 1), (3, 1), (2, 3), (1, 1)]]]}, - "properties": {"id": "12_3"}, - }, - [ - { - "type": "Feature", - "geometry": {"type": "MultiPolygon", "coordinates": [[[[1, 1], [3, 1], [2, 3], [1, 1]]]]}, - "properties": {"id": "12_3"}, - }, - ], - ), - ( - { - "type": "FeatureCollection", - "features": [ - { - "type": "Feature", - "geometry": {"type": "Polygon", "coordinates": [[(1, 1), (3, 1), (2, 3), (1, 1)]]}, - "properties": {"id": 1}, - }, - { - "type": "Feature", - "geometry": {"type": "MultiPolygon", "coordinates": [[[(1, 1), (3, 1), (2, 3), (1, 1)]]]}, - "properties": {"id": 2}, - }, - ]}, - [ - { - "type": "Feature", - "geometry": {"type": "Polygon", "coordinates": [[[1, 1], [3, 1], [2, 3], [1, 1]]]}, - "properties": {"id": 1}, - }, - { - "type": "Feature", - "geometry": {"type": "MultiPolygon", "coordinates": [[[[1, 1], [3, 1], [2, 3], [1, 1]]]]}, - "properties": {"id": 2}, - }, - ], - ), -]) -def test_to_vector_cube(api100, geojson, expected): - res = api100.check_result({ - "vc": { - "process_id": "to_vector_cube", - "arguments": {"data": geojson}, - "result": True, - } - }) - assert res.json == DictSubSet({ - "type": "FeatureCollection", - "features": expected, - }) def test_vector_buffer_returns_error_on_empty_result_geometry(api): @@ -3584,3 +3616,303 @@ def test_request_costs_for_failed_request(api, backend_implementation): assert env["correlation_id"] == "r-abc123" get_request_costs.assert_called_with(TEST_USER, "r-abc123", False) + + +class TestVectorCubeRunUDF: + """ + Tests about running UDF based manipulations on vector cubes + + References: + - https://github.com/Open-EO/openeo-python-driver/issues/197 + - https://github.com/Open-EO/openeo-python-driver/pull/200 + - https://github.com/Open-EO/openeo-geopyspark-driver/issues/437 + """ + + @pytest.mark.parametrize( + "dimension", + [ + "properties", + "geometries", + ], + ) + def test_apply_dimension_run_udf_change_geometry(self, api100, dimension): + udf_code = """ + from openeo.udf import UdfData, FeatureCollection + def process_geometries(udf_data: UdfData) -> UdfData: + [feature_collection] = udf_data.get_feature_collection_list() + gdf = feature_collection.data + gdf["geometry"] = gdf["geometry"].buffer(distance=1, resolution=2) + udf_data.set_feature_collection_list([ + FeatureCollection(id="_", data=gdf), + ]) + """ + udf_code = textwrap.dedent(udf_code) + process_graph = { + "get_vector_data": { + "process_id": "load_uploaded_files", + "arguments": {"paths": [str(get_path("geojson/FeatureCollection02.json"))], "format": "GeoJSON"}, + }, + "apply_dimension": { + "process_id": "apply_dimension", + "arguments": { + "data": {"from_node": "get_vector_data"}, + "dimension": dimension, + "process": { + "process_graph": { + "runudf1": { + "process_id": "run_udf", + "arguments": { + "data": {"from_node": "get_vector_data"}, + "udf": udf_code, + "runtime": "Python", + }, + "result": True, + } + }, + }, + }, + "result": True, + }, + } + resp = api100.check_result(process_graph) + assert resp.json == DictSubSet( + { + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "geometry": ApproxGeoJSONByBounds(0, 0, 4, 4, types=["Polygon"], abs=0.1), + "properties": {"id": "first", "pop": 1234}, + }, + { + "type": "Feature", + "geometry": ApproxGeoJSONByBounds(2, 1, 6, 5, types=["Polygon"], abs=0.1), + "properties": {"id": "second", "pop": 5678}, + }, + ], + } + ) + + @pytest.mark.parametrize( + "dimension", + [ + # TODO: this "dimension="properties" use case does not strictly follow the openEO API spec + # `apply_dimension` only allows changing the cardinality of the provided dimension ("properties"), + # not any other dimension ("geometries" here). + "properties", + "geometries", + ], + ) + def test_apply_dimension_run_udf_filter_on_geometries(self, api100, dimension): + """ + Test to use `apply_dimension(dimension="...", process=UDF)` to filter out certain + entries from geometries dimension based on geometry (e.g. intersection with another geometry) + """ + udf_code = """ + from openeo.udf import UdfData, FeatureCollection + import shapely.geometry + def process_geometries(udf_data: UdfData) -> UdfData: + [feature_collection] = udf_data.get_feature_collection_list() + gdf = feature_collection.data + to_intersect = shapely.geometry.box(4, 3, 8, 4) + gdf = gdf[gdf["geometry"].intersects(to_intersect)] + udf_data.set_feature_collection_list([ + FeatureCollection(id="_", data=gdf), + ]) + """ + udf_code = textwrap.dedent(udf_code) + process_graph = { + "get_vector_data": { + "process_id": "load_uploaded_files", + "arguments": {"paths": [str(get_path("geojson/FeatureCollection10.json"))], "format": "GeoJSON"}, + }, + "apply_dimension": { + "process_id": "apply_dimension", + "arguments": { + "data": {"from_node": "get_vector_data"}, + "dimension": dimension, + "process": { + "process_graph": { + "runudf1": { + "process_id": "run_udf", + "arguments": { + "data": {"from_node": "get_vector_data"}, + "udf": udf_code, + "runtime": "Python", + }, + "result": True, + } + }, + }, + }, + "result": True, + }, + } + resp = api100.check_result(process_graph) + assert resp.json == DictSubSet( + { + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "geometry": ApproxGeoJSONByBounds(3, 2, 5, 4, types=["Polygon"], abs=0.1), + "properties": {"id": "second", "pop": 456}, + }, + { + "type": "Feature", + "geometry": ApproxGeoJSONByBounds(6, 2, 12, 6, types=["Polygon"], abs=0.1), + "properties": {"id": "third", "pop": 789}, + }, + ], + } + ) + + @pytest.mark.parametrize( + "dimension", + [ + # TODO: this "dimension="properties" use case does not strictly follow the openEO API spec + # `apply_dimension` only allows changing the cardinality of the provided dimension ("properties"), + # not any other dimension ("geometries" here). + "properties", + "geometries", + ], + ) + def test_apply_dimension_run_udf_filter_on_properties(self, api100, dimension): + """ + Test to use `apply_dimension(dimension="...", process=UDF)` to filter out certain + entries from geometries dimension, based on feature properties + + Note in case of dimension="properties": + strictly speaking, this approach draws outside the lines of the openEO API spec + as apply_dimension only allows changing the cardinality of the provided dimension ("properties" in this case), + not any other dimension (like "geometries" in this case). + """ + udf_code = """ + from openeo.udf import UdfData, FeatureCollection + import shapely.geometry + def process_geometries(udf_data: UdfData) -> UdfData: + [feature_collection] = udf_data.get_feature_collection_list() + gdf = feature_collection.data + gdf = gdf[gdf["pop"] > 500] + udf_data.set_feature_collection_list([ + FeatureCollection(id="_", data=gdf), + ]) + """ + udf_code = textwrap.dedent(udf_code) + process_graph = { + "get_vector_data": { + "process_id": "load_uploaded_files", + "arguments": {"paths": [str(get_path("geojson/FeatureCollection10.json"))], "format": "GeoJSON"}, + }, + "apply_dimension": { + "process_id": "apply_dimension", + "arguments": { + "data": {"from_node": "get_vector_data"}, + "dimension": dimension, + "process": { + "process_graph": { + "runudf1": { + "process_id": "run_udf", + "arguments": { + "data": {"from_node": "get_vector_data"}, + "udf": udf_code, + "runtime": "Python", + }, + "result": True, + } + }, + }, + }, + "result": True, + }, + } + resp = api100.check_result(process_graph) + assert resp.json == DictSubSet( + { + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "geometry": ApproxGeoJSONByBounds(6.0, 2.0, 12.0, 6.0, types=["Polygon"], abs=0.1), + "properties": {"id": "third", "pop": 789}, + }, + { + "type": "Feature", + "geometry": ApproxGeoJSONByBounds(-2.0, 7.0, 5.0, 14.0, types=["Polygon"], abs=0.1), + "properties": {"id": "fourth", "pop": 101112}, + }, + ], + } + ) + + @pytest.mark.parametrize( + "dimension", + [ + "properties", + # TODO: this "dimension="geometries" use case does not strictly follow the openEO API spec + # `apply_dimension` only allows changing the cardinality of the provided dimension ("geometries"), + # not any other dimension ("properties" here). + "geometries", + ], + ) + def test_apply_dimension_run_udf_add_properties(self, api100, dimension): + """ + Test to use `apply_dimension(dimension="...", process=UDF)` to add properties + """ + udf_code = """ + from openeo.udf import UdfData, FeatureCollection + import shapely.geometry + def process_geometries(udf_data: UdfData) -> UdfData: + [feature_collection] = udf_data.get_feature_collection_list() + gdf = feature_collection.data + gdf["poppop"] = gdf["pop"] ** 2 + udf_data.set_feature_collection_list([ + FeatureCollection(id="_", data=gdf), + ]) + """ + udf_code = textwrap.dedent(udf_code) + process_graph = { + "get_vector_data": { + "process_id": "load_uploaded_files", + "arguments": {"paths": [str(get_path("geojson/FeatureCollection02.json"))], "format": "GeoJSON"}, + }, + "apply_dimension": { + "process_id": "apply_dimension", + "arguments": { + "data": {"from_node": "get_vector_data"}, + "dimension": dimension, + "process": { + "process_graph": { + "runudf1": { + "process_id": "run_udf", + "arguments": { + "data": {"from_node": "get_vector_data"}, + "udf": udf_code, + "runtime": "Python", + }, + "result": True, + } + }, + }, + }, + "result": True, + }, + } + resp = api100.check_result(process_graph) + assert resp.json == DictSubSet( + { + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "geometry": ApproxGeoJSONByBounds(1.0, 1.0, 3.0, 3.0, types=["Polygon"], abs=0.1), + "properties": {"id": "first", "pop": 1234, "poppop": 1234 * 1234}, + }, + { + "type": "Feature", + "geometry": ApproxGeoJSONByBounds(3.0, 2.0, 5.0, 4.0, types=["Polygon"], abs=0.1), + "properties": {"id": "second", "pop": 5678, "poppop": 5678 * 5678}, + }, + ], + } + ) diff --git a/tests/util/test_geometry.py b/tests/util/test_geometry.py index cbb2379d..fc96d0c3 100644 --- a/tests/util/test_geometry.py +++ b/tests/util/test_geometry.py @@ -1,4 +1,6 @@ +import contextlib import math +from typing import List, Union import pyproj import pytest @@ -9,19 +11,19 @@ from shapely.geos import WKTWriter from openeo_driver.util.geometry import ( - geojson_to_multipolygon, - reproject_bounding_box, - spatial_extent_union, + BoundingBox, + BoundingBoxException, + CrsRequired, GeometryBufferer, as_geojson_feature, as_geojson_feature_collection, + geojson_to_multipolygon, + reproject_bounding_box, reproject_geometry, - BoundingBox, - BoundingBoxException, - CrsRequired, + spatial_extent_union, + validate_geojson_basic, ) - from ..data import get_path EARTH_CIRCUMFERENCE_KM = 40075.017 @@ -746,3 +748,169 @@ def test_best_utm(self): bbox = BoundingBox(-72, -13, -71, -12, crs="EPSG:4326") assert bbox.best_utm() == 32719 + + +class TestValidateGeoJSON: + @staticmethod + @contextlib.contextmanager + def _checker(expected_issue: Union[str, None], raise_exception: bool): + """ + Helper context manager to easily check a validate_geojson_basic result + for both raise_exception modes: + + - "exception mode": context manger __exit__ phase checks result + - "return issue mode": returned `check` function should be used inside context manageer body + """ + checked = False + + def check(result: List[str]): + """Check validation result in case no actual exception was thrown""" + nonlocal checked + checked = True + if expected_issue: + if raise_exception: + pytest.fail("Exception should have been raised") + if not result: + pytest.fail("No issue was reported") + assert expected_issue in "\n".join(result) + else: + if result: + pytest.fail(f"Unexpected issue reported: {result}") + + try: + yield check + except Exception as e: + # Check validation result in case of actual exception + if not raise_exception: + pytest.fail(f"Unexpected {e!r}: issue should be returned") + if not expected_issue: + pytest.fail(f"Unexpected {e!r}: no issue expected") + assert expected_issue in str(e) + else: + # No exception was thrown: check that the `check` function has been called. + if not checked: + raise RuntimeError("`check` function was not used") + + @pytest.mark.parametrize( + ["value", "expected_issue"], + [ + ("nope nope", "JSON object (mapping/dictionary) expected, but got str"), + (123, "JSON object (mapping/dictionary) expected, but got int"), + ({}, "No 'type' field"), + ({"type": 123}, "Invalid 'type' type: int"), + ({"type": {"Poly": "gon"}}, "Invalid 'type' type: dict"), + ({"type": "meh"}, "Invalid type 'meh'"), + ({"type": "Point"}, "No 'coordinates' field (type 'Point')"), + ({"type": "Point", "coordinates": [1, 2]}, None), + ({"type": "Polygon"}, "No 'coordinates' field (type 'Polygon')"), + ({"type": "Polygon", "coordinates": [[1, 2]]}, None), + ({"type": "MultiPolygon"}, "No 'coordinates' field (type 'MultiPolygon')"), + ({"type": "MultiPolygon", "coordinates": [[[1, 2]]]}, None), + ({"type": "GeometryCollection", "coordinates": []}, "No 'geometries' field (type 'GeometryCollection')"), + ({"type": "GeometryCollection", "geometries": []}, None), + ({"type": "Feature", "coordinates": []}, "No 'geometry' field (type 'Feature')"), + ({"type": "Feature", "geometry": {}}, "No 'properties' field (type 'Feature')"), + ({"type": "Feature", "geometry": {}, "properties": {}}, "No 'type' field"), + ( + {"type": "Feature", "geometry": {"type": "Polygon"}, "properties": {}}, + "No 'coordinates' field (type 'Polygon')", + ), + ( + {"type": "Feature", "geometry": {"type": "Polygon", "coordinates": [[1, 2]]}, "properties": {}}, + None, + ), + ( + {"type": "Feature", "geometry": {"type": "Polygonnnnn", "coordinates": [[1, 2]]}, "properties": {}}, + "Found type 'Polygonnnnn', but expects one of ", + ), + ({"type": "FeatureCollection"}, "No 'features' field (type 'FeatureCollection')"), + ({"type": "FeatureCollection", "features": []}, None), + ({"type": "FeatureCollection", "features": [{"type": "Feature"}]}, "No 'geometry' field (type 'Feature')"), + ( + {"type": "FeatureCollection", "features": [{"type": "Feature", "geometry": {}}]}, + "No 'properties' field (type 'Feature')", + ), + ( + {"type": "FeatureCollection", "features": [{"type": "Feature", "geometry": {}, "properties": {}}]}, + "No 'type' field", + ), + ( + { + "type": "FeatureCollection", + "features": [{"type": "Feature", "geometry": {"type": "Polygon"}, "properties": {}}], + }, + "No 'coordinates' field (type 'Polygon')", + ), + ( + { + "type": "FeatureCollection", + "features": [ + {"type": "Feature", "geometry": {"type": "Polygon", "coordinates": [[1, 2]]}, "properties": {}}, + {"type": "Feature", "geometry": {"type": "Polygon", "coordinates": [[3, 4]]}, "properties": {}}, + ], + }, + None, + ), + ], + ) + @pytest.mark.parametrize("raise_exception", [False, True]) + def test_validate_geojson_basic(self, value, expected_issue, raise_exception): + with self._checker(expected_issue=expected_issue, raise_exception=raise_exception) as check: + result = validate_geojson_basic(value, raise_exception=raise_exception) + check(result) + + @pytest.mark.parametrize( + ["value", "allowed_types", "expected_issue"], + [ + ( + {"type": "Point", "coordinates": [1, 2]}, + {"Polygon", "MultiPolygon"}, + "Found type 'Point', but expects one of ['MultiPolygon', 'Polygon']", + ), + ({"type": "Polygon", "coordinates": [[1, 2]]}, {"Polygon", "MultiPolygon"}, None), + ({"type": "MultiPolygon", "coordinates": [[[1, 2]]]}, {"Polygon", "MultiPolygon"}, None), + ( + {"type": "Feature", "geometry": {"type": "Polygon", "coordinates": [[1, 2]]}, "properties": {}}, + {"Polygon", "MultiPolygon"}, + "Found type 'Feature', but expects one of ['MultiPolygon', 'Polygon']", + ), + ( + {"type": "Feature", "geometry": {"type": "Polygon", "coordinates": [[1, 2]]}, "properties": {}}, + {"Feature"}, + None, + ), + ( + { + "type": "FeatureCollection", + "features": [ + {"type": "Feature", "geometry": {"type": "Polygon", "coordinates": [[1, 2]]}, "properties": {}}, + {"type": "Feature", "geometry": {"type": "Polygon", "coordinates": [[3, 4]]}, "properties": {}}, + ], + }, + {"Polygon", "MultiPolygon"}, + "Found type 'FeatureCollection', but expects one of ['MultiPolygon', 'Polygon']", + ), + ( + { + "type": "FeatureCollection", + "features": [ + {"type": "Feature", "geometry": {"type": "Polygon", "coordinates": [[1, 2]]}, "properties": {}}, + {"type": "Feature", "geometry": {"type": "Polygon", "coordinates": [[3, 4]]}, "properties": {}}, + ], + }, + {"FeatureCollection"}, + None, + ), + ], + ) + @pytest.mark.parametrize( + "raise_exception", + [ + False, + True, + ], + ) + def test_validate_geojson_basic_allowed_types(self, value, allowed_types, expected_issue, raise_exception): + with self._checker(expected_issue=expected_issue, raise_exception=raise_exception) as check: + result = validate_geojson_basic(value, allowed_types=allowed_types, raise_exception=raise_exception) + check(result)