diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index bc56c7de4..e771afb24 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -13,3 +13,7 @@ cfd2e0d1cb3c270222cd4d2e4b0688da50087494 # pre-commit end-of-file-fixer and trailing-whitespace d7ccd7b57aab823867415a9e75913d14e440ae25 c4759f2025cf777f761cb508a64e01c5c3ea3e72 + +# Import cleanups using isort +9ae624cb536c9aeda35c5e389f5d87e353c22726 +f50c2e18583619f1c67de6c803cd65c0ac2d2f5e diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index b84d1caa9..afbe5ec3b 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -1,6 +1,13 @@ name: Lint -on: [push, pull_request] +on: + push: + paths: + - '**.py' + pull_request: + paths: + - '**.py' +concurrency: ci-${{ github.ref }} jobs: build: runs-on: ubuntu-latest diff --git a/.github/workflows/sphinx2ghpages.yml b/.github/workflows/sphinx2ghpages.yml index eb9ec377f..10e883c36 100644 --- a/.github/workflows/sphinx2ghpages.yml +++ b/.github/workflows/sphinx2ghpages.yml @@ -5,6 +5,13 @@ name: Sphinx Documentation to GitHub Pages on: push: branches: [ master ] + paths: + - 'docs/**' + - 'openeo/**' + +concurrency: + group: docs-${{ github.ref }} + cancel-in-progress: true jobs: build: diff --git a/.github/workflows/unittests.yml b/.github/workflows/unittests.yml index 678c4a397..a0bb20a01 100644 --- a/.github/workflows/unittests.yml +++ b/.github/workflows/unittests.yml @@ -1,6 +1,19 @@ name: Unit tests -on: [push, pull_request] +on: + push: + paths: + - 'setup.py' + - 'pyproject.toml' + - 'openeo/**' + - 'tests/**' + pull_request: + paths: + - 'setup.py' + - 'pyproject.toml' + - 'openeo/**' + - 'tests/**' +concurrency: ci-${{ github.ref }} jobs: build: strategy: @@ -15,8 +28,6 @@ jobs: - "3.11" # Additional special cases (see https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#example-adding-configurations) include: - - os: "ubuntu-20.04" - python-version: "3.6" - os: "windows-latest" python-version: "3.11" runs-on: ${{ matrix.os }} diff --git a/CHANGELOG.md b/CHANGELOG.md index c0d5cd9e7..cfe668f41 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Removed +- Bumped minimal supported Python version to 3.7 ([#460](https://github.com/Open-EO/openeo-python-client/issues/460)) + ### Fixed diff --git a/README.md b/README.md index 8ccbe2289..de23bb1f8 100644 --- a/README.md +++ b/README.md @@ -47,7 +47,7 @@ examples and in-depth discussion. ## Installation -Python 3.6 or higher is required. +Python 3.7 or higher is required. As always, it is recommended to work in some kind of virtual environment (using `venv`, `virtualenv`, conda, docker, ...) to install the `openeo` package and its dependencies: diff --git a/docs/api.rst b/docs/api.rst index 5085aa2ef..b67497b79 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -126,7 +126,7 @@ openeo.util ------------- .. automodule:: openeo.util - :members: to_bbox_dict, BBoxDict, load_json_resource, normalize_crs + :members: to_bbox_dict, BBoxDict, load_json_resource, normalize_crs, string_to_temporal_extent openeo.processes diff --git a/docs/installation.rst b/docs/installation.rst index 126d9a2b6..02fdf4049 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -11,7 +11,7 @@ The package is a pure Python implementation and its dependencies are carefully c Basic install ============= -At least *Python 3.6* is recommended. +At least *Python 3.7* is required (since version 0.23.0). Also, it is recommended to work in a some kind of *virtual environment* (``venv``, ``conda``, ...) to avoid polluting the base install of Python on your operating system or introducing conflicts with other applications. diff --git a/docs/process_mapping.py b/docs/process_mapping.py index 022ff50eb..930aae1fe 100644 --- a/docs/process_mapping.py +++ b/docs/process_mapping.py @@ -7,13 +7,12 @@ """ import datetime +import importlib import sys from textwrap import dedent -import importlib from openeo.internal.documentation import _process_registry - # TODO: automate running of this tool. E.g. use https://nedbatchelder.com/code/cog # TODO: support operators properly (use `+` in docs instead of `__add__`) diff --git a/examples/R_example.py b/examples/R_example.py index 09c462b67..ce52f3ee7 100644 --- a/examples/R_example.py +++ b/examples/R_example.py @@ -1,8 +1,8 @@ -import openeo import logging -from openeo.auth.auth_bearer import BearerAuth import time +import openeo +from openeo.auth.auth_bearer import BearerAuth logging.basicConfig(level=logging.INFO) diff --git a/examples/eodc_example.py b/examples/eodc_example.py index 140eb4f10..eed235864 100644 --- a/examples/eodc_example.py +++ b/examples/eodc_example.py @@ -1,7 +1,8 @@ import logging + import openeo -from openeo.rest.job import RESTJob from openeo.auth.auth_bearer import BearerAuth +from openeo.rest.job import RESTJob logging.basicConfig(level=logging.DEBUG) diff --git a/examples/gee_example.py b/examples/gee_example.py index ede297ad2..982abb81a 100644 --- a/examples/gee_example.py +++ b/examples/gee_example.py @@ -1,6 +1,7 @@ +import logging + import openeo from openeo.internal.graph_building import PGNode -import logging logging.basicConfig(level=logging.INFO) diff --git a/examples/gee_uc1_temp.py b/examples/gee_uc1_temp.py index e07a089ea..de7d632c3 100644 --- a/examples/gee_uc1_temp.py +++ b/examples/gee_uc1_temp.py @@ -1,6 +1,7 @@ -import openeo import logging +import openeo + logging.basicConfig(level=logging.INFO) GEE_DRIVER_URL = "https://earthengine.openeo.org/v1.0" diff --git a/examples/mundialis_example.py b/examples/mundialis_example.py index 0561039d4..295f4fc28 100644 --- a/examples/mundialis_example.py +++ b/examples/mundialis_example.py @@ -1,9 +1,10 @@ #!/usr/bin/env python3 import logging -import openeo import time +import openeo + logging.basicConfig(level=logging.INFO) backend_url = 'https://openeo.mundialis.de' diff --git a/examples/mundialis_extended.py b/examples/mundialis_extended.py index 9d91d6e97..50a8fabaf 100644 --- a/examples/mundialis_extended.py +++ b/examples/mundialis_extended.py @@ -1,10 +1,10 @@ #!/usr/bin/env python3 import json import logging +import sys from datetime import datetime import openeo -import sys logger = logging.getLogger(__name__) diff --git a/examples/mundialis_mini.py b/examples/mundialis_mini.py index 29e0e7d01..a7fdefe9c 100644 --- a/examples/mundialis_mini.py +++ b/examples/mundialis_mini.py @@ -1,6 +1,7 @@ #!/usr/bin/env python3 import json + import openeo backend_url = 'https://openeo.mundialis.de' diff --git a/examples/ndvi_composite.py b/examples/ndvi_composite.py index 410a2b423..11daa90a8 100644 --- a/examples/ndvi_composite.py +++ b/examples/ndvi_composite.py @@ -1,9 +1,10 @@ import os from pathlib import Path -import openeo from skimage.morphology import selem +import openeo + connection = openeo.connect("https://openeo-dev.vito.be") bbox = {"west": 4.996033, "south": 51.258922, "east": 5.091603, "north": 51.282696} connection.authenticate_basic() @@ -75,6 +76,7 @@ def test_composite_by_udf(): from openeo.rest.datacube import DataCube + def test_debug_udf(): """ Shows how to run your UDF locally for testing. This method uses the same code as the backend, and can be used to check validity of your UDF. @@ -86,6 +88,7 @@ def test_debug_udf(): from examples.udf.median_composite import apply_datacube + def test_debug_udf_direct_invoke(): """ Shows how to run your UDF locally for testing, by invoking the function directly, breakpoints work. diff --git a/examples/phenology_example.py b/examples/phenology_example.py index a1f3a9700..8889ddb00 100644 --- a/examples/phenology_example.py +++ b/examples/phenology_example.py @@ -1,10 +1,11 @@ -from shapely.geometry import Polygon - -import openeo import logging import os from pathlib import Path + import pandas as pd +from shapely.geometry import Polygon + +import openeo #enable logging in requests library logging.basicConfig(level=logging.DEBUG) diff --git a/examples/profiling_example.py b/examples/profiling_example.py index f1ad5f4bc..8e51c9bed 100644 --- a/examples/profiling_example.py +++ b/examples/profiling_example.py @@ -1,10 +1,10 @@ # -*- coding: utf-8 -*- -from pathlib import Path -import openeo +import pstats import tarfile import tempfile -import pstats +from pathlib import Path +import openeo if __name__ == '__main__': diff --git a/examples/py3_process_wrapper-wcps_eurac.py b/examples/py3_process_wrapper-wcps_eurac.py index 6b86051b1..9ba59ae59 100644 --- a/examples/py3_process_wrapper-wcps_eurac.py +++ b/examples/py3_process_wrapper-wcps_eurac.py @@ -1,10 +1,10 @@ #!/usr/bin/env python3 import json import logging +import sys from datetime import datetime import openeo -import sys logger = logging.getLogger(__name__) diff --git a/examples/udf/median_composite.py b/examples/udf/median_composite.py index c9605de9b..5a000d530 100644 --- a/examples/udf/median_composite.py +++ b/examples/udf/median_composite.py @@ -1,5 +1,5 @@ -import xarray import numpy as np +import xarray from openeo.udf import XarrayDataCube diff --git a/examples/vito_example.py b/examples/vito_example.py index 602e5a974..d3d426e97 100644 --- a/examples/vito_example.py +++ b/examples/vito_example.py @@ -1,6 +1,7 @@ -import openeo -import logging import json +import logging + +import openeo logging.basicConfig(level=logging.INFO) diff --git a/openeo/__init__.py b/openeo/__init__.py index 61fafd7e1..0ddf2ff83 100644 --- a/openeo/__init__.py +++ b/openeo/__init__.py @@ -12,9 +12,8 @@ class BaseOpenEoException(Exception): from openeo._version import __version__ - -from openeo.rest.datacube import DataCube, UDF -from openeo.rest.connection import connect, session, Connection +from openeo.rest.connection import Connection, connect, session +from openeo.rest.datacube import UDF, DataCube from openeo.rest.job import BatchJob, RESTJob diff --git a/openeo/api/process.py b/openeo/api/process.py index 405828f24..9eaea06e8 100644 --- a/openeo/api/process.py +++ b/openeo/api/process.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import warnings from typing import Union @@ -35,7 +37,7 @@ def to_dict(self) -> dict: return d @classmethod - def raster_cube(cls, name: str = "data", description: str = "A data cube.") -> 'Parameter': + def raster_cube(cls, name: str = "data", description: str = "A data cube.") -> Parameter: """ Helper to easily create a 'raster-cube' parameter. @@ -46,7 +48,7 @@ def raster_cube(cls, name: str = "data", description: str = "A data cube.") -> ' return cls(name=name, description=description, schema={"type": "object", "subtype": "raster-cube"}) @classmethod - def datacube(cls, name: str = "data", description: str = "A data cube.") -> "Parameter": + def datacube(cls, name: str = "data", description: str = "A data cube.") -> Parameter: """ Helper to easily create a 'datacube' parameter. @@ -59,7 +61,7 @@ def datacube(cls, name: str = "data", description: str = "A data cube.") -> "Par return cls(name=name, description=description, schema={"type": "object", "subtype": "datacube"}) @classmethod - def string(cls, name: str, description: str = None, default=_DEFAULT_UNDEFINED, values=None) -> 'Parameter': + def string(cls, name: str, description: str = None, default=_DEFAULT_UNDEFINED, values=None) -> Parameter: """Helper to create a 'string' type parameter.""" schema = {"type": "string"} if values is not None: @@ -68,21 +70,21 @@ def string(cls, name: str, description: str = None, default=_DEFAULT_UNDEFINED, @classmethod - def integer(cls, name: str, description: str = None, default=_DEFAULT_UNDEFINED) -> 'Parameter': + def integer(cls, name: str, description: str = None, default=_DEFAULT_UNDEFINED) -> Parameter: """Helper to create a 'integer' type parameter.""" return cls(name=name, description=description, schema={"type": "integer"}, default=default) @classmethod - def number(cls, name: str, description: str = None, default=_DEFAULT_UNDEFINED) -> 'Parameter': + def number(cls, name: str, description: str = None, default=_DEFAULT_UNDEFINED) -> Parameter: """Helper to create a 'number' type parameter.""" return cls(name=name, description=description, schema={"type": "number"}, default=default) @classmethod - def boolean(cls, name: str, description: str = None, default=_DEFAULT_UNDEFINED) -> 'Parameter': + def boolean(cls, name: str, description: str = None, default=_DEFAULT_UNDEFINED) -> Parameter: """Helper to create a 'boolean' type parameter.""" return cls(name=name, description=description, schema={"type": "boolean"}, default=default) @classmethod - def array(cls, name: str, description: str = None, default=_DEFAULT_UNDEFINED) -> 'Parameter': + def array(cls, name: str, description: str = None, default=_DEFAULT_UNDEFINED) -> Parameter: """Helper to create a 'array' type parameter.""" return cls(name=name, description=description, schema={"type": "array"}, default=default) diff --git a/openeo/capabilities.py b/openeo/capabilities.py index 0bc4a1270..3e8137390 100644 --- a/openeo/capabilities.py +++ b/openeo/capabilities.py @@ -1,8 +1,9 @@ +from __future__ import annotations + import contextlib -from abc import ABC import re -from typing import Union, Tuple - +from abc import ABC +from typing import Tuple, Union # TODO Is this base class (still) useful? @@ -24,7 +25,7 @@ def api_version(self) -> str: raise NotImplementedError @property - def api_version_check(self) -> 'ComparableVersion': + def api_version_check(self) -> ComparableVersion: """Helper to easily check if the API version is at least or below some threshold version.""" api_version = self.api_version() if not api_version: diff --git a/openeo/config.py b/openeo/config.py index 918fa1f99..32a5827a1 100644 --- a/openeo/config.py +++ b/openeo/config.py @@ -4,13 +4,15 @@ """ +from __future__ import annotations + import logging import os import platform from configparser import ConfigParser from copy import deepcopy from pathlib import Path -from typing import Union, Any, Sequence, Iterator, Optional, List +from typing import Any, Iterator, List, Optional, Sequence, Union from openeo.util import in_interactive_mode @@ -116,13 +118,13 @@ def get(self, key: Union[str, Sequence[str]], default=None) -> Any: # TODO: option to cast/convert to certain type? return self._config.get(self._key(key), default) - def load_ini_file(self, path: Union[str, Path]) -> "ClientConfig": + def load_ini_file(self, path: Union[str, Path]) -> ClientConfig: cp = ConfigParser() read_ok = cp.read(path) self._sources.extend(read_ok) return self.load_config_parser(cp) - def load_config_parser(self, parser: ConfigParser) -> "ClientConfig": + def load_config_parser(self, parser: ConfigParser) -> ClientConfig: for section in parser.sections(): for option, value in parser.items(section=section): self._set(key=(section, option), value=value) diff --git a/openeo/extra/job_management.py b/openeo/extra/job_management.py index 2dedf64fb..3e67fc1f3 100644 --- a/openeo/extra/job_management.py +++ b/openeo/extra/job_management.py @@ -1,11 +1,10 @@ -import collections import contextlib import datetime import json import logging import time from pathlib import Path -from typing import Callable, Dict, Optional, Union +from typing import Callable, Dict, NamedTuple, Optional, Union import pandas as pd import requests @@ -20,8 +19,13 @@ _log = logging.getLogger(__name__) -# Container for backend info/settings -_Backend = collections.namedtuple("_Backend", ["get_connection", "parallel_jobs"]) +class _Backend(NamedTuple): + """Container for backend info/settings""" + + # callable to create a backend connection + get_connection: Callable[[], Connection] + # Maximum number of jobs to allow in parallel on a backend + parallel_jobs: int MAX_RETRIES = 5 diff --git a/openeo/extra/spectral_indices/spectral_indices.py b/openeo/extra/spectral_indices/spectral_indices.py index 9e3509a6e..06b9dde89 100644 --- a/openeo/extra/spectral_indices/spectral_indices.py +++ b/openeo/extra/spectral_indices/spectral_indices.py @@ -1,12 +1,17 @@ import json -import pkg_resources from typing import Dict, List import numpy as np -from openeo.processes import ProcessBuilder, array_modify, array_create +from openeo.processes import ProcessBuilder, array_create, array_modify from openeo.rest.datacube import DataCube +try: + import importlib_resources +except ImportError: + import importlib.resources as importlib_resources + + BAND_MAPPING_LANDSAT457 = { "B1": "B", "B2": "G", @@ -89,8 +94,9 @@ def load_indices() -> Dict[str, dict]: "resources/awesome-spectral-indices/spectral-indices-dict.json", "resources/extra-indices-dict.json", ]: - with pkg_resources.resource_stream("openeo.extra.spectral_indices", path) as stream: - specs.update(json.load(stream)["SpectralIndices"]) + with importlib_resources.files("openeo.extra.spectral_indices") / path as resource_path: + data = json.loads(resource_path.read_text(encoding="utf8")) + specs.update(data["SpectralIndices"]) return specs diff --git a/openeo/internal/graph_building.py b/openeo/internal/graph_building.py index 085596f05..31dc8b99e 100644 --- a/openeo/internal/graph_building.py +++ b/openeo/internal/graph_building.py @@ -2,6 +2,9 @@ Functionality for abstracting, building, manipulating and processing openEO process graphs. """ + +from __future__ import annotations + import abc import collections import json @@ -78,7 +81,7 @@ class _FromNodeMixin(abc.ABC): """Mixin for classes that want to hook into the generation of a "from_node" reference.""" @abc.abstractmethod - def from_node(self) -> "PGNode": + def from_node(self) -> PGNode: # TODO: "from_node" is a bit a confusing name: # it refers to the "from_node" node reference in openEO process graphs, # but as a method name here it reads like "construct from PGNode", @@ -203,7 +206,7 @@ def to_process_graph_argument(value: Union['PGNode', str, dict]) -> dict: raise ValueError(value) @staticmethod - def from_flat_graph(flat_graph: dict, parameters: Optional[dict] = None) -> 'PGNode': + def from_flat_graph(flat_graph: dict, parameters: Optional[dict] = None) -> PGNode: """Unflatten a given flat dict representation of a process graph and return result node.""" return PGNodeGraphUnflattener.unflatten(flat_graph=flat_graph, parameters=parameters) @@ -259,7 +262,7 @@ def dimension(self): def reducer_process_graph(self) -> PGNode: return self.arguments["reducer"]["process_graph"] - def clone_with_new_reducer(self, reducer: PGNode) -> 'ReduceNode': + def clone_with_new_reducer(self, reducer: PGNode) -> ReduceNode: """Copy/clone this reduce node: keep input reference, but use new reducer""" return ReduceNode( data=self.arguments["data"]["from_node"], diff --git a/openeo/internal/process_graph_visitor.py b/openeo/internal/process_graph_visitor.py index 6512b3fb7..8315eda40 100644 --- a/openeo/internal/process_graph_visitor.py +++ b/openeo/internal/process_graph_visitor.py @@ -1,6 +1,8 @@ +from __future__ import annotations + import json from abc import ABC -from typing import Union, Tuple, Any +from typing import Any, Tuple, Union from openeo.internal.warnings import deprecated from openeo.rest import OpenEoClientException @@ -63,7 +65,7 @@ def resolve_from_node(process_graph, node, from_node): raise ProcessGraphVisitException("No result node in process graph: " + dump[:1000]) return result_node - def accept_process_graph(self, graph: dict) -> 'ProcessGraphVisitor': + def accept_process_graph(self, graph: dict) -> ProcessGraphVisitor: """ Traverse a (flat) process graph diff --git a/openeo/internal/processes/builder.py b/openeo/internal/processes/builder.py index 15d0c2b74..24816e46a 100644 --- a/openeo/internal/processes/builder.py +++ b/openeo/internal/processes/builder.py @@ -1,9 +1,9 @@ import inspect import logging import warnings -from typing import Union, Callable, List, Optional, Any, Dict +from typing import Any, Callable, Dict, List, Optional, Union -from openeo.internal.graph_building import PGNode, _FromNodeMixin, FlatGraphableMixin +from openeo.internal.graph_building import FlatGraphableMixin, PGNode, _FromNodeMixin from openeo.rest import OpenEoClientException UNSET = object() diff --git a/openeo/internal/processes/generator.py b/openeo/internal/processes/generator.py index b13eaa757..77579a222 100644 --- a/openeo/internal/processes/generator.py +++ b/openeo/internal/processes/generator.py @@ -4,7 +4,7 @@ import sys import textwrap from pathlib import Path -from typing import Union, List, Iterator, Optional +from typing import Iterator, List, Optional, Union from openeo.internal.processes.parse import Process, parse_all_from_dir @@ -112,7 +112,10 @@ def collect_processes(sources: List[Union[Path, str]]) -> List[Process]: def generate_process_py(processes: List[Process], output=sys.stdout, argv=None): oo_src = textwrap.dedent( """ + from __future__ import annotations + import builtins + from openeo.internal.processes.builder import ProcessBuilderBase, UNSET from openeo.internal.documentation import openeo_process @@ -125,47 +128,47 @@ class ProcessBuilder(ProcessBuilderBase): _ITERATION_LIMIT = 100 @openeo_process(process_id="add", mode="operator") - def __add__(self, other) -> 'ProcessBuilder': + def __add__(self, other) -> ProcessBuilder: return self.add(other) @openeo_process(process_id="add", mode="operator") - def __radd__(self, other) -> 'ProcessBuilder': + def __radd__(self, other) -> ProcessBuilder: return add(other, self) @openeo_process(process_id="subtract", mode="operator") - def __sub__(self, other) -> 'ProcessBuilder': + def __sub__(self, other) -> ProcessBuilder: return self.subtract(other) @openeo_process(process_id="subtract", mode="operator") - def __rsub__(self, other) -> 'ProcessBuilder': + def __rsub__(self, other) -> ProcessBuilder: return subtract(other, self) @openeo_process(process_id="multiply", mode="operator") - def __mul__(self, other) -> 'ProcessBuilder': + def __mul__(self, other) -> ProcessBuilder: return self.multiply(other) @openeo_process(process_id="multiply", mode="operator") - def __rmul__(self, other) -> 'ProcessBuilder': + def __rmul__(self, other) -> ProcessBuilder: return multiply(other, self) @openeo_process(process_id="divide", mode="operator") - def __truediv__(self, other) -> 'ProcessBuilder': + def __truediv__(self, other) -> ProcessBuilder: return self.divide(other) @openeo_process(process_id="divide", mode="operator") - def __rtruediv__(self, other) -> 'ProcessBuilder': + def __rtruediv__(self, other) -> ProcessBuilder: return divide(other, self) @openeo_process(process_id="multiply", mode="operator") - def __neg__(self) -> 'ProcessBuilder': + def __neg__(self) -> ProcessBuilder: return self.multiply(-1) @openeo_process(process_id="power", mode="operator") - def __pow__(self, other) -> 'ProcessBuilder': + def __pow__(self, other) -> ProcessBuilder: return self.power(other) @openeo_process(process_id="array_element", mode="operator") - def __getitem__(self, key) -> 'ProcessBuilder': + def __getitem__(self, key) -> ProcessBuilder: if isinstance(key, builtins.int): if key > self._ITERATION_LIMIT: raise RuntimeError( @@ -178,27 +181,27 @@ def __getitem__(self, key) -> 'ProcessBuilder': return self.array_element(label=key) @openeo_process(process_id="eq", mode="operator") - def __eq__(self, other) -> 'ProcessBuilder': + def __eq__(self, other) -> ProcessBuilder: return eq(self, other) @openeo_process(process_id="neq", mode="operator") - def __ne__(self, other) -> 'ProcessBuilder': + def __ne__(self, other) -> ProcessBuilder: return neq(self, other) @openeo_process(process_id="lt", mode="operator") - def __lt__(self, other) -> 'ProcessBuilder': + def __lt__(self, other) -> ProcessBuilder: return lt(self, other) @openeo_process(process_id="lte", mode="operator") - def __le__(self, other) -> 'ProcessBuilder': + def __le__(self, other) -> ProcessBuilder: return lte(self, other) @openeo_process(process_id="ge", mode="operator") - def __ge__(self, other) -> 'ProcessBuilder': + def __ge__(self, other) -> ProcessBuilder: return gte(self, other) @openeo_process(process_id="gt", mode="operator") - def __gt__(self, other) -> 'ProcessBuilder': + def __gt__(self, other) -> ProcessBuilder: return gt(self, other) """ @@ -223,7 +226,7 @@ def __gt__(self, other) -> 'ProcessBuilder': oo_mode=True, body_template="return {safe_name}({args})", optional_default="UNSET", - return_type_hint="'ProcessBuilder'", + return_type_hint="ProcessBuilder", decorator="@openeo_process", ) for p in processes: diff --git a/openeo/internal/processes/parse.py b/openeo/internal/processes/parse.py index 624221161..de6f25dbc 100644 --- a/openeo/internal/processes/parse.py +++ b/openeo/internal/processes/parse.py @@ -2,9 +2,11 @@ Functionality and tools to process openEO processes. For example: parse a bunch of JSON descriptions and generate Python (stub) functions. """ +from __future__ import annotations + import json from pathlib import Path -from typing import List, Union, Iterator +from typing import Iterator, List, Union import requests @@ -16,7 +18,7 @@ def __init__(self, schema: Union[dict, list]): self.schema = schema @classmethod - def from_dict(cls, data: dict) -> 'Schema': + def from_dict(cls, data: dict) -> Schema: return cls(schema=data) @@ -34,7 +36,7 @@ def __init__(self, name: str, description: str, schema: Schema, default=NO_DEFAU self.optional = optional @classmethod - def from_dict(cls, data: dict) -> 'Parameter': + def from_dict(cls, data: dict) -> Parameter: return cls( name=data["name"], description=data["description"], schema=Schema.from_dict(data["schema"]), default=data.get("default", cls.NO_DEFAULT), optional=data.get("optional", False) @@ -52,7 +54,7 @@ def __init__(self, description: str, schema: Schema): self.schema = schema @classmethod - def from_dict(cls, data: dict) -> 'Returns': + def from_dict(cls, data: dict) -> Returns: return cls(description=data["description"], schema=Schema.from_dict(data["schema"])) @@ -71,7 +73,7 @@ def __init__( # TODO: more properties? @classmethod - def from_dict(cls, data: dict) -> 'Process': + def from_dict(cls, data: dict) -> Process: """Construct openEO process from dictionary values""" return cls( id=data["id"], @@ -82,17 +84,17 @@ def from_dict(cls, data: dict) -> 'Process': ) @classmethod - def from_json(cls, data: str) -> 'Process': + def from_json(cls, data: str) -> Process: """Parse openEO process JSON description.""" return cls.from_dict(json.loads(data)) @classmethod - def from_json_url(cls, url: str) -> 'Process': + def from_json_url(cls, url: str) -> Process: """Parse openEO process JSON description from given URL.""" return cls.from_dict(requests.get(url).json()) @classmethod - def from_json_file(cls, path: Union[str, Path]) -> 'Process': + def from_json_file(cls, path: Union[str, Path]) -> Process: """Parse openEO process JSON description file.""" with Path(path).open("r") as f: return cls.from_json(f.read()) diff --git a/openeo/internal/warnings.py b/openeo/internal/warnings.py index 0d5a4bfaa..df083753a 100644 --- a/openeo/internal/warnings.py +++ b/openeo/internal/warnings.py @@ -2,6 +2,7 @@ import inspect import warnings from typing import Callable, Optional + from deprecated.sphinx import deprecated as _deprecated diff --git a/openeo/local/connection.py b/openeo/local/connection.py index 8bc652034..635e85247 100644 --- a/openeo/local/connection.py +++ b/openeo/local/connection.py @@ -11,9 +11,19 @@ from openeo.internal.graph_building import PGNode, as_flat_graph from openeo.internal.jupyter import VisualDict, VisualList -from openeo.local.collections import _get_geotiff_metadata, _get_local_collections, _get_netcdf_zarr_metadata +from openeo.local.collections import ( + _get_geotiff_metadata, + _get_local_collections, + _get_netcdf_zarr_metadata, +) from openeo.local.processing import PROCESS_REGISTRY -from openeo.metadata import Band, BandDimension, CollectionMetadata, SpatialDimension, TemporalDimension +from openeo.metadata import ( + Band, + BandDimension, + CollectionMetadata, + SpatialDimension, + TemporalDimension, +) from openeo.rest.datacube import DataCube _log = logging.getLogger(__name__) @@ -236,7 +246,7 @@ def load_stac( TemporalDimension(name=xarray_cube.openeo.temporal_dims[0], extent=[]), BandDimension( name=xarray_cube.openeo.band_dims[0], - bands=[Band(x) for x in xarray_cube[xarray_cube.openeo.band_dims[0]].values], + bands=[Band(name=x) for x in xarray_cube[xarray_cube.openeo.band_dims[0]].values], ), ], ) diff --git a/openeo/metadata.py b/openeo/metadata.py index 4e9dbf36b..43e39d206 100644 --- a/openeo/metadata.py +++ b/openeo/metadata.py @@ -1,11 +1,11 @@ +from __future__ import annotations + import logging import warnings -from collections import namedtuple -from typing import List, Union, Tuple, Callable, Any +from typing import Any, Callable, List, NamedTuple, Optional, Tuple, Union -from openeo.util import deep_get from openeo.internal.jupyter import render_component - +from openeo.util import deep_get _log = logging.getLogger(__name__) @@ -34,11 +34,11 @@ def __repr__(self): def __eq__(self, other): return self.__class__ == other.__class__ and self.__dict__ == other.__dict__ - def rename(self, name) -> 'Dimension': + def rename(self, name) -> Dimension: """Create new dimension with new name.""" return Dimension(type=self.type, name=name) - def rename_labels(self, target, source) -> 'Dimension': + def rename_labels(self, target, source) -> Dimension: """ Rename labels, if the type of dimension allows it. @@ -69,7 +69,7 @@ def __init__( self.crs = crs self.step = step - def rename(self, name) -> 'Dimension': + def rename(self, name) -> Dimension: return SpatialDimension(name=name, extent=self.extent, crs=self.crs, step=self.step) @@ -78,13 +78,23 @@ def __init__(self, name: str, extent: Union[Tuple[str, str], List[str]]): super().__init__(type="temporal", name=name) self.extent = extent - def rename(self, name) -> 'Dimension': + def rename(self, name) -> Dimension: return TemporalDimension(name=name, extent=self.extent) -# Simple container class for band metadata (incl. wavelength in micrometer) -Band = namedtuple("Band", ["name", "common_name", "wavelength_um", "aliases", "gsd"]) -Band.__new__.__defaults__ = (None, None, None, None,) +class Band(NamedTuple): + """ + Simple container class for band metadata. + Based on https://github.com/stac-extensions/eo#band-object + """ + + name: str + common_name: Optional[str] = None + # wavelength in micrometer + wavelength_um: Optional[float] = None + aliases: Optional[List[str]] = None + # "openeo:gsd" field (https://github.com/Open-EO/openeo-stac-extensions#GSD-Object) + gsd: Optional[dict] = None class BandDimension(Dimension): @@ -144,7 +154,7 @@ def band_name(self, band: Union[str, int], allow_common=True) -> str: return self.band_names[band] raise ValueError("Invalid band name/index {b!r}. Valid names: {n!r}".format(b=band, n=self.band_names)) - def filter_bands(self, bands: List[Union[int, str]]) -> 'BandDimension': + def filter_bands(self, bands: List[Union[int, str]]) -> BandDimension: """ Construct new BandDimension with subset of bands, based on given band indices or (common) names @@ -154,7 +164,7 @@ def filter_bands(self, bands: List[Union[int, str]]) -> 'BandDimension': bands=[self.bands[self.band_index(b)] for b in bands] ) - def append_band(self, band: Band) -> 'BandDimension': + def append_band(self, band: Band) -> BandDimension: """Create new BandDimension with appended band.""" if band.name in self.band_names: raise ValueError("Duplicate band {b!r}".format(b=band)) @@ -164,7 +174,7 @@ def append_band(self, band: Band) -> 'BandDimension': bands=self.bands + [band] ) - def rename_labels(self, target, source) -> 'Dimension': + def rename_labels(self, target, source) -> Dimension: if source: if len(target) != len(source): raise ValueError( @@ -175,10 +185,15 @@ def rename_labels(self, target, source) -> 'Dimension': for old_name, new_name in zip(source, target): band_index = self.band_index(old_name) the_band = new_bands[band_index] - new_bands[band_index] = Band(new_name, the_band.common_name, the_band.wavelength_um, the_band.aliases, - the_band.gsd) + new_bands[band_index] = Band( + name=new_name, + common_name=the_band.common_name, + wavelength_um=the_band.wavelength_um, + aliases=the_band.aliases, + gsd=the_band.gsd, + ) else: - new_bands = [Band(name=n, common_name=None, wavelength_um=None) for n in target] + new_bands = [Band(name=n) for n in target] return BandDimension(name=self.name, bands=new_bands) @@ -217,7 +232,7 @@ def __init__(self, metadata: dict, dimensions: List[Dimension] = None): self._temporal_dimension = dim @classmethod - def get_or_create(cls, metadata: Union[dict, 'CollectionMetadata', None]) -> 'CollectionMetadata': + def get_or_create(cls, metadata: Union[dict, "CollectionMetadata", None]) -> CollectionMetadata: """Get or create CollectionMetadata from given argument.""" if isinstance(metadata, cls): return metadata @@ -229,7 +244,7 @@ def __eq__(self, o: Any) -> bool: def _clone_and_update( self, metadata: dict = None, dimensions: List[Dimension] = None, **kwargs - ) -> 'CollectionMetadata': + ) -> CollectionMetadata: """Create a new instance (of same class) with copied/updated fields.""" cls = type(self) if dimensions == None: @@ -273,7 +288,7 @@ def _parse_dimensions(cls, spec: dict, complain: Callable[[str], None] = warning elif dim_type == "temporal": dimensions.append(TemporalDimension(name=name, extent=info.get("extent"))) elif dim_type == "bands": - bands = [Band(b, None, None) for b in info.get("values", [])] + bands = [Band(name=b) for b in info.get("values", [])] if not bands: complain("No band names in dimension {d!r}".format(d=name)) dimensions.append(BandDimension(name=name, bands=bands)) @@ -289,8 +304,16 @@ def _parse_dimensions(cls, spec: dict, complain: Callable[[str], None] = warning ) if eo_bands: # center_wavelength is in micrometer according to spec - bands_detailed = [Band(b['name'], b.get('common_name'), b.get('center_wavelength'), b.get('aliases'), - b.get('openeo:gsd')) for b in eo_bands] + bands_detailed = [ + Band( + name=b["name"], + common_name=b.get("common_name"), + wavelength_um=b.get("center_wavelength"), + aliases=b.get("aliases"), + gsd=b.get("openeo:gsd"), + ) + for b in eo_bands + ] # Update band dimension with more detailed info band_dimensions = [d for d in dimensions if d.type == "bands"] if len(band_dimensions) == 1: @@ -376,7 +399,7 @@ def band_common_names(self) -> List[str]: def get_band_index(self, band: Union[int, str]) -> int: return self.band_dimension.band_index(band) - def filter_bands(self, band_names: List[Union[int, str]]) -> 'CollectionMetadata': + def filter_bands(self, band_names: List[Union[int, str]]) -> CollectionMetadata: """ Create new `CollectionMetadata` with filtered band dimension :param band_names: list of band names/indices to keep @@ -388,7 +411,7 @@ def filter_bands(self, band_names: List[Union[int, str]]) -> 'CollectionMetadata for d in self._dimensions ]) - def append_band(self, band: Band) -> 'CollectionMetadata': + def append_band(self, band: Band) -> CollectionMetadata: """ Create new `CollectionMetadata` with given band added to band dimension. """ @@ -398,7 +421,7 @@ def append_band(self, band: Band) -> 'CollectionMetadata': for d in self._dimensions ]) - def rename_labels(self, dimension: str, target: list, source: list = None) -> 'CollectionMetadata': + def rename_labels(self, dimension: str, target: list, source: list = None) -> CollectionMetadata: """ Renames the labels of the specified dimension from source to target. @@ -415,7 +438,7 @@ def rename_labels(self, dimension: str, target: list, source: list = None) -> 'C return self._clone_and_update(dimensions=new_dimensions) - def rename_dimension(self, source: str, target: str) -> 'CollectionMetadata': + def rename_dimension(self, source: str, target: str) -> CollectionMetadata: """ Rename source dimension into target, preserving other properties """ @@ -426,7 +449,7 @@ def rename_dimension(self, source: str, target: str) -> 'CollectionMetadata': return self._clone_and_update(dimensions=new_dimensions) - def reduce_dimension(self, dimension_name: str) -> 'CollectionMetadata': + def reduce_dimension(self, dimension_name: str) -> CollectionMetadata: """Create new metadata object by collapsing/reducing a dimension.""" # TODO: option to keep reduced dimension (with a single value)? self.assert_valid_dimension(dimension_name) @@ -434,12 +457,12 @@ def reduce_dimension(self, dimension_name: str) -> 'CollectionMetadata': dimensions = self._dimensions[:loc] + self._dimensions[loc + 1:] return self._clone_and_update(dimensions=dimensions) - def add_dimension(self, name: str, label: Union[str, float], type: str = None) -> 'CollectionMetadata': + def add_dimension(self, name: str, label: Union[str, float], type: str = None) -> CollectionMetadata: """Create new metadata object with added dimension""" if any(d.name == name for d in self._dimensions): raise DimensionAlreadyExistsException(f"Dimension with name {name!r} already exists") if type == "bands": - dim = BandDimension(name=name, bands=[Band(label, None, None)]) + dim = BandDimension(name=name, bands=[Band(name=label)]) elif type == "spatial": dim = SpatialDimension(name=name, extent=[label, label]) elif type == "temporal": @@ -448,7 +471,7 @@ def add_dimension(self, name: str, label: Union[str, float], type: str = None) - dim = Dimension(type=type or "other", name=name) return self._clone_and_update(dimensions=self._dimensions + [dim]) - def drop_dimension(self, name: str = None) -> 'CollectionMetadata': + def drop_dimension(self, name: str = None) -> CollectionMetadata: """Drop dimension with given name""" dimension_names = self.dimension_names() if name not in dimension_names: diff --git a/openeo/processes.py b/openeo/processes.py index 517f2ab4c..d85f090fb 100644 --- a/openeo/processes.py +++ b/openeo/processes.py @@ -3,11 +3,14 @@ # It is automatically generated. # Used command line arguments: # openeo/internal/processes/generator.py specs/openeo-processes specs/openeo-processes/proposals --output openeo/processes.py -# Generated on 2023-03-15 +# Generated on 2023-08-28 + +from __future__ import annotations import builtins -from openeo.internal.processes.builder import ProcessBuilderBase, UNSET + from openeo.internal.documentation import openeo_process +from openeo.internal.processes.builder import UNSET, ProcessBuilderBase class ProcessBuilder(ProcessBuilderBase): @@ -18,47 +21,47 @@ class ProcessBuilder(ProcessBuilderBase): _ITERATION_LIMIT = 100 @openeo_process(process_id="add", mode="operator") - def __add__(self, other) -> 'ProcessBuilder': + def __add__(self, other) -> ProcessBuilder: return self.add(other) @openeo_process(process_id="add", mode="operator") - def __radd__(self, other) -> 'ProcessBuilder': + def __radd__(self, other) -> ProcessBuilder: return add(other, self) @openeo_process(process_id="subtract", mode="operator") - def __sub__(self, other) -> 'ProcessBuilder': + def __sub__(self, other) -> ProcessBuilder: return self.subtract(other) @openeo_process(process_id="subtract", mode="operator") - def __rsub__(self, other) -> 'ProcessBuilder': + def __rsub__(self, other) -> ProcessBuilder: return subtract(other, self) @openeo_process(process_id="multiply", mode="operator") - def __mul__(self, other) -> 'ProcessBuilder': + def __mul__(self, other) -> ProcessBuilder: return self.multiply(other) @openeo_process(process_id="multiply", mode="operator") - def __rmul__(self, other) -> 'ProcessBuilder': + def __rmul__(self, other) -> ProcessBuilder: return multiply(other, self) @openeo_process(process_id="divide", mode="operator") - def __truediv__(self, other) -> 'ProcessBuilder': + def __truediv__(self, other) -> ProcessBuilder: return self.divide(other) @openeo_process(process_id="divide", mode="operator") - def __rtruediv__(self, other) -> 'ProcessBuilder': + def __rtruediv__(self, other) -> ProcessBuilder: return divide(other, self) @openeo_process(process_id="multiply", mode="operator") - def __neg__(self) -> 'ProcessBuilder': + def __neg__(self) -> ProcessBuilder: return self.multiply(-1) @openeo_process(process_id="power", mode="operator") - def __pow__(self, other) -> 'ProcessBuilder': + def __pow__(self, other) -> ProcessBuilder: return self.power(other) @openeo_process(process_id="array_element", mode="operator") - def __getitem__(self, key) -> 'ProcessBuilder': + def __getitem__(self, key) -> ProcessBuilder: if isinstance(key, builtins.int): if key > self._ITERATION_LIMIT: raise RuntimeError( @@ -71,31 +74,31 @@ def __getitem__(self, key) -> 'ProcessBuilder': return self.array_element(label=key) @openeo_process(process_id="eq", mode="operator") - def __eq__(self, other) -> 'ProcessBuilder': + def __eq__(self, other) -> ProcessBuilder: return eq(self, other) @openeo_process(process_id="neq", mode="operator") - def __ne__(self, other) -> 'ProcessBuilder': + def __ne__(self, other) -> ProcessBuilder: return neq(self, other) @openeo_process(process_id="lt", mode="operator") - def __lt__(self, other) -> 'ProcessBuilder': + def __lt__(self, other) -> ProcessBuilder: return lt(self, other) @openeo_process(process_id="lte", mode="operator") - def __le__(self, other) -> 'ProcessBuilder': + def __le__(self, other) -> ProcessBuilder: return lte(self, other) @openeo_process(process_id="ge", mode="operator") - def __ge__(self, other) -> 'ProcessBuilder': + def __ge__(self, other) -> ProcessBuilder: return gte(self, other) @openeo_process(process_id="gt", mode="operator") - def __gt__(self, other) -> 'ProcessBuilder': + def __gt__(self, other) -> ProcessBuilder: return gt(self, other) @openeo_process - def absolute(self) -> 'ProcessBuilder': + def absolute(self) -> ProcessBuilder: """ Absolute value @@ -106,7 +109,7 @@ def absolute(self) -> 'ProcessBuilder': return absolute(x=self) @openeo_process - def add(self, y) -> 'ProcessBuilder': + def add(self, y) -> ProcessBuilder: """ Addition of two numbers @@ -118,7 +121,7 @@ def add(self, y) -> 'ProcessBuilder': return add(x=self, y=y) @openeo_process - def add_dimension(self, name, label, type=UNSET) -> 'ProcessBuilder': + def add_dimension(self, name, label, type=UNSET) -> ProcessBuilder: """ Add a new dimension @@ -133,7 +136,7 @@ def add_dimension(self, name, label, type=UNSET) -> 'ProcessBuilder': return add_dimension(data=self, name=name, label=label, type=type) @openeo_process - def aggregate_spatial(self, geometries, reducer, target_dimension=UNSET, context=UNSET) -> 'ProcessBuilder': + def aggregate_spatial(self, geometries, reducer, target_dimension=UNSET, context=UNSET) -> ProcessBuilder: """ Zonal statistics for geometries @@ -174,7 +177,7 @@ def aggregate_spatial(self, geometries, reducer, target_dimension=UNSET, context return aggregate_spatial(data=self, geometries=geometries, reducer=reducer, target_dimension=target_dimension, context=context) @openeo_process - def aggregate_spatial_window(self, reducer, size, boundary=UNSET, align=UNSET, context=UNSET) -> 'ProcessBuilder': + def aggregate_spatial_window(self, reducer, size, boundary=UNSET, align=UNSET, context=UNSET) -> ProcessBuilder: """ Zonal statistics for rectangular windows @@ -203,7 +206,7 @@ def aggregate_spatial_window(self, reducer, size, boundary=UNSET, align=UNSET, c return aggregate_spatial_window(data=self, reducer=reducer, size=size, boundary=boundary, align=align, context=context) @openeo_process - def aggregate_temporal(self, intervals, reducer, labels=UNSET, dimension=UNSET, context=UNSET) -> 'ProcessBuilder': + def aggregate_temporal(self, intervals, reducer, labels=UNSET, dimension=UNSET, context=UNSET) -> ProcessBuilder: """ Temporal aggregations @@ -237,7 +240,7 @@ def aggregate_temporal(self, intervals, reducer, labels=UNSET, dimension=UNSET, return aggregate_temporal(data=self, intervals=intervals, reducer=reducer, labels=labels, dimension=dimension, context=context) @openeo_process - def aggregate_temporal_period(self, period, reducer, dimension=UNSET, context=UNSET) -> 'ProcessBuilder': + def aggregate_temporal_period(self, period, reducer, dimension=UNSET, context=UNSET) -> ProcessBuilder: """ Temporal aggregations based on calendar hierarchies @@ -282,7 +285,7 @@ def aggregate_temporal_period(self, period, reducer, dimension=UNSET, context=UN return aggregate_temporal_period(data=self, period=period, reducer=reducer, dimension=dimension, context=context) @openeo_process - def all(self, ignore_nodata=UNSET) -> 'ProcessBuilder': + def all(self, ignore_nodata=UNSET) -> ProcessBuilder: """ Are all of the values true? @@ -294,7 +297,7 @@ def all(self, ignore_nodata=UNSET) -> 'ProcessBuilder': return all(data=self, ignore_nodata=ignore_nodata) @openeo_process - def and_(self, y) -> 'ProcessBuilder': + def and_(self, y) -> ProcessBuilder: """ Logical AND @@ -306,7 +309,7 @@ def and_(self, y) -> 'ProcessBuilder': return and_(x=self, y=y) @openeo_process - def anomaly(self, normals, period) -> 'ProcessBuilder': + def anomaly(self, normals, period) -> ProcessBuilder: """ Compute anomalies @@ -346,7 +349,7 @@ def anomaly(self, normals, period) -> 'ProcessBuilder': return anomaly(data=self, normals=normals, period=period) @openeo_process - def any(self, ignore_nodata=UNSET) -> 'ProcessBuilder': + def any(self, ignore_nodata=UNSET) -> ProcessBuilder: """ Is at least one value true? @@ -358,7 +361,7 @@ def any(self, ignore_nodata=UNSET) -> 'ProcessBuilder': return any(data=self, ignore_nodata=ignore_nodata) @openeo_process - def apply(self, process, context=UNSET) -> 'ProcessBuilder': + def apply(self, process, context=UNSET) -> ProcessBuilder: """ Apply a process to each pixel @@ -374,7 +377,7 @@ def apply(self, process, context=UNSET) -> 'ProcessBuilder': return apply(data=self, process=process, context=context) @openeo_process - def apply_dimension(self, process, dimension, target_dimension=UNSET, context=UNSET) -> 'ProcessBuilder': + def apply_dimension(self, process, dimension, target_dimension=UNSET, context=UNSET) -> ProcessBuilder: """ Apply a process to pixels along a dimension @@ -411,7 +414,7 @@ def apply_dimension(self, process, dimension, target_dimension=UNSET, context=UN return apply_dimension(data=self, process=process, dimension=dimension, target_dimension=target_dimension, context=context) @openeo_process - def apply_kernel(self, kernel, factor=UNSET, border=UNSET, replace_invalid=UNSET) -> 'ProcessBuilder': + def apply_kernel(self, kernel, factor=UNSET, border=UNSET, replace_invalid=UNSET) -> ProcessBuilder: """ Apply a spatial convolution with a kernel @@ -438,7 +441,7 @@ def apply_kernel(self, kernel, factor=UNSET, border=UNSET, replace_invalid=UNSET return apply_kernel(data=self, kernel=kernel, factor=factor, border=border, replace_invalid=replace_invalid) @openeo_process - def apply_neighborhood(self, process, size, overlap=UNSET, context=UNSET) -> 'ProcessBuilder': + def apply_neighborhood(self, process, size, overlap=UNSET, context=UNSET) -> ProcessBuilder: """ Apply a process to pixels in a n-dimensional neighborhood @@ -462,7 +465,7 @@ def apply_neighborhood(self, process, size, overlap=UNSET, context=UNSET) -> 'Pr return apply_neighborhood(data=self, process=process, size=size, overlap=overlap, context=context) @openeo_process - def arccos(self) -> 'ProcessBuilder': + def arccos(self) -> ProcessBuilder: """ Inverse cosine @@ -473,7 +476,7 @@ def arccos(self) -> 'ProcessBuilder': return arccos(x=self) @openeo_process - def arcosh(self) -> 'ProcessBuilder': + def arcosh(self) -> ProcessBuilder: """ Inverse hyperbolic cosine @@ -484,7 +487,7 @@ def arcosh(self) -> 'ProcessBuilder': return arcosh(x=self) @openeo_process - def arcsin(self) -> 'ProcessBuilder': + def arcsin(self) -> ProcessBuilder: """ Inverse sine @@ -495,7 +498,7 @@ def arcsin(self) -> 'ProcessBuilder': return arcsin(x=self) @openeo_process - def arctan(self) -> 'ProcessBuilder': + def arctan(self) -> ProcessBuilder: """ Inverse tangent @@ -506,7 +509,7 @@ def arctan(self) -> 'ProcessBuilder': return arctan(x=self) @openeo_process - def arctan2(self, x) -> 'ProcessBuilder': + def arctan2(self, x) -> ProcessBuilder: """ Inverse tangent of two numbers @@ -518,7 +521,7 @@ def arctan2(self, x) -> 'ProcessBuilder': return arctan2(y=self, x=x) @openeo_process - def ard_normalized_radar_backscatter(self, elevation_model=UNSET, contributing_area=UNSET, ellipsoid_incidence_angle=UNSET, noise_removal=UNSET, options=UNSET) -> 'ProcessBuilder': + def ard_normalized_radar_backscatter(self, elevation_model=UNSET, contributing_area=UNSET, ellipsoid_incidence_angle=UNSET, noise_removal=UNSET, options=UNSET) -> ProcessBuilder: """ CARD4L compliant SAR NRB generation @@ -544,7 +547,7 @@ def ard_normalized_radar_backscatter(self, elevation_model=UNSET, contributing_a return ard_normalized_radar_backscatter(data=self, elevation_model=elevation_model, contributing_area=contributing_area, ellipsoid_incidence_angle=ellipsoid_incidence_angle, noise_removal=noise_removal, options=options) @openeo_process - def ard_surface_reflectance(self, atmospheric_correction_method, cloud_detection_method, elevation_model=UNSET, atmospheric_correction_options=UNSET, cloud_detection_options=UNSET) -> 'ProcessBuilder': + def ard_surface_reflectance(self, atmospheric_correction_method, cloud_detection_method, elevation_model=UNSET, atmospheric_correction_options=UNSET, cloud_detection_options=UNSET) -> ProcessBuilder: """ CARD4L compliant Surface Reflectance generation @@ -592,7 +595,7 @@ def ard_surface_reflectance(self, atmospheric_correction_method, cloud_detection return ard_surface_reflectance(data=self, atmospheric_correction_method=atmospheric_correction_method, cloud_detection_method=cloud_detection_method, elevation_model=elevation_model, atmospheric_correction_options=atmospheric_correction_options, cloud_detection_options=cloud_detection_options) @openeo_process - def array_append(self, value, label=UNSET) -> 'ProcessBuilder': + def array_append(self, value, label=UNSET) -> ProcessBuilder: """ Append a value to an array @@ -607,7 +610,7 @@ def array_append(self, value, label=UNSET) -> 'ProcessBuilder': return array_append(data=self, value=value, label=label) @openeo_process - def array_apply(self, process, context=UNSET) -> 'ProcessBuilder': + def array_apply(self, process, context=UNSET) -> ProcessBuilder: """ Apply a process to each array element @@ -623,7 +626,7 @@ def array_apply(self, process, context=UNSET) -> 'ProcessBuilder': return array_apply(data=self, process=process, context=context) @openeo_process - def array_concat(self, array2) -> 'ProcessBuilder': + def array_concat(self, array2) -> ProcessBuilder: """ Merge two arrays @@ -635,7 +638,7 @@ def array_concat(self, array2) -> 'ProcessBuilder': return array_concat(array1=self, array2=array2) @openeo_process - def array_contains(self, value) -> 'ProcessBuilder': + def array_contains(self, value) -> ProcessBuilder: """ Check whether the array contains a given value @@ -647,7 +650,7 @@ def array_contains(self, value) -> 'ProcessBuilder': return array_contains(data=self, value=value) @openeo_process - def array_create(self=UNSET, repeat=UNSET) -> 'ProcessBuilder': + def array_create(self=UNSET, repeat=UNSET) -> ProcessBuilder: """ Create an array @@ -660,7 +663,7 @@ def array_create(self=UNSET, repeat=UNSET) -> 'ProcessBuilder': return array_create(data=self, repeat=repeat) @openeo_process - def array_create_labeled(self, labels) -> 'ProcessBuilder': + def array_create_labeled(self, labels) -> ProcessBuilder: """ Create a labeled array @@ -672,7 +675,7 @@ def array_create_labeled(self, labels) -> 'ProcessBuilder': return array_create_labeled(data=self, labels=labels) @openeo_process - def array_element(self, index=UNSET, label=UNSET, return_nodata=UNSET) -> 'ProcessBuilder': + def array_element(self, index=UNSET, label=UNSET, return_nodata=UNSET) -> ProcessBuilder: """ Get an element from an array @@ -688,7 +691,7 @@ def array_element(self, index=UNSET, label=UNSET, return_nodata=UNSET) -> 'Proce return array_element(data=self, index=index, label=label, return_nodata=return_nodata) @openeo_process - def array_filter(self, condition, context=UNSET) -> 'ProcessBuilder': + def array_filter(self, condition, context=UNSET) -> ProcessBuilder: """ Filter an array based on a condition @@ -703,7 +706,7 @@ def array_filter(self, condition, context=UNSET) -> 'ProcessBuilder': return array_filter(data=self, condition=condition, context=context) @openeo_process - def array_find(self, value, reverse=UNSET) -> 'ProcessBuilder': + def array_find(self, value, reverse=UNSET) -> ProcessBuilder: """ Get the index for a value in an array @@ -718,7 +721,7 @@ def array_find(self, value, reverse=UNSET) -> 'ProcessBuilder': return array_find(data=self, value=value, reverse=reverse) @openeo_process - def array_find_label(self, label) -> 'ProcessBuilder': + def array_find_label(self, label) -> ProcessBuilder: """ Get the index for a label in a labeled array @@ -731,7 +734,7 @@ def array_find_label(self, label) -> 'ProcessBuilder': return array_find_label(data=self, label=label) @openeo_process - def array_interpolate_linear(self) -> 'ProcessBuilder': + def array_interpolate_linear(self) -> ProcessBuilder: """ One-dimensional linear interpolation for arrays @@ -745,7 +748,7 @@ def array_interpolate_linear(self) -> 'ProcessBuilder': return array_interpolate_linear(data=self) @openeo_process - def array_labels(self) -> 'ProcessBuilder': + def array_labels(self) -> ProcessBuilder: """ Get the labels for an array @@ -756,7 +759,7 @@ def array_labels(self) -> 'ProcessBuilder': return array_labels(data=self) @openeo_process - def array_modify(self, values, index, length=UNSET) -> 'ProcessBuilder': + def array_modify(self, values, index, length=UNSET) -> ProcessBuilder: """ Change the content of an array (remove, insert, update) @@ -777,7 +780,7 @@ def array_modify(self, values, index, length=UNSET) -> 'ProcessBuilder': return array_modify(data=self, values=values, index=index, length=length) @openeo_process - def arsinh(self) -> 'ProcessBuilder': + def arsinh(self) -> ProcessBuilder: """ Inverse hyperbolic sine @@ -788,7 +791,7 @@ def arsinh(self) -> 'ProcessBuilder': return arsinh(x=self) @openeo_process - def artanh(self) -> 'ProcessBuilder': + def artanh(self) -> ProcessBuilder: """ Inverse hyperbolic tangent @@ -799,7 +802,7 @@ def artanh(self) -> 'ProcessBuilder': return artanh(x=self) @openeo_process - def atmospheric_correction(self, method, elevation_model=UNSET, options=UNSET) -> 'ProcessBuilder': + def atmospheric_correction(self, method, elevation_model=UNSET, options=UNSET) -> ProcessBuilder: """ Apply atmospheric correction @@ -818,7 +821,7 @@ def atmospheric_correction(self, method, elevation_model=UNSET, options=UNSET) - return atmospheric_correction(data=self, method=method, elevation_model=elevation_model, options=options) @openeo_process - def between(self, min, max, exclude_max=UNSET) -> 'ProcessBuilder': + def between(self, min, max, exclude_max=UNSET) -> ProcessBuilder: """ Between comparison @@ -832,7 +835,7 @@ def between(self, min, max, exclude_max=UNSET) -> 'ProcessBuilder': return between(x=self, min=min, max=max, exclude_max=exclude_max) @openeo_process - def ceil(self) -> 'ProcessBuilder': + def ceil(self) -> ProcessBuilder: """ Round fractions up @@ -843,7 +846,7 @@ def ceil(self) -> 'ProcessBuilder': return ceil(x=self) @openeo_process - def climatological_normal(self, period, climatology_period=UNSET) -> 'ProcessBuilder': + def climatological_normal(self, period, climatology_period=UNSET) -> ProcessBuilder: """ Compute climatology normals @@ -874,7 +877,7 @@ def climatological_normal(self, period, climatology_period=UNSET) -> 'ProcessBui return climatological_normal(data=self, period=period, climatology_period=climatology_period) @openeo_process - def clip(self, min, max) -> 'ProcessBuilder': + def clip(self, min, max) -> ProcessBuilder: """ Clip a value between a minimum and a maximum @@ -889,7 +892,7 @@ def clip(self, min, max) -> 'ProcessBuilder': return clip(x=self, min=min, max=max) @openeo_process - def cloud_detection(self, method, options=UNSET) -> 'ProcessBuilder': + def cloud_detection(self, method, options=UNSET) -> ProcessBuilder: """ Create cloud masks @@ -909,7 +912,7 @@ def cloud_detection(self, method, options=UNSET) -> 'ProcessBuilder': return cloud_detection(data=self, method=method, options=options) @openeo_process - def constant(self) -> 'ProcessBuilder': + def constant(self) -> ProcessBuilder: """ Define a constant value @@ -920,7 +923,7 @@ def constant(self) -> 'ProcessBuilder': return constant(x=self) @openeo_process - def cos(self) -> 'ProcessBuilder': + def cos(self) -> ProcessBuilder: """ Cosine @@ -931,7 +934,7 @@ def cos(self) -> 'ProcessBuilder': return cos(x=self) @openeo_process - def cosh(self) -> 'ProcessBuilder': + def cosh(self) -> ProcessBuilder: """ Hyperbolic cosine @@ -942,7 +945,7 @@ def cosh(self) -> 'ProcessBuilder': return cosh(x=self) @openeo_process - def count(self, condition=UNSET, context=UNSET) -> 'ProcessBuilder': + def count(self, condition=UNSET, context=UNSET) -> ProcessBuilder: """ Count the number of elements @@ -958,7 +961,7 @@ def count(self, condition=UNSET, context=UNSET) -> 'ProcessBuilder': return count(data=self, condition=condition, context=context) @openeo_process - def create_raster_cube(self) -> 'ProcessBuilder': + def create_raster_cube(self) -> ProcessBuilder: """ Create an empty raster data cube @@ -967,7 +970,7 @@ def create_raster_cube(self) -> 'ProcessBuilder': return create_raster_cube() @openeo_process - def cummax(self, ignore_nodata=UNSET) -> 'ProcessBuilder': + def cummax(self, ignore_nodata=UNSET) -> ProcessBuilder: """ Cumulative maxima @@ -981,7 +984,7 @@ def cummax(self, ignore_nodata=UNSET) -> 'ProcessBuilder': return cummax(data=self, ignore_nodata=ignore_nodata) @openeo_process - def cummin(self, ignore_nodata=UNSET) -> 'ProcessBuilder': + def cummin(self, ignore_nodata=UNSET) -> ProcessBuilder: """ Cumulative minima @@ -995,7 +998,7 @@ def cummin(self, ignore_nodata=UNSET) -> 'ProcessBuilder': return cummin(data=self, ignore_nodata=ignore_nodata) @openeo_process - def cumproduct(self, ignore_nodata=UNSET) -> 'ProcessBuilder': + def cumproduct(self, ignore_nodata=UNSET) -> ProcessBuilder: """ Cumulative products @@ -1009,7 +1012,7 @@ def cumproduct(self, ignore_nodata=UNSET) -> 'ProcessBuilder': return cumproduct(data=self, ignore_nodata=ignore_nodata) @openeo_process - def cumsum(self, ignore_nodata=UNSET) -> 'ProcessBuilder': + def cumsum(self, ignore_nodata=UNSET) -> ProcessBuilder: """ Cumulative sums @@ -1023,7 +1026,7 @@ def cumsum(self, ignore_nodata=UNSET) -> 'ProcessBuilder': return cumsum(data=self, ignore_nodata=ignore_nodata) @openeo_process - def date_shift(self, value, unit) -> 'ProcessBuilder': + def date_shift(self, value, unit) -> ProcessBuilder: """ Manipulates dates and times by addition or subtraction @@ -1046,7 +1049,7 @@ def date_shift(self, value, unit) -> 'ProcessBuilder': return date_shift(date=self, value=value, unit=unit) @openeo_process - def dimension_labels(self, dimension) -> 'ProcessBuilder': + def dimension_labels(self, dimension) -> ProcessBuilder: """ Get the dimension labels @@ -1058,7 +1061,7 @@ def dimension_labels(self, dimension) -> 'ProcessBuilder': return dimension_labels(data=self, dimension=dimension) @openeo_process - def divide(self, y) -> 'ProcessBuilder': + def divide(self, y) -> ProcessBuilder: """ Division of two numbers @@ -1070,7 +1073,7 @@ def divide(self, y) -> 'ProcessBuilder': return divide(x=self, y=y) @openeo_process - def drop_dimension(self, name) -> 'ProcessBuilder': + def drop_dimension(self, name) -> ProcessBuilder: """ Remove a dimension @@ -1084,7 +1087,7 @@ def drop_dimension(self, name) -> 'ProcessBuilder': return drop_dimension(data=self, name=name) @openeo_process - def e(self) -> 'ProcessBuilder': + def e(self) -> ProcessBuilder: """ Euler's number (e) @@ -1093,7 +1096,7 @@ def e(self) -> 'ProcessBuilder': return e() @openeo_process - def eq(self, y, delta=UNSET, case_sensitive=UNSET) -> 'ProcessBuilder': + def eq(self, y, delta=UNSET, case_sensitive=UNSET) -> ProcessBuilder: """ Equal to comparison @@ -1111,7 +1114,7 @@ def eq(self, y, delta=UNSET, case_sensitive=UNSET) -> 'ProcessBuilder': return eq(x=self, y=y, delta=delta, case_sensitive=case_sensitive) @openeo_process - def exp(self) -> 'ProcessBuilder': + def exp(self) -> ProcessBuilder: """ Exponentiation to the base e @@ -1122,7 +1125,7 @@ def exp(self) -> 'ProcessBuilder': return exp(p=self) @openeo_process - def extrema(self, ignore_nodata=UNSET) -> 'ProcessBuilder': + def extrema(self, ignore_nodata=UNSET) -> ProcessBuilder: """ Minimum and maximum values @@ -1138,7 +1141,7 @@ def extrema(self, ignore_nodata=UNSET) -> 'ProcessBuilder': return extrema(data=self, ignore_nodata=ignore_nodata) @openeo_process - def filter_bands(self, bands=UNSET, wavelengths=UNSET) -> 'ProcessBuilder': + def filter_bands(self, bands=UNSET, wavelengths=UNSET) -> ProcessBuilder: """ Filter the bands by names @@ -1161,7 +1164,7 @@ def filter_bands(self, bands=UNSET, wavelengths=UNSET) -> 'ProcessBuilder': return filter_bands(data=self, bands=bands, wavelengths=wavelengths) @openeo_process - def filter_bbox(self, extent) -> 'ProcessBuilder': + def filter_bbox(self, extent) -> ProcessBuilder: """ Spatial filter using a bounding box @@ -1175,7 +1178,7 @@ def filter_bbox(self, extent) -> 'ProcessBuilder': return filter_bbox(data=self, extent=extent) @openeo_process - def filter_labels(self, condition, dimension, context=UNSET) -> 'ProcessBuilder': + def filter_labels(self, condition, dimension, context=UNSET) -> ProcessBuilder: """ Filter dimension labels based on a condition @@ -1194,7 +1197,7 @@ def filter_labels(self, condition, dimension, context=UNSET) -> 'ProcessBuilder' return filter_labels(data=self, condition=condition, dimension=dimension, context=context) @openeo_process - def filter_spatial(self, geometries) -> 'ProcessBuilder': + def filter_spatial(self, geometries) -> ProcessBuilder: """ Spatial filter using geometries @@ -1208,7 +1211,7 @@ def filter_spatial(self, geometries) -> 'ProcessBuilder': return filter_spatial(data=self, geometries=geometries) @openeo_process - def filter_temporal(self, extent, dimension=UNSET) -> 'ProcessBuilder': + def filter_temporal(self, extent, dimension=UNSET) -> ProcessBuilder: """ Temporal filter based on temporal intervals @@ -1230,7 +1233,7 @@ def filter_temporal(self, extent, dimension=UNSET) -> 'ProcessBuilder': return filter_temporal(data=self, extent=extent, dimension=dimension) @openeo_process - def first(self, ignore_nodata=UNSET) -> 'ProcessBuilder': + def first(self, ignore_nodata=UNSET) -> ProcessBuilder: """ First element @@ -1244,7 +1247,7 @@ def first(self, ignore_nodata=UNSET) -> 'ProcessBuilder': return first(data=self, ignore_nodata=ignore_nodata) @openeo_process - def fit_class_random_forest(self, target, max_variables, num_trees=UNSET, seed=UNSET) -> 'ProcessBuilder': + def fit_class_random_forest(self, target, max_variables, num_trees=UNSET, seed=UNSET) -> ProcessBuilder: """ Train a random forest classification model @@ -1269,7 +1272,7 @@ def fit_class_random_forest(self, target, max_variables, num_trees=UNSET, seed=U return fit_class_random_forest(predictors=self, target=target, max_variables=max_variables, num_trees=num_trees, seed=seed) @openeo_process - def fit_curve(self, parameters, function, dimension) -> 'ProcessBuilder': + def fit_curve(self, parameters, function, dimension) -> ProcessBuilder: """ Curve fitting @@ -1289,7 +1292,7 @@ def fit_curve(self, parameters, function, dimension) -> 'ProcessBuilder': return fit_curve(data=self, parameters=parameters, function=function, dimension=dimension) @openeo_process - def fit_regr_random_forest(self, target, max_variables, num_trees=UNSET, seed=UNSET) -> 'ProcessBuilder': + def fit_regr_random_forest(self, target, max_variables, num_trees=UNSET, seed=UNSET) -> ProcessBuilder: """ Train a random forest regression model @@ -1314,7 +1317,7 @@ def fit_regr_random_forest(self, target, max_variables, num_trees=UNSET, seed=UN return fit_regr_random_forest(predictors=self, target=target, max_variables=max_variables, num_trees=num_trees, seed=seed) @openeo_process - def flatten_dimensions(self, dimensions, target_dimension, label_separator=UNSET) -> 'ProcessBuilder': + def flatten_dimensions(self, dimensions, target_dimension, label_separator=UNSET) -> ProcessBuilder: """ Combine multiple dimensions into a single dimension @@ -1335,7 +1338,7 @@ def flatten_dimensions(self, dimensions, target_dimension, label_separator=UNSET return flatten_dimensions(data=self, dimensions=dimensions, target_dimension=target_dimension, label_separator=label_separator) @openeo_process - def floor(self) -> 'ProcessBuilder': + def floor(self) -> ProcessBuilder: """ Round fractions down @@ -1346,7 +1349,7 @@ def floor(self) -> 'ProcessBuilder': return floor(x=self) @openeo_process - def gt(self, y) -> 'ProcessBuilder': + def gt(self, y) -> ProcessBuilder: """ Greater than comparison @@ -1359,7 +1362,7 @@ def gt(self, y) -> 'ProcessBuilder': return gt(x=self, y=y) @openeo_process - def gte(self, y) -> 'ProcessBuilder': + def gte(self, y) -> ProcessBuilder: """ Greater than or equal to comparison @@ -1372,7 +1375,7 @@ def gte(self, y) -> 'ProcessBuilder': return gte(x=self, y=y) @openeo_process - def if_(self, accept, reject=UNSET) -> 'ProcessBuilder': + def if_(self, accept, reject=UNSET) -> ProcessBuilder: """ If-Then-Else conditional @@ -1385,7 +1388,7 @@ def if_(self, accept, reject=UNSET) -> 'ProcessBuilder': return if_(value=self, accept=accept, reject=reject) @openeo_process - def inspect(self, code=UNSET, level=UNSET, message=UNSET) -> 'ProcessBuilder': + def inspect(self, code=UNSET, level=UNSET, message=UNSET) -> ProcessBuilder: """ Add information to the logs @@ -1400,7 +1403,7 @@ def inspect(self, code=UNSET, level=UNSET, message=UNSET) -> 'ProcessBuilder': return inspect(data=self, code=code, level=level, message=message) @openeo_process - def int(self) -> 'ProcessBuilder': + def int(self) -> ProcessBuilder: """ Integer part of a number @@ -1411,7 +1414,7 @@ def int(self) -> 'ProcessBuilder': return int(x=self) @openeo_process - def is_infinite(self) -> 'ProcessBuilder': + def is_infinite(self) -> ProcessBuilder: """ Value is an infinite number @@ -1422,7 +1425,7 @@ def is_infinite(self) -> 'ProcessBuilder': return is_infinite(x=self) @openeo_process - def is_nan(self) -> 'ProcessBuilder': + def is_nan(self) -> ProcessBuilder: """ Value is not a number @@ -1433,7 +1436,7 @@ def is_nan(self) -> 'ProcessBuilder': return is_nan(x=self) @openeo_process - def is_nodata(self) -> 'ProcessBuilder': + def is_nodata(self) -> ProcessBuilder: """ Value is a no-data value @@ -1444,7 +1447,7 @@ def is_nodata(self) -> 'ProcessBuilder': return is_nodata(x=self) @openeo_process - def is_valid(self) -> 'ProcessBuilder': + def is_valid(self) -> ProcessBuilder: """ Value is valid data @@ -1455,7 +1458,7 @@ def is_valid(self) -> 'ProcessBuilder': return is_valid(x=self) @openeo_process - def last(self, ignore_nodata=UNSET) -> 'ProcessBuilder': + def last(self, ignore_nodata=UNSET) -> ProcessBuilder: """ Last element @@ -1469,7 +1472,7 @@ def last(self, ignore_nodata=UNSET) -> 'ProcessBuilder': return last(data=self, ignore_nodata=ignore_nodata) @openeo_process - def linear_scale_range(self, inputMin, inputMax, outputMin=UNSET, outputMax=UNSET) -> 'ProcessBuilder': + def linear_scale_range(self, inputMin, inputMax, outputMin=UNSET, outputMax=UNSET) -> ProcessBuilder: """ Linear transformation between two ranges @@ -1485,7 +1488,7 @@ def linear_scale_range(self, inputMin, inputMax, outputMin=UNSET, outputMax=UNSE return linear_scale_range(x=self, inputMin=inputMin, inputMax=inputMax, outputMin=outputMin, outputMax=outputMax) @openeo_process - def ln(self) -> 'ProcessBuilder': + def ln(self) -> ProcessBuilder: """ Natural logarithm @@ -1496,7 +1499,7 @@ def ln(self) -> 'ProcessBuilder': return ln(x=self) @openeo_process - def load_collection(self, spatial_extent, temporal_extent, bands=UNSET, properties=UNSET) -> 'ProcessBuilder': + def load_collection(self, spatial_extent, temporal_extent, bands=UNSET, properties=UNSET) -> ProcessBuilder: """ Load a collection @@ -1541,7 +1544,7 @@ def load_collection(self, spatial_extent, temporal_extent, bands=UNSET, properti return load_collection(id=self, spatial_extent=spatial_extent, temporal_extent=temporal_extent, bands=bands, properties=properties) @openeo_process - def load_ml_model(self) -> 'ProcessBuilder': + def load_ml_model(self) -> ProcessBuilder: """ Load a ML model @@ -1554,7 +1557,7 @@ def load_ml_model(self) -> 'ProcessBuilder': return load_ml_model(id=self) @openeo_process - def load_result(self, spatial_extent=UNSET, temporal_extent=UNSET, bands=UNSET) -> 'ProcessBuilder': + def load_result(self, spatial_extent=UNSET, temporal_extent=UNSET, bands=UNSET) -> ProcessBuilder: """ Load batch job results @@ -1592,7 +1595,7 @@ def load_result(self, spatial_extent=UNSET, temporal_extent=UNSET, bands=UNSET) return load_result(id=self, spatial_extent=spatial_extent, temporal_extent=temporal_extent, bands=bands) @openeo_process - def load_uploaded_files(self, format, options=UNSET) -> 'ProcessBuilder': + def load_uploaded_files(self, format, options=UNSET) -> ProcessBuilder: """ Load files from the user workspace @@ -1611,7 +1614,7 @@ def load_uploaded_files(self, format, options=UNSET) -> 'ProcessBuilder': return load_uploaded_files(paths=self, format=format, options=options) @openeo_process - def log(self, base) -> 'ProcessBuilder': + def log(self, base) -> ProcessBuilder: """ Logarithm to a base @@ -1623,7 +1626,7 @@ def log(self, base) -> 'ProcessBuilder': return log(x=self, base=base) @openeo_process - def lt(self, y) -> 'ProcessBuilder': + def lt(self, y) -> ProcessBuilder: """ Less than comparison @@ -1635,7 +1638,7 @@ def lt(self, y) -> 'ProcessBuilder': return lt(x=self, y=y) @openeo_process - def lte(self, y) -> 'ProcessBuilder': + def lte(self, y) -> ProcessBuilder: """ Less than or equal to comparison @@ -1648,7 +1651,7 @@ def lte(self, y) -> 'ProcessBuilder': return lte(x=self, y=y) @openeo_process - def mask(self, mask, replacement=UNSET) -> 'ProcessBuilder': + def mask(self, mask, replacement=UNSET) -> ProcessBuilder: """ Apply a raster mask @@ -1663,7 +1666,7 @@ def mask(self, mask, replacement=UNSET) -> 'ProcessBuilder': return mask(data=self, mask=mask, replacement=replacement) @openeo_process - def mask_polygon(self, mask, replacement=UNSET, inside=UNSET) -> 'ProcessBuilder': + def mask_polygon(self, mask, replacement=UNSET, inside=UNSET) -> ProcessBuilder: """ Apply a polygon mask @@ -1684,7 +1687,7 @@ def mask_polygon(self, mask, replacement=UNSET, inside=UNSET) -> 'ProcessBuilder return mask_polygon(data=self, mask=mask, replacement=replacement, inside=inside) @openeo_process - def max(self, ignore_nodata=UNSET) -> 'ProcessBuilder': + def max(self, ignore_nodata=UNSET) -> ProcessBuilder: """ Maximum value @@ -1698,7 +1701,7 @@ def max(self, ignore_nodata=UNSET) -> 'ProcessBuilder': return max(data=self, ignore_nodata=ignore_nodata) @openeo_process - def mean(self, ignore_nodata=UNSET) -> 'ProcessBuilder': + def mean(self, ignore_nodata=UNSET) -> ProcessBuilder: """ Arithmetic mean (average) @@ -1712,7 +1715,7 @@ def mean(self, ignore_nodata=UNSET) -> 'ProcessBuilder': return mean(data=self, ignore_nodata=ignore_nodata) @openeo_process - def median(self, ignore_nodata=UNSET) -> 'ProcessBuilder': + def median(self, ignore_nodata=UNSET) -> ProcessBuilder: """ Statistical median @@ -1726,7 +1729,7 @@ def median(self, ignore_nodata=UNSET) -> 'ProcessBuilder': return median(data=self, ignore_nodata=ignore_nodata) @openeo_process - def merge_cubes(self, cube2, overlap_resolver=UNSET, context=UNSET) -> 'ProcessBuilder': + def merge_cubes(self, cube2, overlap_resolver=UNSET, context=UNSET) -> ProcessBuilder: """ Merge two data cubes @@ -1744,7 +1747,7 @@ def merge_cubes(self, cube2, overlap_resolver=UNSET, context=UNSET) -> 'ProcessB return merge_cubes(cube1=self, cube2=cube2, overlap_resolver=overlap_resolver, context=context) @openeo_process - def min(self, ignore_nodata=UNSET) -> 'ProcessBuilder': + def min(self, ignore_nodata=UNSET) -> ProcessBuilder: """ Minimum value @@ -1758,7 +1761,7 @@ def min(self, ignore_nodata=UNSET) -> 'ProcessBuilder': return min(data=self, ignore_nodata=ignore_nodata) @openeo_process - def mod(self, y) -> 'ProcessBuilder': + def mod(self, y) -> ProcessBuilder: """ Modulo @@ -1770,7 +1773,7 @@ def mod(self, y) -> 'ProcessBuilder': return mod(x=self, y=y) @openeo_process - def multiply(self, y) -> 'ProcessBuilder': + def multiply(self, y) -> ProcessBuilder: """ Multiplication of two numbers @@ -1782,7 +1785,7 @@ def multiply(self, y) -> 'ProcessBuilder': return multiply(x=self, y=y) @openeo_process - def nan(self) -> 'ProcessBuilder': + def nan(self) -> ProcessBuilder: """ Not a Number (NaN) @@ -1791,7 +1794,7 @@ def nan(self) -> 'ProcessBuilder': return nan() @openeo_process - def ndvi(self, nir=UNSET, red=UNSET, target_band=UNSET) -> 'ProcessBuilder': + def ndvi(self, nir=UNSET, red=UNSET, target_band=UNSET) -> ProcessBuilder: """ Normalized Difference Vegetation Index @@ -1819,7 +1822,7 @@ def ndvi(self, nir=UNSET, red=UNSET, target_band=UNSET) -> 'ProcessBuilder': return ndvi(data=self, nir=nir, red=red, target_band=target_band) @openeo_process - def neq(self, y, delta=UNSET, case_sensitive=UNSET) -> 'ProcessBuilder': + def neq(self, y, delta=UNSET, case_sensitive=UNSET) -> ProcessBuilder: """ Not equal to comparison @@ -1837,7 +1840,7 @@ def neq(self, y, delta=UNSET, case_sensitive=UNSET) -> 'ProcessBuilder': return neq(x=self, y=y, delta=delta, case_sensitive=case_sensitive) @openeo_process - def normalized_difference(self, y) -> 'ProcessBuilder': + def normalized_difference(self, y) -> ProcessBuilder: """ Normalized difference @@ -1849,7 +1852,7 @@ def normalized_difference(self, y) -> 'ProcessBuilder': return normalized_difference(x=self, y=y) @openeo_process - def not_(self) -> 'ProcessBuilder': + def not_(self) -> ProcessBuilder: """ Inverting a boolean @@ -1860,7 +1863,7 @@ def not_(self) -> 'ProcessBuilder': return not_(x=self) @openeo_process - def or_(self, y) -> 'ProcessBuilder': + def or_(self, y) -> ProcessBuilder: """ Logical OR @@ -1872,7 +1875,7 @@ def or_(self, y) -> 'ProcessBuilder': return or_(x=self, y=y) @openeo_process - def order(self, asc=UNSET, nodata=UNSET) -> 'ProcessBuilder': + def order(self, asc=UNSET, nodata=UNSET) -> ProcessBuilder: """ Create a permutation @@ -1887,7 +1890,7 @@ def order(self, asc=UNSET, nodata=UNSET) -> 'ProcessBuilder': return order(data=self, asc=asc, nodata=nodata) @openeo_process - def pi(self) -> 'ProcessBuilder': + def pi(self) -> ProcessBuilder: """ Pi (Ï€) @@ -1896,7 +1899,7 @@ def pi(self) -> 'ProcessBuilder': return pi() @openeo_process - def power(self, p) -> 'ProcessBuilder': + def power(self, p) -> ProcessBuilder: """ Exponentiation @@ -1908,7 +1911,7 @@ def power(self, p) -> 'ProcessBuilder': return power(base=self, p=p) @openeo_process - def predict_curve(self, parameters, function, dimension, labels=UNSET) -> 'ProcessBuilder': + def predict_curve(self, parameters, function, dimension, labels=UNSET) -> ProcessBuilder: """ Predict values @@ -1927,7 +1930,7 @@ def predict_curve(self, parameters, function, dimension, labels=UNSET) -> 'Proce return predict_curve(data=self, parameters=parameters, function=function, dimension=dimension, labels=labels) @openeo_process - def predict_random_forest(self, model) -> 'ProcessBuilder': + def predict_random_forest(self, model) -> ProcessBuilder: """ Predict values from a Random Forest model @@ -1941,7 +1944,7 @@ def predict_random_forest(self, model) -> 'ProcessBuilder': return predict_random_forest(data=self, model=model) @openeo_process - def product(self, ignore_nodata=UNSET) -> 'ProcessBuilder': + def product(self, ignore_nodata=UNSET) -> ProcessBuilder: """ Compute the product by multiplying numbers @@ -1955,7 +1958,7 @@ def product(self, ignore_nodata=UNSET) -> 'ProcessBuilder': return product(data=self, ignore_nodata=ignore_nodata) @openeo_process - def quantiles(self, probabilities=UNSET, q=UNSET, ignore_nodata=UNSET) -> 'ProcessBuilder': + def quantiles(self, probabilities=UNSET, q=UNSET, ignore_nodata=UNSET) -> ProcessBuilder: """ Quantiles @@ -1976,7 +1979,7 @@ def quantiles(self, probabilities=UNSET, q=UNSET, ignore_nodata=UNSET) -> 'Proce return quantiles(data=self, probabilities=probabilities, q=q, ignore_nodata=ignore_nodata) @openeo_process - def rearrange(self, order) -> 'ProcessBuilder': + def rearrange(self, order) -> ProcessBuilder: """ Rearrange an array based on a permutation @@ -1988,7 +1991,7 @@ def rearrange(self, order) -> 'ProcessBuilder': return rearrange(data=self, order=order) @openeo_process - def reduce_dimension(self, reducer, dimension, context=UNSET) -> 'ProcessBuilder': + def reduce_dimension(self, reducer, dimension, context=UNSET) -> ProcessBuilder: """ Reduce dimensions @@ -2007,7 +2010,7 @@ def reduce_dimension(self, reducer, dimension, context=UNSET) -> 'ProcessBuilder return reduce_dimension(data=self, reducer=reducer, dimension=dimension, context=context) @openeo_process - def reduce_spatial(self, reducer, context=UNSET) -> 'ProcessBuilder': + def reduce_spatial(self, reducer, context=UNSET) -> ProcessBuilder: """ Reduce spatial dimensions 'x' and 'y' @@ -2024,7 +2027,7 @@ def reduce_spatial(self, reducer, context=UNSET) -> 'ProcessBuilder': return reduce_spatial(data=self, reducer=reducer, context=context) @openeo_process - def rename_dimension(self, source, target) -> 'ProcessBuilder': + def rename_dimension(self, source, target) -> ProcessBuilder: """ Rename a dimension @@ -2041,7 +2044,7 @@ def rename_dimension(self, source, target) -> 'ProcessBuilder': return rename_dimension(data=self, source=source, target=target) @openeo_process - def rename_labels(self, dimension, target, source=UNSET) -> 'ProcessBuilder': + def rename_labels(self, dimension, target, source=UNSET) -> ProcessBuilder: """ Rename dimension labels @@ -2064,7 +2067,7 @@ def rename_labels(self, dimension, target, source=UNSET) -> 'ProcessBuilder': return rename_labels(data=self, dimension=dimension, target=target, source=source) @openeo_process - def resample_cube_spatial(self, target, method=UNSET) -> 'ProcessBuilder': + def resample_cube_spatial(self, target, method=UNSET) -> ProcessBuilder: """ Resample the spatial dimensions to match a target data cube @@ -2090,7 +2093,7 @@ def resample_cube_spatial(self, target, method=UNSET) -> 'ProcessBuilder': return resample_cube_spatial(data=self, target=target, method=method) @openeo_process - def resample_cube_temporal(self, target, dimension=UNSET, valid_within=UNSET) -> 'ProcessBuilder': + def resample_cube_temporal(self, target, dimension=UNSET, valid_within=UNSET) -> ProcessBuilder: """ Resample temporal dimensions to match a target data cube @@ -2117,7 +2120,7 @@ def resample_cube_temporal(self, target, dimension=UNSET, valid_within=UNSET) -> return resample_cube_temporal(data=self, target=target, dimension=dimension, valid_within=valid_within) @openeo_process - def resample_spatial(self, resolution=UNSET, projection=UNSET, method=UNSET, align=UNSET) -> 'ProcessBuilder': + def resample_spatial(self, resolution=UNSET, projection=UNSET, method=UNSET, align=UNSET) -> ProcessBuilder: """ Resample and warp the spatial dimensions @@ -2152,7 +2155,7 @@ def resample_spatial(self, resolution=UNSET, projection=UNSET, method=UNSET, ali return resample_spatial(data=self, resolution=resolution, projection=projection, method=method, align=align) @openeo_process - def round(self, p=UNSET) -> 'ProcessBuilder': + def round(self, p=UNSET) -> ProcessBuilder: """ Round to a specified precision @@ -2166,7 +2169,7 @@ def round(self, p=UNSET) -> 'ProcessBuilder': return round(x=self, p=p) @openeo_process - def run_udf(self, udf, runtime, version=UNSET, context=UNSET) -> 'ProcessBuilder': + def run_udf(self, udf, runtime, version=UNSET, context=UNSET) -> ProcessBuilder: """ Run a UDF @@ -2183,7 +2186,7 @@ def run_udf(self, udf, runtime, version=UNSET, context=UNSET) -> 'ProcessBuilder return run_udf(data=self, udf=udf, runtime=runtime, version=version, context=context) @openeo_process - def run_udf_externally(self, url, context=UNSET) -> 'ProcessBuilder': + def run_udf_externally(self, url, context=UNSET) -> ProcessBuilder: """ Run an externally hosted UDF container @@ -2197,7 +2200,7 @@ def run_udf_externally(self, url, context=UNSET) -> 'ProcessBuilder': return run_udf_externally(data=self, url=url, context=context) @openeo_process - def sar_backscatter(self, coefficient=UNSET, elevation_model=UNSET, mask=UNSET, contributing_area=UNSET, local_incidence_angle=UNSET, ellipsoid_incidence_angle=UNSET, noise_removal=UNSET, options=UNSET) -> 'ProcessBuilder': + def sar_backscatter(self, coefficient=UNSET, elevation_model=UNSET, mask=UNSET, contributing_area=UNSET, local_incidence_angle=UNSET, ellipsoid_incidence_angle=UNSET, noise_removal=UNSET, options=UNSET) -> ProcessBuilder: """ Computes backscatter from SAR input @@ -2228,7 +2231,7 @@ def sar_backscatter(self, coefficient=UNSET, elevation_model=UNSET, mask=UNSET, return sar_backscatter(data=self, coefficient=coefficient, elevation_model=elevation_model, mask=mask, contributing_area=contributing_area, local_incidence_angle=local_incidence_angle, ellipsoid_incidence_angle=ellipsoid_incidence_angle, noise_removal=noise_removal, options=options) @openeo_process - def save_ml_model(self, options=UNSET) -> 'ProcessBuilder': + def save_ml_model(self, options=UNSET) -> ProcessBuilder: """ Save a ML model @@ -2240,7 +2243,7 @@ def save_ml_model(self, options=UNSET) -> 'ProcessBuilder': return save_ml_model(data=self, options=options) @openeo_process - def save_result(self, format, options=UNSET) -> 'ProcessBuilder': + def save_result(self, format, options=UNSET) -> ProcessBuilder: """ Save processed data @@ -2258,7 +2261,7 @@ def save_result(self, format, options=UNSET) -> 'ProcessBuilder': return save_result(data=self, format=format, options=options) @openeo_process - def sd(self, ignore_nodata=UNSET) -> 'ProcessBuilder': + def sd(self, ignore_nodata=UNSET) -> ProcessBuilder: """ Standard deviation @@ -2272,7 +2275,7 @@ def sd(self, ignore_nodata=UNSET) -> 'ProcessBuilder': return sd(data=self, ignore_nodata=ignore_nodata) @openeo_process - def sgn(self) -> 'ProcessBuilder': + def sgn(self) -> ProcessBuilder: """ Signum @@ -2283,7 +2286,7 @@ def sgn(self) -> 'ProcessBuilder': return sgn(x=self) @openeo_process - def sin(self) -> 'ProcessBuilder': + def sin(self) -> ProcessBuilder: """ Sine @@ -2294,7 +2297,7 @@ def sin(self) -> 'ProcessBuilder': return sin(x=self) @openeo_process - def sinh(self) -> 'ProcessBuilder': + def sinh(self) -> ProcessBuilder: """ Hyperbolic sine @@ -2305,7 +2308,7 @@ def sinh(self) -> 'ProcessBuilder': return sinh(x=self) @openeo_process - def sort(self, asc=UNSET, nodata=UNSET) -> 'ProcessBuilder': + def sort(self, asc=UNSET, nodata=UNSET) -> ProcessBuilder: """ Sort data @@ -2320,7 +2323,7 @@ def sort(self, asc=UNSET, nodata=UNSET) -> 'ProcessBuilder': return sort(data=self, asc=asc, nodata=nodata) @openeo_process - def sqrt(self) -> 'ProcessBuilder': + def sqrt(self) -> ProcessBuilder: """ Square root @@ -2331,7 +2334,7 @@ def sqrt(self) -> 'ProcessBuilder': return sqrt(x=self) @openeo_process - def subtract(self, y) -> 'ProcessBuilder': + def subtract(self, y) -> ProcessBuilder: """ Subtraction of two numbers @@ -2343,7 +2346,7 @@ def subtract(self, y) -> 'ProcessBuilder': return subtract(x=self, y=y) @openeo_process - def sum(self, ignore_nodata=UNSET) -> 'ProcessBuilder': + def sum(self, ignore_nodata=UNSET) -> ProcessBuilder: """ Compute the sum by adding up numbers @@ -2357,7 +2360,7 @@ def sum(self, ignore_nodata=UNSET) -> 'ProcessBuilder': return sum(data=self, ignore_nodata=ignore_nodata) @openeo_process - def tan(self) -> 'ProcessBuilder': + def tan(self) -> ProcessBuilder: """ Tangent @@ -2368,7 +2371,7 @@ def tan(self) -> 'ProcessBuilder': return tan(x=self) @openeo_process - def tanh(self) -> 'ProcessBuilder': + def tanh(self) -> ProcessBuilder: """ Hyperbolic tangent @@ -2379,7 +2382,7 @@ def tanh(self) -> 'ProcessBuilder': return tanh(x=self) @openeo_process - def text_begins(self, pattern, case_sensitive=UNSET) -> 'ProcessBuilder': + def text_begins(self, pattern, case_sensitive=UNSET) -> ProcessBuilder: """ Text begins with another text @@ -2392,7 +2395,7 @@ def text_begins(self, pattern, case_sensitive=UNSET) -> 'ProcessBuilder': return text_begins(data=self, pattern=pattern, case_sensitive=case_sensitive) @openeo_process - def text_concat(self, separator=UNSET) -> 'ProcessBuilder': + def text_concat(self, separator=UNSET) -> ProcessBuilder: """ Concatenate elements to a single text @@ -2407,7 +2410,7 @@ def text_concat(self, separator=UNSET) -> 'ProcessBuilder': return text_concat(data=self, separator=separator) @openeo_process - def text_contains(self, pattern, case_sensitive=UNSET) -> 'ProcessBuilder': + def text_contains(self, pattern, case_sensitive=UNSET) -> ProcessBuilder: """ Text contains another text @@ -2420,7 +2423,7 @@ def text_contains(self, pattern, case_sensitive=UNSET) -> 'ProcessBuilder': return text_contains(data=self, pattern=pattern, case_sensitive=case_sensitive) @openeo_process - def text_ends(self, pattern, case_sensitive=UNSET) -> 'ProcessBuilder': + def text_ends(self, pattern, case_sensitive=UNSET) -> ProcessBuilder: """ Text ends with another text @@ -2433,7 +2436,7 @@ def text_ends(self, pattern, case_sensitive=UNSET) -> 'ProcessBuilder': return text_ends(data=self, pattern=pattern, case_sensitive=case_sensitive) @openeo_process - def trim_cube(self) -> 'ProcessBuilder': + def trim_cube(self) -> ProcessBuilder: """ Remove dimension labels with no-data values @@ -2445,7 +2448,7 @@ def trim_cube(self) -> 'ProcessBuilder': return trim_cube(data=self) @openeo_process - def unflatten_dimension(self, dimension, target_dimensions, label_separator=UNSET) -> 'ProcessBuilder': + def unflatten_dimension(self, dimension, target_dimensions, label_separator=UNSET) -> ProcessBuilder: """ Split a single dimensions into multiple dimensions @@ -2466,7 +2469,7 @@ def unflatten_dimension(self, dimension, target_dimensions, label_separator=UNSE return unflatten_dimension(data=self, dimension=dimension, target_dimensions=target_dimensions, label_separator=label_separator) @openeo_process - def variance(self, ignore_nodata=UNSET) -> 'ProcessBuilder': + def variance(self, ignore_nodata=UNSET) -> ProcessBuilder: """ Variance @@ -2480,7 +2483,7 @@ def variance(self, ignore_nodata=UNSET) -> 'ProcessBuilder': return variance(data=self, ignore_nodata=ignore_nodata) @openeo_process - def vector_buffer(self, distance) -> 'ProcessBuilder': + def vector_buffer(self, distance) -> ProcessBuilder: """ Buffer geometries by distance @@ -2497,7 +2500,7 @@ def vector_buffer(self, distance) -> 'ProcessBuilder': return vector_buffer(geometries=self, distance=distance) @openeo_process - def vector_to_random_points(self, geometry_count=UNSET, total_count=UNSET, group=UNSET, seed=UNSET) -> 'ProcessBuilder': + def vector_to_random_points(self, geometry_count=UNSET, total_count=UNSET, group=UNSET, seed=UNSET) -> ProcessBuilder: """ Sample random points from geometries @@ -2520,7 +2523,7 @@ def vector_to_random_points(self, geometry_count=UNSET, total_count=UNSET, group return vector_to_random_points(data=self, geometry_count=geometry_count, total_count=total_count, group=group, seed=seed) @openeo_process - def vector_to_regular_points(self, distance, group=UNSET) -> 'ProcessBuilder': + def vector_to_regular_points(self, distance, group=UNSET) -> ProcessBuilder: """ Sample regular points from geometries @@ -2545,7 +2548,7 @@ def vector_to_regular_points(self, distance, group=UNSET) -> 'ProcessBuilder': return vector_to_regular_points(data=self, distance=distance, group=group) @openeo_process - def xor(self, y) -> 'ProcessBuilder': + def xor(self, y) -> ProcessBuilder: """ Logical XOR (exclusive or) diff --git a/openeo/rest/_datacube.py b/openeo/rest/_datacube.py index c1c9956df..10e253211 100644 --- a/openeo/rest/_datacube.py +++ b/openeo/rest/_datacube.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import logging import pathlib import re @@ -11,7 +13,10 @@ import openeo.processes from openeo.internal.graph_building import FlatGraphableMixin, PGNode, _FromNodeMixin from openeo.internal.jupyter import render_component -from openeo.internal.processes.builder import convert_callable_to_pgnode, get_parameter_names +from openeo.internal.processes.builder import ( + convert_callable_to_pgnode, + get_parameter_names, +) from openeo.internal.warnings import UserDeprecationWarning from openeo.rest import OpenEoClientException from openeo.util import dict_no_none, str_truncate @@ -33,7 +38,7 @@ class _ProcessGraphAbstraction(_FromNodeMixin, FlatGraphableMixin): raster data cubes, vector cubes, ML models, ... """ - def __init__(self, pgnode: PGNode, connection: "Connection"): + def __init__(self, pgnode: PGNode, connection: Connection): self._pg = pgnode self._connection = connection @@ -60,7 +65,7 @@ def _api_version(self): return self._connection.capabilities().api_version_check @property - def connection(self) -> "Connection": + def connection(self) -> Connection: return self._connection def result_node(self) -> PGNode: @@ -162,7 +167,7 @@ def __init__( def __repr__(self): return f"<{type(self).__name__} runtime={self._runtime!r} code={str_truncate(self.code, width=200)!r}>" - def get_runtime(self, connection: "openeo.Connection") -> str: + def get_runtime(self, connection: Connection) -> str: return self._runtime or self._guess_runtime(connection=connection) @classmethod @@ -172,7 +177,7 @@ def from_file( runtime: Optional[str] = None, version: Optional[str] = None, context: Optional[dict] = None, - ) -> "UDF": + ) -> UDF: """ Load a UDF from a local file. @@ -197,7 +202,7 @@ def from_url( runtime: Optional[str] = None, version: Optional[str] = None, context: Optional[dict] = None, - ) -> "UDF": + ) -> UDF: """ Load a UDF from a URL. @@ -216,7 +221,7 @@ def from_url( code=code, runtime=runtime, version=version, context=context, _source=url ) - def _guess_runtime(self, connection: "openeo.Connection") -> str: + def _guess_runtime(self, connection: Connection) -> str: """Guess UDF runtime from UDF source (path) or source code.""" # First, guess UDF language language = None @@ -250,9 +255,7 @@ def _guess_runtime_from_suffix(self, suffix: str) -> Union[str]: ".r": "R", }.get(suffix.lower()) - def get_run_udf_callback( - self, connection: "openeo.Connection", data_parameter: str = "data" - ) -> PGNode: + def get_run_udf_callback(self, connection: Connection, data_parameter: str = "data") -> PGNode: """ For internal use: construct `run_udf` node to be used as callback in `apply`, `reduce_dimension`, ... """ diff --git a/openeo/rest/_testing.py b/openeo/rest/_testing.py index 267df89d7..de37bd213 100644 --- a/openeo/rest/_testing.py +++ b/openeo/rest/_testing.py @@ -1,5 +1,5 @@ import re -from typing import Union, Optional +from typing import Optional, Union from openeo import Connection, DataCube from openeo.rest.vectorcube import VectorCube diff --git a/openeo/rest/auth/cli.py b/openeo/rest/auth/cli.py index b1616c8cb..29784d4a0 100644 --- a/openeo/rest/auth/cli.py +++ b/openeo/rest/auth/cli.py @@ -8,7 +8,7 @@ from pathlib import Path from typing import List, Tuple -from openeo import connect, Connection +from openeo import Connection, connect from openeo.capabilities import ApiVersionException from openeo.rest.auth.config import AuthConfig, RefreshTokenStore from openeo.rest.auth.oidc import OidcProviderInfo diff --git a/openeo/rest/auth/config.py b/openeo/rest/auth/config.py index 352e373bd..d76697de7 100644 --- a/openeo/rest/auth/config.py +++ b/openeo/rest/auth/config.py @@ -8,15 +8,15 @@ import json import logging -import stat import platform +import stat from datetime import datetime from pathlib import Path -from typing import Union, Tuple, Dict +from typing import Dict, Tuple, Union from openeo import __version__ from openeo.config import get_user_config_dir, get_user_data_dir -from openeo.util import rfc3339, deep_get, deep_set +from openeo.util import deep_get, deep_set, rfc3339 try: # Use oschmod when available (fall back to POSIX-only functionality from stdlib otherwise) diff --git a/openeo/rest/auth/oidc.py b/openeo/rest/auth/oidc.py index c99da91f3..a1ed2bf3f 100644 --- a/openeo/rest/auth/oidc.py +++ b/openeo/rest/auth/oidc.py @@ -3,6 +3,8 @@ """ +from __future__ import annotations + import base64 import contextlib import enum @@ -12,7 +14,6 @@ import inspect import json import logging -import math import random import string import threading @@ -20,9 +21,8 @@ import urllib.parse import warnings import webbrowser -from collections import namedtuple from queue import Empty, Queue -from typing import Callable, List, Optional, Tuple, Union +from typing import Callable, List, NamedTuple, Optional, Tuple, Union import requests @@ -200,11 +200,12 @@ class OidcException(OpenEoClientException): pass -# Container for result of access_token request. -AccessTokenResult = namedtuple( - typename="AccessTokenResult", - field_names=["access_token", "id_token", "refresh_token"] -) +class AccessTokenResult(NamedTuple): + """Container for result of access_token request.""" + + access_token: str + id_token: Optional[str] = None + refresh_token: Optional[str] = None def jwt_decode(token: str) -> Tuple[dict, dict]: @@ -273,7 +274,7 @@ def __init__( self.default_clients = default_clients @classmethod - def from_dict(cls, data: dict) -> "OidcProviderInfo": + def from_dict(cls, data: dict) -> OidcProviderInfo: return cls( provider_id=data["id"], title=data["title"], issuer=data["issuer"], @@ -451,7 +452,11 @@ def sha256_hash(code: str) -> str: return base64.urlsafe_b64encode(data).decode('ascii').replace('=', '') -AuthCodeResult = namedtuple("AuthCodeResult", ["auth_code", "nonce", "code_verifier", "redirect_uri"]) +class AuthCodeResult(NamedTuple): + auth_code: str + nonce: str + code_verifier: str + redirect_uri: str class OidcAuthCodePkceAuthenticator(OidcAuthenticator): @@ -664,10 +669,12 @@ def _get_token_endpoint_post_data(self) -> dict: return data -VerificationInfo = namedtuple( - "VerificationInfo", - ["verification_uri", "verification_uri_complete", "device_code", "user_code", "interval"] -) +class VerificationInfo(NamedTuple): + verification_uri: str + verification_uri_complete: Optional[str] + device_code: str + user_code: str + interval: int def _like_print(display: Callable) -> Callable: diff --git a/openeo/rest/connection.py b/openeo/rest/connection.py index dd46605fb..8e5e1f0ad 100644 --- a/openeo/rest/connection.py +++ b/openeo/rest/connection.py @@ -1,6 +1,8 @@ """ This module provides a Connection object to manage and persist settings when interacting with the OpenEO API. """ +from __future__ import annotations + import datetime import json import logging @@ -10,44 +12,60 @@ import warnings from collections import OrderedDict from pathlib import Path, PurePosixPath -from typing import Dict, List, Tuple, Union, Callable, Optional, Any, Iterator, Iterable +from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Tuple, Union import requests -from requests import Response -from requests.auth import HTTPBasicAuth, AuthBase import shapely.geometry.base +from requests import Response +from requests.auth import AuthBase, HTTPBasicAuth import openeo from openeo.capabilities import ApiVersionException, ComparableVersion -from openeo.config import get_config_option, config_log +from openeo.config import config_log, get_config_option from openeo.internal.documentation import openeo_process -from openeo.internal.graph_building import PGNode, as_flat_graph, FlatGraphableMixin +from openeo.internal.graph_building import FlatGraphableMixin, PGNode, as_flat_graph from openeo.internal.jupyter import VisualDict, VisualList from openeo.internal.processes.builder import ProcessBuilderBase -from openeo.internal.warnings import legacy_alias, deprecated -from openeo.metadata import CollectionMetadata, SpatialDimension, TemporalDimension, BandDimension, Band -from openeo.rest import OpenEoClientException, OpenEoApiError, OpenEoRestError +from openeo.internal.warnings import deprecated, legacy_alias +from openeo.metadata import ( + Band, + BandDimension, + CollectionMetadata, + SpatialDimension, + TemporalDimension, +) +from openeo.rest import OpenEoApiError, OpenEoClientException, OpenEoRestError from openeo.rest._datacube import build_child_callback -from openeo.rest.auth.auth import NullAuth, BearerAuth, BasicBearerAuth, OidcBearerAuth -from openeo.rest.auth.config import RefreshTokenStore, AuthConfig -from openeo.rest.auth.oidc import OidcClientCredentialsAuthenticator, OidcAuthCodePkceAuthenticator, \ - OidcClientInfo, OidcAuthenticator, OidcRefreshTokenAuthenticator, OidcResourceOwnerPasswordAuthenticator, \ - OidcDeviceAuthenticator, OidcProviderInfo, OidcException, DefaultOidcClientGrant, GrantsChecker +from openeo.rest.auth.auth import BasicBearerAuth, BearerAuth, NullAuth, OidcBearerAuth +from openeo.rest.auth.config import AuthConfig, RefreshTokenStore +from openeo.rest.auth.oidc import ( + DefaultOidcClientGrant, + GrantsChecker, + OidcAuthCodePkceAuthenticator, + OidcAuthenticator, + OidcClientCredentialsAuthenticator, + OidcClientInfo, + OidcDeviceAuthenticator, + OidcException, + OidcProviderInfo, + OidcRefreshTokenAuthenticator, + OidcResourceOwnerPasswordAuthenticator, +) from openeo.rest.datacube import DataCube -from openeo.rest.mlmodel import MlModel -from openeo.rest.userfile import UserFile from openeo.rest.job import BatchJob, RESTJob +from openeo.rest.mlmodel import MlModel from openeo.rest.rest_capabilities import RESTCapabilities from openeo.rest.service import Service -from openeo.rest.udp import RESTUserDefinedProcess, Parameter +from openeo.rest.udp import Parameter, RESTUserDefinedProcess +from openeo.rest.userfile import UserFile from openeo.rest.vectorcube import VectorCube from openeo.util import ( - ensure_list, + ContextTimer, + LazyLoadCache, dict_no_none, - rfc3339, + ensure_list, load_json_resource, - LazyLoadCache, - ContextTimer, + rfc3339, str_truncate, url_join, ) @@ -306,7 +324,7 @@ def _get_refresh_token_store(self) -> RefreshTokenStore: self._refresh_token_store = RefreshTokenStore() return self._refresh_token_store - def authenticate_basic(self, username: Optional[str] = None, password: Optional[str] = None) -> "Connection": + def authenticate_basic(self, username: Optional[str] = None, password: Optional[str] = None) -> Connection: """ Authenticate a user to the backend using basic username and password. @@ -431,7 +449,7 @@ def _authenticate_oidc( store_refresh_token: bool = False, fallback_refresh_token_to_store: Optional[str] = None, oidc_auth_renewer: Optional[OidcAuthenticator] = None, - ) -> "Connection": + ) -> Connection: """ Authenticate through OIDC and set up bearer token (based on OIDC access_token) for further requests. """ @@ -465,7 +483,7 @@ def authenticate_oidc_authorization_code( server_address: Optional[Tuple[str, int]] = None, webbrowser_open: Optional[Callable] = None, store_refresh_token=False, - ) -> "Connection": + ) -> Connection: """ OpenID Connect Authorization Code Flow (with PKCE). @@ -489,7 +507,7 @@ def authenticate_oidc_client_credentials( client_id: Optional[str] = None, client_secret: Optional[str] = None, provider_id: Optional[str] = None, - ) -> 'Connection': + ) -> Connection: """ Authenticate with :ref:`OIDC Client Credentials flow ` @@ -528,7 +546,7 @@ def authenticate_oidc_resource_owner_password_credentials( client_secret: Optional[str] = None, provider_id: Optional[str] = None, store_refresh_token: bool = False, - ) -> "Connection": + ) -> Connection: """ OpenId Connect Resource Owner Password Credentials """ @@ -549,7 +567,7 @@ def authenticate_oidc_refresh_token( provider_id: Optional[str] = None, *, store_refresh_token: bool = False, - ) -> "Connection": + ) -> Connection: """ Authenticate with :ref:`OIDC Refresh Token flow ` @@ -594,7 +612,7 @@ def authenticate_oidc_device( use_pkce: Optional[bool] = None, max_poll_time: float = OidcDeviceAuthenticator.DEFAULT_MAX_POLL_TIME, **kwargs, - ) -> "Connection": + ) -> Connection: """ Authenticate with the :ref:`OIDC Device Code flow ` @@ -1160,12 +1178,15 @@ def load_result( :return: a :py:class:`DataCube` """ # TODO: add check that back-end supports `load_result` process? - metadata = CollectionMetadata({}, dimensions=[ - SpatialDimension(name="x", extent=[]), - SpatialDimension(name="y", extent=[]), - TemporalDimension(name='t', extent=[]), - BandDimension(name="bands", bands=[Band("unknown")]), - ]) + metadata = CollectionMetadata( + {}, + dimensions=[ + SpatialDimension(name="x", extent=[]), + SpatialDimension(name="y", extent=[]), + TemporalDimension(name="t", extent=[]), + BandDimension(name="bands", bands=[Band(name="unknown")]), + ], + ) cube = self.datacube_from_process( process_id="load_result", id=id, @@ -1289,7 +1310,7 @@ def load_stac( SpatialDimension(name="x", extent=[]), SpatialDimension(name="y", extent=[]), TemporalDimension(name="t", extent=[]), - BandDimension(name="bands", bands=[Band("unknown")]), + BandDimension(name="bands", bands=[Band(name="unknown")]), ], ) arguments = {"url": url} @@ -1308,7 +1329,7 @@ def load_stac( cube.metadata = metadata return cube - def load_ml_model(self, id: Union[str, BatchJob]) -> "MlModel": + def load_ml_model(self, id: Union[str, BatchJob]) -> MlModel: """ Loads a machine learning model from a STAC Item. diff --git a/openeo/rest/conversions.py b/openeo/rest/conversions.py index 89c259b50..6268bed1a 100644 --- a/openeo/rest/conversions.py +++ b/openeo/rest/conversions.py @@ -2,6 +2,8 @@ Helpers for data conversions between Python ecosystem data types and openEO data structures. """ +from __future__ import annotations + import typing import numpy as np @@ -11,9 +13,10 @@ if typing.TYPE_CHECKING: # Imports for type checking only (circular import issue at runtime). - from openeo.udf import XarrayDataCube import xarray + from openeo.udf import XarrayDataCube + class InvalidTimeSeriesException(ValueError): pass @@ -94,28 +97,28 @@ def timeseries_json_to_pandas(timeseries: dict, index: str = "date", auto_collap @deprecated("Use :py:meth:`XarrayDataCube.from_file` instead.", version="0.7.0") -def datacube_from_file(filename, fmt='netcdf') -> "XarrayDataCube": +def datacube_from_file(filename, fmt="netcdf") -> XarrayDataCube: from openeo.udf.xarraydatacube import XarrayDataCube return XarrayDataCube.from_file(path=filename, fmt=fmt) @deprecated("Use :py:meth:`XarrayDataCube.save_to_file` instead.", version="0.7.0") -def datacube_to_file(datacube: "XarrayDataCube", filename, fmt='netcdf'): +def datacube_to_file(datacube: XarrayDataCube, filename, fmt="netcdf"): return datacube.save_to_file(path=filename, fmt=fmt) @deprecated("Use :py:meth:`XarrayIO.to_json_file` instead", version="0.7.0") -def _save_DataArray_to_JSON(filename, array: "xarray.DataArray"): +def _save_DataArray_to_JSON(filename, array: xarray.DataArray): from openeo.udf.xarraydatacube import XarrayIO return XarrayIO.to_json_file(array=array, path=filename) @deprecated("Use :py:meth:`XarrayIO.to_netcdf_file` instead", version="0.7.0") -def _save_DataArray_to_NetCDF(filename, array: "xarray.DataArray"): +def _save_DataArray_to_NetCDF(filename, array: xarray.DataArray): from openeo.udf.xarraydatacube import XarrayIO return XarrayIO.to_netcdf_file(array=array, path=filename) @deprecated("Use :py:meth:`XarrayDataCube.plot` instead.", version="0.7.0") -def datacube_plot(datacube: "XarrayDataCube", *args, **kwargs): +def datacube_plot(datacube: XarrayDataCube, *args, **kwargs): datacube.plot(*args, **kwargs) diff --git a/openeo/rest/datacube.py b/openeo/rest/datacube.py index 734fdfe8f..4ba99d688 100644 --- a/openeo/rest/datacube.py +++ b/openeo/rest/datacube.py @@ -7,42 +7,65 @@ Symbolic reference to the current data cube, to be used as argument in :py:meth:`DataCube.process()` calls """ +from __future__ import annotations + import datetime import logging import pathlib import typing import warnings from builtins import staticmethod -from typing import List, Dict, Union, Tuple, Optional, Any, Iterable +from typing import Any, Dict, Iterable, List, Optional, Tuple, Union import numpy as np import shapely.geometry import shapely.geometry.base -from shapely.geometry import Polygon, MultiPolygon, mapping +from shapely.geometry import MultiPolygon, Polygon, mapping import openeo import openeo.processes from openeo.api.process import Parameter from openeo.internal.documentation import openeo_process from openeo.internal.graph_building import PGNode, ReduceNode, _FromNodeMixin -from openeo.internal.processes.builder import get_parameter_names, convert_callable_to_pgnode -from openeo.internal.warnings import legacy_alias, UserDeprecationWarning, deprecated from openeo.internal.jupyter import in_jupyter_context -from openeo.metadata import CollectionMetadata, Band, BandDimension, TemporalDimension, SpatialDimension +from openeo.internal.processes.builder import ( + convert_callable_to_pgnode, + get_parameter_names, +) +from openeo.internal.warnings import UserDeprecationWarning, deprecated, legacy_alias +from openeo.metadata import ( + Band, + BandDimension, + CollectionMetadata, + SpatialDimension, + TemporalDimension, +) from openeo.processes import ProcessBuilder -from openeo.rest import BandMathException, OperatorException, OpenEoClientException -from openeo.rest._datacube import _ProcessGraphAbstraction, THIS, UDF, build_child_callback +from openeo.rest import BandMathException, OpenEoClientException, OperatorException +from openeo.rest._datacube import ( + THIS, + UDF, + _ProcessGraphAbstraction, + build_child_callback, +) from openeo.rest.job import BatchJob from openeo.rest.mlmodel import MlModel from openeo.rest.service import Service from openeo.rest.udp import RESTUserDefinedProcess from openeo.rest.vectorcube import VectorCube -from openeo.util import get_temporal_extent, dict_no_none, rfc3339, guess_format, normalize_crs +from openeo.util import ( + dict_no_none, + get_temporal_extent, + guess_format, + normalize_crs, + rfc3339, +) if typing.TYPE_CHECKING: # Imports for type checking only (circular import issue at runtime). - from openeo.rest.connection import Connection import xarray + + from openeo.rest.connection import Connection from openeo.udf import XarrayDataCube @@ -62,7 +85,7 @@ class DataCube(_ProcessGraphAbstraction): # TODO: set this based on back-end or user preference? _DEFAULT_RASTER_FORMAT = "GTiff" - def __init__(self, graph: PGNode, connection: 'openeo.Connection', metadata: CollectionMetadata = None): + def __init__(self, graph: PGNode, connection: Connection, metadata: CollectionMetadata = None): super().__init__(pgnode=graph, connection=connection) self.metadata = CollectionMetadata.get_or_create(metadata) @@ -73,7 +96,7 @@ def process( metadata: Optional[CollectionMetadata] = None, namespace: Optional[str] = None, **kwargs, - ) -> "DataCube": + ) -> DataCube: """ Generic helper to create a new DataCube by applying a process. @@ -88,7 +111,7 @@ def process( graph_add_node = legacy_alias(process, "graph_add_node", since="0.1.1") - def process_with_node(self, pg: PGNode, metadata: Optional[CollectionMetadata] = None) -> 'DataCube': + def process_with_node(self, pg: PGNode, metadata: Optional[CollectionMetadata] = None) -> DataCube: """ Generic helper to create a new DataCube by applying a process (given as process graph node) @@ -104,16 +127,16 @@ def process_with_node(self, pg: PGNode, metadata: Optional[CollectionMetadata] = @classmethod @openeo_process def load_collection( - cls, - collection_id: str, - connection: 'openeo.Connection' = None, - spatial_extent: Optional[Dict[str, float]] = None, - temporal_extent: Optional[List[Union[str, datetime.datetime, datetime.date, PGNode]]] = None, - bands: Optional[List[str]] = None, - fetch_metadata=True, - properties: Optional[Dict[str, Union[str, PGNode, typing.Callable]]] = None, - max_cloud_cover: Optional[float] = None, - ) -> 'DataCube': + cls, + collection_id: str, + connection: Connection = None, + spatial_extent: Optional[Dict[str, float]] = None, + temporal_extent: Optional[List[Union[str, datetime.datetime, datetime.date, PGNode]]] = None, + bands: Optional[List[str]] = None, + fetch_metadata=True, + properties: Optional[Dict[str, Union[str, PGNode, typing.Callable]]] = None, + max_cloud_cover: Optional[float] = None, + ) -> DataCube: """ Create a new Raster Data cube. @@ -149,7 +172,7 @@ def load_collection( metadata = metadata.filter_bands(bands) else: # Ensure minimal metadata with best effort band dimension guess (based on `bands` argument). - band_dimension = BandDimension("bands", bands=[Band(b, None, None) for b in bands]) + band_dimension = BandDimension("bands", bands=[Band(name=b) for b in bands]) metadata = CollectionMetadata({}, dimensions=[band_dimension]) arguments['bands'] = bands if max_cloud_cover: @@ -178,8 +201,7 @@ def load_collection( ) @classmethod - def load_disk_collection(cls, connection: 'openeo.Connection', file_format: str, glob_pattern: str, - **options) -> 'DataCube': + def load_disk_collection(cls, connection: Connection, file_format: str, glob_pattern: str, **options) -> DataCube: """ Loads image data from disk as a DataCube. This is backed by a non-standard process ('load_disk_data'). This will eventually be replaced by standard options such as @@ -201,12 +223,15 @@ def load_disk_collection(cls, connection: 'openeo.Connection', file_format: str, } ) - metadata = CollectionMetadata({}, dimensions=[ - SpatialDimension(name="x", extent=[]), - SpatialDimension(name="y", extent=[]), - TemporalDimension(name='t', extent=[]), - BandDimension(name="bands", bands=[Band("unknown")]), - ]) + metadata = CollectionMetadata( + {}, + dimensions=[ + SpatialDimension(name="x", extent=[]), + SpatialDimension(name="y", extent=[]), + TemporalDimension(name="t", extent=[]), + BandDimension(name="bands", bands=[Band(name="unknown")]), + ], + ) return cls(graph=pg, connection=connection, metadata=metadata) @classmethod @@ -243,7 +268,7 @@ def filter_temporal( start_date: Union[str, datetime.datetime, datetime.date] = None, end_date: Union[str, datetime.datetime, datetime.date] = None, extent: Union[list, tuple] = None - ) -> 'DataCube': + ) -> DataCube: """ Limit the DataCube to a certain date range, which can be specified in several ways: @@ -274,7 +299,7 @@ def filter_bbox( crs=None, base=None, height=None, bbox=None - ) -> 'DataCube': + ) -> DataCube: """ Limits the data cube to the specified bounding box. @@ -379,10 +404,7 @@ def filter_bbox( ) @openeo_process - def filter_spatial( - self, - geometries - ) -> 'DataCube': + def filter_spatial(self, geometries) -> DataCube: """ Limits the data cube over the spatial dimensions to the specified geometries. @@ -414,7 +436,7 @@ def filter_spatial( ) @openeo_process - def filter_bands(self, bands: Union[List[Union[str, int]], str]) -> 'DataCube': + def filter_bands(self, bands: Union[List[Union[str, int]], str]) -> DataCube: """ Filter the data cube by the given bands @@ -434,7 +456,7 @@ def filter_bands(self, bands: Union[List[Union[str, int]], str]) -> 'DataCube': band_filter = legacy_alias(filter_bands, "band_filter", since="0.1.0") - def band(self, band: Union[str, int]) -> "DataCube": + def band(self, band: Union[str, int]) -> DataCube: """ Filter out a single band @@ -454,7 +476,7 @@ def band(self, band: Union[str, int]) -> "DataCube": def resample_spatial( self, resolution: Union[float, Tuple[float, float]], projection: Union[int, str] = None, method: str = 'near', align: str = 'upper-left' - ) -> 'DataCube': + ) -> DataCube: return self.process('resample_spatial', { 'data': THIS, 'resolution': resolution, @@ -463,7 +485,7 @@ def resample_spatial( 'align': align }) - def resample_cube_spatial(self, target: "DataCube", method: str = "near") -> 'DataCube': + def resample_cube_spatial(self, target: DataCube, method: str = "near") -> DataCube: """ Resamples the spatial dimensions (x,y) from a source data cube to align with the corresponding dimensions of the given target data cube. @@ -480,8 +502,8 @@ def resample_cube_spatial(self, target: "DataCube", method: str = "near") -> 'Da @openeo_process def resample_cube_temporal( - self, target: "DataCube", dimension: Optional[str] = None, valid_within: Optional[int] = None - ) -> 'DataCube': + self, target: DataCube, dimension: Optional[str] = None, valid_within: Optional[int] = None + ) -> DataCube: """ Resamples one or more given temporal dimensions from a source data cube to align with the corresponding dimensions of the given target data cube using the nearest neighbor method. @@ -505,7 +527,7 @@ def resample_cube_temporal( dict_no_none({"data": self, "target": target, "dimension": dimension, "valid_within": valid_within}) ) - def _operator_binary(self, operator: str, other: Union['DataCube', int, float], reverse=False) -> 'DataCube': + def _operator_binary(self, operator: str, other: Union[DataCube, int, float], reverse=False) -> DataCube: """Generic handling of (mathematical) binary operator""" band_math_mode = self._in_bandmath_mode() if band_math_mode: @@ -525,7 +547,7 @@ def _operator_binary(self, operator: str, other: Union['DataCube', int, float], f"Unsupported operator {operator!r} with `other` type {type(other)!r} (band math mode={band_math_mode})" ) - def _operator_unary(self, operator: str, **kwargs) -> 'DataCube': + def _operator_unary(self, operator: str, **kwargs) -> DataCube: band_math_mode = self._in_bandmath_mode() if band_math_mode: return self._bandmath_operator_unary(operator, **kwargs) @@ -538,7 +560,7 @@ def _apply_operator( other: Optional[Union[int, float]] = None, reverse: Optional[bool] = None, extra_arguments: Optional[dict] = None, - ) -> "DataCube": + ) -> DataCube: """ Apply a unary or binary operator/process, by appending to existing `apply` node, or starting a new one. @@ -578,30 +600,30 @@ def _apply_operator( ) @openeo_process(mode="operator") - def add(self, other: Union['DataCube', int, float], reverse=False) -> 'DataCube': + def add(self, other: Union[DataCube, int, float], reverse=False) -> DataCube: return self._operator_binary("add", other, reverse=reverse) @openeo_process(mode="operator") - def subtract(self, other: Union['DataCube', int, float], reverse=False) -> 'DataCube': + def subtract(self, other: Union[DataCube, int, float], reverse=False) -> DataCube: return self._operator_binary("subtract", other, reverse=reverse) @openeo_process(mode="operator") - def divide(self, other: Union['DataCube', int, float], reverse=False) -> 'DataCube': + def divide(self, other: Union[DataCube, int, float], reverse=False) -> DataCube: return self._operator_binary("divide", other, reverse=reverse) @openeo_process(mode="operator") - def multiply(self, other: Union['DataCube', int, float], reverse=False) -> 'DataCube': + def multiply(self, other: Union[DataCube, int, float], reverse=False) -> DataCube: return self._operator_binary("multiply", other, reverse=reverse) @openeo_process - def normalized_difference(self, other: 'DataCube') -> 'DataCube': + def normalized_difference(self, other: DataCube) -> DataCube: # This DataCube method is only a convenience function when in band math mode assert self._in_bandmath_mode() assert other._in_bandmath_mode() return self._operator_binary("normalized_difference", other) @openeo_process(process_id="or", mode="operator") - def logical_or(self, other: 'DataCube') -> 'DataCube': + def logical_or(self, other: DataCube) -> DataCube: """ Apply element-wise logical `or` operation @@ -611,7 +633,7 @@ def logical_or(self, other: 'DataCube') -> 'DataCube': return self._operator_binary("or", other) @openeo_process(process_id="and", mode="operator") - def logical_and(self, other: "DataCube") -> "DataCube": + def logical_and(self, other: DataCube) -> DataCube: """ Apply element-wise logical `and` operation @@ -621,15 +643,15 @@ def logical_and(self, other: "DataCube") -> "DataCube": return self._operator_binary("and", other) @openeo_process(process_id="not", mode="operator") - def __invert__(self) -> "DataCube": + def __invert__(self) -> DataCube: return self._operator_unary("not") @openeo_process(process_id="neq", mode="operator") - def __ne__(self, other: Union["DataCube", int, float]) -> "DataCube": + def __ne__(self, other: Union[DataCube, int, float]) -> DataCube: return self._operator_binary("neq", other) @openeo_process(process_id="eq", mode="operator") - def __eq__(self, other: Union["DataCube", int, float]) -> "DataCube": + def __eq__(self, other: Union[DataCube, int, float]) -> DataCube: """ Pixelwise comparison of this data cube with another cube or constant. @@ -639,7 +661,7 @@ def __eq__(self, other: Union["DataCube", int, float]) -> "DataCube": return self._operator_binary("eq", other) @openeo_process(process_id="gt", mode="operator") - def __gt__(self, other: Union["DataCube", int, float]) -> "DataCube": + def __gt__(self, other: Union[DataCube, int, float]) -> DataCube: """ Pairwise comparison of the bands in this data cube with the bands in the 'other' data cube. @@ -649,11 +671,11 @@ def __gt__(self, other: Union["DataCube", int, float]) -> "DataCube": return self._operator_binary("gt", other) @openeo_process(process_id="ge", mode="operator") - def __ge__(self, other: Union["DataCube", int, float]) -> "DataCube": + def __ge__(self, other: Union[DataCube, int, float]) -> DataCube: return self._operator_binary("gte", other) @openeo_process(process_id="lt", mode="operator") - def __lt__(self, other: Union["DataCube", int, float]) -> "DataCube": + def __lt__(self, other: Union[DataCube, int, float]) -> DataCube: """ Pairwise comparison of the bands in this data cube with the bands in the 'other' data cube. The number of bands in both data cubes has to be the same. @@ -664,51 +686,51 @@ def __lt__(self, other: Union["DataCube", int, float]) -> "DataCube": return self._operator_binary("lt", other) @openeo_process(process_id="le", mode="operator") - def __le__(self, other: Union["DataCube", int, float]) -> "DataCube": + def __le__(self, other: Union[DataCube, int, float]) -> DataCube: return self._operator_binary("lte", other) @openeo_process(process_id="add", mode="operator") - def __add__(self, other) -> "DataCube": + def __add__(self, other) -> DataCube: return self.add(other) @openeo_process(process_id="add", mode="operator") - def __radd__(self, other) -> "DataCube": + def __radd__(self, other) -> DataCube: return self.add(other, reverse=True) @openeo_process(process_id="subtract", mode="operator") - def __sub__(self, other) -> "DataCube": + def __sub__(self, other) -> DataCube: return self.subtract(other) @openeo_process(process_id="subtract", mode="operator") - def __rsub__(self, other) -> "DataCube": + def __rsub__(self, other) -> DataCube: return self.subtract(other, reverse=True) @openeo_process(process_id="multiply", mode="operator") - def __neg__(self) -> "DataCube": + def __neg__(self) -> DataCube: return self.multiply(-1) @openeo_process(process_id="multiply", mode="operator") - def __mul__(self, other) -> "DataCube": + def __mul__(self, other) -> DataCube: return self.multiply(other) @openeo_process(process_id="multiply", mode="operator") - def __rmul__(self, other) -> "DataCube": + def __rmul__(self, other) -> DataCube: return self.multiply(other, reverse=True) @openeo_process(process_id="divide", mode="operator") - def __truediv__(self, other) -> "DataCube": + def __truediv__(self, other) -> DataCube: return self.divide(other) @openeo_process(process_id="divide", mode="operator") - def __rtruediv__(self, other) -> "DataCube": + def __rtruediv__(self, other) -> DataCube: return self.divide(other, reverse=True) @openeo_process(process_id="power", mode="operator") - def __rpow__(self, other) -> "DataCube": + def __rpow__(self, other) -> DataCube: return self._power(other, reverse=True) @openeo_process(process_id="power", mode="operator") - def __pow__(self, other) -> "DataCube": + def __pow__(self, other) -> DataCube: return self._power(other, reverse=False) def _power(self, other, reverse=False): @@ -726,23 +748,23 @@ def power(self, p: float): return self._power(other=p, reverse=False) @openeo_process(process_id="ln", mode="operator") - def ln(self) -> "DataCube": + def ln(self) -> DataCube: return self._operator_unary("ln") @openeo_process(process_id="log", mode="operator") - def logarithm(self, base: float) -> "DataCube": + def logarithm(self, base: float) -> DataCube: return self._operator_unary("log", base=base) @openeo_process(process_id="log", mode="operator") - def log2(self) -> "DataCube": + def log2(self) -> DataCube: return self.logarithm(base=2) @openeo_process(process_id="log", mode="operator") - def log10(self) -> "DataCube": + def log10(self) -> DataCube: return self.logarithm(base=10) @openeo_process(process_id="or", mode="operator") - def __or__(self, other) -> "DataCube": + def __or__(self, other) -> DataCube: return self.logical_or(other) @openeo_process(process_id="and", mode="operator") @@ -750,8 +772,8 @@ def __and__(self, other): return self.logical_and(other) def _bandmath_operator_binary_cubes( - self, operator, other: "DataCube", left_arg_name="x", right_arg_name="y" - ) -> "DataCube": + self, operator, other: DataCube, left_arg_name="x", right_arg_name="y" + ) -> DataCube: """Band math binary operator with cube as right hand side argument""" left = self._get_bandmath_node() right = other._get_bandmath_node() @@ -768,7 +790,7 @@ def _bandmath_operator_binary_cubes( ) return self.process_with_node(left.clone_with_new_reducer(merged)) - def _bandmath_operator_binary_scalar(self, operator: str, other: Union[int, float], reverse=False) -> 'DataCube': + def _bandmath_operator_binary_scalar(self, operator: str, other: Union[int, float], reverse=False) -> DataCube: """Band math binary operator with scalar value (int or float) as right hand side argument""" node = self._get_bandmath_node() x = {'from_node': node.reducer_process_graph()} @@ -779,7 +801,7 @@ def _bandmath_operator_binary_scalar(self, operator: str, other: Union[int, floa PGNode(operator, x=x, y=y) )) - def _bandmath_operator_unary(self, operator: str, **kwargs) -> 'DataCube': + def _bandmath_operator_unary(self, operator: str, **kwargs) -> DataCube: node = self._get_bandmath_node() return self.process_with_node(node.clone_with_new_reducer( PGNode(operator, x={'from_node': node.reducer_process_graph()}, **kwargs) @@ -796,8 +818,9 @@ def _get_bandmath_node(self) -> ReduceNode: raise BandMathException("Must be in band math mode already") return self._pg - def _merge_operator_binary_cubes(self, operator: str, other: 'DataCube', left_arg_name="x", - right_arg_name="y") -> 'DataCube': + def _merge_operator_binary_cubes( + self, operator: str, other: DataCube, left_arg_name="x", right_arg_name="y" + ) -> DataCube: """Merge two cubes with given operator as overlap_resolver.""" # TODO #123 reuse an existing merge_cubes process graph if it already exists? return self.merge_cubes(other, overlap_resolver=PGNode( @@ -936,7 +959,7 @@ def aggregate_spatial_window( align: str = "upper-left", context: Optional[dict] = None, # TODO arguments: target dimension, context - ) -> "DataCube": + ) -> DataCube: """ Aggregates statistics over the horizontal spatial dimensions (axes x and y) of the data cube. @@ -999,7 +1022,7 @@ def apply_dimension( dimension: str = "t", target_dimension: Optional[str] = None, context: Optional[dict] = None, - ) -> "DataCube": + ) -> DataCube: """ Applies a process to all pixel values along a dimension of a raster data cube. For example, if the temporal dimension is specified the process will work on a time series of pixel values. @@ -1086,7 +1109,7 @@ def reduce_dimension( context: Optional[dict] = None, process_id="reduce_dimension", band_math_mode: bool = False, - ) -> "DataCube": + ) -> DataCube: """ Add a reduce process with given reducer callback along given dimension @@ -1125,12 +1148,12 @@ def reduce_dimension( # @openeo_process def chunk_polygon( - self, - chunks: Union[shapely.geometry.base.BaseGeometry, dict, str, pathlib.Path, Parameter, "VectorCube"], - process: Union[str, PGNode, typing.Callable], - mask_value: float = None, - context: Optional[dict] = None, - ) -> 'DataCube': + self, + chunks: Union[shapely.geometry.base.BaseGeometry, dict, str, pathlib.Path, Parameter, VectorCube], + process: Union[str, PGNode, typing.Callable], + mask_value: float = None, + context: Optional[dict] = None, + ) -> DataCube: """ Apply a process to spatial chunks of a data cube. @@ -1167,7 +1190,7 @@ def chunk_polygon( ), ) - def reduce_bands(self, reducer: Union[str, PGNode, typing.Callable, UDF]) -> 'DataCube': + def reduce_bands(self, reducer: Union[str, PGNode, typing.Callable, UDF]) -> DataCube: """ Shortcut for :py:meth:`reduce_dimension` along the band dimension @@ -1175,7 +1198,7 @@ def reduce_bands(self, reducer: Union[str, PGNode, typing.Callable, UDF]) -> 'Da """ return self.reduce_dimension(dimension=self.metadata.band_dimension.name, reducer=reducer, band_math_mode=True) - def reduce_temporal(self, reducer: Union[str, PGNode, typing.Callable, UDF]) -> 'DataCube': + def reduce_temporal(self, reducer: Union[str, PGNode, typing.Callable, UDF]) -> DataCube: """ Shortcut for :py:meth:`reduce_dimension` along the temporal dimension @@ -1187,9 +1210,7 @@ def reduce_temporal(self, reducer: Union[str, PGNode, typing.Callable, UDF]) -> "Use :py:meth:`reduce_bands` with :py:class:`UDF ` as reducer.", version="0.13.0", ) - def reduce_bands_udf( - self, code: str, runtime: Optional[str] = None, version: Optional[str] = None - ) -> "DataCube": + def reduce_bands_udf(self, code: str, runtime: Optional[str] = None, version: Optional[str] = None) -> DataCube: """ Use `reduce_dimension` process with given UDF along band/spectral dimension. """ @@ -1260,7 +1281,7 @@ def apply_neighborhood( size: List[Dict], overlap: List[dict] = None, context: Optional[dict] = None, - ) -> "DataCube": + ) -> DataCube: """ Applies a focal process to a data cube. @@ -1297,7 +1318,7 @@ def apply( self, process: Union[str, typing.Callable, UDF, PGNode], context: Optional[dict] = None, - ) -> "DataCube": + ) -> DataCube: """ Applies a unary process (a local operation) to each value of the specified or all dimensions in the data cube. @@ -1335,7 +1356,7 @@ def apply( ) @openeo_process(process_id="min", mode="reduce_dimension") - def min_time(self) -> 'DataCube': + def min_time(self) -> DataCube: """ Finds the minimum value of a time series for all bands of the input dataset. @@ -1344,7 +1365,7 @@ def min_time(self) -> 'DataCube': return self.reduce_temporal("min") @openeo_process(process_id="max", mode="reduce_dimension") - def max_time(self) -> 'DataCube': + def max_time(self) -> DataCube: """ Finds the maximum value of a time series for all bands of the input dataset. @@ -1353,7 +1374,7 @@ def max_time(self) -> 'DataCube': return self.reduce_temporal("max") @openeo_process(process_id="mean", mode="reduce_dimension") - def mean_time(self) -> "DataCube": + def mean_time(self) -> DataCube: """ Finds the mean value of a time series for all bands of the input dataset. @@ -1362,7 +1383,7 @@ def mean_time(self) -> "DataCube": return self.reduce_temporal("mean") @openeo_process(process_id="median", mode="reduce_dimension") - def median_time(self) -> "DataCube": + def median_time(self) -> DataCube: """ Finds the median value of a time series for all bands of the input dataset. @@ -1371,7 +1392,7 @@ def median_time(self) -> "DataCube": return self.reduce_temporal("median") @openeo_process(process_id="count", mode="reduce_dimension") - def count_time(self) -> "DataCube": + def count_time(self) -> DataCube: """ Counts the number of images with a valid mask in a time series for all bands of the input dataset. @@ -1387,7 +1408,7 @@ def aggregate_temporal( labels: Optional[List[str]] = None, dimension: Optional[str] = None, context: Optional[dict] = None, - ) -> "DataCube": + ) -> DataCube: """ Computes a temporal aggregation based on an array of date and/or time intervals. @@ -1435,7 +1456,7 @@ def aggregate_temporal_period( reducer: Union[str, PGNode, typing.Callable], dimension: Optional[str] = None, context: Optional[Dict] = None, - ) -> "DataCube": + ) -> DataCube: """ Computes a temporal aggregation based on calendar hierarchies such as years, months or seasons. For other calendar hierarchies aggregate_temporal can be used. @@ -1476,7 +1497,7 @@ def aggregate_temporal_period( ) @openeo_process - def ndvi(self, nir: str = None, red: str = None, target_band: str = None) -> 'DataCube': + def ndvi(self, nir: str = None, red: str = None, target_band: str = None) -> DataCube: """ Normalized Difference Vegetation Index (NDVI) @@ -1489,7 +1510,7 @@ def ndvi(self, nir: str = None, red: str = None, target_band: str = None) -> 'Da if target_band is None: metadata = self.metadata.reduce_dimension(self.metadata.band_dimension.name) else: - metadata = self.metadata.append_band(Band(target_band, "ndvi", None)) + metadata = self.metadata.append_band(Band(name=target_band, common_name="ndvi")) return self.process( process_id="ndvi", arguments=dict_no_none( @@ -1521,7 +1542,7 @@ def rename_dimension(self, source: str, target: str): ) @openeo_process - def rename_labels(self, dimension: str, target: list, source: list = None) -> 'DataCube': + def rename_labels(self, dimension: str, target: list, source: list = None) -> DataCube: """ Renames the labels of the specified dimension in the data cube from source to target. @@ -1543,7 +1564,7 @@ def rename_labels(self, dimension: str, target: list, source: list = None) -> 'D ) @openeo_process(mode="apply") - def linear_scale_range(self, input_min, input_max, output_min, output_max) -> 'DataCube': + def linear_scale_range(self, input_min, input_max, output_min, output_max) -> DataCube: """ Performs a linear transformation between the input and output range. @@ -1568,7 +1589,7 @@ def linear_scale_range(self, input_min, input_max, output_min, output_max) -> 'D return self.apply(lambda x: x.linear_scale_range(input_min, input_max, output_min, output_max)) @openeo_process - def mask(self, mask: "DataCube" = None, replacement=None) -> "DataCube": + def mask(self, mask: DataCube = None, replacement=None) -> DataCube: """ Applies a mask to a raster data cube. To apply a vector mask use `mask_polygon`. @@ -1588,11 +1609,12 @@ def mask(self, mask: "DataCube" = None, replacement=None) -> "DataCube": @openeo_process def mask_polygon( - self, - mask: Union[shapely.geometry.base.BaseGeometry, dict, str, pathlib.Path, Parameter, "VectorCube"], - srs: str = None, - replacement=None, inside: bool = None - ) -> 'DataCube': + self, + mask: Union[shapely.geometry.base.BaseGeometry, dict, str, pathlib.Path, Parameter, VectorCube], + srs: str = None, + replacement=None, + inside: bool = None, + ) -> DataCube: """ Applies a polygon mask to a raster data cube. To apply a raster mask use `mask`. @@ -1626,11 +1648,11 @@ def mask_polygon( @openeo_process def merge_cubes( - self, - other: 'DataCube', - overlap_resolver: Union[str, PGNode, typing.Callable] = None, - context: Optional[dict] = None, - ) -> 'DataCube': + self, + other: DataCube, + overlap_resolver: Union[str, PGNode, typing.Callable] = None, + context: Optional[dict] = None, + ) -> DataCube: """ Merging two data cubes @@ -1672,7 +1694,7 @@ def merge_cubes( def apply_kernel( self, kernel: Union[np.ndarray, List[List[float]]], factor=1.0, border=0, replace_invalid=0 - ) -> "DataCube": + ) -> DataCube: """ Applies a focal operation based on a weighted kernel to each value of the specified dimensions in the data cube. @@ -1703,7 +1725,7 @@ def apply_kernel( @openeo_process def resolution_merge( self, high_resolution_bands: List[str], low_resolution_bands: List[str], method: str = None - ) -> "DataCube": + ) -> DataCube: """ Resolution merging algorithms try to improve the spatial resolution of lower resolution bands (e.g. Sentinel-2 20M) based on higher resolution bands. (e.g. Sentinel-2 10M). @@ -1810,7 +1832,7 @@ def polygonal_standarddeviation_timeseries( def ard_surface_reflectance( self, atmospheric_correction_method: str, cloud_detection_method: str, elevation_model: str = None, atmospheric_correction_options: dict = None, cloud_detection_options: dict = None, - ) -> 'DataCube': + ) -> DataCube: """ Computes CARD4L compliant surface reflectance values from optical input. @@ -1831,12 +1853,7 @@ def ard_surface_reflectance( }) @openeo_process - def atmospheric_correction( - self, - method: str = None, - elevation_model: str = None, - options: dict = None - ) -> 'DataCube': + def atmospheric_correction(self, method: str = None, elevation_model: str = None, options: dict = None) -> DataCube: """ Applies an atmospheric correction that converts top of atmosphere reflectance values into bottom of atmosphere/top of canopy reflectance values. @@ -1860,7 +1877,7 @@ def save_result( self, format: str = _DEFAULT_RASTER_FORMAT, options: Optional[dict] = None, - ) -> "DataCube": + ) -> DataCube: formats = set(self._connection.list_output_formats().keys()) # TODO: map format to correct casing too? if format.lower() not in {f.lower() for f in formats}: @@ -1879,7 +1896,7 @@ def _ensure_save_result( self, format: Optional[str] = None, options: Optional[dict] = None, - ) -> "DataCube": + ) -> DataCube: """ Make sure there is a (final) `save_result` node in the process graph. If there is already one: check if it is consistent with the given format/options (if any) @@ -2147,7 +2164,7 @@ def execute_local_udf(udf: str, datacube: Union[str, 'xarray.DataArray', 'Xarray def ard_normalized_radar_backscatter( self, elevation_model: str = None, contributing_area=False, ellipsoid_incidence_angle: bool = False, noise_removal: bool = True - ) -> "DataCube": + ) -> DataCube: """ Computes CARD4L compliant backscatter (gamma0) from SAR input. This method is a variant of :py:meth:`~openeo.rest.datacube.DataCube.sar_backscatter`, @@ -2183,7 +2200,7 @@ def sar_backscatter( ellipsoid_incidence_angle: bool = False, noise_removal: bool = True, options: Optional[dict] = None - ) -> "DataCube": + ) -> DataCube: """ Computes backscatter from SAR input. @@ -2313,7 +2330,7 @@ def predict_random_forest(self, model: Union[str, BatchJob, MlModel], dimension: return self.reduce_dimension(dimension=dimension, reducer=reducer, context=model) @openeo_process - def dimension_labels(self, dimension: str) -> "DataCube": + def dimension_labels(self, dimension: str) -> DataCube: """ Gives all labels for a dimension in the data cube. The labels have the same order as in the data cube. diff --git a/openeo/rest/job.py b/openeo/rest/job.py index 9d85712d7..541662c4a 100644 --- a/openeo/rest/job.py +++ b/openeo/rest/job.py @@ -1,18 +1,25 @@ +from __future__ import annotations + import datetime import json import logging import time import typing from pathlib import Path -from typing import List, Union, Dict, Optional +from typing import Dict, List, Optional, Union import requests -from openeo.api.logs import LogEntry, normalize_log_level, log_level_name +from openeo.api.logs import LogEntry, log_level_name, normalize_log_level from openeo.internal.documentation import openeo_endpoint -from openeo.internal.jupyter import render_component, render_error, VisualDict, VisualList +from openeo.internal.jupyter import ( + VisualDict, + VisualList, + render_component, + render_error, +) from openeo.internal.warnings import deprecated, legacy_alias -from openeo.rest import OpenEoClientException, JobFailedException, OpenEoApiError +from openeo.rest import JobFailedException, OpenEoApiError, OpenEoClientException from openeo.util import ensure_dir if typing.TYPE_CHECKING: @@ -35,7 +42,7 @@ class BatchJob: # TODO #425 method to bootstrap `load_stac` directly from a BatchJob object - def __init__(self, job_id: str, connection: 'Connection'): + def __init__(self, job_id: str, connection: Connection): self.job_id = job_id """Unique identifier of the batch job (string).""" @@ -94,7 +101,7 @@ def estimate(self): estimate_job = legacy_alias(estimate, since="0.20.0", mode="soft") @openeo_endpoint("POST /jobs/{job_id}/results") - def start(self) -> "BatchJob": + def start(self) -> BatchJob: """ Start this batch job. @@ -160,7 +167,7 @@ def download_results(self, target: Union[str, Path] = None) -> Dict[Path, dict]: def get_result(self): return _Result(self) - def get_results(self) -> "JobResults": + def get_results(self) -> JobResults: """ Get handle to batch job results for result metadata inspection or downloading resulting assets. @@ -221,7 +228,7 @@ def logs( def run_synchronous( self, outputfile: Union[str, Path, None] = None, print=print, max_poll_interval=60, connection_retry_interval=30 - ) -> 'BatchJob': + ) -> BatchJob: """Start the job, wait for it to finish and download result""" self.start_and_wait( print=print, max_poll_interval=max_poll_interval, connection_retry_interval=connection_retry_interval @@ -233,7 +240,7 @@ def run_synchronous( def start_and_wait( self, print=print, max_poll_interval: int = 60, connection_retry_interval: int = 30, soft_error_max=10 - ) -> "BatchJob": + ) -> BatchJob: """ Start the batch job, poll its status and wait till it finishes (or fails) diff --git a/openeo/rest/mlmodel.py b/openeo/rest/mlmodel.py index 8291c4dba..5029fb46d 100644 --- a/openeo/rest/mlmodel.py +++ b/openeo/rest/mlmodel.py @@ -1,7 +1,9 @@ +from __future__ import annotations + import logging import pathlib import typing -from typing import Union, Optional +from typing import Optional, Union from openeo.internal.documentation import openeo_process from openeo.internal.graph_building import PGNode @@ -24,7 +26,8 @@ class MlModel(_ProcessGraphAbstraction): .. versionadded:: 0.10.0 """ - def __init__(self, graph: PGNode, connection: 'Connection'): + + def __init__(self, graph: PGNode, connection: Connection): super().__init__(pgnode=graph, connection=connection) def save_ml_model(self, options: Optional[dict] = None): @@ -41,7 +44,7 @@ def save_ml_model(self, options: Optional[dict] = None): @staticmethod @openeo_process - def load_ml_model(connection: "Connection", id: Union[str, BatchJob]) -> "MlModel": + def load_ml_model(connection: Connection, id: Union[str, BatchJob]) -> MlModel: """ Loads a machine learning model from a STAC Item. diff --git a/openeo/rest/service.py b/openeo/rest/service.py index dd88c6a59..a12383695 100644 --- a/openeo/rest/service.py +++ b/openeo/rest/service.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import typing from typing import List, Optional, Union @@ -12,7 +14,7 @@ class Service: """Represents a secondary web service in openeo.""" - def __init__(self, service_id: str, connection: 'Connection'): + def __init__(self, service_id: str, connection: Connection): # Unique identifier of the secondary web service (string) self.service_id = service_id self.connection = connection diff --git a/openeo/rest/udp.py b/openeo/rest/udp.py index f53ec07a0..b390ed761 100644 --- a/openeo/rest/udp.py +++ b/openeo/rest/udp.py @@ -1,5 +1,7 @@ +from __future__ import annotations + import typing -from typing import List, Union, Optional +from typing import List, Optional, Union from openeo.api.process import Parameter from openeo.internal.graph_building import as_flat_graph @@ -60,7 +62,8 @@ class RESTUserDefinedProcess: """ Wrapper for a user-defined process stored (or to be stored) on an openEO back-end """ - def __init__(self, user_defined_process_id: str, connection: 'Connection'): + + def __init__(self, user_defined_process_id: str, connection: Connection): self.user_defined_process_id = user_defined_process_id self._connection = connection diff --git a/openeo/rest/userfile.py b/openeo/rest/userfile.py index 98c176c51..0732c671c 100644 --- a/openeo/rest/userfile.py +++ b/openeo/rest/userfile.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import typing from pathlib import Path, PurePosixPath from typing import Any, Dict, Optional, Union @@ -18,7 +20,7 @@ def __init__( self, path: Union[str, PurePosixPath, None], *, - connection: "Connection", + connection: Connection, metadata: Optional[dict] = None, ): if path: @@ -35,7 +37,7 @@ def __init__( self.connection = connection @classmethod - def from_metadata(cls, metadata: dict, connection: "Connection") -> "UserFile": + def from_metadata(cls, metadata: dict, connection: Connection) -> UserFile: """Build :py:class:`UserFile` from a workspace file metadata dictionary.""" return cls(path=None, connection=connection, metadata=metadata) @@ -69,7 +71,7 @@ def download(self, target: Union[Path, str] = None) -> Path: return target - def upload(self, source: Union[Path, str]) -> "UserFile": + def upload(self, source: Union[Path, str]) -> UserFile: """ Uploads a local file to the path corresponding to this :py:class:`UserFile` in the user workspace and returns new :py:class:`UserFile` of newly uploaded file. diff --git a/openeo/rest/vectorcube.py b/openeo/rest/vectorcube.py index a266c67bb..74c02cbe4 100644 --- a/openeo/rest/vectorcube.py +++ b/openeo/rest/vectorcube.py @@ -1,7 +1,9 @@ +from __future__ import annotations + import json import pathlib import typing -from typing import List, Optional, Union, Tuple, Callable +from typing import Callable, List, Optional, Tuple, Union import shapely.geometry.base @@ -11,10 +13,15 @@ from openeo.internal.graph_building import PGNode from openeo.internal.warnings import legacy_alias from openeo.metadata import CollectionMetadata, Dimension -from openeo.rest._datacube import THIS, UDF, _ProcessGraphAbstraction, build_child_callback +from openeo.rest._datacube import ( + THIS, + UDF, + _ProcessGraphAbstraction, + build_child_callback, +) from openeo.rest.job import BatchJob from openeo.rest.mlmodel import MlModel -from openeo.util import dict_no_none, guess_format, to_bbox_dict, InvalidBBoxException +from openeo.util import InvalidBBoxException, dict_no_none, guess_format, to_bbox_dict if typing.TYPE_CHECKING: # Imports for type checking only (circular import issue at runtime). @@ -30,7 +37,7 @@ class VectorCube(_ProcessGraphAbstraction): A geometry is specified in a 'coordinate reference system'. https://www.w3.org/TR/sdw-bp/#dfn-coordinate-reference-system-(crs) """ - def __init__(self, graph: PGNode, connection: 'Connection', metadata: CollectionMetadata = None): + def __init__(self, graph: PGNode, connection: Connection, metadata: CollectionMetadata = None): super().__init__(pgnode=graph, connection=connection) self.metadata = metadata or self._build_metadata() @@ -51,7 +58,7 @@ def process( metadata: Optional[CollectionMetadata] = None, namespace: Optional[str] = None, **kwargs, - ) -> "VectorCube": + ) -> VectorCube: """ Generic helper to create a new DataCube by applying a process. @@ -66,10 +73,10 @@ def process( @openeo_process def load_geojson( cls, - connection: "openeo.Connection", + connection: Connection, data: Union[dict, str, pathlib.Path, shapely.geometry.base.BaseGeometry, Parameter], properties: Optional[List[str]] = None, - ) -> "VectorCube": + ) -> VectorCube: """ Converts GeoJSON data as defined by RFC 7946 into a vector data cube. @@ -115,9 +122,7 @@ def load_geojson( @classmethod @openeo_process - def load_url( - cls, connection: "openeo.Connection", url: str, format: str, options: Optional[dict] = None - ) -> "VectorCube": + def load_url(cls, connection: Connection, url: str, format: str, options: Optional[dict] = None) -> VectorCube: """ Loads a file from a URL @@ -144,7 +149,7 @@ def run_udf( runtime: Optional[str] = None, version: Optional[str] = None, context: Optional[dict] = None, - ) -> "VectorCube": + ) -> VectorCube: """ Run a UDF on the vector cube. @@ -194,7 +199,7 @@ def _ensure_save_result( self, format: Optional[str] = None, options: Optional[dict] = None, - ) -> "VectorCube": + ) -> VectorCube: """ Make sure there is a (final) `save_result` node in the process graph. If there is already one: check if it is consistent with the given format/options (if any) @@ -328,7 +333,7 @@ def create_job( send_job = legacy_alias(create_job, name="send_job", since="0.10.0") @openeo_process - def filter_bands(self, bands: List[str]) -> "VectorCube": + def filter_bands(self, bands: List[str]) -> VectorCube: """ .. versionadded:: 0.22.0 """ @@ -348,7 +353,7 @@ def filter_bbox( north: Optional[float] = None, extent: Optional[Union[dict, List[float], Tuple[float, float, float, float], Parameter]] = None, crs: Optional[int] = None, - ) -> "VectorCube": + ) -> VectorCube: """ .. versionadded:: 0.22.0 """ @@ -370,7 +375,7 @@ def filter_bbox( @openeo_process def filter_labels( self, condition: Union[PGNode, Callable], dimension: str, context: Optional[dict] = None - ) -> "VectorCube": + ) -> VectorCube: """ .. versionadded:: 0.22.0 """ @@ -384,7 +389,7 @@ def filter_labels( @openeo_process def filter_vector( self, geometries: Union["VectorCube", shapely.geometry.base.BaseGeometry, dict], relation: str = "intersects" - ) -> "VectorCube": + ) -> VectorCube: """ .. versionadded:: 0.22.0 """ @@ -488,7 +493,7 @@ def apply_dimension( dimension: str, target_dimension: Optional[str] = None, context: Optional[dict] = None, - ) -> "VectorCube": + ) -> VectorCube: """ Applies a process to all values along a dimension of a data cube. For example, if the temporal dimension is specified the process will work on the values of a time series. diff --git a/openeo/udf/__init__.py b/openeo/udf/__init__.py index 16ce2cb76..387b8bc3d 100644 --- a/openeo/udf/__init__.py +++ b/openeo/udf/__init__.py @@ -7,7 +7,7 @@ class OpenEoUdfException(BaseOpenEoException): from openeo.udf.debug import inspect from openeo.udf.feature_collection import FeatureCollection -from openeo.udf.run_code import run_udf_code, execute_local_udf +from openeo.udf.run_code import execute_local_udf, run_udf_code from openeo.udf.structured_data import StructuredData from openeo.udf.udf_data import UdfData from openeo.udf.xarraydatacube import XarrayDataCube diff --git a/openeo/udf/feature_collection.py b/openeo/udf/feature_collection.py index df3fbc222..329c618cc 100644 --- a/openeo/udf/feature_collection.py +++ b/openeo/udf/feature_collection.py @@ -3,8 +3,9 @@ """ # Note: this module was initially developed under the ``openeo-udf`` project (https://github.com/Open-EO/openeo-udf) +from __future__ import annotations -from typing import Optional, Union, Any, List +from typing import Any, List, Optional, Union import pandas import shapely.geometry @@ -93,7 +94,7 @@ def to_dict(self) -> dict: return data @classmethod - def from_dict(cls, data: dict) -> "FeatureCollection": + def from_dict(cls, data: dict) -> FeatureCollection: """ Create a feature collection from a python dictionary that was created from the JSON definition of the FeatureCollection diff --git a/openeo/udf/structured_data.py b/openeo/udf/structured_data.py index 9dbce8822..038bb37be 100644 --- a/openeo/udf/structured_data.py +++ b/openeo/udf/structured_data.py @@ -4,6 +4,7 @@ # Note: this module was initially developed under the ``openeo-udf`` project (https://github.com/Open-EO/openeo-udf) +from __future__ import annotations import builtins from typing import Union @@ -38,7 +39,7 @@ def to_dict(self) -> dict: ) @classmethod - def from_dict(cls, data: dict) -> "StructuredData": + def from_dict(cls, data: dict) -> StructuredData: return cls( data=data["data"], description=data.get("description"), diff --git a/openeo/udf/udf_data.py b/openeo/udf/udf_data.py index 8ea798665..e07ccdf8b 100644 --- a/openeo/udf/udf_data.py +++ b/openeo/udf/udf_data.py @@ -4,8 +4,9 @@ # Note: this module was initially developed under the ``openeo-udf`` project (https://github.com/Open-EO/openeo-udf) +from __future__ import annotations -from typing import Optional, List, Union +from typing import List, Optional, Union from openeo.udf.feature_collection import FeatureCollection from openeo.udf.structured_data import StructuredData @@ -113,7 +114,7 @@ def to_dict(self) -> dict: } @classmethod - def from_dict(cls, udf_dict: dict) -> "UdfData": + def from_dict(cls, udf_dict: dict) -> UdfData: """ Create a udf data object from a python dictionary that was created from the JSON definition of the UdfData class diff --git a/openeo/udf/xarraydatacube.py b/openeo/udf/xarraydatacube.py index ba4a6ce7a..fa47beec6 100644 --- a/openeo/udf/xarraydatacube.py +++ b/openeo/udf/xarraydatacube.py @@ -4,18 +4,19 @@ # Note: this module was initially developed under the ``openeo-udf`` project (https://github.com/Open-EO/openeo-udf) +from __future__ import annotations import collections import json import typing from pathlib import Path -from typing import Union, Optional +from typing import Optional, Union import numpy import xarray from openeo.udf import OpenEoUdfException -from openeo.util import dict_no_none, deep_get +from openeo.util import deep_get, dict_no_none if typing.TYPE_CHECKING: # Imports for type checking only (circular import issue at runtime). @@ -81,7 +82,7 @@ def to_dict(self) -> dict: }) @classmethod - def from_dict(cls, xdc_dict: dict) -> "XarrayDataCube": + def from_dict(cls, xdc_dict: dict) -> XarrayDataCube: """ Create a :py:class:`XarrayDataCube` from a Python dictionary that was created from the JSON definition of the data cube @@ -120,7 +121,7 @@ def _guess_format(path: Union[str, Path]) -> str: raise ValueError("Can not guess format of {p}".format(p=path)) @classmethod - def from_file(cls, path: Union[str, Path], fmt=None, **kwargs) -> "XarrayDataCube": + def from_file(cls, path: Union[str, Path], fmt=None, **kwargs) -> XarrayDataCube: """ Load data file as :py:class:`XarrayDataCube` in memory diff --git a/openeo/util.py b/openeo/util.py index d5b46e3e2..6cead402e 100644 --- a/openeo/util.py +++ b/openeo/util.py @@ -1,7 +1,11 @@ """ Various utilities and helpers. """ -# TODO: split this kitchen-sink in thematic submodules + +# TODO #465 split this kitchen-sink in thematic submodules + +from __future__ import annotations + import datetime as dt import functools import json @@ -281,9 +285,159 @@ def get_temporal_extent(*args, elif extent: assert start_date is None and end_date is None start_date, end_date = extent + if start_date and not end_date and isinstance(start_date, str): + start_date, end_date = string_to_temporal_extent(start_date) return convertor(start_date) if start_date else None, convertor(end_date) if end_date else None +def string_to_temporal_extent( + start_date: Union[str, dt.datetime, dt.date] +) -> Tuple[Union[dt.date, dt.datetime, str], Union[dt.date, dt.datetime, None]]: + """Convert a string into a date range when it is an abbreviation for an entire year or month. + + The result is a 2-tuple ``(start, end)`` that represents the period as a + half-open interval, where the end date is not included in the period. + + The intent of this function is to only convert values into a periods + when they are clearly abbreviations, and in all other cases leave the original + start_date as it was, because there can be too many complications otherwise. + + The reason being that calling functions, e.g. ``get_temporal_extent``, + can allow you to specifying both a start date **and** end date, but either date + can be ``None``. What such an open-ended interval means depends very much on + what the calling function/method is meant to do, so the caller should really + handle that themselves. + + When we don't convert, the return value is the tuple ``(start_date, None)`` + using the original parameter value start_date, unprocessed. + + :param start_date: + + - Typically a string that represents either a year, a year + month, a day, + or a datetime, and it always indicates the *beginning* of that period. + - Other data types allowed are a ``datetime.date`` and ``datetime.datetime``, + and in that case we return the tuple ``(start_date, None)`` where + ``start_date`` is our original input parameter ``start_date`` as-is. + Similarly, strings that represent a date or datetime are not processed + any further and the return value is also ``(start_date, None)``. + - Any other type raises a TypeError. + + - Allowed string formats are: + - For year: "yyyy" + - For year + month: "yyyy-mm" + Some other separators than "-" technically work but they are discouraged. + - For date and datetime you must follow the RFC 3339 format. See also: class ``Rfc3339`` + + :return: + The result is a 2-tuple of the form ``(start, end)`` that represents + the period as a half-open interval, where the end date is not included, + i.e. end is the first day that is no longer part of the time slot. + + When start_date was indeed an abbreviation and thus was converted to + a period, then the element types will be ``(datetime.date, datetime.date)`` + + If no conversion happened we return the original start_date wrapped in a + 2-tuple: ``(start_date, None)`` so the type is the same as the input's type. + + :raises TypeError: + when start_date is neither of the following types: + str, datetime.date, datetime.datetime + + :raises ValueError: + when start_date was a string but not recognized as either a year, + a month, a date, or datetime. + + Examples + -------- + + >>> import datetime + >>> + >>> # 1. Year: use all data from the start of 2021 to the end of 2021. + >>> string_to_temporal_extent("2021") + (datetime.date(2021, 1, 1), datetime.date(2022, 1, 1)) + >>> + >>> # 2. Year + month: all data from the start of August 2022 to the end of August 2022. + >>> string_to_temporal_extent("2022-08") + (datetime.date(2022, 8, 1), datetime.date(2022, 9, 1)) + >>> + >>> # 3. We received a full date 2022-08-15: + >>> # In this case we should not process start_date. The calling function/method must + >>> # handle end date, depending on what an interval with an open end means for the caller. + >>> # See for example how ``get_temporal_extent`` handles this. + >>> string_to_temporal_extent("2022-08-15") + ('2022-08-15', None) + >>> + >>> # 4. Similar to 3), but with a datetime.date instead of a string containing a date. + >>> string_to_temporal_extent(datetime.date(2022, 8, 15)) + (datetime.date(2022, 8, 15), None) + >>> + >>> # 5. Similar to 3) & 4), but with a datetime.datetime instance. + >>> string_to_temporal_extent(datetime.datetime(2022, 8, 15, 0, 0)) + (datetime.datetime(2022, 8, 15, 0, 0), None) + """ + supported_types = (str, dt.date, dt.datetime) + if not isinstance(start_date, supported_types): + raise TypeError( + "Value of start_date must be one of the following types:" + + "str, datetime.date, datetime.datetime" + + f"but it is {type(start_date)}, value={start_date}" + ) + + # Skip it if the string represents a day or if it is not even a string + # If it is a day, we want to let the upstream function handle that case + # because a day could be either a start date or an end date. + if not isinstance(start_date, str): + return start_date, None + + # Using a separate and stricter regular expressions to detect day, month, + # or year. Having a regex that only matches one type of period makes it + # easier to check it is effectively only a year, or only a month, + # but not a day. Datetime strings are more complex so we use rfc3339 to + # check whether or not it represents a datetime. + regex_day = re.compile(r"^(\d{4})[:/_-](\d{2})[:/_-](\d{2})$") + regex_month = re.compile(r"^(\d{4})[:/_-](\d{2})$") + regex_year = re.compile(r"^\d{4}$") + + try: + rfc3339.parse_datetime(start_date) + is_date_time = True + except ValueError as exc: + is_date_time = False + + match_day = regex_day.match(start_date) + match_month = regex_month.match(start_date) + match_year = regex_year.match(start_date) + + if is_date_time or match_day: + return start_date, None + + if not (match_year or match_month): + raise ValueError( + f"The value of start_date='{start_date}' does not represent any of: " + + "a year ('yyyy'), a year + month ('yyyy-dd'), a date, or a datetime." + ) + + if match_month: + year_start = int(match_month.group(1)) + month_start = int(match_month.group(2)) + if month_start == 12: + year_end = year_start + 1 + month_end = 1 + else: + month_end = month_start + 1 + year_end = year_start + else: + year_start = int(start_date) + year_end = year_start + 1 + month_start = 1 + month_end = 1 + + date_start = dt.date(year_start, month_start, 1) + date_end = dt.date(year_end, month_end, 1) + + return date_start, date_end + + class ContextTimer: """ Context manager to measure the "wall clock" time (in seconds) inside/for a block of code. @@ -318,7 +472,7 @@ def elapsed(self) -> float: # Currently elapsed inside context. return self._clock() - self.start - def __enter__(self) -> 'ContextTimer': + def __enter__(self) -> ContextTimer: self.start = self._clock() return self @@ -359,7 +513,7 @@ def __init__(self, title: str = "Timing", logger: Union[logging.Logger, str, Cal self.title = title if isinstance(logger, str): logger = logging.getLogger(logger) - if isinstance(logger, logging.Logger): + if isinstance(logger, (logging.Logger, logging.LoggerAdapter)): self._log = logger.info elif callable(logger): self._log = logger @@ -559,7 +713,7 @@ def __init__(self, *, west: float, south: float, east: float, north: float, crs: # TODO: provide west, south, east, north, crs as @properties? Read-only or read-write? @classmethod - def from_any(cls, x: Any, *, crs: Optional[str] = None) -> 'BBoxDict': + def from_any(cls, x: Any, *, crs: Optional[str] = None) -> BBoxDict: if isinstance(x, dict): if crs and "crs" in x and crs != x["crs"]: raise InvalidBBoxException(f"Two CRS values specified: {crs} and {x['crs']}") @@ -573,7 +727,7 @@ def from_any(cls, x: Any, *, crs: Optional[str] = None) -> 'BBoxDict': raise InvalidBBoxException(f"Can not construct BBoxDict from {x!r}") @classmethod - def from_dict(cls, data: dict) -> 'BBoxDict': + def from_dict(cls, data: dict) -> BBoxDict: """Build from dictionary with at least keys "west", "south", "east", and "north".""" expected_fields = {"west", "south", "east", "north"} # TODO: also support upper case fields? @@ -587,7 +741,7 @@ def from_dict(cls, data: dict) -> 'BBoxDict': return cls(west=data["west"], south=data["south"], east=data["east"], north=data["north"], crs=data.get("crs")) @classmethod - def from_sequence(cls, seq: Union[list, tuple], crs: Optional[str] = None) -> 'BBoxDict': + def from_sequence(cls, seq: Union[list, tuple], crs: Optional[str] = None) -> BBoxDict: """Build from sequence of 4 bounds (west, south, east and north).""" if len(seq) != 4: raise InvalidBBoxException(f"Expected sequence with 4 items, but got {len(seq)}.") diff --git a/pyproject.toml b/pyproject.toml index 8f5576a5a..bb86d577f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,5 +6,9 @@ build-backend = "setuptools.build_meta" [tool.black] line-length = 120 +[tool.isort] +# Run isort in black-compatible mode (https://pycqa.github.io/isort/docs/configuration/black_compatibility.html) +profile = "black" + [tool.ruff] line-length = 120 diff --git a/setup.py b/setup.py index 28fcd0eea..1b14f8ca4 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,4 @@ -from setuptools import setup, find_packages +from setuptools import find_packages, setup # Load the openeo version info. # @@ -27,7 +27,7 @@ "geopandas", "flake8>=5.0.0", "time_machine", - "pyproj", # Pyproj is an optional, best-effort runtime dependency # TODO #460 set a high enough minimum version when py3.6 support can be dropped + "pyproj>=3.2.0", # Pyproj is an optional, best-effort runtime dependency ] docs_require = [ @@ -60,7 +60,7 @@ long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/Open-EO/openeo-python-client", - python_requires=">=3.6", + python_requires=">=3.7", packages=find_packages(include=["openeo*"]), include_package_data=True, tests_require=tests_require, @@ -73,6 +73,7 @@ "pandas>0.20.0", "deprecated>=1.2.12", 'oschmod>=0.3.12; sys_platform == "win32"', + "importlib_resources; python_version<'3.9'", ], extras_require={ "tests": tests_require, @@ -88,7 +89,6 @@ "console_scripts": ["openeo-auth=openeo.rest.auth.cli:main"], }, classifiers=[ - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", diff --git a/specs/update-subrepos.py b/specs/update-subrepos.py index dbf0d6d11..e5f430915 100755 --- a/specs/update-subrepos.py +++ b/specs/update-subrepos.py @@ -11,7 +11,6 @@ # TODO: windows support? -import collections import contextlib import logging import logging.config @@ -19,13 +18,16 @@ import subprocess import sys from pathlib import Path -from typing import List - +from typing import List, NamedTuple _log = logging.getLogger(__name__) _ROOT_DIR = Path(__file__).parent -SubRepo = collections.namedtuple("SubRepo", ["url", "rev", "path"]) + +class SubRepo(NamedTuple): + url: str + rev: str + path: str def main(): diff --git a/tests/api/test_logs.py b/tests/api/test_logs.py index 034ef5e27..5d228a42f 100644 --- a/tests/api/test_logs.py +++ b/tests/api/test_logs.py @@ -1,8 +1,9 @@ import logging -from openeo.api.logs import LogEntry, normalize_log_level, log_level_name import pytest +from openeo.api.logs import LogEntry, log_level_name, normalize_log_level + def test_log_entry_empty(): with pytest.raises(ValueError, match="Missing required fields"): diff --git a/tests/extra/spectral_indices/test_spectral_indices.py b/tests/extra/spectral_indices/test_spectral_indices.py index 9e8e1863d..c49f28f37 100644 --- a/tests/extra/spectral_indices/test_spectral_indices.py +++ b/tests/extra/spectral_indices/test_spectral_indices.py @@ -2,11 +2,11 @@ from openeo.extra.spectral_indices import ( append_and_rescale_indices, - compute_and_rescale_indices, - compute_indices, + append_index, append_indices, + compute_and_rescale_indices, compute_index, - append_index, + compute_indices, list_indices, load_indices, ) diff --git a/tests/extra/test_job_management.py b/tests/extra/test_job_management.py index 952b40ef6..ef7c08d99 100644 --- a/tests/extra/test_job_management.py +++ b/tests/extra/test_job_management.py @@ -1,6 +1,4 @@ import json -import multiprocessing -import platform import threading from unittest import mock @@ -11,19 +9,14 @@ # httpretty avoids this specific problem because it mocks at the socket level, # But I would rather not have two dependencies with almost the same goal. import httpretty - -# Alias to avoid conflicting names in httpretty. -from httpretty.core import httpretty as corehttpretty - import pandas as pd import pytest import requests import shapely.geometry.point as shpt - import openeo -from openeo.extra.job_management import MultiBackendJobManager, MAX_RETRIES from openeo import BatchJob +from openeo.extra.job_management import MAX_RETRIES, MultiBackendJobManager class TestMultiBackendJobManager: @@ -363,12 +356,10 @@ def test_is_resilient_to_backend_failures( # First fail the max times the connection should retry, then succeed. after that response_list = [ - corehttpretty.Response( - f"Simulate error HTTP {http_error_status}", status=http_error_status - ) + httpretty.Response(f"Simulate error HTTP {http_error_status}", status=http_error_status) ] * MAX_RETRIES response_list += [ - corehttpretty.Response( + httpretty.Response( body=json.dumps( { "id": job_id, @@ -439,7 +430,7 @@ def test_resilient_backend_reports_error_when_max_retries_exceeded( # in running mode at one point. # Namely, we want to check that it flags the job stopped with an error. response_list = [ - corehttpretty.Response( + httpretty.Response( body=json.dumps( { "id": job_id, @@ -449,11 +440,9 @@ def test_resilient_backend_reports_error_when_max_retries_exceeded( ) ) ] - response_list += [ - corehttpretty.Response( - f"Simulate error HTTP {http_error_status}", status=http_error_status - ) - ] * (MAX_RETRIES + 1) + response_list += [httpretty.Response(f"Simulate error HTTP {http_error_status}", status=http_error_status)] * ( + MAX_RETRIES + 1 + ) httpretty.register_uri( "GET", f"{backend}/jobs/{job_id}", responses=response_list diff --git a/tests/internal/processes/test_builder.py b/tests/internal/processes/test_builder.py index c8a88a98c..d71433294 100644 --- a/tests/internal/processes/test_builder.py +++ b/tests/internal/processes/test_builder.py @@ -7,7 +7,11 @@ import openeo.processes from openeo.internal.graph_building import PGNode -from openeo.internal.processes.builder import ProcessBuilderBase, convert_callable_to_pgnode, get_parameter_names +from openeo.internal.processes.builder import ( + ProcessBuilderBase, + convert_callable_to_pgnode, + get_parameter_names, +) from openeo.rest import OpenEoClientException diff --git a/tests/internal/processes/test_generator.py b/tests/internal/processes/test_generator.py index fd7515e3f..7749b9f9c 100644 --- a/tests/internal/processes/test_generator.py +++ b/tests/internal/processes/test_generator.py @@ -1,7 +1,11 @@ from io import StringIO from textwrap import dedent -from openeo.internal.processes.generator import PythonRenderer, generate_process_py, collect_processes +from openeo.internal.processes.generator import ( + PythonRenderer, + collect_processes, + generate_process_py, +) from openeo.internal.processes.parse import Process from tests import get_test_resource @@ -225,7 +229,7 @@ def test_generate_process_py(): generate_process_py(processes, output=output) lines = output.getvalue().split("\n") assert "class ProcessBuilder(ProcessBuilderBase):" in lines - assert " def incr(self) -> 'ProcessBuilder':" in lines - assert " def add(self, y) -> 'ProcessBuilder':" in lines + assert " def incr(self) -> ProcessBuilder:" in lines + assert " def add(self, y) -> ProcessBuilder:" in lines assert "def incr(x) -> ProcessBuilder:" in lines assert "def add(x, y) -> ProcessBuilder:" in lines diff --git a/tests/internal/processes/test_parse.py b/tests/internal/processes/test_parse.py index dd6f4e412..db2dbf351 100644 --- a/tests/internal/processes/test_parse.py +++ b/tests/internal/processes/test_parse.py @@ -1,4 +1,4 @@ -from openeo.internal.processes.parse import Parameter, Schema, Returns, Process +from openeo.internal.processes.parse import Parameter, Process, Returns, Schema def test_schema(): diff --git a/tests/internal/test_graphbuilding.py b/tests/internal/test_graphbuilding.py index acc0f9d7a..46c6171c3 100644 --- a/tests/internal/test_graphbuilding.py +++ b/tests/internal/test_graphbuilding.py @@ -5,7 +5,12 @@ import openeo.processes from openeo.api.process import Parameter -from openeo.internal.graph_building import FlatGraphNodeIdGenerator, PGNode, PGNodeGraphUnflattener, ReduceNode +from openeo.internal.graph_building import ( + FlatGraphNodeIdGenerator, + PGNode, + PGNodeGraphUnflattener, + ReduceNode, +) from openeo.internal.process_graph_visitor import ProcessGraphVisitException diff --git a/tests/internal/test_process_graph_visitor.py b/tests/internal/test_process_graph_visitor.py index f0a3a9926..2edaf240c 100644 --- a/tests/internal/test_process_graph_visitor.py +++ b/tests/internal/test_process_graph_visitor.py @@ -1,9 +1,12 @@ -from unittest.mock import MagicMock, call, ANY +from unittest.mock import ANY, MagicMock, call import pytest -from openeo.internal.process_graph_visitor import ProcessGraphVisitor, ProcessGraphUnflattener, \ - ProcessGraphVisitException +from openeo.internal.process_graph_visitor import ( + ProcessGraphUnflattener, + ProcessGraphVisitException, + ProcessGraphVisitor, +) def test_visit_node(): diff --git a/tests/internal/test_warnings.py b/tests/internal/test_warnings.py index 3ba7dad82..4e7f6da8d 100644 --- a/tests/internal/test_warnings.py +++ b/tests/internal/test_warnings.py @@ -3,7 +3,7 @@ import pytest -from openeo.internal.warnings import legacy_alias, UserDeprecationWarning, deprecated +from openeo.internal.warnings import UserDeprecationWarning, deprecated, legacy_alias def test_user_deprecation_warning(pytester): diff --git a/tests/local/test_local_collection.py b/tests/local/test_local_collection.py index 4115ecd0d..08ecd04ab 100644 --- a/tests/local/test_local_collection.py +++ b/tests/local/test_local_collection.py @@ -1,7 +1,7 @@ -import pytest -import xarray as xr import numpy as np import pandas as pd +import pytest +import xarray as xr try: from openeo.local import LocalConnection diff --git a/tests/rest/auth/test_config.py b/tests/rest/auth/test_config.py index d9c3a59d9..bb7a3f0ef 100644 --- a/tests/rest/auth/test_config.py +++ b/tests/rest/auth/test_config.py @@ -1,5 +1,5 @@ -import logging import json +import logging import platform import re from unittest import mock @@ -7,7 +7,12 @@ import pytest import openeo.rest.auth.config -from openeo.rest.auth.config import RefreshTokenStore, AuthConfig, PrivateJsonFile, get_file_mode +from openeo.rest.auth.config import ( + AuthConfig, + PrivateJsonFile, + RefreshTokenStore, + get_file_mode, +) class TestPrivateJsonFile: diff --git a/tests/rest/auth/test_testing.py b/tests/rest/auth/test_testing.py index 407588388..9b50d30a2 100644 --- a/tests/rest/auth/test_testing.py +++ b/tests/rest/auth/test_testing.py @@ -1,7 +1,7 @@ from openeo.rest.auth.oidc import ( - OidcProviderInfo, - OidcClientInfo, OidcClientCredentialsAuthenticator, + OidcClientInfo, + OidcProviderInfo, ) from openeo.rest.auth.testing import OidcMock diff --git a/tests/rest/datacube/test_bandmath.py b/tests/rest/datacube/test_bandmath.py index 41d13b4fd..00de0f36e 100644 --- a/tests/rest/datacube/test_bandmath.py +++ b/tests/rest/datacube/test_bandmath.py @@ -10,8 +10,9 @@ import openeo from openeo.rest import BandMathException -from .. import get_download_graph + from ... import load_json_resource +from .. import get_download_graph from .test_datacube import _get_leaf_node diff --git a/tests/rest/datacube/test_base_datacube.py b/tests/rest/datacube/test_base_datacube.py index 7392bb425..c49ee2bf2 100644 --- a/tests/rest/datacube/test_base_datacube.py +++ b/tests/rest/datacube/test_base_datacube.py @@ -1,5 +1,5 @@ from openeo.internal.graph_building import PGNode -from openeo.rest._datacube import build_child_callback, UDF +from openeo.rest._datacube import UDF, build_child_callback def test_build_child_callback_str(): diff --git a/tests/rest/datacube/test_datacube100.py b/tests/rest/datacube/test_datacube100.py index d3183d394..cad00babf 100644 --- a/tests/rest/datacube/test_datacube100.py +++ b/tests/rest/datacube/test_datacube100.py @@ -8,8 +8,8 @@ import io import json import pathlib -import sys import re +import sys import textwrap from typing import Optional @@ -27,10 +27,11 @@ from openeo.internal.warnings import UserDeprecationWarning from openeo.rest import OpenEoClientException from openeo.rest.connection import Connection -from openeo.rest.datacube import THIS, DataCube, ProcessBuilder, UDF +from openeo.rest.datacube import THIS, UDF, DataCube, ProcessBuilder from openeo.rest.vectorcube import VectorCube -from .conftest import API_URL, setup_collection_metadata, DEFAULT_S2_METADATA + from ... import load_json_resource +from .conftest import API_URL, DEFAULT_S2_METADATA, setup_collection_metadata basic_geometry_types = [ ( @@ -192,8 +193,8 @@ def _get_normalizable_crs_inputs(): yield "EPSG:32631" yield 32631 if pyproj.__version__ >= ComparableVersion("3.3.1"): + # TODO drop this skip once support for python 3.7 is dropped (pyproj 3.3.0 requires at least python 3.8) # pyproj below 3.3.1 does not support int-like strings - # TODO #460 this skip is only necessary for python 3.6 and lower yield "32631" yield "+proj=utm +zone=31 +datum=WGS84 +units=m +no_defs" # is also EPSG:32631, in proj format if pyproj.__version__ >= ComparableVersion("3.1.0"): @@ -2393,7 +2394,7 @@ def test_unflatten_dimension(con100): def test_merge_if(con100): """https://github.com/Open-EO/openeo-python-client/issues/275""" - from openeo.processes import if_, eq + from openeo.processes import eq, if_ s1 = con100.load_collection("S2") s2 = con100.load_collection("SENTINEL2_RADIOMETRY_10M") diff --git a/tests/rest/datacube/test_mlmodel.py b/tests/rest/datacube/test_mlmodel.py index 07df0faad..3bfd996a7 100644 --- a/tests/rest/datacube/test_mlmodel.py +++ b/tests/rest/datacube/test_mlmodel.py @@ -2,6 +2,7 @@ from openeo import BatchJob from openeo.rest.mlmodel import MlModel + from .conftest import API_URL FEATURE_COLLECTION_1 = { diff --git a/tests/rest/datacube/test_processbuilder.py b/tests/rest/datacube/test_processbuilder.py index 59eea367b..595c7bebd 100644 --- a/tests/rest/datacube/test_processbuilder.py +++ b/tests/rest/datacube/test_processbuilder.py @@ -6,6 +6,7 @@ import openeo.processes from openeo.internal.graph_building import PGNode from openeo.processes import ProcessBuilder + from ... import load_json_resource @@ -51,7 +52,7 @@ def test_apply_callback_chain_lambda_method(con100): def test_apply_callback_chain_lambda_functions(con100): im = con100.load_collection("S2") - from openeo.processes import absolute, cos, add + from openeo.processes import absolute, add, cos result = im.apply(lambda data: add(cos(absolute(data)), 1.23)) assert result.flat_graph() == load_json_resource('data/1.0.0/apply_chain.json') @@ -73,7 +74,7 @@ def transform(x: ProcessBuilder) -> ProcessBuilder: def test_apply_callback_chain_custom_function_functions(con100): - from openeo.processes import absolute, cos, add + from openeo.processes import absolute, add, cos def transform(x: ProcessBuilder) -> ProcessBuilder: return add(cos(absolute(x)), y=1.23) @@ -303,7 +304,7 @@ def test_apply_dimension_bandmath_lambda(con100): def test_apply_dimension_time_to_bands(con100): - from openeo.processes import array_concat, quantiles, sd, mean + from openeo.processes import array_concat, mean, quantiles, sd im = con100.load_collection("S2") res = im.apply_dimension( process=lambda d: array_concat(quantiles(d, [0.25, 0.5, 0.75]), [sd(d), mean(d)]), diff --git a/tests/rest/datacube/test_vectorcube.py b/tests/rest/datacube/test_vectorcube.py index beb447da7..3f2ea69d3 100644 --- a/tests/rest/datacube/test_vectorcube.py +++ b/tests/rest/datacube/test_vectorcube.py @@ -4,12 +4,12 @@ import pytest import shapely.geometry +import openeo.processes from openeo.api.process import Parameter from openeo.internal.graph_building import PGNode from openeo.rest._testing import DummyBackend from openeo.rest.vectorcube import VectorCube from openeo.util import InvalidBBoxException -import openeo.processes @pytest.fixture diff --git a/tests/rest/datacube/test_zonal_stats.py b/tests/rest/datacube/test_zonal_stats.py index 799a56f87..bd456bd2b 100644 --- a/tests/rest/datacube/test_zonal_stats.py +++ b/tests/rest/datacube/test_zonal_stats.py @@ -4,8 +4,9 @@ import openeo.processes from openeo.api.process import Parameter from openeo.capabilities import ComparableVersion -from .. import get_execute_graph + from ... import load_json_resource +from .. import get_execute_graph def test_polygon_timeseries_polygon(connection, api_version): diff --git a/tests/rest/test_conversions.py b/tests/rest/test_conversions.py index 9f71b6b70..9aa568260 100644 --- a/tests/rest/test_conversions.py +++ b/tests/rest/test_conversions.py @@ -3,7 +3,10 @@ import pytest from pandas.testing import assert_frame_equal, assert_series_equal -from openeo.rest.conversions import timeseries_json_to_pandas, InvalidTimeSeriesException +from openeo.rest.conversions import ( + InvalidTimeSeriesException, + timeseries_json_to_pandas, +) DATE1 = "2019-01-11T11:11:11Z" DATE2 = "2019-02-22T22:22:22Z" diff --git a/tests/rest/test_job.py b/tests/rest/test_job.py index 4737271fd..38d23907c 100644 --- a/tests/rest/test_job.py +++ b/tests/rest/test_job.py @@ -11,6 +11,7 @@ import openeo.rest.job from openeo.rest import JobFailedException, OpenEoClientException from openeo.rest.job import BatchJob, ResultAsset + from .test_connection import _credentials_basic_handler API_URL = "https://oeo.test" diff --git a/tests/rest/test_job_results.py b/tests/rest/test_job_results.py index 9996fd167..d6f6edc4f 100644 --- a/tests/rest/test_job_results.py +++ b/tests/rest/test_job_results.py @@ -1,6 +1,7 @@ -import openeo import pytest +import openeo + API_URL = "https://oeo.net" diff --git a/tests/rest/test_udp.py b/tests/rest/test_udp.py index 89f89e98a..b793fcb64 100644 --- a/tests/rest/test_udp.py +++ b/tests/rest/test_udp.py @@ -5,6 +5,7 @@ import openeo from openeo.api.process import Parameter from openeo.rest.udp import RESTUserDefinedProcess, build_process_dict + from .. import load_json_resource API_URL = "https://oeo.test" diff --git a/tests/test_capabilities.py b/tests/test_capabilities.py index ced489d4b..335c0e64d 100644 --- a/tests/test_capabilities.py +++ b/tests/test_capabilities.py @@ -1,6 +1,6 @@ import pytest -from openeo.capabilities import ComparableVersion, ApiVersionException +from openeo.capabilities import ApiVersionException, ComparableVersion class TestComparableVersion: diff --git a/tests/test_config.py b/tests/test_config.py index c2e889090..089fcbdc6 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -20,7 +20,6 @@ get_user_data_dir, ) - DATA_ROOT_DIR = Path(__file__).parent / "data" diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 0d34fcc96..5a482673f 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -1,10 +1,19 @@ -import contextlib +from __future__ import annotations + from typing import List import pytest -from openeo.metadata import CollectionMetadata, Band, SpatialDimension, Dimension, TemporalDimension, BandDimension, \ - MetadataException, DimensionAlreadyExistsException +from openeo.metadata import ( + Band, + BandDimension, + CollectionMetadata, + Dimension, + DimensionAlreadyExistsException, + MetadataException, + SpatialDimension, + TemporalDimension, +) def test_metadata_get(): @@ -27,6 +36,11 @@ def test_metadata_extent(): assert metadata.extent == {"spatial": {"xmin": 4, "xmax": 10}} +def test_band_minimal(): + band = Band("red") + assert band.name == "red" + + def test_band_dimension(): bdim = BandDimension(name="spectral", bands=[ Band("B02", "blue", 0.490), @@ -546,7 +560,7 @@ def __init__(self, metadata: dict, dimensions: List[Dimension] = None, bbox=None def _clone_and_update( self, metadata: dict = None, dimensions: List[Dimension] = None, bbox=None, **kwargs - ) -> 'MyCollectionMetadata': + ) -> MyCollectionMetadata: return super()._clone_and_update(metadata=metadata, dimensions=dimensions, bbox=bbox or self.bbox, **kwargs) def filter_bbox(self, bbox): diff --git a/tests/test_util.py b/tests/test_util.py index 15406bcac..bc81ef877 100644 --- a/tests/test_util.py +++ b/tests/test_util.py @@ -3,8 +3,8 @@ import logging import os import pathlib -import sys import re +import sys import unittest.mock as mock from typing import List, Union @@ -17,12 +17,12 @@ BBoxDict, ContextTimer, DeepKeyError, + InvalidBBoxException, LazyLoadCache, Rfc3339, SimpleProgressBar, TimingLogger, clip, - normalize_crs, deep_get, deep_set, dict_no_none, @@ -31,12 +31,13 @@ first_not_none, get_temporal_extent, guess_format, + normalize_crs, repr_truncate, rfc3339, str_truncate, + string_to_temporal_extent, to_bbox_dict, url_join, - InvalidBBoxException, ) @@ -392,7 +393,10 @@ def test_get_temporal_extent(): assert get_temporal_extent(start_date="2019-03-15", end_date="2019-10-11") == ("2019-03-15", "2019-10-11") assert get_temporal_extent(start_date="2019-03-15") == ("2019-03-15", None) assert get_temporal_extent(end_date="2019-10-11") == (None, "2019-10-11") - + assert get_temporal_extent(start_date="2019") == ("2019-01-01", "2020-01-01") + assert get_temporal_extent(start_date="2019-01") == ("2019-01-01", "2019-02-01") + assert get_temporal_extent(start_date="2019-11") == ("2019-11-01", "2019-12-01") + assert get_temporal_extent(start_date="2019-12") == ("2019-12-01", "2020-01-01") def test_context_timer_basic(): with mock.patch.object(ContextTimer, "_clock", new=_fake_clock([3, 5, 8, 13])): @@ -946,7 +950,7 @@ class TestNormalizeCrs: def test_normalize_crs_succeeds_with_correct_crses(self, epsg_input, expected): """Happy path, values that are allowed""" if isinstance(epsg_input, str) and epsg_input.isnumeric() and pyproj.__version__ < ComparableVersion("3.3.1"): - # TODO #460 this skip is only necessary for python 3.6 and lower + # TODO drop this skip once support for python 3.7 is dropped (pyproj 3.3.0 requires at least python 3.8) pytest.skip("pyproj below 3.3.1 does not support int-like strings") assert normalize_crs(epsg_input) == expected @@ -984,11 +988,6 @@ def test_normalize_crs_without_pyproj_accept_non_epsg_string(self, caplog): in caplog.text ) - @pytest.mark.skipif( - # TODO #460 this skip is only necessary for python 3.6 and lower - pyproj.__version__ < ComparableVersion("3.1.0"), - reason="WKT2 format support requires pyproj 3.1.0 or higher", - ) def test_normalize_crs_succeeds_with_wkt2_input(self): """Test can handle WKT2 strings. @@ -1070,7 +1069,7 @@ def test_normalize_crs_without_pyproj_succeeds_with_wkt2_input(self): } @pytest.mark.skipif( - # TODO #460 this skip is only necessary for python 3.6 and lower + # TODO drop this skip once support for python 3.7 is dropped (pyproj 3.3.0 requires at least python 3.8) pyproj.__version__ < ComparableVersion("3.3.0"), reason="PROJJSON format support requires pyproj 3.3.0 or higher", ) @@ -1110,3 +1109,62 @@ def test_normalize_crs_succeeds_with_correct_projstring(self, epsg_input, expect def test_normalize_crs_handles_incorrect_crs(self, epsg_input, use_pyproj): with pytest.raises(ValueError): normalize_crs(epsg_input, use_pyproj=use_pyproj) + + +@pytest.mark.parametrize( + ["date_input", "expected_start", "expected_end"], + [ + ("2023", dt.date(2023, 1, 1), dt.date(2024, 1, 1)), + ("1999", dt.date(1999, 1, 1), dt.date(2000, 1, 1)), + ("2023-03", dt.date(2023, 3, 1), dt.date(2023, 4, 1)), + ("2023/03", dt.date(2023, 3, 1), dt.date(2023, 4, 1)), + ("2023-01", dt.date(2023, 1, 1), dt.date(2023, 2, 1)), + ("2023/01", dt.date(2023, 1, 1), dt.date(2023, 2, 1)), + ("2022-12", dt.date(2022, 12, 1), dt.date(2023, 1, 1)), + ("2022/12", dt.date(2022, 12, 1), dt.date(2023, 1, 1)), + ("2022-11", dt.date(2022, 11, 1), dt.date(2022, 12, 1)), + ("2022/11", dt.date(2022, 11, 1), dt.date(2022, 12, 1)), + ("2022-12-31", "2022-12-31", None), + ("2022/12/31", "2022/12/31", None), + ("2022-11-30", "2022-11-30", None), + ("2022/11/30", "2022/11/30", None), + ("2022-12-31T12:33:05Z", "2022-12-31T12:33:05Z", None), + (dt.date(2022, 11, 1), dt.date(2022, 11, 1), None), + (dt.datetime(2022, 11, 1, 15, 30, 00), dt.datetime(2022, 11, 1, 15, 30, 00), None), + ], +) +def test_string_to_temporal_extent(date_input: str, expected_start: dt.date, expected_end: dt.date): + actual_start, actual_end = string_to_temporal_extent(date_input) + assert actual_start == expected_start + assert actual_end == expected_end + + +@pytest.mark.parametrize( + "date_input", + [ + "foobar", + "20-22-12-31", + "2022/12/31/aa1/bb/cc", + "20-2--12", + "2021-2--12", + "2021-1-1-", + "2021-2-", + "-2021-2", + ], +) +def test_string_to_temporal_extent_raises_valueerror(date_input: Union[str, dt.date, dt.datetime]): + with pytest.raises(ValueError): + string_to_temporal_extent(date_input) + + +@pytest.mark.parametrize( + "date_input", + [ + 2000, + {}, + (), + ], +) +def test_string_to_temporal_extent_raises_typeerror(date_input: any): + with pytest.raises(TypeError): + string_to_temporal_extent(date_input) diff --git a/tests/udf/test_run_code.py b/tests/udf/test_run_code.py index ec640aa7a..5b28afde2 100644 --- a/tests/udf/test_run_code.py +++ b/tests/udf/test_run_code.py @@ -7,8 +7,15 @@ import xarray from openeo.udf import UdfData, XarrayDataCube -from openeo.udf.run_code import run_udf_code, _get_annotation_str, _annotation_is_pandas_series, \ - _annotation_is_udf_datacube, _annotation_is_udf_data, execute_local_udf +from openeo.udf.run_code import ( + _annotation_is_pandas_series, + _annotation_is_udf_data, + _annotation_is_udf_datacube, + _get_annotation_str, + execute_local_udf, + run_udf_code, +) + from .test_xarraydatacube import _build_xdc UDF_CODE_PATH = Path(__file__).parent / "udf_code" diff --git a/tests/udf/test_udf_data.py b/tests/udf/test_udf_data.py index f3246f572..cce6aedb5 100644 --- a/tests/udf/test_udf_data.py +++ b/tests/udf/test_udf_data.py @@ -4,7 +4,7 @@ from geopandas import GeoDataFrame from shapely.geometry import Point -from openeo.udf import StructuredData, UdfData, XarrayDataCube, FeatureCollection +from openeo.udf import FeatureCollection, StructuredData, UdfData, XarrayDataCube def test_structured_data_list(): diff --git a/tests/udf/test_xarraydatacube.py b/tests/udf/test_xarraydatacube.py index b459ea91a..cfa4dea4b 100644 --- a/tests/udf/test_xarraydatacube.py +++ b/tests/udf/test_xarraydatacube.py @@ -1,6 +1,6 @@ import itertools -from typing import Union, Optional, List, Tuple, Iterator, NamedTuple -import collections +from typing import Iterator, List, NamedTuple, Optional, Tuple, Union + import numpy import pytest import xarray