Skip to content

Commit

Permalink
detect if file format available
Browse files Browse the repository at this point in the history
  • Loading branch information
clausmichele committed Dec 20, 2023
1 parent f55ddf4 commit 28d0363
Show file tree
Hide file tree
Showing 2 changed files with 101 additions and 17 deletions.
18 changes: 15 additions & 3 deletions src/openeo_test_suite/tests/workflows/L1/test_load_save.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,18 @@
import numpy as np
import pytest
import rioxarray
import xarray as xr

# import openeo_processes_dask


def test_load_save_netcdf(
netcdf_not_supported,
cube_red_nir,
collection_dims,
tmp_path,
):
if netcdf_not_supported:
pytest.skip("NetCDF not supported as output file format!")

filename = tmp_path / "test_load_save_netcdf.nc"
b_dim = collection_dims["b_dim"]
x_dim = collection_dims["x_dim"]
Expand All @@ -32,11 +35,15 @@ def test_load_save_netcdf(


def test_load_save_10x10_netcdf(
netcdf_not_supported,
cube_red_10x10,
collection_dims,
tmp_path,
bounding_box_32632_10x10,
):
if netcdf_not_supported:
pytest.skip("NetCDF not supported as output file format!")

filename = tmp_path / "test_load_save_10x10_netcdf.nc"
b_dim = collection_dims["b_dim"]
x_dim = collection_dims["x_dim"]
Expand Down Expand Up @@ -66,7 +73,12 @@ def test_load_save_10x10_netcdf(

# The next test will fail if the back-end allows to store only 3D (x,y,bands) cubes to geoTIFF
# In this test, only a single acquisition in time should be loaded
def test_load_save_geotiff(cube_one_day_red, tmp_path):


def test_load_save_geotiff(geotiff_not_supported, cube_one_day_red, tmp_path):
if geotiff_not_supported:
pytest.skip("GeoTIFF not supported as output file format!")

filename = tmp_path / "test_load_save_geotiff.tiff"
cube_one_day_red.download(filename)

Expand Down
100 changes: 86 additions & 14 deletions src/openeo_test_suite/tests/workflows/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,21 @@ def cube_one_day_red(
"bands": ["B04"],
}
if "http" in s2_collection:
# check whether the processes are available
processes = ["load_stac", "save_result"]
for id in processes:
try:
connection.describe_process(id)
except:
pytest.skip("Process {} not supported by the backend".format(id))
cube = connection.load_stac(s2_collection, **params)
else:
processes = ["load_collection", "save_result"]
for id in processes:
try:
connection.describe_process(id)
except:
pytest.skip("Process {} not supported by the backend".format(id))
cube = connection.load_collection(s2_collection, **params)
return cube

Expand All @@ -69,8 +82,20 @@ def cube_one_day_red_nir(
"bands": ["B04", "B08"],
}
if "http" in s2_collection:
processes = ["load_stac", "save_result"]
for id in processes:
try:
connection.describe_process(id)
except:
pytest.skip("Process {} not supported by the backend".format(id))
cube = connection.load_stac(s2_collection, **params)
else:
processes = ["load_collection", "save_result"]
for id in processes:
try:
connection.describe_process(id)
except:
pytest.skip("Process {} not supported by the backend".format(id))
cube = connection.load_collection(s2_collection, **params)
return cube

Expand All @@ -88,8 +113,20 @@ def cube_red_nir(
"bands": ["B04", "B08"],
}
if "http" in s2_collection:
processes = ["load_stac", "save_result"]
for id in processes:
try:
connection.describe_process(id)
except:
pytest.skip("Process {} not supported by the backend".format(id))
cube = connection.load_stac(s2_collection, **params)
else:
processes = ["load_collection", "save_result"]
for id in processes:
try:
connection.describe_process(id)
except:
pytest.skip("Process {} not supported by the backend".format(id))
cube = connection.load_collection(s2_collection, **params)
return cube

Expand All @@ -107,25 +144,37 @@ def cube_red_10x10(
"bands": ["B04"],
}
if "http" in s2_collection:
processes = ["load_stac", "save_result"]
for id in processes:
try:
connection.describe_process(id)
except:
pytest.skip("Process {} not supported by the backend".format(id))
cube = connection.load_stac(s2_collection, **params)
else:
processes = ["load_collection", "save_result"]
for id in processes:
try:
connection.describe_process(id)
except:
pytest.skip("Process {} not supported by the backend".format(id))
cube = connection.load_collection(s2_collection, **params)
return cube


@pytest.fixture
def cube_full_extent(
connection,
temporal_interval,
s2_collection,
) -> dict:
if "http" in s2_collection:
cube = connection.load_stac(s2_collection, temporal_extent=temporal_interval)
else:
# Maybe not the best idea to load a full openEO collection?
# It would work fine if the STAC sample collection is replicated
return None
return cube
# @pytest.fixture
# def cube_full_extent(
# connection,
# temporal_interval,
# s2_collection,
# ) -> dict:
# if "http" in s2_collection:
# cube = connection.load_stac(s2_collection, temporal_extent=temporal_interval)
# else:
# # Maybe not the best idea to load a full openEO collection?
# # It would work fine if the STAC sample collection is replicated
# return None
# return cube


@pytest.fixture
Expand Down Expand Up @@ -166,7 +215,6 @@ def temporal_interval_one_day():
return ["2022-06-01", "2022-06-03"]


# TODO: the dimension names are back-end specific, even though they should be the ones from the STAC metadata
@pytest.fixture
def collection_dims(
connection,
Expand Down Expand Up @@ -198,3 +246,27 @@ def collection_dims(
if stac_dict["cube:dimensions"][dim]["axis"] == "z":
collection_dims["z_dim"] = dim
return collection_dims


@pytest.fixture
def geotiff_not_supported(connection):
output_file_formats = [
x.lower() for x in dict(connection.list_file_formats()["output"])
]
geotiff_not_available = (
False
if len(set(["geotiff", "gtiff", "tiff", "tif"]) & set(output_file_formats)) > 0
else True
)
return geotiff_not_available


@pytest.fixture
def netcdf_not_supported(connection):
output_file_formats = [
x.lower() for x in dict(connection.list_file_formats()["output"])
]
netcdf_not_available = (
False if len(set(["nc", "netcdf"]) & set(output_file_formats)) > 0 else True
)
return netcdf_not_available

0 comments on commit 28d0363

Please sign in to comment.