From c4063ab8fbfeb6d5b781121069a6131931a27105 Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Tue, 12 Dec 2023 13:21:50 +1100 Subject: [PATCH 01/11] Timezone handling cleanup. --- datacube_ows/data.py | 4 +--- datacube_ows/utils.py | 12 ++++-------- 2 files changed, 5 insertions(+), 11 deletions(-) diff --git a/datacube_ows/data.py b/datacube_ows/data.py index 407985b7..f7e6bc43 100644 --- a/datacube_ows/data.py +++ b/datacube_ows/data.py @@ -859,15 +859,13 @@ def feature_info(args): pt_native = None for d in all_time_datasets.coords["time"].values: dt_datasets = all_time_datasets.sel(time=d) - dt = datetime.utcfromtimestamp(d.astype(int) * 1e-9) - if params.product.time_resolution.is_solar(): - dt = solar_date(dt, tz) for ds in dt_datasets.values.item(): if pt_native is None: pt_native = geo_point.to_crs(ds.crs) elif pt_native.crs != ds.crs: pt_native = geo_point.to_crs(ds.crs) if ds.extent and ds.extent.contains(pt_native): + dt = stacker.group_by.group_by_func(ds) if params.product.time_resolution.is_subday(): feature_json["data_available_for_dates"].append(dt.isoformat()) else: diff --git a/datacube_ows/utils.py b/datacube_ows/utils.py index f8382987..eb7e39c4 100644 --- a/datacube_ows/utils.py +++ b/datacube_ows/utils.py @@ -87,13 +87,6 @@ def group_by_begin_datetime(pnames: Optional[List[str]] = None, ) -def group_by_subday() -> "datacube.api.query.GroupBy": - """ - Returns an ODC GroupBy object, suitable for sub-day level data - - :return: - """ - def group_by_solar(pnames: Optional[List[str]] = None) -> "datacube.api.query.GroupBy": from datacube.api.query import GroupBy, solar_day base_sort_key = lambda ds: ds.time.begin @@ -105,9 +98,12 @@ def group_by_solar(pnames: Optional[List[str]] = None) -> "datacube.api.query.Gr sort_key = lambda ds: (index.get(ds.type.name), base_sort_key(ds)) else: sort_key = base_sort_key + # Wrap solar_day so we consistently get a datetime. + solar_day_py = lambda x: datetime.utcfromtimestamp(solar_day(x).astype(int) * 1e-9) + # dt = datetime.utcfromtimestamp(d.astype(int) * 1e-9) return GroupBy( dimension='time', - group_by_func=solar_day, + group_by_func=solar_day_py, units='seconds since 1970-01-01 00:00:00', sort_key=sort_key ) From dbd514c4e4b3acc7c9d85ca89efff7d9fc8d2969 Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Tue, 12 Dec 2023 14:15:53 +1100 Subject: [PATCH 02/11] Oops, read the std library docs more closely next time. --- datacube_ows/utils.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/datacube_ows/utils.py b/datacube_ows/utils.py index eb7e39c4..192efb61 100644 --- a/datacube_ows/utils.py +++ b/datacube_ows/utils.py @@ -99,8 +99,7 @@ def group_by_solar(pnames: Optional[List[str]] = None) -> "datacube.api.query.Gr else: sort_key = base_sort_key # Wrap solar_day so we consistently get a datetime. - solar_day_py = lambda x: datetime.utcfromtimestamp(solar_day(x).astype(int) * 1e-9) - # dt = datetime.utcfromtimestamp(d.astype(int) * 1e-9) + solar_day_py = lambda x: datetime.datetime.fromtimestamp(solar_day(x).astype(int) * 1e-9, tz=datetime.timezone.utc) return GroupBy( dimension='time', group_by_func=solar_day_py, From a7a3f5d99daac12b65bcbc56c0236c3a793d1886 Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Tue, 12 Dec 2023 14:42:56 +1100 Subject: [PATCH 03/11] What is pylint's problem here? --- datacube_ows/utils.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/datacube_ows/utils.py b/datacube_ows/utils.py index 192efb61..7e6c42d6 100644 --- a/datacube_ows/utils.py +++ b/datacube_ows/utils.py @@ -99,7 +99,10 @@ def group_by_solar(pnames: Optional[List[str]] = None) -> "datacube.api.query.Gr else: sort_key = base_sort_key # Wrap solar_day so we consistently get a datetime. - solar_day_py = lambda x: datetime.datetime.fromtimestamp(solar_day(x).astype(int) * 1e-9, tz=datetime.timezone.utc) + solar_day_py = lambda x: datetime.datetime.fromtimestamp( + solar_day(x).astype(int) * 1e-9, + tz=datetime.timezone.utc + ) return GroupBy( dimension='time', group_by_func=solar_day_py, From 3e9acd69266b225c9ccb8317651490dfafa3e90f Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Tue, 12 Dec 2023 14:49:38 +1100 Subject: [PATCH 04/11] What *IS* pylint's problem here? --- datacube_ows/utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/datacube_ows/utils.py b/datacube_ows/utils.py index 7e6c42d6..8d54572d 100644 --- a/datacube_ows/utils.py +++ b/datacube_ows/utils.py @@ -99,8 +99,9 @@ def group_by_solar(pnames: Optional[List[str]] = None) -> "datacube.api.query.Gr else: sort_key = base_sort_key # Wrap solar_day so we consistently get a datetime. + # (Don't know why I have to disable pylint for GHA check - passes fine locally.) solar_day_py = lambda x: datetime.datetime.fromtimestamp( - solar_day(x).astype(int) * 1e-9, + (solar_day(x).astype(int) * 1e-9), # pylint: disable=too-many-function-args tz=datetime.timezone.utc ) return GroupBy( From 0df2c6dcf671c7407c0733380c51c5c9f1b9ec19 Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Tue, 12 Dec 2023 15:41:48 +1100 Subject: [PATCH 05/11] I hate np.datetime64 type. --- datacube_ows/utils.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/datacube_ows/utils.py b/datacube_ows/utils.py index 8d54572d..a6182cd9 100644 --- a/datacube_ows/utils.py +++ b/datacube_ows/utils.py @@ -5,6 +5,7 @@ # SPDX-License-Identifier: Apache-2.0 import datetime import logging +import numpy as np from functools import wraps from time import monotonic from typing import Any, Callable, List, Optional, TypeVar @@ -100,10 +101,7 @@ def group_by_solar(pnames: Optional[List[str]] = None) -> "datacube.api.query.Gr sort_key = base_sort_key # Wrap solar_day so we consistently get a datetime. # (Don't know why I have to disable pylint for GHA check - passes fine locally.) - solar_day_py = lambda x: datetime.datetime.fromtimestamp( - (solar_day(x).astype(int) * 1e-9), # pylint: disable=too-many-function-args - tz=datetime.timezone.utc - ) + solar_day_py = lambda x: solar_day(x).tolist() return GroupBy( dimension='time', group_by_func=solar_day_py, From 9a330ad0fbea20060cde82adabab4bfc7cb4d867 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 12 Dec 2023 04:44:05 +0000 Subject: [PATCH 06/11] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- datacube_ows/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacube_ows/utils.py b/datacube_ows/utils.py index a6182cd9..4b93660e 100644 --- a/datacube_ows/utils.py +++ b/datacube_ows/utils.py @@ -5,11 +5,11 @@ # SPDX-License-Identifier: Apache-2.0 import datetime import logging -import numpy as np from functools import wraps from time import monotonic from typing import Any, Callable, List, Optional, TypeVar +import numpy as np import pytz F = TypeVar('F', bound=Callable[..., Any]) From 39f4fadfb4d8425b6034c8a593a004a1b7ab64f5 Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Tue, 12 Dec 2023 15:46:48 +1100 Subject: [PATCH 07/11] More lintage. --- datacube_ows/utils.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/datacube_ows/utils.py b/datacube_ows/utils.py index 4b93660e..499dcc6a 100644 --- a/datacube_ows/utils.py +++ b/datacube_ows/utils.py @@ -8,8 +8,6 @@ from functools import wraps from time import monotonic from typing import Any, Callable, List, Optional, TypeVar - -import numpy as np import pytz F = TypeVar('F', bound=Callable[..., Any]) From 1c6d274420a385bbbab33f911a36a2952660d554 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 12 Dec 2023 04:47:14 +0000 Subject: [PATCH 08/11] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- datacube_ows/utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/datacube_ows/utils.py b/datacube_ows/utils.py index 499dcc6a..f0ce14f1 100644 --- a/datacube_ows/utils.py +++ b/datacube_ows/utils.py @@ -8,6 +8,7 @@ from functools import wraps from time import monotonic from typing import Any, Callable, List, Optional, TypeVar + import pytz F = TypeVar('F', bound=Callable[..., Any]) From 1a177ddddbad4e620a738bbbbe0aec4b7a3aa490 Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Wed, 13 Dec 2023 13:11:52 +1100 Subject: [PATCH 09/11] Always use numpy datetimes for group bys. --- datacube_ows/utils.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/datacube_ows/utils.py b/datacube_ows/utils.py index f0ce14f1..be381816 100644 --- a/datacube_ows/utils.py +++ b/datacube_ows/utils.py @@ -10,6 +10,7 @@ from typing import Any, Callable, List, Optional, TypeVar import pytz +from numpy import datetime64 as npdt64 F = TypeVar('F', bound=Callable[..., Any]) @@ -65,20 +66,20 @@ def group_by_begin_datetime(pnames: Optional[List[str]] = None, else: sort_key = base_sort_key if truncate_dates: - grp_by = lambda ds: datetime.datetime( + grp_by = lambda ds: npdt64(datetime.datetime( ds.time.begin.year, ds.time.begin.month, ds.time.begin.day, - tzinfo=pytz.utc) + tzinfo=pytz.utc)) else: - grp_by = lambda ds: datetime.datetime( + grp_by = lambda ds: npdt64(datetime.datetime( ds.time.begin.year, ds.time.begin.month, ds.time.begin.day, ds.time.begin.hour, ds.time.begin.minute, ds.time.begin.second, - tzinfo=ds.time.begin.tzinfo) + tzinfo=ds.time.begin.tzinfo)) return GroupBy( dimension='time', group_by_func=grp_by, @@ -98,12 +99,9 @@ def group_by_solar(pnames: Optional[List[str]] = None) -> "datacube.api.query.Gr sort_key = lambda ds: (index.get(ds.type.name), base_sort_key(ds)) else: sort_key = base_sort_key - # Wrap solar_day so we consistently get a datetime. - # (Don't know why I have to disable pylint for GHA check - passes fine locally.) - solar_day_py = lambda x: solar_day(x).tolist() return GroupBy( dimension='time', - group_by_func=solar_day_py, + group_by_func=solar_day, units='seconds since 1970-01-01 00:00:00', sort_key=sort_key ) @@ -122,7 +120,7 @@ def group_by_mosaic(pnames: Optional[List[str]] = None) -> "datacube.api.query.G sort_key = lambda ds: (solar_day(ds), base_sort_key(ds)) return GroupBy( dimension='time', - group_by_func=lambda n: datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc), + group_by_func=lambda n: npdt64(datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc)), units='seconds since 1970-01-01 00:00:00', sort_key=sort_key ) From 83f8d024400c6f22187e9760fe316c1b7782e8ed Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Wed, 13 Dec 2023 14:07:41 +1100 Subject: [PATCH 10/11] Feature info datetime64 aware. --- datacube_ows/data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacube_ows/data.py b/datacube_ows/data.py index f7e6bc43..f42c3bd3 100644 --- a/datacube_ows/data.py +++ b/datacube_ows/data.py @@ -865,7 +865,7 @@ def feature_info(args): elif pt_native.crs != ds.crs: pt_native = geo_point.to_crs(ds.crs) if ds.extent and ds.extent.contains(pt_native): - dt = stacker.group_by.group_by_func(ds) + dt = stacker.group_by.group_by_func(ds).tolist() if params.product.time_resolution.is_subday(): feature_json["data_available_for_dates"].append(dt.isoformat()) else: From 0bea88b12553e8f2263dd037051553ed93022767 Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Wed, 13 Dec 2023 14:22:16 +1100 Subject: [PATCH 11/11] Add an explanatory comment. --- datacube_ows/data.py | 1 + 1 file changed, 1 insertion(+) diff --git a/datacube_ows/data.py b/datacube_ows/data.py index f42c3bd3..0e859a61 100644 --- a/datacube_ows/data.py +++ b/datacube_ows/data.py @@ -865,6 +865,7 @@ def feature_info(args): elif pt_native.crs != ds.crs: pt_native = geo_point.to_crs(ds.crs) if ds.extent and ds.extent.contains(pt_native): + # tolist() converts a numpy datetime64 to a python datatime dt = stacker.group_by.group_by_func(ds).tolist() if params.product.time_resolution.is_subday(): feature_json["data_available_for_dates"].append(dt.isoformat())