Skip to content

Commit

Permalink
Timezone handling cleanup. (#982)
Browse files Browse the repository at this point in the history
* Timezone handling cleanup.

* Oops, read the std library docs more closely next time.

* What is pylint's problem here?

* What *IS* pylint's problem here?

* I hate np.datetime64 type.

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* More lintage.

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* Always use numpy datetimes for group bys.

* Feature info datetime64 aware.

* Add an explanatory comment.

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
SpacemanPaul and pre-commit-ci[bot] authored Dec 13, 2023
1 parent ba0e871 commit 19b4226
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 15 deletions.
5 changes: 2 additions & 3 deletions datacube_ows/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -859,15 +859,14 @@ def feature_info(args):
pt_native = None
for d in all_time_datasets.coords["time"].values:
dt_datasets = all_time_datasets.sel(time=d)
dt = datetime.utcfromtimestamp(d.astype(int) * 1e-9)
if params.product.time_resolution.is_solar():
dt = solar_date(dt, tz)
for ds in dt_datasets.values.item():
if pt_native is None:
pt_native = geo_point.to_crs(ds.crs)
elif pt_native.crs != ds.crs:
pt_native = geo_point.to_crs(ds.crs)
if ds.extent and ds.extent.contains(pt_native):
# tolist() converts a numpy datetime64 to a python datatime
dt = stacker.group_by.group_by_func(ds).tolist()
if params.product.time_resolution.is_subday():
feature_json["data_available_for_dates"].append(dt.isoformat())
else:
Expand Down
18 changes: 6 additions & 12 deletions datacube_ows/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from typing import Any, Callable, List, Optional, TypeVar

import pytz
from numpy import datetime64 as npdt64

F = TypeVar('F', bound=Callable[..., Any])

Expand Down Expand Up @@ -65,20 +66,20 @@ def group_by_begin_datetime(pnames: Optional[List[str]] = None,
else:
sort_key = base_sort_key
if truncate_dates:
grp_by = lambda ds: datetime.datetime(
grp_by = lambda ds: npdt64(datetime.datetime(
ds.time.begin.year,
ds.time.begin.month,
ds.time.begin.day,
tzinfo=pytz.utc)
tzinfo=pytz.utc))
else:
grp_by = lambda ds: datetime.datetime(
grp_by = lambda ds: npdt64(datetime.datetime(
ds.time.begin.year,
ds.time.begin.month,
ds.time.begin.day,
ds.time.begin.hour,
ds.time.begin.minute,
ds.time.begin.second,
tzinfo=ds.time.begin.tzinfo)
tzinfo=ds.time.begin.tzinfo))
return GroupBy(
dimension='time',
group_by_func=grp_by,
Expand All @@ -87,13 +88,6 @@ def group_by_begin_datetime(pnames: Optional[List[str]] = None,
)


def group_by_subday() -> "datacube.api.query.GroupBy":
"""
Returns an ODC GroupBy object, suitable for sub-day level data
:return:
"""

def group_by_solar(pnames: Optional[List[str]] = None) -> "datacube.api.query.GroupBy":
from datacube.api.query import GroupBy, solar_day
base_sort_key = lambda ds: ds.time.begin
Expand Down Expand Up @@ -126,7 +120,7 @@ def group_by_mosaic(pnames: Optional[List[str]] = None) -> "datacube.api.query.G
sort_key = lambda ds: (solar_day(ds), base_sort_key(ds))
return GroupBy(
dimension='time',
group_by_func=lambda n: datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc),
group_by_func=lambda n: npdt64(datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc)),
units='seconds since 1970-01-01 00:00:00',
sort_key=sort_key
)
Expand Down

0 comments on commit 19b4226

Please sign in to comment.