Skip to content

Commit

Permalink
Merge pull request #329 from ocefpaf/py311
Browse files Browse the repository at this point in the history
Support Python 3.10 and up
  • Loading branch information
ocefpaf authored Feb 27, 2024
2 parents 21e3f3c + a185941 commit 219d8fb
Show file tree
Hide file tree
Showing 8 changed files with 46 additions and 52 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
python-version: ["3.8", "3.9", "3.10", "3.11"]
python-version: ["3.10", "3.11", "3.12"]
os: [windows-latest, ubuntu-latest, macos-latest]
fail-fast: false

Expand Down
8 changes: 3 additions & 5 deletions erddapy/core/griddap.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,19 @@
"""Griddap handling."""

import functools
from typing import Dict, List, Optional, Tuple, Union

import pandas as pd

from erddapy.core.url import urlopen

ListLike = Union[List[str], Tuple[str]]
OptionalStr = Optional[str]
ListLike = list[str] | tuple[str]


@functools.lru_cache(maxsize=128)
def _griddap_get_constraints(
dataset_url: str,
step: int,
) -> Tuple[Dict, List, List]:
) -> tuple[dict, list, list]:
"""
Fetch metadata of griddap dataset and set initial constraints.
Expand Down Expand Up @@ -64,7 +62,7 @@ def _griddap_get_constraints(
return constraints_dict, dim_names, variable_names


def _griddap_check_constraints(user_constraints: Dict, original_constraints: Dict):
def _griddap_check_constraints(user_constraints: dict, original_constraints: dict):
"""Check that constraints changed by user match those expected by dataset."""
if user_constraints.keys() != original_constraints.keys():
raise ValueError(
Expand Down
16 changes: 8 additions & 8 deletions erddapy/core/interfaces.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
XArray, Iris, etc. objects.
"""

from typing import TYPE_CHECKING, Dict, Optional
from typing import TYPE_CHECKING

import pandas as pd

Expand All @@ -19,8 +19,8 @@

def to_pandas(
url: str,
requests_kwargs: Optional[Dict] = None,
pandas_kwargs: Optional[Dict] = None,
requests_kwargs: dict | None = None,
pandas_kwargs: dict | None = None,
) -> "pd.DataFrame":
"""
Convert a URL to Pandas DataFrame.
Expand All @@ -39,7 +39,7 @@ def to_pandas(
def to_ncCF(
url: str,
protocol: str = None,
requests_kwargs: Optional[Dict] = None,
requests_kwargs: dict | None = None,
) -> "Dataset":
"""
Convert a URL to a netCDF4 Dataset.
Expand All @@ -58,8 +58,8 @@ def to_ncCF(
def to_xarray(
url: str,
response="opendap",
requests_kwargs: Optional[Dict] = None,
xarray_kwargs: Optional[Dict] = None,
requests_kwargs: dict | None = None,
xarray_kwargs: dict | None = None,
) -> "xr.Dataset":
"""
Convert a URL to an xarray dataset.
Expand All @@ -83,8 +83,8 @@ def to_xarray(

def to_iris(
url: str,
requests_kwargs: Optional[Dict] = None,
iris_kwargs: Optional[Dict] = None,
requests_kwargs: dict | None = None,
iris_kwargs: dict | None = None,
):
"""
Convert a URL to an iris CubeList.
Expand Down
5 changes: 3 additions & 2 deletions erddapy/core/netcdf.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
"""Handles netCDF responses."""

import platform
from collections.abc import Generator
from contextlib import contextmanager
from pathlib import Path
from typing import BinaryIO, Dict, Generator, Optional
from typing import BinaryIO
from urllib.parse import urlparse

from erddapy.core.url import urlopen


def _nc_dataset(url, requests_kwargs: Optional[Dict] = None):
def _nc_dataset(url, requests_kwargs: dict | None = None):
"""Return a netCDF4-python Dataset from memory and fallbacks to disk if that fails."""
from netCDF4 import Dataset

Expand Down
26 changes: 13 additions & 13 deletions erddapy/core/url.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,15 @@
import io
from collections import OrderedDict
from datetime import datetime
from typing import BinaryIO, Dict, List, Optional, Tuple, Union
from typing import BinaryIO
from urllib import parse

import httpx
import pytz
from pandas import to_datetime

ListLike = Union[List[str], Tuple[str]]
OptionalStr = Optional[str]
ListLike = list[str] | tuple[str]
OptionalStr = str | None


def _sort_url(url):
Expand All @@ -39,7 +39,7 @@ def _sort_url(url):


@functools.lru_cache(maxsize=128)
def _urlopen(url: str, auth: Optional[tuple] = None, **kwargs: Dict) -> BinaryIO:
def _urlopen(url: str, auth: tuple | None = None, **kwargs: dict) -> BinaryIO:
if "timeout" not in kwargs.keys():
kwargs["timeout"] = 60
response = httpx.get(url, follow_redirects=True, auth=auth, **kwargs)
Expand All @@ -52,7 +52,7 @@ def _urlopen(url: str, auth: Optional[tuple] = None, **kwargs: Dict) -> BinaryIO

def urlopen(
url: str,
requests_kwargs: Optional[Dict] = None,
requests_kwargs: dict | None = None,
) -> BinaryIO:
"""Thin wrapper around httpx get content.
Expand All @@ -68,7 +68,7 @@ def urlopen(


@functools.lru_cache(maxsize=128)
def check_url_response(url: str, **kwargs: Dict) -> str:
def check_url_response(url: str, **kwargs: dict) -> str:
"""
Shortcut to `raise_for_status` instead of fetching the whole content.
Expand All @@ -81,7 +81,7 @@ def check_url_response(url: str, **kwargs: Dict) -> str:
return url


def _distinct(url: str, distinct: Optional[bool] = False) -> str:
def _distinct(url: str, distinct: bool | None = False) -> str:
"""
Sort all of the rows in the results table.
Expand Down Expand Up @@ -114,7 +114,7 @@ def _multi_urlopen(url: str) -> BinaryIO:
return data


def _quote_string_constraints(kwargs: Dict) -> Dict:
def _quote_string_constraints(kwargs: dict) -> dict:
"""
Quote constraints of String variables.
Expand All @@ -126,7 +126,7 @@ def _quote_string_constraints(kwargs: Dict) -> Dict:
}


def _format_constraints_url(kwargs: Dict) -> str:
def _format_constraints_url(kwargs: dict) -> str:
"""Join the constraint variables with separator '&' to add to the download link."""
return "".join([f"&{k}{v}" for k, v in kwargs.items()])

Expand All @@ -138,7 +138,7 @@ def _check_substrings(constraint):


def parse_dates(
date_time: Union[datetime, str],
date_time: datetime | str,
dayfirst=False,
yearfirst=False,
) -> float:
Expand Down Expand Up @@ -170,7 +170,7 @@ def parse_dates(
def get_search_url(
server: str,
response: str = "html",
search_for: Optional[str] = None,
search_for: str | None = None,
protocol: str = "tabledap",
items_per_page: int = 1_000_000,
page: int = 1,
Expand Down Expand Up @@ -362,8 +362,8 @@ def get_download_url(
server: str,
dataset_id: OptionalStr = None,
protocol: OptionalStr = None,
variables: Optional[ListLike] = None,
dim_names: Optional[ListLike] = None,
variables: ListLike | None = None,
dim_names: ListLike | None = None,
response=None,
constraints=None,
distinct=False,
Expand Down
31 changes: 15 additions & 16 deletions erddapy/erddapy.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import functools
import hashlib
from pathlib import Path
from typing import Dict, List, Optional, Tuple, Union
from urllib.request import urlretrieve

import pandas as pd
Expand Down Expand Up @@ -41,8 +40,8 @@
"ERDDAP",
]

ListLike = Union[List[str], Tuple[str]]
OptionalStr = Optional[str]
ListLike = list[str] | tuple[str]
OptionalStr = str | None


class ERDDAP:
Expand Down Expand Up @@ -127,21 +126,21 @@ def __init__(
self.response = response

# Initialized only via properties.
self.constraints: Optional[Dict] = None
self.server_functions: Optional[Dict] = None
self.constraints: dict | None = None
self.server_functions: dict | None = None
self.dataset_id: OptionalStr = None
self.requests_kwargs: Dict = {}
self.auth: Optional[tuple] = None
self.variables: Optional[ListLike] = None
self.dim_names: Optional[ListLike] = None
self.requests_kwargs: dict = {}
self.auth: tuple | None = None
self.variables: ListLike | None = None
self.dim_names: ListLike | None = None

self._get_variables = functools.lru_cache(maxsize=128)(
self._get_variables_uncached,
)
# Caching the last `dataset_id` and `variables` list request for quicker multiple accesses,
# will be overridden when requesting a new `dataset_id`.
self._dataset_id: OptionalStr = None
self._variables: Dict = {}
self._variables: dict = {}

def griddap_initialize(
self,
Expand Down Expand Up @@ -278,8 +277,8 @@ def get_download_url(
self,
dataset_id: OptionalStr = None,
protocol: OptionalStr = None,
variables: Optional[ListLike] = None,
dim_names: Optional[ListLike] = None,
variables: ListLike | None = None,
dim_names: ListLike | None = None,
response=None,
constraints=None,
distinct=False,
Expand Down Expand Up @@ -345,7 +344,7 @@ def get_download_url(

def to_pandas(
self,
requests_kwargs: Optional[Dict] = None,
requests_kwargs: dict | None = None,
**kw,
) -> "pd.DataFrame":
"""Save a data request to a pandas.DataFrame.
Expand Down Expand Up @@ -374,7 +373,7 @@ def to_ncCF(self, protocol: str = None, **kw):

def to_xarray(
self,
requests_kwargs: Optional[Dict] = None,
requests_kwargs: dict | None = None,
**kw,
):
"""Load the data request into a xarray.Dataset.
Expand Down Expand Up @@ -405,7 +404,7 @@ def to_iris(self, **kw):
url = self.get_download_url(response=response, distinct=distinct)
return to_iris(url, iris_kwargs=dict(**kw))

def _get_variables_uncached(self, dataset_id: OptionalStr = None) -> Dict:
def _get_variables_uncached(self, dataset_id: OptionalStr = None) -> dict:
if not dataset_id:
dataset_id = self.dataset_id

Expand All @@ -427,7 +426,7 @@ def _get_variables_uncached(self, dataset_id: OptionalStr = None) -> Dict:
variables.update({variable: attributes})
return variables

def get_var_by_attr(self, dataset_id: OptionalStr = None, **kwargs) -> List[str]:
def get_var_by_attr(self, dataset_id: OptionalStr = None, **kwargs) -> list[str]:
"""
Return a variable based on its attributes.
Expand Down
5 changes: 2 additions & 3 deletions erddapy/multiple_server_search.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
"""Multiple Server Search."""

import multiprocessing
from typing import Dict

import pandas as pd

Expand All @@ -19,7 +18,7 @@
from erddapy.servers.servers import servers


def _format_results(dfs: Dict[str, pd.DataFrame]) -> pd.DataFrame:
def _format_results(dfs: dict[str, pd.DataFrame]) -> pd.DataFrame:
"""Format dictionary of results into a Pandas dataframe."""
# we return None for bad server, so we need to filter them here
df_all = pd.concat([list(df.values())[0] for df in dfs if df is not None])
Expand All @@ -30,7 +29,7 @@ def fetch_results(
url: str,
key: str,
protocol,
) -> Dict[str, pd.DataFrame]:
) -> dict[str, pd.DataFrame]:
"""
Fetch search results from multiple servers.
Expand Down
5 changes: 1 addition & 4 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,9 @@ maintainers = [
{name = "Callum Rollo", email = "[email protected]"},
{name = "Filipe Fernandes", email = "[email protected]"},
]
requires-python = ">=3.8"
requires-python = ">=3.10"
classifiers = [
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
Expand Down Expand Up @@ -56,7 +54,6 @@ select = [
"T20", # flake8-print
"UP", # upgrade
]
target-version = "py38"
line-length = 79

[tool.ruff.per-file-ignores]
Expand Down

0 comments on commit 219d8fb

Please sign in to comment.