Skip to content

Commit

Permalink
Deprecate redirect_empty_polars_concat and `redirect_empty_numpy_co…
Browse files Browse the repository at this point in the history
…ncatenate` (#733)
  • Loading branch information
dycw authored Sep 15, 2024
1 parent 70c6ab6 commit e523029
Show file tree
Hide file tree
Showing 6 changed files with 7 additions and 63 deletions.
9 changes: 0 additions & 9 deletions src/tests/test_numpy.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
from numpy import (
arange,
array,
concatenate,
eye,
full,
inf,
Expand All @@ -28,7 +27,6 @@
from utilities.numpy import (
DEFAULT_RNG,
AsIntError,
EmptyNumpyConcatenateError,
FlatN0EmptyError,
FlatN0MultipleError,
NDArrayF,
Expand Down Expand Up @@ -94,7 +92,6 @@
maximum,
minimum,
pct_change,
redirect_empty_numpy_concatenate,
shift,
shift_bool,
)
Expand Down Expand Up @@ -1133,12 +1130,6 @@ def test_error(self) -> None:
_ = pct_change(arr, n=0)


class TestRedirectEmptyNumpyConcatenate:
def test_main(self) -> None:
with raises(EmptyNumpyConcatenateError), redirect_empty_numpy_concatenate():
_ = concatenate([])


class TestShift:
@mark.parametrize(
("n", "expected_v"),
Expand Down
9 changes: 0 additions & 9 deletions src/tests/test_polars.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
Struct,
Utf8,
col,
concat,
datetime_range,
int_range,
lit,
Expand All @@ -40,7 +39,6 @@
DatetimeUSEastern,
DatetimeUTC,
DropNullStructSeriesError,
EmptyPolarsConcatError,
IsNotNullStructSeriesError,
IsNullStructSeriesError,
SetFirstRowAsColumnsError,
Expand All @@ -64,7 +62,6 @@
join,
nan_sum_agg,
nan_sum_cols,
redirect_empty_polars_concat,
set_first_row_as_columns,
struct_data_type,
yield_struct_series_dataclasses,
Expand Down Expand Up @@ -631,12 +628,6 @@ def test_main(
assert df["z"].item() == expected


class TestRedirectEmptyPolarsConcat:
def test_main(self) -> None:
with raises(EmptyPolarsConcatError), redirect_empty_polars_concat():
_ = concat([])


class TestSetFirstRowAsColumns:
def test_empty(self) -> None:
df = DataFrame()
Expand Down
2 changes: 1 addition & 1 deletion src/utilities/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
from __future__ import annotations

__version__ = "0.54.3"
__version__ = "0.55.0"
18 changes: 0 additions & 18 deletions src/utilities/numpy.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from __future__ import annotations

from contextlib import contextmanager
from dataclasses import dataclass
from functools import reduce
from itertools import repeat
Expand Down Expand Up @@ -39,7 +38,6 @@
from numpy.typing import NDArray
from typing_extensions import override

from utilities.errors import redirect_error
from utilities.iterables import is_iterable_not_str

if TYPE_CHECKING:
Expand Down Expand Up @@ -777,20 +775,6 @@ def __str__(self) -> str:
return "Shift must be non-zero"


@contextmanager
def redirect_empty_numpy_concatenate() -> Iterator[None]:
"""Redirect to the `EmptyNumpyConcatenateError`."""
with redirect_error(
ValueError,
EmptyNumpyConcatenateError,
match="need at least one array to concatenate",
):
yield


class EmptyNumpyConcatenateError(Exception): ...


def shift(array: NDArrayF | NDArrayI, /, *, n: int = 1, axis: int = -1) -> NDArrayF:
"""Shift the elements of an array."""
if n == 0:
Expand Down Expand Up @@ -821,7 +805,6 @@ def shift_bool(
__all__ = [
"DEFAULT_RNG",
"AsIntError",
"EmptyNumpyConcatenateError",
"FlatN0EmptyError",
"FlatN0Error",
"FlatN0MultipleError",
Expand Down Expand Up @@ -904,7 +887,6 @@ def shift_bool(
"maximum",
"minimum",
"pct_change",
"redirect_empty_numpy_concatenate",
"shift",
"shift_bool",
]
16 changes: 1 addition & 15 deletions src/utilities/polars.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import datetime as dt
from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence
from collections.abc import Set as AbstractSet
from contextlib import contextmanager, suppress
from contextlib import suppress
from dataclasses import dataclass
from datetime import timezone
from enum import Enum
Expand Down Expand Up @@ -592,18 +592,6 @@ def func(x: Expr, y: Expr, /) -> Expr:
return reduce(func, all_exprs)


@contextmanager
def redirect_empty_polars_concat() -> Iterator[None]:
"""Redirect to the `EmptyPolarsConcatError`."""
with redirect_error(
ValueError, EmptyPolarsConcatError, match="cannot concat empty list"
):
yield


class EmptyPolarsConcatError(Exception): ...


def set_first_row_as_columns(df: DataFrame, /) -> DataFrame:
"""Set the first row of a DataFrame as its columns."""
with redirect_error(OutOfBoundsError, SetFirstRowAsColumnsError(f"{df=}")):
Expand Down Expand Up @@ -792,7 +780,6 @@ def zoned_datetime(
"DatetimeUSEastern",
"DatetimeUTC",
"DropNullStructSeriesError",
"EmptyPolarsConcatError",
"IsNullStructSeriesError",
"SetFirstRowAsColumnsError",
"YieldStructSeriesElementsError",
Expand All @@ -809,7 +796,6 @@ def zoned_datetime(
"join",
"nan_sum_agg",
"nan_sum_cols",
"redirect_empty_polars_concat",
"set_first_row_as_columns",
"struct_data_type",
"yield_struct_series_dataclasses",
Expand Down
16 changes: 5 additions & 11 deletions src/utilities/sqlalchemy_polars.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,11 +41,7 @@
chunked,
one,
)
from utilities.polars import (
EmptyPolarsConcatError,
redirect_empty_polars_concat,
zoned_datetime,
)
from utilities.polars import zoned_datetime
from utilities.sqlalchemy import (
CHUNK_SIZE_FRAC,
EngineOrConnection,
Expand Down Expand Up @@ -325,9 +321,8 @@ def select_to_dataframe(
for sel in sels
)
try:
with redirect_empty_polars_concat():
return concat(dfs)
except EmptyPolarsConcatError:
return concat(dfs)
except ValueError:
return DataFrame(schema=prepared.schema)
dfs = (
select_to_dataframe(
Expand Down Expand Up @@ -448,9 +443,8 @@ async def select_to_dataframe_async(
for sel in sels
]
try:
with redirect_empty_polars_concat():
return concat(dfs)
except EmptyPolarsConcatError:
return concat(dfs)
except ValueError:
return DataFrame(schema=prepared.schema)

async def yield_dfs() -> AsyncIterator[DataFrame]:
Expand Down

0 comments on commit e523029

Please sign in to comment.