Skip to content

Commit

Permalink
test(cleanup): remove dead xfail_version clutter (#9944)
Browse files Browse the repository at this point in the history
Remove some test decorator clutter that is no longer required.
  • Loading branch information
cpcloud authored Aug 28, 2024
1 parent ae47736 commit d7b77bf
Show file tree
Hide file tree
Showing 8 changed files with 6 additions and 39 deletions.
4 changes: 0 additions & 4 deletions ibis/backends/datafusion/tests/test_select.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,6 @@
pytest.importorskip("datafusion")


@pytest.mark.xfail_version(
datafusion=["datafusion==28.0.0"],
reason="datafusion panics with with the float_col * 2 filter",
)
def test_where_multiple_conditions(alltypes, alltypes_df):
expr = alltypes.filter(
[
Expand Down
15 changes: 3 additions & 12 deletions ibis/backends/tests/test_aggregation.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,20 +126,14 @@ def mean_udf(s):
]


def make_argidx_params(marks, grouped=False):
def make_argidx_params(marks):
marks = [pytest.mark.notyet(marks, raises=com.OperationNotDefinedError)]
return [
param(
lambda t: t.timestamp_col.argmin(t.id),
lambda s: s.timestamp_col.iloc[s.id.argmin()],
id="argmin",
marks=marks
+ [
pytest.mark.xfail_version(
polars=["polars>=0.19.12,<1"], raises=BaseException
)
]
* grouped,
marks=marks,
),
param(
lambda t: t.double_col.argmax(t.id),
Expand Down Expand Up @@ -167,7 +161,7 @@ def test_aggregate(backend, alltypes, df, result_fn, expected_fn):

@pytest.mark.parametrize(
("result_fn", "expected_fn"),
aggregate_test_params + make_argidx_params(argidx_not_grouped_marks, grouped=True),
aggregate_test_params + make_argidx_params(argidx_not_grouped_marks),
)
def test_aggregate_grouped(backend, alltypes, df, result_fn, expected_fn):
grouping_key_col = "bigint_col"
Expand Down Expand Up @@ -1596,9 +1590,6 @@ def test_agg_sort(alltypes):
query.order_by(alltypes.year)


@pytest.mark.xfail_version(
polars=["polars==0.14.31"], reason="projection of scalars is broken"
)
def test_filter(backend, alltypes, df):
expr = (
alltypes[_.string_col == "1"]
Expand Down
6 changes: 1 addition & 5 deletions ibis/backends/tests/test_join.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,11 +58,7 @@ def check_eq(left, right, how, **kwargs):
# TODO: mysql will likely never support full outer join
# syntax, but we might be able to work around that using
# LEFT JOIN UNION RIGHT JOIN
marks=[
pytest.mark.notimpl(["mysql"]),
sqlite_right_or_full_mark,
pytest.mark.xfail_version(datafusion=["datafusion<31"]),
],
marks=[pytest.mark.notimpl(["mysql"]), sqlite_right_or_full_mark],
),
],
)
Expand Down
6 changes: 0 additions & 6 deletions ibis/backends/tests/test_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,9 +241,6 @@ def test_map_table(backend):


@pytest.mark.notimpl(["pandas", "dask"])
@pytest.mark.xfail_version(
duckdb=["duckdb<0.8.0"], raises=exc.UnsupportedOperationError
)
@mark_notimpl_risingwave_hstore
def test_column_map_values(backend):
table = backend.map
Expand All @@ -254,9 +251,6 @@ def test_column_map_values(backend):


@pytest.mark.notimpl(["pandas", "dask"])
@pytest.mark.xfail_version(
duckdb=["duckdb<0.8.0"], raises=exc.UnsupportedOperationError
)
def test_column_map_merge(backend):
table = backend.map
expr = table.select(
Expand Down
1 change: 0 additions & 1 deletion ibis/backends/tests/test_string.py
Original file line number Diff line number Diff line change
Expand Up @@ -638,7 +638,6 @@ def test_string(backend, alltypes, df, result_func, expected_func):
"Polars does not support columnar argument Subtract(StringLength(date_string_col), 1)"
),
),
pytest.mark.xfail_version(datafusion=["datafusion==35"]),
],
),
param(
Expand Down
11 changes: 2 additions & 9 deletions ibis/backends/tests/test_temporal.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
from ibis.backends.tests.errors import (
ArrowInvalid,
ClickHouseDatabaseError,
DuckDBBinderException,
DuckDBInvalidInputException,
ExaQueryError,
GoogleBadRequest,
Expand Down Expand Up @@ -978,12 +977,6 @@ def test_timestamp_comparison_filter(backend, con, alltypes, df, func_name):
raises=TypeError,
reason="Invalid comparison between dtype=datetime64[ns, UTC] and datetime",
),
pytest.mark.xfail_version(
duckdb=["duckdb>=0.10,<0.10.2"],
raises=DuckDBBinderException,
# perhaps we should consider disallowing this in ibis as well
reason="DuckDB doesn't allow comparing timestamp with and without timezones starting at version 0.10",
),
]


Expand Down Expand Up @@ -1674,8 +1667,8 @@ def test_extract_time_from_timestamp(con, microsecond):
reason="BigQuery returns DateOffset arrays",
raises=AssertionError,
)
@pytest.mark.xfail_version(
datafusion=["datafusion"],
@pytest.mark.notyet(
["datafusion"],
raises=Exception,
reason='This feature is not implemented: Can\'t create a scalar from array of type "Duration(Second)"',
)
Expand Down
1 change: 0 additions & 1 deletion ibis/backends/tests/test_vectorized_udf.py
Original file line number Diff line number Diff line change
Expand Up @@ -538,7 +538,6 @@ def test_elementwise_udf_overwrite_destruct_and_assign(udf_backend, udf_alltypes
udf_backend.assert_frame_equal(result, expected, check_like=True)


@pytest.mark.xfail_version(pyspark=["pyspark<3.1"])
@pytest.mark.parametrize("method", ["destructure", "lift", "unpack"])
def test_elementwise_udf_destructure_exact_once(udf_alltypes, method, tmp_path):
with pytest.warns(FutureWarning, match="v9.0"):
Expand Down
1 change: 0 additions & 1 deletion ibis/backends/tests/test_window.py
Original file line number Diff line number Diff line change
Expand Up @@ -634,7 +634,6 @@ def test_simple_ungrouped_unbound_following_window(
raises=PsycoPg2InternalError,
reason="Feature is not yet implemented: Window function with empty PARTITION BY is not supported yet",
)
@pytest.mark.xfail_version(datafusion=["datafusion==35"])
def test_simple_ungrouped_window_with_scalar_order_by(alltypes):
t = alltypes[alltypes.double_col < 50].order_by("id")
w = ibis.window(rows=(0, None), order_by=ibis.null())
Expand Down

0 comments on commit d7b77bf

Please sign in to comment.