diff --git a/docs/changes/newsfragments/251.misc b/docs/changes/newsfragments/251.misc new file mode 100644 index 000000000..c611f7f11 --- /dev/null +++ b/docs/changes/newsfragments/251.misc @@ -0,0 +1 @@ +Remove ``pytest-lazy-fixture`` and enable support for ``pytest >= 8.0.0`` by `Synchon Mandal`_ diff --git a/julearn/conftest.py b/julearn/conftest.py index 8b074b789..a331f6388 100644 --- a/julearn/conftest.py +++ b/julearn/conftest.py @@ -131,7 +131,7 @@ def X_types_iris(request: FixtureRequest) -> Optional[Dict]: @fixture(params=["rf", "svm", "gauss", "ridge"], scope="function") -def models_all_problem_types(request: FixtureRequest) -> str: +def model(request: FixtureRequest) -> str: """Return different models that work with classification and regression. Parameters @@ -149,7 +149,7 @@ def models_all_problem_types(request: FixtureRequest) -> str: @fixture(params=["regression", "classification"], scope="function") -def all_problem_types(request: FixtureRequest) -> str: +def problem_type(request: FixtureRequest) -> str: """Return different problem types. Parameters @@ -160,7 +160,7 @@ def all_problem_types(request: FixtureRequest) -> str: Returns ------- str - The problem type (one of {"regression", "classification"}). + The problem type. """ @@ -245,7 +245,7 @@ def get(step: str) -> Dict: ], scope="function", ) -def preprocessing(request: FixtureRequest) -> Union[str, List[str]]: +def preprocess(request: FixtureRequest) -> Union[str, List[str]]: """Return different preprocessing steps. Parameters diff --git a/julearn/models/tests/test_dynamic.py b/julearn/models/tests/test_dynamic.py index 9393639f9..ecc90a1e0 100644 --- a/julearn/models/tests/test_dynamic.py +++ b/julearn/models/tests/test_dynamic.py @@ -12,7 +12,6 @@ import pandas as pd import pytest from pytest import FixtureRequest, fixture -from pytest_lazyfixture import lazy_fixture from sklearn.ensemble import RandomForestClassifier from sklearn.model_selection import ShuffleSplit, train_test_split @@ -64,7 +63,7 @@ ], scope="module", ) -def all_deslib_algorithms(request: FixtureRequest) -> str: +def algo_name(request: FixtureRequest) -> str: """Return different algorithms for the iris dataset features. Parameters @@ -81,10 +80,6 @@ def all_deslib_algorithms(request: FixtureRequest) -> str: return request.param -@pytest.mark.parametrize( - "algo_name", - [lazy_fixture("all_deslib_algorithms")], -) @pytest.mark.skip("Deslib is not compatible with new python. Waiting for PR.") def test_algorithms( df_iris: pd.DataFrame, diff --git a/julearn/pipeline/test/test_pipeline_creator.py b/julearn/pipeline/test/test_pipeline_creator.py index 4b7f36c7d..f0918aa17 100644 --- a/julearn/pipeline/test/test_pipeline_creator.py +++ b/julearn/pipeline/test/test_pipeline_creator.py @@ -5,11 +5,10 @@ # License: AGPL import warnings -from typing import Callable, Dict, List +from typing import Callable, Dict, List, Union import pandas as pd import pytest -from pytest_lazyfixture import lazy_fixture from sklearn.dummy import DummyClassifier from sklearn.ensemble import RandomForestClassifier from sklearn.model_selection import GridSearchCV, RandomizedSearchCV @@ -23,16 +22,8 @@ from julearn.transformers import get_transformer -@pytest.mark.parametrize( - "model,preprocess,problem_type", - [ - lazy_fixture( - ["models_all_problem_types", "preprocessing", "all_problem_types"] - ) - ], -) def test_construction_working( - model: str, preprocess: List[str], problem_type: str + model: str, preprocess: Union[str, List[str]], problem_type: str ) -> None: """Test that the pipeline constructions works as expected. @@ -40,7 +31,7 @@ def test_construction_working( ---------- model : str The model to test. - preprocess : List[str] + preprocess : str or list of str The preprocessing steps to test. problem_type : str The problem type to test. @@ -77,19 +68,11 @@ def test_construction_working( assert len(preprocess) + 2 == len(pipeline.steps) -@pytest.mark.parametrize( - "model,preprocess,problem_type", - [ - lazy_fixture( - ["models_all_problem_types", "preprocessing", "all_problem_types"] - ) - ], -) def test_fit_and_transform_no_error( X_iris: pd.DataFrame, # noqa: N803 y_iris: pd.Series, model: str, - preprocess: List[str], + preprocess: Union[str, List[str]], problem_type: str, ) -> None: """Test that the pipeline fit and transform does not give an error. @@ -102,7 +85,7 @@ def test_fit_and_transform_no_error( The iris dataset target variable. model : str The model to test. - preprocess : List[str] + preprocess : str or list of str The preprocessing steps to test. problem_type : str The problem type to test. @@ -117,18 +100,10 @@ def test_fit_and_transform_no_error( pipeline[:-1].transform(X_iris) -@pytest.mark.parametrize( - "model,preprocess,problem_type", - [ - lazy_fixture( - ["models_all_problem_types", "preprocessing", "all_problem_types"] - ), - ], -) def test_hyperparameter_tuning( X_types_iris: Dict[str, List[str]], # noqa: N803 model: str, - preprocess: List[str], + preprocess: Union[str, List[str]], problem_type: str, get_tuning_params: Callable, search_params: Dict[str, List], @@ -137,11 +112,11 @@ def test_hyperparameter_tuning( Parameters ---------- - X_types_iris : Dict[str, List[str]] + X_types_iris : dict The iris dataset features types. model : str The model to test. - preprocess : List[str] + preprocess : str or list of str The preprocessing steps to test. problem_type : str The problem type to test. diff --git a/tox.ini b/tox.ini index 25bde3cb4..a17da50a4 100644 --- a/tox.ini +++ b/tox.ini @@ -12,13 +12,8 @@ python = [testenv] skip_install = false deps = - pytest<8.0.0 - pytest-lazy-fixture + pytest seaborn - deslib - panel>=1.0.0b1 - bokeh>=3.0.0 - param commands = pytest @@ -39,8 +34,7 @@ commands = [testenv:test] skip_install = false deps = - pytest<8.0.0 - pytest-lazy-fixture + pytest seaborn deslib panel>=1.0.0b1 @@ -52,8 +46,7 @@ commands = [testenv:coverage] skip_install = false deps = - pytest<8.0.0 - pytest-lazy-fixture + pytest pytest-cov seaborn deslib