From 67b5de3e8b274d8ff962f26ac5c6915c01aac1b3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Radoslav=20Bod=C3=B3?= Date: Fri, 2 Jul 2021 11:02:57 +0200 Subject: [PATCH 01/30] githubactions: init --- .github/workflows/tests.yml | 73 +++++++++++++++++++++++++++++++++++++ 1 file changed, 73 insertions(+) create mode 100644 .github/workflows/tests.yml diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 0000000..e7a6c99 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,73 @@ +name: Tests CI +on: + - push + - pull_request + +jobs: + tests: + name: ${{ matrix.tox }} + runs-on: ubuntu-latest + + services: + mariadb: + image: mariadb:latest + ports: + - 3306:3306 + env: + MYSQL_ALLOW_EMPTY_PASSWORD: yes + options: --health-cmd="mysqladmin ping" --health-interval=5s --health-timeout=2s --health-retries=3 + + postgres: + image: postgres + ports: + - 5432:5432 + env: + POSTGRES_USER: postgres + POSTGRES_HOST_AUTH_METHOD: trust + POSTGRES_DB: test_sqlalchemy_filters + POSTGRES_INITDB_ARGS: "--encoding=UTF8 --lc-collate=en_US.utf8 --lc-ctype=en_US.utf8" + options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 + + + strategy: + fail-fast: false + matrix: + include: + - {python: '2.7', tox: "py27-sqlalchemy1.0"} + - {python: '2.7', tox: "py27-sqlalchemy1.1"} + - {python: '2.7', tox: "py27-sqlalchemy1.2"} + - {python: '2.7', tox: "py27-sqlalchemy1.3"} + - {python: '2.7', tox: "py27-sqlalchemylatest"} + + - {python: '3.5', tox: "py35-sqlalchemy1.0"} + - {python: '3.5', tox: "py35-sqlalchemy1.1"} + - {python: '3.5', tox: "py35-sqlalchemy1.2"} + - {python: '3.5', tox: "py35-sqlalchemy1.3"} + - {python: '3.5', tox: "py35-sqlalchemylatest"} + + - {python: '3.6', tox: "py36-sqlalchemy1.0"} + - {python: '3.6', tox: "py36-sqlalchemy1.1"} + - {python: '3.6', tox: "py36-sqlalchemy1.2"} + - {python: '3.6', tox: "py36-sqlalchemy1.3"} + - {python: '3.6', tox: "py36-sqlalchemylatest"} + + - {python: '3.7', tox: "py37-sqlalchemy1.0"} + - {python: '3.7', tox: "py37-sqlalchemy1.1"} + - {python: '3.7', tox: "py37-sqlalchemy1.2"} + - {python: '3.7', tox: "py37-sqlalchemy1.3"} + - {python: '3.7', tox: "py37-sqlalchemylatest"} + + - {python: '3.8', tox: "py38-sqlalchemy1.0"} + - {python: '3.8', tox: "py38-sqlalchemy1.1"} + - {python: '3.8', tox: "py38-sqlalchemy1.2"} + - {python: '3.8', tox: "py38-sqlalchemy1.3"} + - {python: '3.8', tox: "py38-sqlalchemylatest"} + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + + - run: pip install tox + - run: tox -e ${{ matrix.tox }} From 499e669baa8b03085a0be7587f813ae9a0df60f1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Radoslav=20Bod=C3=B3?= Date: Fri, 2 Jul 2021 11:12:39 +0200 Subject: [PATCH 02/30] githubactions: handle py27 vs sqlalchemy-utils vs sqlalchemy version dependencies --- .github/workflows/tests.yml | 1 - setup.py | 3 ++- tox.ini | 5 ++++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index e7a6c99..eebf8b9 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -37,7 +37,6 @@ jobs: - {python: '2.7', tox: "py27-sqlalchemy1.1"} - {python: '2.7', tox: "py27-sqlalchemy1.2"} - {python: '2.7', tox: "py27-sqlalchemy1.3"} - - {python: '2.7', tox: "py27-sqlalchemylatest"} - {python: '3.5', tox: "py35-sqlalchemy1.0"} - {python: '3.5', tox: "py35-sqlalchemy1.1"} diff --git a/setup.py b/setup.py index 5e9c10e..1631980 100644 --- a/setup.py +++ b/setup.py @@ -25,7 +25,8 @@ 'dev': [ 'pytest>=4.6.9', 'coverage~=5.0.4', - 'sqlalchemy-utils~=0.36.3', + # for sqlalchemy1.4 >= 0.37 is required + 'sqlalchemy-utils>=0.36.3', 'flake8', 'restructuredtext-lint', 'Pygments', diff --git a/tox.ini b/tox.ini index 46a9424..79a7e42 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = {py27,py35,py36,py37,py38}-sqlalchemy{1.0,1.1,1.2,1.3,latest} +envlist = {py27}-sqlalchemy{1.0,1.1,1.2,1.3},{py35,py36,py37,py38}-sqlalchemy{1.0,1.1,1.2,1.3,latest} skipsdist = True [testenv] @@ -14,6 +14,9 @@ deps = # https://docs.pytest.org/en/latest/py27-py34-deprecation.html py27: pytest<5.0.0 {py35,py36,py37,py38}: pytest~=5.4.1 + # https://github.com/kvesteri/sqlalchemy-utils/blob/master/CHANGES.rst#0364-2020-04-30 + py27: sqlalchemy-utils==0.36.3 + {py35,py36,py37,py38}: sqlalchemy-utils~=0.37.8 sqlalchemy1.0: sqlalchemy>=1.0,<1.1 sqlalchemy1.1: sqlalchemy>=1.1,<1.2 sqlalchemy1.2: sqlalchemy>=1.2,<1.3 From b13f662383eb55c538a419bdda00f627565a7682 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Radoslav=20Bod=C3=B3?= Date: Fri, 2 Jul 2021 17:51:03 +0200 Subject: [PATCH 03/30] add sqlalchemy 1.4 support --- sqlalchemy_filters/models.py | 67 ++++++++++++++++++++++++++--------- test/interface/test_models.py | 8 +++-- 2 files changed, 56 insertions(+), 19 deletions(-) diff --git a/sqlalchemy_filters/models.py b/sqlalchemy_filters/models.py index 1c79516..5bfa47e 100644 --- a/sqlalchemy_filters/models.py +++ b/sqlalchemy_filters/models.py @@ -1,12 +1,19 @@ +from sqlalchemy import __version__ as sqlalchemy_version from sqlalchemy.exc import InvalidRequestError +from sqlalchemy.orm import mapperlib from sqlalchemy.inspection import inspect -from sqlalchemy.orm.mapper import Mapper from sqlalchemy.util import symbol import types from .exceptions import BadQuery, FieldNotFound, BadSpec +def sqlalchemy_version_lt(version): + """compares sqla version < version""" + + return tuple(sqlalchemy_version.split('.')) < tuple(version.split('.')) + + class Field(object): def __init__(self, model, field_name): @@ -61,20 +68,33 @@ def get_query_models(query): A dictionary with all the models included in the query. """ models = [col_desc['entity'] for col_desc in query.column_descriptions] - models.extend(mapper.class_ for mapper in query._join_entities) + try: + join_entities = ( + query._join_entities + if sqlalchemy_version_lt('1.4') + else query._compile_state()._join_entities + ) + models.extend(mapper.class_ for mapper in join_entities) + except InvalidRequestError: # pragma: nocover + pass # handle compilation errors in sqla 1.4 # account also query.select_from entities - if ( - hasattr(query, '_select_from_entity') and - (query._select_from_entity is not None) - ): - model_class = ( - query._select_from_entity.class_ - if isinstance(query._select_from_entity, Mapper) # sqlalchemy>=1.1 - else query._select_from_entity # sqlalchemy==1.0 - ) - if model_class not in models: - models.append(model_class) + model_class = None + if sqlalchemy_version_lt('1.4'): # pragma: nocover ; sqlalchemy<1.4 + if query._select_from_entity: + model_class = ( + query._select_from_entity + if sqlalchemy_version_lt('1.1') + else query._select_from_entity.class_ + ) + else: # pragma: nocover ; sqlalchemy>=1.4 + if query._from_obj: + for registry in mapperlib._all_registries(): + for mapper in registry.mappers: + if query._from_obj[0] in mapper.tables: + model_class = mapper.class_ + if model_class and (model_class not in models): + models.append(model_class) return {model.__name__: model for model in models} @@ -152,13 +172,26 @@ def auto_join(query, *model_names): """ # every model has access to the registry, so we can use any from the query query_models = get_query_models(query).values() - model_registry = list(query_models)[-1]._decl_class_registry + last_model = list(query_models)[-1] + model_registry = ( + last_model._decl_class_registry + if sqlalchemy_version_lt('1.4') + else last_model.registry._class_registry + ) for name in model_names: model = get_model_class_by_name(model_registry, name) - if model not in get_query_models(query).values(): - try: - query = query.join(model) + if model and (model not in get_query_models(query).values()): + try: # pragma: nocover + if sqlalchemy_version_lt('1.4'): + query = query.join(model) + else: + # https://docs.sqlalchemy.org/en/14/changelog/migration_14.html + # Many Core and ORM statement objects now perform much of + # their construction and validation in the compile phase + tmp = query.join(model) + tmp._compile_state() + query = tmp except InvalidRequestError: pass # can't be autojoined return query diff --git a/test/interface/test_models.py b/test/interface/test_models.py index 0531095..86853ac 100644 --- a/test/interface/test_models.py +++ b/test/interface/test_models.py @@ -5,7 +5,7 @@ from sqlalchemy_filters.exceptions import BadSpec, BadQuery from sqlalchemy_filters.models import ( auto_join, get_default_model, get_query_models, get_model_class_by_name, - get_model_from_spec + get_model_from_spec, sqlalchemy_version_lt ) from test.models import Base, Bar, Foo, Qux @@ -132,7 +132,11 @@ class TestGetModelClassByName: @pytest.fixture def registry(self): - return Base._decl_class_registry + return ( + Base._decl_class_registry + if sqlalchemy_version_lt('1.4') + else Base.registry._class_registry + ) def test_exists(self, registry): assert get_model_class_by_name(registry, 'Foo') == Foo From 8c469e5949bcc66bc6c665fbc3fb76c5fd86977a Mon Sep 17 00:00:00 2001 From: Radoslav Bodo Date: Mon, 12 Jul 2021 00:11:11 +0200 Subject: [PATCH 04/30] sqlalchemy 1.4 infer tables from query without columns (sqlalchemy-datatables usecase) --- sqlalchemy_filters/models.py | 46 +++++++++++++++++++++++------------ test/interface/test_models.py | 7 ++++++ 2 files changed, 38 insertions(+), 15 deletions(-) diff --git a/sqlalchemy_filters/models.py b/sqlalchemy_filters/models.py index 5bfa47e..e9e1c3b 100644 --- a/sqlalchemy_filters/models.py +++ b/sqlalchemy_filters/models.py @@ -58,6 +58,16 @@ def _is_hybrid_method(orm_descriptor): return orm_descriptor.extension_type == symbol('HYBRID_METHOD') +def get_model_from_table(table): # pragma: nocover + """Resolve model class from table object""" + + for registry in mapperlib._all_registries(): + for mapper in registry.mappers: + if table in mapper.tables: + return mapper.class_ + return None + + def get_query_models(query): """Get models from query. @@ -68,31 +78,37 @@ def get_query_models(query): A dictionary with all the models included in the query. """ models = [col_desc['entity'] for col_desc in query.column_descriptions] - try: - join_entities = ( - query._join_entities - if sqlalchemy_version_lt('1.4') - else query._compile_state()._join_entities - ) - models.extend(mapper.class_ for mapper in join_entities) - except InvalidRequestError: # pragma: nocover - pass # handle compilation errors in sqla 1.4 + + # account joined entities + if sqlalchemy_version_lt('1.4'): # pragma: nocover + models.extend(mapper.class_ for mapper in query._join_entities) + else: # pragma: nocover + try: + models.extend( + mapper.class_ + for mapper + in query._compile_state()._join_entities + ) + except InvalidRequestError: + # query might not contain columns yet, hence cannot be compiled + # try to infer the models from various internals + for table_tuple in query._setup_joins + query._legacy_setup_joins: + model_class = get_model_from_table(table_tuple[0]) + if model_class: + models.append(model_class) # account also query.select_from entities model_class = None - if sqlalchemy_version_lt('1.4'): # pragma: nocover ; sqlalchemy<1.4 + if sqlalchemy_version_lt('1.4'): # pragma: nocover if query._select_from_entity: model_class = ( query._select_from_entity if sqlalchemy_version_lt('1.1') else query._select_from_entity.class_ ) - else: # pragma: nocover ; sqlalchemy>=1.4 + else: # pragma: nocover if query._from_obj: - for registry in mapperlib._all_registries(): - for mapper in registry.mappers: - if query._from_obj[0] in mapper.tables: - model_class = mapper.class_ + model_class = get_model_from_table(query._from_obj[0]) if model_class and (model_class not in models): models.append(model_class) diff --git a/test/interface/test_models.py b/test/interface/test_models.py index 86853ac..8910efb 100644 --- a/test/interface/test_models.py +++ b/test/interface/test_models.py @@ -33,6 +33,13 @@ def test_query_with_select_from_model(self, session): assert {'Bar': Bar} == entities + def test_query_with_select_from_and_join_model(self, session): + query = session.query().select_from(Bar).join(Foo) + + entities = get_query_models(query) + + assert {'Bar': Bar, 'Foo': Foo} == entities + def test_query_with_multiple_models(self, session): query = session.query(Bar, Qux) From 58a9f767d39fb5b262bc5ea62acac4c2ae9bbfa4 Mon Sep 17 00:00:00 2001 From: Tom V Date: Thu, 6 Apr 2023 14:26:28 +0100 Subject: [PATCH 05/30] add sqlalchemy 1.4 to tox conf --- tox.ini | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 79a7e42..5f82b7e 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = {py27}-sqlalchemy{1.0,1.1,1.2,1.3},{py35,py36,py37,py38}-sqlalchemy{1.0,1.1,1.2,1.3,latest} +envlist = {py27}-sqlalchemy{1.0,1.1,1.2,1.3,1.4},{py35,py36,py37,py38}-sqlalchemy{1.0,1.1,1.2,1.3,1.4,latest} skipsdist = True [testenv] @@ -21,5 +21,6 @@ deps = sqlalchemy1.1: sqlalchemy>=1.1,<1.2 sqlalchemy1.2: sqlalchemy>=1.2,<1.3 sqlalchemy1.3: sqlalchemy>=1.3,<1.4 + sqlalchemy1.4: sqlalchemy>=1.4,<1.5 commands = make coverage ARGS='-x -vv' From 342e89c82b5ae502de1dbd554644ed674a6b7746 Mon Sep 17 00:00:00 2001 From: Tom V Date: Thu, 6 Apr 2023 14:36:55 +0100 Subject: [PATCH 06/30] github actions, actions/setup-python v2->v4 --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index eebf8b9..ea3e912 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -64,7 +64,7 @@ jobs: steps: - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python }} From 02b72d582511ac366ccafb87aa2e609bd41e1cd8 Mon Sep 17 00:00:00 2001 From: Tom V Date: Thu, 6 Apr 2023 14:40:10 +0100 Subject: [PATCH 07/30] github actions needs patch level python versions and add sqlalchemy 1.4 --- .github/workflows/tests.yml | 45 ++++++++++++++++++++----------------- 1 file changed, 25 insertions(+), 20 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index ea3e912..e1f1b24 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -37,30 +37,35 @@ jobs: - {python: '2.7', tox: "py27-sqlalchemy1.1"} - {python: '2.7', tox: "py27-sqlalchemy1.2"} - {python: '2.7', tox: "py27-sqlalchemy1.3"} + - {python: '2.7', tox: "py27-sqlalchemy1.4"} - - {python: '3.5', tox: "py35-sqlalchemy1.0"} - - {python: '3.5', tox: "py35-sqlalchemy1.1"} - - {python: '3.5', tox: "py35-sqlalchemy1.2"} - - {python: '3.5', tox: "py35-sqlalchemy1.3"} - - {python: '3.5', tox: "py35-sqlalchemylatest"} + - {python: '3.5.10', tox: "py35-sqlalchemy1.0"} + - {python: '3.5.10', tox: "py35-sqlalchemy1.1"} + - {python: '3.5.10', tox: "py35-sqlalchemy1.2"} + - {python: '3.5.10', tox: "py35-sqlalchemy1.3"} + - {python: '3.5.10', tox: "py35-sqlalchemy1.4"} + - {python: '3.5.10', tox: "py35-sqlalchemylatest"} - - {python: '3.6', tox: "py36-sqlalchemy1.0"} - - {python: '3.6', tox: "py36-sqlalchemy1.1"} - - {python: '3.6', tox: "py36-sqlalchemy1.2"} - - {python: '3.6', tox: "py36-sqlalchemy1.3"} - - {python: '3.6', tox: "py36-sqlalchemylatest"} + - {python: '3.6.15', tox: "py36-sqlalchemy1.0"} + - {python: '3.6.15', tox: "py36-sqlalchemy1.1"} + - {python: '3.6.15', tox: "py36-sqlalchemy1.2"} + - {python: '3.6.15', tox: "py36-sqlalchemy1.3"} + - {python: '3.6.15', tox: "py36-sqlalchemy1.4"} + - {python: '3.6.15', tox: "py36-sqlalchemylatest"} - - {python: '3.7', tox: "py37-sqlalchemy1.0"} - - {python: '3.7', tox: "py37-sqlalchemy1.1"} - - {python: '3.7', tox: "py37-sqlalchemy1.2"} - - {python: '3.7', tox: "py37-sqlalchemy1.3"} - - {python: '3.7', tox: "py37-sqlalchemylatest"} + - {python: '3.7.16', tox: "py37-sqlalchemy1.0"} + - {python: '3.7.16', tox: "py37-sqlalchemy1.1"} + - {python: '3.7.16', tox: "py37-sqlalchemy1.2"} + - {python: '3.7.16', tox: "py37-sqlalchemy1.3"} + - {python: '3.7.16', tox: "py37-sqlalchemy1.4"} + - {python: '3.7.16', tox: "py37-sqlalchemylatest"} - - {python: '3.8', tox: "py38-sqlalchemy1.0"} - - {python: '3.8', tox: "py38-sqlalchemy1.1"} - - {python: '3.8', tox: "py38-sqlalchemy1.2"} - - {python: '3.8', tox: "py38-sqlalchemy1.3"} - - {python: '3.8', tox: "py38-sqlalchemylatest"} + - {python: '3.8.16', tox: "py38-sqlalchemy1.0"} + - {python: '3.8.16', tox: "py38-sqlalchemy1.1"} + - {python: '3.8.16', tox: "py38-sqlalchemy1.2"} + - {python: '3.8.16', tox: "py38-sqlalchemy1.3"} + - {python: '3.8.16', tox: "py38-sqlalchemy1.4"} + - {python: '3.8.16', tox: "py38-sqlalchemylatest"} steps: - uses: actions/checkout@v2 From c13776d5bc9b7f7db30387afd600183dffa0c600 Mon Sep 17 00:00:00 2001 From: Tom V Date: Thu, 6 Apr 2023 14:43:01 +0100 Subject: [PATCH 08/30] GHA: downgrade ubuntu 22.04->20.04 for more python versions --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index e1f1b24..3da4883 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -6,7 +6,7 @@ on: jobs: tests: name: ${{ matrix.tox }} - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 services: mariadb: From e9219fe45792591cd9194e033bdbf1a09b90c0d4 Mon Sep 17 00:00:00 2001 From: Tom V Date: Thu, 6 Apr 2023 14:45:31 +0100 Subject: [PATCH 09/30] revert python patch versions --- .github/workflows/tests.yml | 48 ++++++++++++++++++------------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 3da4883..76f78f2 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -39,33 +39,33 @@ jobs: - {python: '2.7', tox: "py27-sqlalchemy1.3"} - {python: '2.7', tox: "py27-sqlalchemy1.4"} - - {python: '3.5.10', tox: "py35-sqlalchemy1.0"} - - {python: '3.5.10', tox: "py35-sqlalchemy1.1"} - - {python: '3.5.10', tox: "py35-sqlalchemy1.2"} - - {python: '3.5.10', tox: "py35-sqlalchemy1.3"} - - {python: '3.5.10', tox: "py35-sqlalchemy1.4"} - - {python: '3.5.10', tox: "py35-sqlalchemylatest"} + - {python: '3.5', tox: "py35-sqlalchemy1.0"} + - {python: '3.5', tox: "py35-sqlalchemy1.1"} + - {python: '3.5', tox: "py35-sqlalchemy1.2"} + - {python: '3.5', tox: "py35-sqlalchemy1.3"} + - {python: '3.5', tox: "py35-sqlalchemy1.4"} + - {python: '3.5', tox: "py35-sqlalchemylatest"} - - {python: '3.6.15', tox: "py36-sqlalchemy1.0"} - - {python: '3.6.15', tox: "py36-sqlalchemy1.1"} - - {python: '3.6.15', tox: "py36-sqlalchemy1.2"} - - {python: '3.6.15', tox: "py36-sqlalchemy1.3"} - - {python: '3.6.15', tox: "py36-sqlalchemy1.4"} - - {python: '3.6.15', tox: "py36-sqlalchemylatest"} + - {python: '3.6', tox: "py36-sqlalchemy1.0"} + - {python: '3.6', tox: "py36-sqlalchemy1.1"} + - {python: '3.6', tox: "py36-sqlalchemy1.2"} + - {python: '3.6', tox: "py36-sqlalchemy1.3"} + - {python: '3.6', tox: "py36-sqlalchemy1.4"} + - {python: '3.6', tox: "py36-sqlalchemylatest"} - - {python: '3.7.16', tox: "py37-sqlalchemy1.0"} - - {python: '3.7.16', tox: "py37-sqlalchemy1.1"} - - {python: '3.7.16', tox: "py37-sqlalchemy1.2"} - - {python: '3.7.16', tox: "py37-sqlalchemy1.3"} - - {python: '3.7.16', tox: "py37-sqlalchemy1.4"} - - {python: '3.7.16', tox: "py37-sqlalchemylatest"} + - {python: '3.7', tox: "py37-sqlalchemy1.0"} + - {python: '3.7', tox: "py37-sqlalchemy1.1"} + - {python: '3.7', tox: "py37-sqlalchemy1.2"} + - {python: '3.7', tox: "py37-sqlalchemy1.3"} + - {python: '3.7', tox: "py37-sqlalchemy1.4"} + - {python: '3.7', tox: "py37-sqlalchemylatest"} - - {python: '3.8.16', tox: "py38-sqlalchemy1.0"} - - {python: '3.8.16', tox: "py38-sqlalchemy1.1"} - - {python: '3.8.16', tox: "py38-sqlalchemy1.2"} - - {python: '3.8.16', tox: "py38-sqlalchemy1.3"} - - {python: '3.8.16', tox: "py38-sqlalchemy1.4"} - - {python: '3.8.16', tox: "py38-sqlalchemylatest"} + - {python: '3.8', tox: "py38-sqlalchemy1.0"} + - {python: '3.8', tox: "py38-sqlalchemy1.1"} + - {python: '3.8', tox: "py38-sqlalchemy1.2"} + - {python: '3.8', tox: "py38-sqlalchemy1.3"} + - {python: '3.8', tox: "py38-sqlalchemy1.4"} + - {python: '3.8', tox: "py38-sqlalchemylatest"} steps: - uses: actions/checkout@v2 From 79844ebbe88c9bcdd9e2889bc48c4f94f6374202 Mon Sep 17 00:00:00 2001 From: Tom V Date: Thu, 6 Apr 2023 14:50:43 +0100 Subject: [PATCH 10/30] GHA: pin tox --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 76f78f2..b327ea8 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -73,5 +73,5 @@ jobs: with: python-version: ${{ matrix.python }} - - run: pip install tox + - run: pip install tox==3.15.1 - run: tox -e ${{ matrix.tox }} From 7a69686ab0e2aacf416799e6cded857f518f31f3 Mon Sep 17 00:00:00 2001 From: Tom V Date: Thu, 6 Apr 2023 14:55:07 +0100 Subject: [PATCH 11/30] remove py2.7/sqlalchemy 1.4; add py3.9 --- .github/workflows/tests.yml | 8 +++++++- tox.ini | 6 +++--- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index b327ea8..1d1462e 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -37,7 +37,6 @@ jobs: - {python: '2.7', tox: "py27-sqlalchemy1.1"} - {python: '2.7', tox: "py27-sqlalchemy1.2"} - {python: '2.7', tox: "py27-sqlalchemy1.3"} - - {python: '2.7', tox: "py27-sqlalchemy1.4"} - {python: '3.5', tox: "py35-sqlalchemy1.0"} - {python: '3.5', tox: "py35-sqlalchemy1.1"} @@ -67,6 +66,13 @@ jobs: - {python: '3.8', tox: "py38-sqlalchemy1.4"} - {python: '3.8', tox: "py38-sqlalchemylatest"} + - {python: '3.9', tox: "py39-sqlalchemy1.0"} + - {python: '3.9', tox: "py39-sqlalchemy1.1"} + - {python: '3.9', tox: "py39-sqlalchemy1.2"} + - {python: '3.9', tox: "py39-sqlalchemy1.3"} + - {python: '3.9', tox: "py39-sqlalchemy1.4"} + - {python: '3.9', tox: "py39-sqlalchemylatest"} + steps: - uses: actions/checkout@v2 - uses: actions/setup-python@v4 diff --git a/tox.ini b/tox.ini index 5f82b7e..aa6c825 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = {py27}-sqlalchemy{1.0,1.1,1.2,1.3,1.4},{py35,py36,py37,py38}-sqlalchemy{1.0,1.1,1.2,1.3,1.4,latest} +envlist = {py27}-sqlalchemy{1.0,1.1,1.2,1.3,1.4},{py35,py36,py37,py38,py39}-sqlalchemy{1.0,1.1,1.2,1.3,1.4,latest} skipsdist = True [testenv] @@ -13,10 +13,10 @@ deps = py27: funcsigs # https://docs.pytest.org/en/latest/py27-py34-deprecation.html py27: pytest<5.0.0 - {py35,py36,py37,py38}: pytest~=5.4.1 + {py35,py36,py37,py38,py39}: pytest~=5.4.1 # https://github.com/kvesteri/sqlalchemy-utils/blob/master/CHANGES.rst#0364-2020-04-30 py27: sqlalchemy-utils==0.36.3 - {py35,py36,py37,py38}: sqlalchemy-utils~=0.37.8 + {py35,py36,py37,py38,py39}: sqlalchemy-utils~=0.37.8 sqlalchemy1.0: sqlalchemy>=1.0,<1.1 sqlalchemy1.1: sqlalchemy>=1.1,<1.2 sqlalchemy1.2: sqlalchemy>=1.2,<1.3 From 31d226d27415f7356ccbc3e7e70fea0f07ce997c Mon Sep 17 00:00:00 2001 From: Tom V Date: Thu, 6 Apr 2023 17:43:36 +0100 Subject: [PATCH 12/30] tox 4 doesn't support Python<3.7 --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 1d1462e..3bcde57 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -79,5 +79,5 @@ jobs: with: python-version: ${{ matrix.python }} - - run: pip install tox==3.15.1 + - run: pip install tox~=3.28 - run: tox -e ${{ matrix.tox }} From eba355115b7a70ce174b7f39333f4979f49e2491 Mon Sep 17 00:00:00 2001 From: Tom V Date: Thu, 6 Apr 2023 18:12:00 +0100 Subject: [PATCH 13/30] update changelog --- CHANGELOG.rst | 8 ++++++++ README.rst | 2 +- setup.py | 1 + 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 821e30a..2cad1f1 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -4,6 +4,14 @@ Release Notes Here you can see the full list of changes between sqlalchemy-filters versions, where semantic versioning is used: *major.minor.patch*. +0.13.0 +------ + +Released TBD + +* Add support for SQLAlchemy 1.4 (#69) thanks to @bodik +* Add support for Python 3.9 + 0.12.0 ------ diff --git a/README.rst b/README.rst index 8c3b8f2..bffb33b 100644 --- a/README.rst +++ b/README.rst @@ -499,7 +499,7 @@ SQLAlchemy support ------------------ The following SQLAlchemy_ versions are supported: ``1.0``, ``1.1``, -``1.2``, ``1.3``. +``1.2``, ``1.3``, ``1.4``. Changelog diff --git a/setup.py b/setup.py index 1631980..03b66a0 100644 --- a/setup.py +++ b/setup.py @@ -47,6 +47,7 @@ "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", "Topic :: Database", "Topic :: Database :: Front-Ends", "Topic :: Software Development :: Libraries :: Python Modules", From c0ce3025bbf5668201c23b25467609d6b7942305 Mon Sep 17 00:00:00 2001 From: Tom V Date: Thu, 6 Apr 2023 18:21:07 +0100 Subject: [PATCH 14/30] bump version --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 03b66a0..72f113c 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ setup( name='sqlalchemy-filters', - version='0.12.0', + version='0.13.0', description='A library to filter SQLAlchemy queries.', long_description=readme, long_description_content_type='text/x-rst', From 9a50614f74f2a4d655e01d1d6055b2ee4f8e5400 Mon Sep 17 00:00:00 2001 From: Tom V Date: Thu, 13 Apr 2023 08:59:02 +0100 Subject: [PATCH 15/30] use coverage-conditional-plugin --- setup.py | 1 + sqlalchemy_filters/models.py | 16 ++++++++-------- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/setup.py b/setup.py index 72f113c..a2efdcf 100644 --- a/setup.py +++ b/setup.py @@ -30,6 +30,7 @@ 'flake8', 'restructuredtext-lint', 'Pygments', + 'coverage-conditional-plugin', ], 'mysql': ['mysql-connector-python-rf==2.2.2'], 'postgresql': ['psycopg2==2.8.4'], diff --git a/sqlalchemy_filters/models.py b/sqlalchemy_filters/models.py index e9e1c3b..b60b35b 100644 --- a/sqlalchemy_filters/models.py +++ b/sqlalchemy_filters/models.py @@ -58,7 +58,7 @@ def _is_hybrid_method(orm_descriptor): return orm_descriptor.extension_type == symbol('HYBRID_METHOD') -def get_model_from_table(table): # pragma: nocover +def get_model_from_table(table): # pragma: sqlalchemy_gte_1_4 """Resolve model class from table object""" for registry in mapperlib._all_registries(): @@ -80,9 +80,9 @@ def get_query_models(query): models = [col_desc['entity'] for col_desc in query.column_descriptions] # account joined entities - if sqlalchemy_version_lt('1.4'): # pragma: nocover + if sqlalchemy_version_lt('1.4'): # pragma: sqlalchemy_lt_1_4 models.extend(mapper.class_ for mapper in query._join_entities) - else: # pragma: nocover + else: # pragma: sqlalchemy_gte_1_4 try: models.extend( mapper.class_ @@ -99,14 +99,14 @@ def get_query_models(query): # account also query.select_from entities model_class = None - if sqlalchemy_version_lt('1.4'): # pragma: nocover + if sqlalchemy_version_lt('1.4'): # pragma: sqlalchemy_lt_1_4 if query._select_from_entity: model_class = ( query._select_from_entity if sqlalchemy_version_lt('1.1') else query._select_from_entity.class_ ) - else: # pragma: nocover + else: # pragma: sqlalchemy_gte_1_4 if query._from_obj: model_class = get_model_from_table(query._from_obj[0]) if model_class and (model_class not in models): @@ -198,10 +198,10 @@ def auto_join(query, *model_names): for name in model_names: model = get_model_class_by_name(model_registry, name) if model and (model not in get_query_models(query).values()): - try: # pragma: nocover - if sqlalchemy_version_lt('1.4'): + try: + if sqlalchemy_version_lt('1.4'): # pragma: sqlalchemy_lt_1_4 query = query.join(model) - else: + else: # pragma: sqlalchemy_gte_1_4 # https://docs.sqlalchemy.org/en/14/changelog/migration_14.html # Many Core and ORM statement objects now perform much of # their construction and validation in the compile phase From 326368b73a8d4fa9f80c63c1a5f3b46b8dea2496 Mon Sep 17 00:00:00 2001 From: Tom V Date: Thu, 13 Apr 2023 09:11:14 +0100 Subject: [PATCH 16/30] only support in life Python versions see https://endoflife.date/python --- .github/workflows/tests.yml | 26 +++-------- .travis.yml | 87 ----------------------------------- README.rst | 8 ---- setup.py | 4 +- sqlalchemy_filters/filters.py | 14 +----- tox.ini | 10 +--- 6 files changed, 12 insertions(+), 137 deletions(-) delete mode 100644 .travis.yml diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 3bcde57..fe8cb61 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -33,25 +33,6 @@ jobs: fail-fast: false matrix: include: - - {python: '2.7', tox: "py27-sqlalchemy1.0"} - - {python: '2.7', tox: "py27-sqlalchemy1.1"} - - {python: '2.7', tox: "py27-sqlalchemy1.2"} - - {python: '2.7', tox: "py27-sqlalchemy1.3"} - - - {python: '3.5', tox: "py35-sqlalchemy1.0"} - - {python: '3.5', tox: "py35-sqlalchemy1.1"} - - {python: '3.5', tox: "py35-sqlalchemy1.2"} - - {python: '3.5', tox: "py35-sqlalchemy1.3"} - - {python: '3.5', tox: "py35-sqlalchemy1.4"} - - {python: '3.5', tox: "py35-sqlalchemylatest"} - - - {python: '3.6', tox: "py36-sqlalchemy1.0"} - - {python: '3.6', tox: "py36-sqlalchemy1.1"} - - {python: '3.6', tox: "py36-sqlalchemy1.2"} - - {python: '3.6', tox: "py36-sqlalchemy1.3"} - - {python: '3.6', tox: "py36-sqlalchemy1.4"} - - {python: '3.6', tox: "py36-sqlalchemylatest"} - - {python: '3.7', tox: "py37-sqlalchemy1.0"} - {python: '3.7', tox: "py37-sqlalchemy1.1"} - {python: '3.7', tox: "py37-sqlalchemy1.2"} @@ -73,6 +54,13 @@ jobs: - {python: '3.9', tox: "py39-sqlalchemy1.4"} - {python: '3.9', tox: "py39-sqlalchemylatest"} + - {python: '3.10', tox: "py39-sqlalchemy1.0"} + - {python: '3.10', tox: "py39-sqlalchemy1.1"} + - {python: '3.10', tox: "py39-sqlalchemy1.2"} + - {python: '3.10', tox: "py39-sqlalchemy1.3"} + - {python: '3.10', tox: "py39-sqlalchemy1.4"} + - {python: '3.10', tox: "py39-sqlalchemylatest"} + steps: - uses: actions/checkout@v2 - uses: actions/setup-python@v4 diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 6c9de39..0000000 --- a/.travis.yml +++ /dev/null @@ -1,87 +0,0 @@ -language: python -python: 3.7 - -dist: xenial - -services: - - docker - -before_install: - - make mysql-container - - make postgres-container - -install: - - pip install tox - -matrix: - include: - - stage: test - python: 2.7 - env: TOX_ENV="py27-sqlalchemy1.0" - - python: 2.7 - env: TOX_ENV="py27-sqlalchemy1.1" - - python: 2.7 - env: TOX_ENV="py27-sqlalchemy1.2" - - python: 2.7 - env: TOX_ENV="py27-sqlalchemy1.3" - - python: 2.7 - env: TOX_ENV="py27-sqlalchemylatest" - - - python: 3.5 - env: TOX_ENV="py35-sqlalchemy1.0" - - python: 3.5 - env: TOX_ENV="py35-sqlalchemy1.1" - - python: 3.5 - env: TOX_ENV="py35-sqlalchemy1.2" - - python: 3.5 - env: TOX_ENV="py35-sqlalchemy1.3" - - python: 3.5 - env: TOX_ENV="py35-sqlalchemylatest" - - - python: 3.6 - env: TOX_ENV="py36-sqlalchemy1.0" - - python: 3.6 - env: TOX_ENV="py36-sqlalchemy1.1" - - python: 3.6 - env: TOX_ENV="py36-sqlalchemy1.2" - - python: 3.6 - env: TOX_ENV="py36-sqlalchemy1.3" - - python: 3.6 - env: TOX_ENV="py36-sqlalchemylatest" - - - python: 3.7 - env: TOX_ENV="py37-sqlalchemy1.0" - - python: 3.7 - env: TOX_ENV="py37-sqlalchemy1.1" - - python: 3.7 - env: TOX_ENV="py37-sqlalchemy1.2" - - python: 3.7 - env: TOX_ENV="py37-sqlalchemy1.3" - - python: 3.7 - env: TOX_ENV="py37-sqlalchemylatest" - - - python: 3.8 - env: TOX_ENV="py38-sqlalchemy1.0" - - python: 3.8 - env: TOX_ENV="py38-sqlalchemy1.1" - - python: 3.8 - env: TOX_ENV="py38-sqlalchemy1.2" - - python: 3.8 - env: TOX_ENV="py38-sqlalchemy1.3" - - python: 3.8 - env: TOX_ENV="py38-sqlalchemylatest" - - - stage: deploy - script: skip - deploy: - provider: pypi - user: mattbennett - password: - secure: "x27Zk+mvbYOtuf3XojYBh3O32KTv79SebMzkXE1lmtSms/pvRT+OG5eUAvWq1tro0shQdvv1X86aK9UHNzkRpIJEvbyH8zP3nWr/TlI62iZJC6y9m/c6JWwIUBk3p+CDHSwA1i6QXrxnffRfE/KSlAuTOY2FDYFDUBdVrEQrqIJa68Ij9200kxNcYWC9JmJKLaGl1l/V9M2K7YDsll3PxBByrCQeM95oeuoL3U81fPV0CJnsPNXAMtQHC9+dv+WE9UCo9QVsxWrnJGvCVUAfNQ7Ub+pECwSqzIL1O1ltqi0JH5CafwF6EMcQWxjh6R7IuN224QoAGTBzJ+ngKMxV7aQr58rkP82CyGaYEI7w9pvM9cZ6nBVIdgfCe8EfXCg+tTYy6v7SptT66wzP+GdX7sOuNFDYadmXGHZdwtUtEDFHwiCETRJNt/t5ONejlktY0fXlRUCta5UbYj755iryhk1lE7Ldj3sU336WjG74iAFdAd0fO8gVM0OwKLYZeaU2mN4JBI4qcp6n2S0sNtoyqXzm9OL/76Is5vAzUQavto9ao9bw0HN+ZWVLyE4+NLKS3ti+Pi/YofieoUFZ6kQJQRGja3tt7RCl0MYHqdIQQWx9MJWdiDpkyqqtR+0Q2sSYo8g4TsJe8zCuQLOflwd9pLKXVwdfqMUVGFCbdzZ/rig=" - on: - tags: true - repo: juliotrigo/sqlalchemy-filters - distributions: "sdist bdist_wheel" - -script: - - tox -e $TOX_ENV diff --git a/README.rst b/README.rst index bffb33b..cf8c880 100644 --- a/README.rst +++ b/README.rst @@ -487,14 +487,6 @@ The following RDBMS are supported (tested): - PostgreSQL -Python 2 --------- - -There is no active support for Python 2. However, it is compatible as of -February 2019, if you install ``funcsigs``, included in the ``python2`` -extra requirements. - - SQLAlchemy support ------------------ diff --git a/setup.py b/setup.py index a2efdcf..06d99f3 100644 --- a/setup.py +++ b/setup.py @@ -34,7 +34,6 @@ ], 'mysql': ['mysql-connector-python-rf==2.2.2'], 'postgresql': ['psycopg2==2.8.4'], - 'python2': ['funcsigs>=1.0.2'], }, zip_safe=True, license='Apache License, Version 2.0', @@ -44,11 +43,10 @@ "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", "Topic :: Database", "Topic :: Database :: Front-Ends", "Topic :: Software Development :: Libraries :: Python Modules", diff --git a/sqlalchemy_filters/filters.py b/sqlalchemy_filters/filters.py index 356c4fd..ba9d919 100644 --- a/sqlalchemy_filters/filters.py +++ b/sqlalchemy_filters/filters.py @@ -1,17 +1,7 @@ # -*- coding: utf-8 -*- from collections import namedtuple -try: - from collections.abc import Iterable -except ImportError: # pragma: no cover - # For python2 capability. - from collections import Iterable -try: - from inspect import signature -except ImportError: # pragma: no cover - # For python2 capability. NOTE: This is in not handled in install_requires - # but rather in extras_require. You can install with - # 'pip install sqlalchemy-filters[python2]' - from funcsigs import signature +from collections.abc import Iterable +from inspect import signature from itertools import chain from six import string_types diff --git a/tox.ini b/tox.ini index aa6c825..ca2fca8 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = {py27}-sqlalchemy{1.0,1.1,1.2,1.3,1.4},{py35,py36,py37,py38,py39}-sqlalchemy{1.0,1.1,1.2,1.3,1.4,latest} +envlist = {py37,py38,py39,py310}-sqlalchemy{1.0,1.1,1.2,1.3,1.4,latest} skipsdist = True [testenv] @@ -10,13 +10,7 @@ extras = mysql postgresql deps = - py27: funcsigs - # https://docs.pytest.org/en/latest/py27-py34-deprecation.html - py27: pytest<5.0.0 - {py35,py36,py37,py38,py39}: pytest~=5.4.1 - # https://github.com/kvesteri/sqlalchemy-utils/blob/master/CHANGES.rst#0364-2020-04-30 - py27: sqlalchemy-utils==0.36.3 - {py35,py36,py37,py38,py39}: sqlalchemy-utils~=0.37.8 + {py37,py38,py39,py310}: sqlalchemy-utils~=0.37.8 sqlalchemy1.0: sqlalchemy>=1.0,<1.1 sqlalchemy1.1: sqlalchemy>=1.1,<1.2 sqlalchemy1.2: sqlalchemy>=1.2,<1.3 From 879d05a9a034f1f1b322ce7316750b6a02c5b1cb Mon Sep 17 00:00:00 2001 From: Tom V Date: Thu, 13 Apr 2023 10:22:28 +0100 Subject: [PATCH 17/30] fix conditional coverage --- .coveragerc | 8 ++++++++ setup.cfg | 2 ++ sqlalchemy_filters/models.py | 14 +++++++------- 3 files changed, 17 insertions(+), 7 deletions(-) create mode 100644 .coveragerc create mode 100644 setup.cfg diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..2a1f049 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,8 @@ +[coverage:run] +plugins = + coverage_conditional_plugin + +[coverage:coverage_conditional_plugin] +rules = + "package_version('sqlalchemy') < (1, 4)": no_cover_sqlalchemy_lt_1_4 + "package_version('sqlalchemy') >= (1, 4)": no_cover_sqlalchemy_gte_1_4 diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..2bcd70e --- /dev/null +++ b/setup.cfg @@ -0,0 +1,2 @@ +[flake8] +max-line-length = 88 diff --git a/sqlalchemy_filters/models.py b/sqlalchemy_filters/models.py index b60b35b..b4f3084 100644 --- a/sqlalchemy_filters/models.py +++ b/sqlalchemy_filters/models.py @@ -58,7 +58,7 @@ def _is_hybrid_method(orm_descriptor): return orm_descriptor.extension_type == symbol('HYBRID_METHOD') -def get_model_from_table(table): # pragma: sqlalchemy_gte_1_4 +def get_model_from_table(table): # pragma: no_cover_sqlalchemy_lt_1_4 """Resolve model class from table object""" for registry in mapperlib._all_registries(): @@ -80,9 +80,9 @@ def get_query_models(query): models = [col_desc['entity'] for col_desc in query.column_descriptions] # account joined entities - if sqlalchemy_version_lt('1.4'): # pragma: sqlalchemy_lt_1_4 + if sqlalchemy_version_lt('1.4'): # pragma: no_cover_sqlalchemy_gte_1_4 models.extend(mapper.class_ for mapper in query._join_entities) - else: # pragma: sqlalchemy_gte_1_4 + else: # pragma: no_cover_sqlalchemy_lt_1_4 try: models.extend( mapper.class_ @@ -99,14 +99,14 @@ def get_query_models(query): # account also query.select_from entities model_class = None - if sqlalchemy_version_lt('1.4'): # pragma: sqlalchemy_lt_1_4 + if sqlalchemy_version_lt('1.4'): # pragma: no_cover_sqlalchemy_gte_1_4 if query._select_from_entity: model_class = ( query._select_from_entity if sqlalchemy_version_lt('1.1') else query._select_from_entity.class_ ) - else: # pragma: sqlalchemy_gte_1_4 + else: # pragma: no_cover_sqlalchemy_lt_1_4 if query._from_obj: model_class = get_model_from_table(query._from_obj[0]) if model_class and (model_class not in models): @@ -199,9 +199,9 @@ def auto_join(query, *model_names): model = get_model_class_by_name(model_registry, name) if model and (model not in get_query_models(query).values()): try: - if sqlalchemy_version_lt('1.4'): # pragma: sqlalchemy_lt_1_4 + if sqlalchemy_version_lt('1.4'): # pragma: no_cover_sqlalchemy_gte_1_4 query = query.join(model) - else: # pragma: sqlalchemy_gte_1_4 + else: # pragma: no_cover_sqlalchemy_lt_1_4 # https://docs.sqlalchemy.org/en/14/changelog/migration_14.html # Many Core and ORM statement objects now perform much of # their construction and validation in the compile phase From 2eb0761bd8917178bf68ca38dde0365ae6e53f87 Mon Sep 17 00:00:00 2001 From: Tom V Date: Thu, 13 Apr 2023 10:42:06 +0100 Subject: [PATCH 18/30] improve coverage --- test/interface/test_models.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/test/interface/test_models.py b/test/interface/test_models.py index 8910efb..9a2d3f5 100644 --- a/test/interface/test_models.py +++ b/test/interface/test_models.py @@ -5,12 +5,21 @@ from sqlalchemy_filters.exceptions import BadSpec, BadQuery from sqlalchemy_filters.models import ( auto_join, get_default_model, get_query_models, get_model_class_by_name, - get_model_from_spec, sqlalchemy_version_lt + get_model_from_spec, sqlalchemy_version_lt, get_model_from_table ) from test.models import Base, Bar, Foo, Qux class TestGetQueryModels(object): + def test_returns_none_for_unknown_table(self): + + class FakeUnmappedTable: + pass + + table = FakeUnmappedTable() + + result = get_model_from_table(table) + assert result is None def test_query_with_no_models(self, session): query = session.query() From 5e0f731575cd855b0e9133db9abe55921e9a833a Mon Sep 17 00:00:00 2001 From: Tom V Date: Thu, 13 Apr 2023 10:44:18 +0100 Subject: [PATCH 19/30] fix github actions --- .github/workflows/tests.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index fe8cb61..26808d7 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -54,12 +54,12 @@ jobs: - {python: '3.9', tox: "py39-sqlalchemy1.4"} - {python: '3.9', tox: "py39-sqlalchemylatest"} - - {python: '3.10', tox: "py39-sqlalchemy1.0"} - - {python: '3.10', tox: "py39-sqlalchemy1.1"} - - {python: '3.10', tox: "py39-sqlalchemy1.2"} - - {python: '3.10', tox: "py39-sqlalchemy1.3"} - - {python: '3.10', tox: "py39-sqlalchemy1.4"} - - {python: '3.10', tox: "py39-sqlalchemylatest"} + - {python: '3.10', tox: "py310-sqlalchemy1.0"} + - {python: '3.10', tox: "py310-sqlalchemy1.1"} + - {python: '3.10', tox: "py310-sqlalchemy1.2"} + - {python: '3.10', tox: "py310-sqlalchemy1.3"} + - {python: '3.10', tox: "py310-sqlalchemy1.4"} + - {python: '3.10', tox: "py310-sqlalchemylatest"} steps: - uses: actions/checkout@v2 From eeb35d8b5ed5a20c1565822f9ca6ff7b74ad0ce4 Mon Sep 17 00:00:00 2001 From: Tom V Date: Thu, 13 Apr 2023 10:51:16 +0100 Subject: [PATCH 20/30] skip testing get_model_from_table when not used --- test/interface/test_models.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/test/interface/test_models.py b/test/interface/test_models.py index 9a2d3f5..2ef7f88 100644 --- a/test/interface/test_models.py +++ b/test/interface/test_models.py @@ -11,6 +11,9 @@ class TestGetQueryModels(object): + @pytest.mark.skipif( + sqlalchemy_version_lt('1.4'), reason='tests sqlalchemy 1.4 code' + ) def test_returns_none_for_unknown_table(self): class FakeUnmappedTable: From d0dc3297cca1ddc3533617e23db67ef1fbd05efb Mon Sep 17 00:00:00 2001 From: Tom V Date: Thu, 13 Apr 2023 10:51:39 +0100 Subject: [PATCH 21/30] always use newer sqlalchemy-utils --- setup.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 06d99f3..29212fb 100644 --- a/setup.py +++ b/setup.py @@ -25,8 +25,7 @@ 'dev': [ 'pytest>=4.6.9', 'coverage~=5.0.4', - # for sqlalchemy1.4 >= 0.37 is required - 'sqlalchemy-utils>=0.36.3', + 'sqlalchemy-utils>=0.37', 'flake8', 'restructuredtext-lint', 'Pygments', From 51f011f9aa6c8898aa399391164e4e017d35898d Mon Sep 17 00:00:00 2001 From: Tom V Date: Thu, 13 Apr 2023 10:59:43 +0100 Subject: [PATCH 22/30] sqlalchemy <= 1.1 error for py3.10 --- .github/workflows/tests.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 26808d7..1d1ab19 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -54,8 +54,8 @@ jobs: - {python: '3.9', tox: "py39-sqlalchemy1.4"} - {python: '3.9', tox: "py39-sqlalchemylatest"} - - {python: '3.10', tox: "py310-sqlalchemy1.0"} - - {python: '3.10', tox: "py310-sqlalchemy1.1"} + # sqlalchemy <= 1.1 error with: + # AttributeError: module 'collections' has no attribute 'MutableMapping' - {python: '3.10', tox: "py310-sqlalchemy1.2"} - {python: '3.10', tox: "py310-sqlalchemy1.3"} - {python: '3.10', tox: "py310-sqlalchemy1.4"} From 364f229fc17fd49f3f173e279eb5aa93f05d675c Mon Sep 17 00:00:00 2001 From: Tom V Date: Thu, 13 Apr 2023 11:10:56 +0100 Subject: [PATCH 23/30] set python_requires='>=3.7' --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 29212fb..5dc0a57 100644 --- a/setup.py +++ b/setup.py @@ -20,6 +20,7 @@ author_email='wearehiring@student.com', url='https://github.com/juliotrigo/sqlalchemy-filters', packages=find_packages(exclude=['test', 'test.*']), + python_requires='>=3.7', install_requires=['sqlalchemy>=1.0.16', 'six>=1.10.0'], extras_require={ 'dev': [ From 7746db8c94c56da7c8911e91722d68a3e1173346 Mon Sep 17 00:00:00 2001 From: Tom V Date: Thu, 13 Apr 2023 11:14:56 +0100 Subject: [PATCH 24/30] don't test broken versions --- .github/workflows/tests.yml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 1d1ab19..f831037 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -33,33 +33,32 @@ jobs: fail-fast: false matrix: include: + # sqlalchemylatest (i.e. > 2.0.0) is not yet supported + # for any version of python + - {python: '3.7', tox: "py37-sqlalchemy1.0"} - {python: '3.7', tox: "py37-sqlalchemy1.1"} - {python: '3.7', tox: "py37-sqlalchemy1.2"} - {python: '3.7', tox: "py37-sqlalchemy1.3"} - {python: '3.7', tox: "py37-sqlalchemy1.4"} - - {python: '3.7', tox: "py37-sqlalchemylatest"} - {python: '3.8', tox: "py38-sqlalchemy1.0"} - {python: '3.8', tox: "py38-sqlalchemy1.1"} - {python: '3.8', tox: "py38-sqlalchemy1.2"} - {python: '3.8', tox: "py38-sqlalchemy1.3"} - {python: '3.8', tox: "py38-sqlalchemy1.4"} - - {python: '3.8', tox: "py38-sqlalchemylatest"} - {python: '3.9', tox: "py39-sqlalchemy1.0"} - {python: '3.9', tox: "py39-sqlalchemy1.1"} - {python: '3.9', tox: "py39-sqlalchemy1.2"} - {python: '3.9', tox: "py39-sqlalchemy1.3"} - {python: '3.9', tox: "py39-sqlalchemy1.4"} - - {python: '3.9', tox: "py39-sqlalchemylatest"} - # sqlalchemy <= 1.1 error with: + # python3.10 with sqlalchemy <= 1.1 errors with: # AttributeError: module 'collections' has no attribute 'MutableMapping' - {python: '3.10', tox: "py310-sqlalchemy1.2"} - {python: '3.10', tox: "py310-sqlalchemy1.3"} - {python: '3.10', tox: "py310-sqlalchemy1.4"} - - {python: '3.10', tox: "py310-sqlalchemylatest"} steps: - uses: actions/checkout@v2 From a8fdd354e5da118dc755d6fbabc910155c01b470 Mon Sep 17 00:00:00 2001 From: Tom V Date: Thu, 13 Apr 2023 11:20:07 +0100 Subject: [PATCH 25/30] update status badge --- README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index cf8c880..2dd9cda 100644 --- a/README.rst +++ b/README.rst @@ -16,8 +16,8 @@ SQLAlchemy filters .. image:: https://img.shields.io/pypi/format/sqlalchemy-filters.svg :target: https://pypi.org/project/sqlalchemy-filters/ -.. image:: https://travis-ci.org/juliotrigo/sqlalchemy-filters.svg?branch=master - :target: https://travis-ci.org/juliotrigo/sqlalchemy-filters +.. image:: https://github.com/juliotrigo/sqlalchemy-filters/actions/workflows/tests.yml/badge.svg + :target: https://github.com/juliotrigo/sqlalchemy-filters/actions Filtering From c53c8da87f12b1c407919e9714df003d009bfa42 Mon Sep 17 00:00:00 2001 From: Tom V Date: Thu, 13 Apr 2023 12:08:48 +0100 Subject: [PATCH 26/30] update changelog --- CHANGELOG.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 2cad1f1..4cf510f 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -7,11 +7,11 @@ versions, where semantic versioning is used: *major.minor.patch*. 0.13.0 ------ -Released TBD +Released 2023-04-13 * Add support for SQLAlchemy 1.4 (#69) thanks to @bodik -* Add support for Python 3.9 - +* Add support for Python 3.9 & Python 3.10 +* Drop support for Python 2.7, 3.5 & 3.6 0.12.0 ------ From fcbaabbeb423e98267a7a04946eefc1f96812cc1 Mon Sep 17 00:00:00 2001 From: Tom V Date: Mon, 8 Jan 2024 23:03:33 +0000 Subject: [PATCH 27/30] pin mariadb to the lts --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index f831037..9d24827 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -10,7 +10,7 @@ jobs: services: mariadb: - image: mariadb:latest + image: mariadb:10 ports: - 3306:3306 env: From 7e3abc72db94685863af4c76d2626514212fb245 Mon Sep 17 00:00:00 2001 From: Ron Rademaker Date: Wed, 14 Feb 2024 12:43:02 +0000 Subject: [PATCH 28/30] Run black --- .gitignore | 1 + poetry.lock | 410 +++++++++++------ pyproject.toml | 3 + sqlalchemy_filters/filters.py | 119 +++-- sqlalchemy_filters/models.py | 81 ++-- sqlalchemy_filters/pagination.py | 11 +- sqlalchemy_filters/sorting.py | 32 +- test/conftest.py | 72 ++- test/interface/test_filters.py | 720 ++++++++++++++---------------- test/interface/test_pagination.py | 292 ++++++------ test/interface/test_sorting.py | 564 ++++++++++++----------- test/models.py | 26 +- 12 files changed, 1221 insertions(+), 1110 deletions(-) diff --git a/.gitignore b/.gitignore index 8b3857e..6adab9e 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,4 @@ __pycache__/ .coverage.* .cache .tox +.python-version diff --git a/poetry.lock b/poetry.lock index fdfcd6a..7cec1f2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,3 +1,5 @@ +# This file is automatically @generated by Poetry 1.4.1 and should not be changed by hand. + [[package]] name = "atomicwrites" version = "1.4.0" @@ -5,6 +7,10 @@ description = "Atomic file writes." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, + {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, +] [[package]] name = "attrs" @@ -13,12 +19,78 @@ description = "Classes Without Boilerplate" category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, + {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, +] + +[package.extras] +dev = ["coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"] +docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"] +tests-no-zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"] + +[[package]] +name = "black" +version = "24.2.0" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-24.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6981eae48b3b33399c8757036c7f5d48a535b962a7c2310d19361edeef64ce29"}, + {file = "black-24.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d533d5e3259720fdbc1b37444491b024003e012c5173f7d06825a77508085430"}, + {file = "black-24.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61a0391772490ddfb8a693c067df1ef5227257e72b0e4108482b8d41b5aee13f"}, + {file = "black-24.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:992e451b04667116680cb88f63449267c13e1ad134f30087dec8527242e9862a"}, + {file = "black-24.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:163baf4ef40e6897a2a9b83890e59141cc8c2a98f2dda5080dc15c00ee1e62cd"}, + {file = "black-24.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e37c99f89929af50ffaf912454b3e3b47fd64109659026b678c091a4cd450fb2"}, + {file = "black-24.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9de21bafcba9683853f6c96c2d515e364aee631b178eaa5145fc1c61a3cc92"}, + {file = "black-24.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:9db528bccb9e8e20c08e716b3b09c6bdd64da0dd129b11e160bf082d4642ac23"}, + {file = "black-24.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d84f29eb3ee44859052073b7636533ec995bd0f64e2fb43aeceefc70090e752b"}, + {file = "black-24.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e08fb9a15c914b81dd734ddd7fb10513016e5ce7e6704bdd5e1251ceee51ac9"}, + {file = "black-24.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:810d445ae6069ce64030c78ff6127cd9cd178a9ac3361435708b907d8a04c693"}, + {file = "black-24.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:ba15742a13de85e9b8f3239c8f807723991fbfae24bad92d34a2b12e81904982"}, + {file = "black-24.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e53a8c630f71db01b28cd9602a1ada68c937cbf2c333e6ed041390d6968faf4"}, + {file = "black-24.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93601c2deb321b4bad8f95df408e3fb3943d85012dddb6121336b8e24a0d1218"}, + {file = "black-24.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0057f800de6acc4407fe75bb147b0c2b5cbb7c3ed110d3e5999cd01184d53b0"}, + {file = "black-24.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:faf2ee02e6612577ba0181f4347bcbcf591eb122f7841ae5ba233d12c39dcb4d"}, + {file = "black-24.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:057c3dc602eaa6fdc451069bd027a1b2635028b575a6c3acfd63193ced20d9c8"}, + {file = "black-24.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:08654d0797e65f2423f850fc8e16a0ce50925f9337fb4a4a176a7aa4026e63f8"}, + {file = "black-24.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca610d29415ee1a30a3f30fab7a8f4144e9d34c89a235d81292a1edb2b55f540"}, + {file = "black-24.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:4dd76e9468d5536abd40ffbc7a247f83b2324f0c050556d9c371c2b9a9a95e31"}, + {file = "black-24.2.0-py3-none-any.whl", hash = "sha256:e8a6ae970537e67830776488bca52000eaa37fa63b9988e8c487458d9cd5ace6"}, + {file = "black-24.2.0.tar.gz", hash = "sha256:bce4f25c27c3435e4dace4815bcb2008b87e167e3bf4ee47ccdc5ce906eb4894"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] -docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "colorama" @@ -27,6 +99,10 @@ description = "Cross-platform colored terminal text." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, +] [[package]] name = "iniconfig" @@ -35,6 +111,22 @@ description = "iniconfig: brain-dead simple config-ini parsing" category = "dev" optional = false python-versions = "*" +files = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] [[package]] name = "mysql-connector-python-rf" @@ -43,17 +135,49 @@ description = "MySQL driver written in Python" category = "main" optional = true python-versions = "*" +files = [ + {file = "mysql-connector-python-rf-2.2.2.tar.gz", hash = "sha256:f8a19ef79d2e28ad28c740e3b757d60ff6077e1a08c357eb31dfa6839aaaf294"}, +] [[package]] name = "packaging" -version = "21.0" +version = "23.2" description = "Core utilities for Python packages" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] -[package.dependencies] -pyparsing = ">=2.0.2" +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] [[package]] name = "pluggy" @@ -62,6 +186,10 @@ description = "plugin and hook calling mechanisms for python" category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] [package.extras] dev = ["pre-commit", "tox"] @@ -74,6 +202,44 @@ description = "psycopg2 - Python-PostgreSQL Database Adapter" category = "main" optional = true python-versions = ">=3.6" +files = [ + {file = "psycopg2-binary-2.9.1.tar.gz", hash = "sha256:b0221ca5a9837e040ebf61f48899926b5783668b7807419e4adae8175a31f773"}, + {file = "psycopg2_binary-2.9.1-cp310-cp310-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:24b0b6688b9f31a911f2361fe818492650795c9e5d3a1bc647acbd7440142a4f"}, + {file = "psycopg2_binary-2.9.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:542875f62bc56e91c6eac05a0deadeae20e1730be4c6334d8f04c944fcd99759"}, + {file = "psycopg2_binary-2.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:661509f51531ec125e52357a489ea3806640d0ca37d9dada461ffc69ee1e7b6e"}, + {file = "psycopg2_binary-2.9.1-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:d92272c7c16e105788efe2cfa5d680f07e34e0c29b03c1908f8636f55d5f915a"}, + {file = "psycopg2_binary-2.9.1-cp310-cp310-manylinux_2_24_ppc64le.whl", hash = "sha256:736b8797b58febabb85494142c627bd182b50d2a7ec65322983e71065ad3034c"}, + {file = "psycopg2_binary-2.9.1-cp310-cp310-win32.whl", hash = "sha256:ebccf1123e7ef66efc615a68295bf6fdba875a75d5bba10a05073202598085fc"}, + {file = "psycopg2_binary-2.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:1f6ca4a9068f5c5c57e744b4baa79f40e83e3746875cac3c45467b16326bab45"}, + {file = "psycopg2_binary-2.9.1-cp36-cp36m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:c250a7ec489b652c892e4f0a5d122cc14c3780f9f643e1a326754aedf82d9a76"}, + {file = "psycopg2_binary-2.9.1-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aef9aee84ec78af51107181d02fe8773b100b01c5dfde351184ad9223eab3698"}, + {file = "psycopg2_binary-2.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123c3fb684e9abfc47218d3784c7b4c47c8587951ea4dd5bc38b6636ac57f616"}, + {file = "psycopg2_binary-2.9.1-cp36-cp36m-manylinux_2_24_aarch64.whl", hash = "sha256:995fc41ebda5a7a663a254a1dcac52638c3e847f48307b5416ee373da15075d7"}, + {file = "psycopg2_binary-2.9.1-cp36-cp36m-manylinux_2_24_ppc64le.whl", hash = "sha256:fbb42a541b1093385a2d8c7eec94d26d30437d0e77c1d25dae1dcc46741a385e"}, + {file = "psycopg2_binary-2.9.1-cp36-cp36m-win32.whl", hash = "sha256:20f1ab44d8c352074e2d7ca67dc00843067788791be373e67a0911998787ce7d"}, + {file = "psycopg2_binary-2.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f6fac64a38f6768e7bc7b035b9e10d8a538a9fadce06b983fb3e6fa55ac5f5ce"}, + {file = "psycopg2_binary-2.9.1-cp37-cp37m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:1e3a362790edc0a365385b1ac4cc0acc429a0c0d662d829a50b6ce743ae61b5a"}, + {file = "psycopg2_binary-2.9.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f8559617b1fcf59a9aedba2c9838b5b6aa211ffedecabca412b92a1ff75aac1a"}, + {file = "psycopg2_binary-2.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a36c7eb6152ba5467fb264d73844877be8b0847874d4822b7cf2d3c0cb8cdcb0"}, + {file = "psycopg2_binary-2.9.1-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:2f62c207d1740b0bde5c4e949f857b044818f734a3d57f1d0d0edc65050532ed"}, + {file = "psycopg2_binary-2.9.1-cp37-cp37m-manylinux_2_24_ppc64le.whl", hash = "sha256:cfc523edecddaef56f6740d7de1ce24a2fdf94fd5e704091856a201872e37f9f"}, + {file = "psycopg2_binary-2.9.1-cp37-cp37m-win32.whl", hash = "sha256:1e85b74cbbb3056e3656f1cc4781294df03383127a8114cbc6531e8b8367bf1e"}, + {file = "psycopg2_binary-2.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:1473c0215b0613dd938db54a653f68251a45a78b05f6fc21af4326f40e8360a2"}, + {file = "psycopg2_binary-2.9.1-cp38-cp38-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:35c4310f8febe41f442d3c65066ca93cccefd75013df3d8c736c5b93ec288140"}, + {file = "psycopg2_binary-2.9.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c13d72ed6af7fd2c8acbd95661cf9477f94e381fce0792c04981a8283b52917"}, + {file = "psycopg2_binary-2.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14db1752acdd2187d99cb2ca0a1a6dfe57fc65c3281e0f20e597aac8d2a5bd90"}, + {file = "psycopg2_binary-2.9.1-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:aed4a9a7e3221b3e252c39d0bf794c438dc5453bc2963e8befe9d4cd324dff72"}, + {file = "psycopg2_binary-2.9.1-cp38-cp38-manylinux_2_24_ppc64le.whl", hash = "sha256:da113b70f6ec40e7d81b43d1b139b9db6a05727ab8be1ee559f3a69854a69d34"}, + {file = "psycopg2_binary-2.9.1-cp38-cp38-win32.whl", hash = "sha256:4235f9d5ddcab0b8dbd723dca56ea2922b485ea00e1dafacf33b0c7e840b3d32"}, + {file = "psycopg2_binary-2.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:988b47ac70d204aed01589ed342303da7c4d84b56c2f4c4b8b00deda123372bf"}, + {file = "psycopg2_binary-2.9.1-cp39-cp39-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:7360647ea04db2e7dff1648d1da825c8cf68dc5fbd80b8fb5b3ee9f068dcd21a"}, + {file = "psycopg2_binary-2.9.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca86db5b561b894f9e5f115d6a159fff2a2570a652e07889d8a383b5fae66eb4"}, + {file = "psycopg2_binary-2.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ced67f1e34e1a450cdb48eb53ca73b60aa0af21c46b9b35ac3e581cf9f00e31"}, + {file = "psycopg2_binary-2.9.1-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:0f2e04bd2a2ab54fa44ee67fe2d002bb90cee1c0f1cc0ebc3148af7b02034cbd"}, + {file = "psycopg2_binary-2.9.1-cp39-cp39-manylinux_2_24_ppc64le.whl", hash = "sha256:3242b9619de955ab44581a03a64bdd7d5e470cc4183e8fcadd85ab9d3756ce7a"}, + {file = "psycopg2_binary-2.9.1-cp39-cp39-win32.whl", hash = "sha256:0b7dae87f0b729922e06f85f667de7bf16455d411971b2043bbd9577af9d1975"}, + {file = "psycopg2_binary-2.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:b4d7679a08fea64573c969f6994a2631908bb2c0e69a7235648642f3d2e39a68"}, +] [[package]] name = "py" @@ -82,14 +248,10 @@ description = "library with cross-python path, ini-parsing, io, code, log facili category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "pyparsing" -version = "2.4.7" -description = "Python parsing module" -category = "dev" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, + {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, +] [[package]] name = "pytest" @@ -98,6 +260,10 @@ description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] [package.dependencies] atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} @@ -119,6 +285,10 @@ description = "Python 2 and 3 compatibility utilities" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] [[package]] name = "sqlalchemy" @@ -127,18 +297,54 @@ description = "Database Abstraction Library" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "SQLAlchemy-1.3.24-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:87a2725ad7d41cd7376373c15fd8bf674e9c33ca56d0b8036add2d634dba372e"}, + {file = "SQLAlchemy-1.3.24-cp27-cp27m-win32.whl", hash = "sha256:f597a243b8550a3a0b15122b14e49d8a7e622ba1c9d29776af741f1845478d79"}, + {file = "SQLAlchemy-1.3.24-cp27-cp27m-win_amd64.whl", hash = "sha256:fc4cddb0b474b12ed7bdce6be1b9edc65352e8ce66bc10ff8cbbfb3d4047dbf4"}, + {file = "SQLAlchemy-1.3.24-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:f1149d6e5c49d069163e58a3196865e4321bad1803d7886e07d8710de392c548"}, + {file = "SQLAlchemy-1.3.24-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:14f0eb5db872c231b20c18b1e5806352723a3a89fb4254af3b3e14f22eaaec75"}, + {file = "SQLAlchemy-1.3.24-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:e98d09f487267f1e8d1179bf3b9d7709b30a916491997137dd24d6ae44d18d79"}, + {file = "SQLAlchemy-1.3.24-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:fc1f2a5a5963e2e73bac4926bdaf7790c4d7d77e8fc0590817880e22dd9d0b8b"}, + {file = "SQLAlchemy-1.3.24-cp35-cp35m-win32.whl", hash = "sha256:f3c5c52f7cb8b84bfaaf22d82cb9e6e9a8297f7c2ed14d806a0f5e4d22e83fb7"}, + {file = "SQLAlchemy-1.3.24-cp35-cp35m-win_amd64.whl", hash = "sha256:0352db1befcbed2f9282e72843f1963860bf0e0472a4fa5cf8ee084318e0e6ab"}, + {file = "SQLAlchemy-1.3.24-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:2ed6343b625b16bcb63c5b10523fd15ed8934e1ed0f772c534985e9f5e73d894"}, + {file = "SQLAlchemy-1.3.24-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:34fcec18f6e4b24b4a5f6185205a04f1eab1e56f8f1d028a2a03694ebcc2ddd4"}, + {file = "SQLAlchemy-1.3.24-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:e47e257ba5934550d7235665eee6c911dc7178419b614ba9e1fbb1ce6325b14f"}, + {file = "SQLAlchemy-1.3.24-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:816de75418ea0953b5eb7b8a74933ee5a46719491cd2b16f718afc4b291a9658"}, + {file = "SQLAlchemy-1.3.24-cp36-cp36m-win32.whl", hash = "sha256:26155ea7a243cbf23287f390dba13d7927ffa1586d3208e0e8d615d0c506f996"}, + {file = "SQLAlchemy-1.3.24-cp36-cp36m-win_amd64.whl", hash = "sha256:f03bd97650d2e42710fbe4cf8a59fae657f191df851fc9fc683ecef10746a375"}, + {file = "SQLAlchemy-1.3.24-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:a006d05d9aa052657ee3e4dc92544faae5fcbaafc6128217310945610d862d39"}, + {file = "SQLAlchemy-1.3.24-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1e2f89d2e5e3c7a88e25a3b0e43626dba8db2aa700253023b82e630d12b37109"}, + {file = "SQLAlchemy-1.3.24-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:0d5d862b1cfbec5028ce1ecac06a3b42bc7703eb80e4b53fceb2738724311443"}, + {file = "SQLAlchemy-1.3.24-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:0172423a27fbcae3751ef016663b72e1a516777de324a76e30efa170dbd3dd2d"}, + {file = "SQLAlchemy-1.3.24-cp37-cp37m-win32.whl", hash = "sha256:d37843fb8df90376e9e91336724d78a32b988d3d20ab6656da4eb8ee3a45b63c"}, + {file = "SQLAlchemy-1.3.24-cp37-cp37m-win_amd64.whl", hash = "sha256:c10ff6112d119f82b1618b6dc28126798481b9355d8748b64b9b55051eb4f01b"}, + {file = "SQLAlchemy-1.3.24-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:861e459b0e97673af6cc5e7f597035c2e3acdfb2608132665406cded25ba64c7"}, + {file = "SQLAlchemy-1.3.24-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5de2464c254380d8a6c20a2746614d5a436260be1507491442cf1088e59430d2"}, + {file = "SQLAlchemy-1.3.24-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d375d8ccd3cebae8d90270f7aa8532fe05908f79e78ae489068f3b4eee5994e8"}, + {file = "SQLAlchemy-1.3.24-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:014ea143572fee1c18322b7908140ad23b3994036ef4c0d630110faf942652f8"}, + {file = "SQLAlchemy-1.3.24-cp38-cp38-win32.whl", hash = "sha256:6607ae6cd3a07f8a4c3198ffbf256c261661965742e2b5265a77cd5c679c9bba"}, + {file = "SQLAlchemy-1.3.24-cp38-cp38-win_amd64.whl", hash = "sha256:fcb251305fa24a490b6a9ee2180e5f8252915fb778d3dafc70f9cc3f863827b9"}, + {file = "SQLAlchemy-1.3.24-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:01aa5f803db724447c1d423ed583e42bf5264c597fd55e4add4301f163b0be48"}, + {file = "SQLAlchemy-1.3.24-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:4d0e3515ef98aa4f0dc289ff2eebb0ece6260bbf37c2ea2022aad63797eacf60"}, + {file = "SQLAlchemy-1.3.24-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:bce28277f308db43a6b4965734366f533b3ff009571ec7ffa583cb77539b84d6"}, + {file = "SQLAlchemy-1.3.24-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:8110e6c414d3efc574543109ee618fe2c1f96fa31833a1ff36cc34e968c4f233"}, + {file = "SQLAlchemy-1.3.24-cp39-cp39-win32.whl", hash = "sha256:ee5f5188edb20a29c1cc4a039b074fdc5575337c9a68f3063449ab47757bb064"}, + {file = "SQLAlchemy-1.3.24-cp39-cp39-win_amd64.whl", hash = "sha256:09083c2487ca3c0865dc588e07aeaa25416da3d95f7482c07e92f47e080aa17b"}, + {file = "SQLAlchemy-1.3.24.tar.gz", hash = "sha256:ebbb777cbf9312359b897bf81ba00dae0f5cb69fba2a18265dcc18a6f5ef7519"}, +] [package.extras] mssql = ["pyodbc"] -mssql_pymssql = ["pymssql"] -mssql_pyodbc = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] mysql = ["mysqlclient"] oracle = ["cx-oracle"] postgresql = ["psycopg2"] -postgresql_pg8000 = ["pg8000 (<1.16.6)"] -postgresql_psycopg2binary = ["psycopg2-binary"] -postgresql_psycopg2cffi = ["psycopg2cffi"] -pymysql = ["pymysql (<1)", "pymysql"] +postgresql-pg8000 = ["pg8000 (<1.16.6)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +pymysql = ["pymysql", "pymysql (<1)"] [[package]] name = "sqlalchemy-utils" @@ -147,6 +353,10 @@ description = "Various utility functions for SQLAlchemy." category = "dev" optional = false python-versions = "~=3.4" +files = [ + {file = "SQLAlchemy-Utils-0.37.8.tar.gz", hash = "sha256:a6aaee154f798be4e479af0ceffaa5034d35fcf6f40707c0947d21bde64e05e5"}, + {file = "SQLAlchemy_Utils-0.37.8-py3-none-any.whl", hash = "sha256:b1bf67d904fed16b16ef1dc07f03e5e93a6b23899f920f6b41c09be45fbb85f2"}, +] [package.dependencies] six = "*" @@ -162,8 +372,8 @@ intervals = ["intervals (>=0.7.1)"] password = ["passlib (>=1.6,<2.0)"] pendulum = ["pendulum (>=2.0.5)"] phone = ["phonenumbers (>=5.9.2)"] -test = ["pytest (>=2.7.1)", "Pygments (>=1.2)", "Jinja2 (>=2.3)", "docutils (>=0.10)", "flexmock (>=0.9.7)", "mock (==2.0.0)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pg8000 (>=1.12.4)", "pytz (>=2014.2)", "python-dateutil (>=2.6)", "pymysql", "flake8 (>=2.4.0)", "isort (>=4.2.2)", "pyodbc", "backports.zoneinfo"] -test_all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "anyjson (>=0.3.3)", "arrow (>=0.3.4)", "colour (>=0.0.4)", "cryptography (>=0.6)", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "furl (>=0.4.1)", "intervals (>=0.7.1)", "isort (>=4.2.2)", "mock (==2.0.0)", "passlib (>=1.6,<2.0)", "pendulum (>=2.0.5)", "pg8000 (>=1.12.4)", "phonenumbers (>=5.9.2)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (>=2.7.1)", "python-dateutil", "python-dateutil (>=2.6)", "pytz (>=2014.2)", "backports.zoneinfo"] +test = ["Jinja2 (>=2.3)", "Pygments (>=1.2)", "backports.zoneinfo", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "isort (>=4.2.2)", "mock (==2.0.0)", "pg8000 (>=1.12.4)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (>=2.7.1)", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] +test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "anyjson (>=0.3.3)", "arrow (>=0.3.4)", "backports.zoneinfo", "colour (>=0.0.4)", "cryptography (>=0.6)", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "furl (>=0.4.1)", "intervals (>=0.7.1)", "isort (>=4.2.2)", "mock (==2.0.0)", "passlib (>=1.6,<2.0)", "pendulum (>=2.0.5)", "pg8000 (>=1.12.4)", "phonenumbers (>=5.9.2)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (>=2.7.1)", "python-dateutil", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] timezone = ["python-dateutil"] url = ["furl (>=0.4.1)"] @@ -174,132 +384,40 @@ description = "Python Library for Tom's Obvious, Minimal Language" category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.9.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, +] [extras] mysql = ["mysql-connector-python-rf"] postgresql = ["psycopg2-binary"] [metadata] -lock-version = "1.1" +lock-version = "2.0" python-versions = "^3.8" -content-hash = "5f162c6b7078d723ec7afa904e0d98703ec83d5222cebbbe1922cafce02f0df3" - -[metadata.files] -atomicwrites = [ - {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, - {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, -] -attrs = [ - {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, - {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, -] -colorama = [ - {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, - {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, -] -iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, -] -mysql-connector-python-rf = [ - {file = "mysql-connector-python-rf-2.2.2.tar.gz", hash = "sha256:f8a19ef79d2e28ad28c740e3b757d60ff6077e1a08c357eb31dfa6839aaaf294"}, -] -packaging = [ - {file = "packaging-21.0-py3-none-any.whl", hash = "sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14"}, - {file = "packaging-21.0.tar.gz", hash = "sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7"}, -] -pluggy = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, -] -psycopg2-binary = [ - {file = "psycopg2-binary-2.9.1.tar.gz", hash = "sha256:b0221ca5a9837e040ebf61f48899926b5783668b7807419e4adae8175a31f773"}, - {file = "psycopg2_binary-2.9.1-cp36-cp36m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:c250a7ec489b652c892e4f0a5d122cc14c3780f9f643e1a326754aedf82d9a76"}, - {file = "psycopg2_binary-2.9.1-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aef9aee84ec78af51107181d02fe8773b100b01c5dfde351184ad9223eab3698"}, - {file = "psycopg2_binary-2.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123c3fb684e9abfc47218d3784c7b4c47c8587951ea4dd5bc38b6636ac57f616"}, - {file = "psycopg2_binary-2.9.1-cp36-cp36m-manylinux_2_24_aarch64.whl", hash = "sha256:995fc41ebda5a7a663a254a1dcac52638c3e847f48307b5416ee373da15075d7"}, - {file = "psycopg2_binary-2.9.1-cp36-cp36m-manylinux_2_24_ppc64le.whl", hash = "sha256:fbb42a541b1093385a2d8c7eec94d26d30437d0e77c1d25dae1dcc46741a385e"}, - {file = "psycopg2_binary-2.9.1-cp36-cp36m-win32.whl", hash = "sha256:20f1ab44d8c352074e2d7ca67dc00843067788791be373e67a0911998787ce7d"}, - {file = "psycopg2_binary-2.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f6fac64a38f6768e7bc7b035b9e10d8a538a9fadce06b983fb3e6fa55ac5f5ce"}, - {file = "psycopg2_binary-2.9.1-cp37-cp37m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:1e3a362790edc0a365385b1ac4cc0acc429a0c0d662d829a50b6ce743ae61b5a"}, - {file = "psycopg2_binary-2.9.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f8559617b1fcf59a9aedba2c9838b5b6aa211ffedecabca412b92a1ff75aac1a"}, - {file = "psycopg2_binary-2.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a36c7eb6152ba5467fb264d73844877be8b0847874d4822b7cf2d3c0cb8cdcb0"}, - {file = "psycopg2_binary-2.9.1-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:2f62c207d1740b0bde5c4e949f857b044818f734a3d57f1d0d0edc65050532ed"}, - {file = "psycopg2_binary-2.9.1-cp37-cp37m-manylinux_2_24_ppc64le.whl", hash = "sha256:cfc523edecddaef56f6740d7de1ce24a2fdf94fd5e704091856a201872e37f9f"}, - {file = "psycopg2_binary-2.9.1-cp37-cp37m-win32.whl", hash = "sha256:1e85b74cbbb3056e3656f1cc4781294df03383127a8114cbc6531e8b8367bf1e"}, - {file = "psycopg2_binary-2.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:1473c0215b0613dd938db54a653f68251a45a78b05f6fc21af4326f40e8360a2"}, - {file = "psycopg2_binary-2.9.1-cp38-cp38-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:35c4310f8febe41f442d3c65066ca93cccefd75013df3d8c736c5b93ec288140"}, - {file = "psycopg2_binary-2.9.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c13d72ed6af7fd2c8acbd95661cf9477f94e381fce0792c04981a8283b52917"}, - {file = "psycopg2_binary-2.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14db1752acdd2187d99cb2ca0a1a6dfe57fc65c3281e0f20e597aac8d2a5bd90"}, - {file = "psycopg2_binary-2.9.1-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:aed4a9a7e3221b3e252c39d0bf794c438dc5453bc2963e8befe9d4cd324dff72"}, - {file = "psycopg2_binary-2.9.1-cp38-cp38-manylinux_2_24_ppc64le.whl", hash = "sha256:da113b70f6ec40e7d81b43d1b139b9db6a05727ab8be1ee559f3a69854a69d34"}, - {file = "psycopg2_binary-2.9.1-cp38-cp38-win32.whl", hash = "sha256:4235f9d5ddcab0b8dbd723dca56ea2922b485ea00e1dafacf33b0c7e840b3d32"}, - {file = "psycopg2_binary-2.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:988b47ac70d204aed01589ed342303da7c4d84b56c2f4c4b8b00deda123372bf"}, - {file = "psycopg2_binary-2.9.1-cp39-cp39-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:7360647ea04db2e7dff1648d1da825c8cf68dc5fbd80b8fb5b3ee9f068dcd21a"}, - {file = "psycopg2_binary-2.9.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca86db5b561b894f9e5f115d6a159fff2a2570a652e07889d8a383b5fae66eb4"}, - {file = "psycopg2_binary-2.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ced67f1e34e1a450cdb48eb53ca73b60aa0af21c46b9b35ac3e581cf9f00e31"}, - {file = "psycopg2_binary-2.9.1-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:0f2e04bd2a2ab54fa44ee67fe2d002bb90cee1c0f1cc0ebc3148af7b02034cbd"}, - {file = "psycopg2_binary-2.9.1-cp39-cp39-manylinux_2_24_ppc64le.whl", hash = "sha256:3242b9619de955ab44581a03a64bdd7d5e470cc4183e8fcadd85ab9d3756ce7a"}, - {file = "psycopg2_binary-2.9.1-cp39-cp39-win32.whl", hash = "sha256:0b7dae87f0b729922e06f85f667de7bf16455d411971b2043bbd9577af9d1975"}, - {file = "psycopg2_binary-2.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:b4d7679a08fea64573c969f6994a2631908bb2c0e69a7235648642f3d2e39a68"}, -] -py = [ - {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, - {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, -] -pyparsing = [ - {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, - {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, -] -pytest = [ - {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, - {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, -] -six = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] -sqlalchemy = [ - {file = "SQLAlchemy-1.3.24-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:87a2725ad7d41cd7376373c15fd8bf674e9c33ca56d0b8036add2d634dba372e"}, - {file = "SQLAlchemy-1.3.24-cp27-cp27m-win32.whl", hash = "sha256:f597a243b8550a3a0b15122b14e49d8a7e622ba1c9d29776af741f1845478d79"}, - {file = "SQLAlchemy-1.3.24-cp27-cp27m-win_amd64.whl", hash = "sha256:fc4cddb0b474b12ed7bdce6be1b9edc65352e8ce66bc10ff8cbbfb3d4047dbf4"}, - {file = "SQLAlchemy-1.3.24-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:f1149d6e5c49d069163e58a3196865e4321bad1803d7886e07d8710de392c548"}, - {file = "SQLAlchemy-1.3.24-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:14f0eb5db872c231b20c18b1e5806352723a3a89fb4254af3b3e14f22eaaec75"}, - {file = "SQLAlchemy-1.3.24-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:e98d09f487267f1e8d1179bf3b9d7709b30a916491997137dd24d6ae44d18d79"}, - {file = "SQLAlchemy-1.3.24-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:fc1f2a5a5963e2e73bac4926bdaf7790c4d7d77e8fc0590817880e22dd9d0b8b"}, - {file = "SQLAlchemy-1.3.24-cp35-cp35m-win32.whl", hash = "sha256:f3c5c52f7cb8b84bfaaf22d82cb9e6e9a8297f7c2ed14d806a0f5e4d22e83fb7"}, - {file = "SQLAlchemy-1.3.24-cp35-cp35m-win_amd64.whl", hash = "sha256:0352db1befcbed2f9282e72843f1963860bf0e0472a4fa5cf8ee084318e0e6ab"}, - {file = "SQLAlchemy-1.3.24-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:2ed6343b625b16bcb63c5b10523fd15ed8934e1ed0f772c534985e9f5e73d894"}, - {file = "SQLAlchemy-1.3.24-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:34fcec18f6e4b24b4a5f6185205a04f1eab1e56f8f1d028a2a03694ebcc2ddd4"}, - {file = "SQLAlchemy-1.3.24-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:e47e257ba5934550d7235665eee6c911dc7178419b614ba9e1fbb1ce6325b14f"}, - {file = "SQLAlchemy-1.3.24-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:816de75418ea0953b5eb7b8a74933ee5a46719491cd2b16f718afc4b291a9658"}, - {file = "SQLAlchemy-1.3.24-cp36-cp36m-win32.whl", hash = "sha256:26155ea7a243cbf23287f390dba13d7927ffa1586d3208e0e8d615d0c506f996"}, - {file = "SQLAlchemy-1.3.24-cp36-cp36m-win_amd64.whl", hash = "sha256:f03bd97650d2e42710fbe4cf8a59fae657f191df851fc9fc683ecef10746a375"}, - {file = "SQLAlchemy-1.3.24-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:a006d05d9aa052657ee3e4dc92544faae5fcbaafc6128217310945610d862d39"}, - {file = "SQLAlchemy-1.3.24-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1e2f89d2e5e3c7a88e25a3b0e43626dba8db2aa700253023b82e630d12b37109"}, - {file = "SQLAlchemy-1.3.24-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:0d5d862b1cfbec5028ce1ecac06a3b42bc7703eb80e4b53fceb2738724311443"}, - {file = "SQLAlchemy-1.3.24-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:0172423a27fbcae3751ef016663b72e1a516777de324a76e30efa170dbd3dd2d"}, - {file = "SQLAlchemy-1.3.24-cp37-cp37m-win32.whl", hash = "sha256:d37843fb8df90376e9e91336724d78a32b988d3d20ab6656da4eb8ee3a45b63c"}, - {file = "SQLAlchemy-1.3.24-cp37-cp37m-win_amd64.whl", hash = "sha256:c10ff6112d119f82b1618b6dc28126798481b9355d8748b64b9b55051eb4f01b"}, - {file = "SQLAlchemy-1.3.24-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:861e459b0e97673af6cc5e7f597035c2e3acdfb2608132665406cded25ba64c7"}, - {file = "SQLAlchemy-1.3.24-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5de2464c254380d8a6c20a2746614d5a436260be1507491442cf1088e59430d2"}, - {file = "SQLAlchemy-1.3.24-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d375d8ccd3cebae8d90270f7aa8532fe05908f79e78ae489068f3b4eee5994e8"}, - {file = "SQLAlchemy-1.3.24-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:014ea143572fee1c18322b7908140ad23b3994036ef4c0d630110faf942652f8"}, - {file = "SQLAlchemy-1.3.24-cp38-cp38-win32.whl", hash = "sha256:6607ae6cd3a07f8a4c3198ffbf256c261661965742e2b5265a77cd5c679c9bba"}, - {file = "SQLAlchemy-1.3.24-cp38-cp38-win_amd64.whl", hash = "sha256:fcb251305fa24a490b6a9ee2180e5f8252915fb778d3dafc70f9cc3f863827b9"}, - {file = "SQLAlchemy-1.3.24-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:01aa5f803db724447c1d423ed583e42bf5264c597fd55e4add4301f163b0be48"}, - {file = "SQLAlchemy-1.3.24-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:4d0e3515ef98aa4f0dc289ff2eebb0ece6260bbf37c2ea2022aad63797eacf60"}, - {file = "SQLAlchemy-1.3.24-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:bce28277f308db43a6b4965734366f533b3ff009571ec7ffa583cb77539b84d6"}, - {file = "SQLAlchemy-1.3.24-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:8110e6c414d3efc574543109ee618fe2c1f96fa31833a1ff36cc34e968c4f233"}, - {file = "SQLAlchemy-1.3.24-cp39-cp39-win32.whl", hash = "sha256:ee5f5188edb20a29c1cc4a039b074fdc5575337c9a68f3063449ab47757bb064"}, - {file = "SQLAlchemy-1.3.24-cp39-cp39-win_amd64.whl", hash = "sha256:09083c2487ca3c0865dc588e07aeaa25416da3d95f7482c07e92f47e080aa17b"}, - {file = "SQLAlchemy-1.3.24.tar.gz", hash = "sha256:ebbb777cbf9312359b897bf81ba00dae0f5cb69fba2a18265dcc18a6f5ef7519"}, -] -sqlalchemy-utils = [ - {file = "SQLAlchemy-Utils-0.37.8.tar.gz", hash = "sha256:a6aaee154f798be4e479af0ceffaa5034d35fcf6f40707c0947d21bde64e05e5"}, - {file = "SQLAlchemy_Utils-0.37.8-py3-none-any.whl", hash = "sha256:b1bf67d904fed16b16ef1dc07f03e5e93a6b23899f920f6b41c09be45fbb85f2"}, -] -toml = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] +content-hash = "9657d7d746a17107b50ab54a082568af8eb8340057c36141ff1dd90e6c3a3e48" diff --git a/pyproject.toml b/pyproject.toml index ee63bfb..086f6ad 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,6 +17,9 @@ psycopg2-binary = { version = "2.9.1", optional = true } pytest = "^6.2.5" sqlalchemy-utils = "^0.37.8" +[tool.poetry.group.dev.dependencies] +black = "^24.2.0" + [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" diff --git a/sqlalchemy_filters/filters.py b/sqlalchemy_filters/filters.py index 1bb3310..99fb14b 100644 --- a/sqlalchemy_filters/filters.py +++ b/sqlalchemy_filters/filters.py @@ -17,12 +17,12 @@ ) BooleanFunction = namedtuple( - 'BooleanFunction', ('key', 'sqlalchemy_fn', 'only_one_arg') + "BooleanFunction", ("key", "sqlalchemy_fn", "only_one_arg") ) BOOLEAN_FUNCTIONS = [ - BooleanFunction('or', or_, False), - BooleanFunction('and', and_, False), - BooleanFunction('not', not_, True), + BooleanFunction("or", or_, False), + BooleanFunction("and", and_, False), + BooleanFunction("not", not_, True), ] """ Sqlalchemy boolean functions that can be parsed from the filter definition. @@ -31,36 +31,36 @@ class Operator(object): OPERATORS = { - 'is_null': lambda f: f.is_(None), - 'is_not_null': lambda f: f.isnot(None), - '==': lambda f, a: f == a, - 'eq': lambda f, a: f == a, - '!=': lambda f, a: f != a, - 'ne': lambda f, a: f != a, - '>': lambda f, a: f > a, - 'gt': lambda f, a: f > a, - '<': lambda f, a: f < a, - 'lt': lambda f, a: f < a, - '>=': lambda f, a: f >= a, - 'ge': lambda f, a: f >= a, - '<=': lambda f, a: f <= a, - 'le': lambda f, a: f <= a, - 'like': lambda f, a: f.like(a), - 'ilike': lambda f, a: f.ilike(a), - 'not_ilike': lambda f, a: ~f.ilike(a), - 'in': lambda f, a: f.in_(a), - 'not_in': lambda f, a: ~f.in_(a), - 'any': lambda f, a: f.any(a), - 'not_any': lambda f, a: func.not_(f.any(a)), - 'in_set': lambda f, a: func.find_in_set(a, f), + "is_null": lambda f: f.is_(None), + "is_not_null": lambda f: f.isnot(None), + "==": lambda f, a: f == a, + "eq": lambda f, a: f == a, + "!=": lambda f, a: f != a, + "ne": lambda f, a: f != a, + ">": lambda f, a: f > a, + "gt": lambda f, a: f > a, + "<": lambda f, a: f < a, + "lt": lambda f, a: f < a, + ">=": lambda f, a: f >= a, + "ge": lambda f, a: f >= a, + "<=": lambda f, a: f <= a, + "le": lambda f, a: f <= a, + "like": lambda f, a: f.like(a), + "ilike": lambda f, a: f.ilike(a), + "not_ilike": lambda f, a: ~f.ilike(a), + "in": lambda f, a: f.in_(a), + "not_in": lambda f, a: ~f.in_(a), + "any": lambda f, a: f.any(a), + "not_any": lambda f, a: func.not_(f.any(a)), + "in_set": lambda f, a: func.find_in_set(a, f), } def __init__(self, operator=None): if not operator: - operator = '==' + operator = "==" if operator not in self.OPERATORS: - raise BadFilterFormat('Operator `{}` not valid.'.format(operator)) + raise BadFilterFormat("Operator `{}` not valid.".format(operator)) self.operator = operator self.function = self.OPERATORS[operator] @@ -73,26 +73,30 @@ def __init__(self, filter_spec): self.filter_spec = filter_spec try: - filter_spec['field'] + filter_spec["field"] except KeyError: - raise BadFilterFormat('`field` is a mandatory filter attribute.') + raise BadFilterFormat("`field` is a mandatory filter attribute.") except TypeError: raise BadFilterFormat( - 'Filter spec `{}` should be a dictionary.'.format(filter_spec) + "Filter spec `{}` should be a dictionary.".format(filter_spec) ) - self.operator = Operator(filter_spec.get('op')) - self.value = filter_spec.get('value') - value_present = True if 'value' in filter_spec else False + self.operator = Operator(filter_spec.get("op")) + self.value = filter_spec.get("value") + value_present = True if "value" in filter_spec else False if not value_present and self.operator.arity == 2: - raise BadFilterFormat('`value` must be provided.') + raise BadFilterFormat("`value` must be provided.") def get_named_models(self, model): - field = self.filter_spec['field'] - operator = self.filter_spec['op'] if 'op' in self.filter_spec else None + field = self.filter_spec["field"] + operator = self.filter_spec["op"] if "op" in self.filter_spec else None models = get_relationship_models(model, field) - return (list(), models) if should_filter_outer_join_relationship(operator) else (models, list()) + return ( + (list(), models) + if should_filter_outer_join_relationship(operator) + else (models, list()) + ) def format_for_sqlalchemy(self, query, default_model): filter_spec = self.filter_spec @@ -104,7 +108,7 @@ def format_for_sqlalchemy(self, query, default_model): function = operator.function arity = operator.arity - field_name = self.filter_spec['field'] + field_name = self.filter_spec["field"] field = Field(model, field_name) sqlalchemy_field = field.get_sqlalchemy_field() @@ -133,28 +137,26 @@ def get_named_models(self, base_model): return models_inner_join, models_outer_join def format_for_sqlalchemy(self, query, default_model): - return self.function(*[ - filter.format_for_sqlalchemy(query, default_model) - for filter in self.filters - ]) + return self.function( + *[ + filter.format_for_sqlalchemy(query, default_model) + for filter in self.filters + ] + ) def _is_iterable_filter(filter_spec): - """ `filter_spec` may be a list of nested filter specs, or a dict. - """ - return ( - isinstance(filter_spec, Iterable) and - not isinstance(filter_spec, (string_types, dict)) + """`filter_spec` may be a list of nested filter specs, or a dict.""" + return isinstance(filter_spec, Iterable) and not isinstance( + filter_spec, (string_types, dict) ) def build_filters(filter_spec): - """ Recursively process `filter_spec` """ + """Recursively process `filter_spec`""" if _is_iterable_filter(filter_spec): - return list(chain.from_iterable( - build_filters(item) for item in filter_spec - )) + return list(chain.from_iterable(build_filters(item) for item in filter_spec)) if isinstance(filter_spec, dict): # Check if filter spec defines a boolean function. @@ -166,18 +168,16 @@ def build_filters(filter_spec): if not _is_iterable_filter(fn_args): raise BadFilterFormat( - '`{}` value must be an iterable across the function ' - 'arguments'.format(boolean_function.key) + "`{}` value must be an iterable across the function " + "arguments".format(boolean_function.key) ) if boolean_function.only_one_arg and len(fn_args) != 1: raise BadFilterFormat( - '`{}` must have one argument'.format( - boolean_function.key - ) + "`{}` must have one argument".format(boolean_function.key) ) if not boolean_function.only_one_arg and len(fn_args) < 1: raise BadFilterFormat( - '`{}` must have one or more arguments'.format( + "`{}` must have one or more arguments".format( boolean_function.key ) ) @@ -247,8 +247,7 @@ def apply_filters(model, query, filter_spec, do_auto_join=True): query = auto_join(query, inner_join_models, outer_join_models) sqlalchemy_filters = [ - filter.format_for_sqlalchemy(query, model) - for filter in filters + filter.format_for_sqlalchemy(query, model) for filter in filters ] if sqlalchemy_filters: diff --git a/sqlalchemy_filters/models.py b/sqlalchemy_filters/models.py index aa8291c..4b7882e 100644 --- a/sqlalchemy_filters/models.py +++ b/sqlalchemy_filters/models.py @@ -11,7 +11,7 @@ def sqlalchemy_version_lt(version): """compares sqla version < version""" - return tuple(sqlalchemy_version.split('.')) < tuple(version.split('.')) + return tuple(sqlalchemy_version.split(".")) < tuple(version.split(".")) class Field(object): @@ -25,9 +25,7 @@ def get_sqlalchemy_field(self): if sqlalchemy_field is None: raise FieldNotFound( - 'Model {} has no column `{}`.'.format( - self.model, self.field_name - ) + "Model {} has no column `{}`.".format(self.model, self.field_name) ) # If it's a hybrid method, then we call it so that we can work with @@ -39,11 +37,11 @@ def get_sqlalchemy_field(self): def _is_hybrid_property(orm_descriptor): - return orm_descriptor.extension_type == symbol('HYBRID_PROPERTY') + return orm_descriptor.extension_type == symbol("HYBRID_PROPERTY") def _is_hybrid_method(orm_descriptor): - return orm_descriptor.extension_type == symbol('HYBRID_METHOD') + return orm_descriptor.extension_type == symbol("HYBRID_METHOD") def get_relationship_models(model, field): @@ -55,7 +53,9 @@ def get_relationship_models(model, field): # Find all relationships. for i in range(1, len(parts)): - if (column := find_nested_relationship_model(inspect(model), parts[0:i])) is not None: + if ( + column := find_nested_relationship_model(inspect(model), parts[0:i]) + ) is not None: relationships.append(column.class_attribute) return relationships @@ -64,12 +64,12 @@ def get_relationship_models(model, field): def should_filter_outer_join_relationship(operator): - return operator == 'is_null' + return operator == "is_null" def should_sort_outer_join_relationship(models): for rel_model in models: - if rel_model.prop.direction == symbol('ONETOMANY'): + if rel_model.prop.direction == symbol("ONETOMANY"): return True elif any(column.nullable for column in rel_model.prop.local_columns): return True @@ -81,7 +81,11 @@ def find_nested_relationship_model(mapper, field): if (part := parts[0]) in mapper.relationships: related_field = mapper.relationships[part] - return find_nested_relationship_model(related_field.mapper, ".".join(parts[1::])) if len(parts) > 1 else related_field + return ( + find_nested_relationship_model(related_field.mapper, ".".join(parts[1::])) + if len(parts) > 1 + else related_field + ) else: return None @@ -95,24 +99,38 @@ def get_nested_column(model, field): mapper = inspect(model) orm_descriptors = mapper.all_orm_descriptors hybrid_fields = [ - key for key, item in orm_descriptors.items() + key + for key, item in orm_descriptors.items() if _is_hybrid_property(item) or _is_hybrid_method(item) ] # Search in own model fields if len(parts) == 1: - if field in mapper.columns or field in mapper.composites or field in hybrid_fields: + if ( + field in mapper.columns + or field in mapper.composites + or field in hybrid_fields + ): return getattr(model, field) else: return None # Search in relationships. if (part := parts[0]) in mapper.relationships: - return get_nested_column(getattr(model, part).property.entity.class_, ".".join(parts[1::])) + return get_nested_column( + getattr(model, part).property.entity.class_, ".".join(parts[1::]) + ) else: return None - - + + +def get_model_class_by_name(registry, name): + """Return the model class matching `name` in the given `registry`.""" + for cls in registry.values(): + if getattr(cls, "__name__", None) == name: + return cls + + def get_model_from_table(table): # pragma: no_cover_sqlalchemy_lt_1_4 """Resolve model class from table object""" @@ -132,17 +150,19 @@ def get_query_models(query): :returns: A dictionary with all the models included in the query. """ - models = [col_desc['entity'] for col_desc in query.column_descriptions if col_desc['entity']] + models = [ + col_desc["entity"] + for col_desc in query.column_descriptions + if col_desc["entity"] + ] # account joined entities - if sqlalchemy_version_lt('1.4'): # pragma: no_cover_sqlalchemy_gte_1_4 + if sqlalchemy_version_lt("1.4"): # pragma: no_cover_sqlalchemy_gte_1_4 models.extend(mapper.class_ for mapper in query._join_entities) else: # pragma: no_cover_sqlalchemy_lt_1_4 try: models.extend( - mapper.class_ - for mapper - in query._compile_state()._join_entities + mapper.class_ for mapper in query._compile_state()._join_entities ) except InvalidRequestError: # query might not contain columns yet, hence cannot be compiled @@ -154,11 +174,11 @@ def get_query_models(query): # account also query.select_from entities model_class = None - if sqlalchemy_version_lt('1.4'): # pragma: no_cover_sqlalchemy_gte_1_4 + if sqlalchemy_version_lt("1.4"): # pragma: no_cover_sqlalchemy_gte_1_4 if query._select_from_entity: model_class = ( query._select_from_entity - if sqlalchemy_version_lt('1.1') + if sqlalchemy_version_lt("1.1") else query._select_from_entity.class_ ) else: # pragma: no_cover_sqlalchemy_lt_1_4 @@ -171,7 +191,7 @@ def get_query_models(query): def get_model_from_spec(spec, query, default_model=None): - """ Determine the model to which a spec applies on a given query. + """Determine the model to which a spec applies on a given query. A spec that does not specify a model may be applied to a query that contains a single model. Otherwise the spec must specify the model to @@ -196,15 +216,13 @@ def get_model_from_spec(spec, query, default_model=None): """ models = get_query_models(query) if not models: - raise BadQuery('The query does not contain any models.') + raise BadQuery("The query does not contain any models.") - model_name = spec.get('model') + model_name = spec.get("model") if model_name is not None: models = [v for (k, v) in models.items() if k == model_name] if not models: - raise BadSpec( - 'The query does not contain model `{}`.'.format(model_name) - ) + raise BadSpec("The query does not contain model `{}`.".format(model_name)) model = models[0] else: if len(models) == 1: @@ -218,20 +236,19 @@ def get_model_from_spec(spec, query, default_model=None): def get_default_model(query): - """ Return the singular model from `query`, or `None` if `query` contains + """Return the singular model from `query`, or `None` if `query` contains multiple models. """ query_models = get_query_models(query).values() if len(query_models) == 1: - default_model, = iter(query_models) + (default_model,) = iter(query_models) else: default_model = None return default_model def auto_join(query, inner_join_relationships, outer_join_relationships): - """ Automatically join models to `query` if they're not already present. - """ + """Automatically join models to `query` if they're not already present.""" for relationship in outer_join_relationships: query = join_relationship(query, relationship, True) diff --git a/sqlalchemy_filters/pagination.py b/sqlalchemy_filters/pagination.py index 6f63994..6609f83 100644 --- a/sqlalchemy_filters/pagination.py +++ b/sqlalchemy_filters/pagination.py @@ -55,8 +55,7 @@ def apply_pagination(query, page_number=None, page_size=None): num_pages = _calculate_num_pages(page_number, page_size, total_results) Pagination = namedtuple( - 'Pagination', - ['page_number', 'page_size', 'num_pages', 'total_results'] + "Pagination", ["page_number", "page_size", "num_pages", "total_results"] ) return query, Pagination(page_number, page_size, num_pages, total_results) @@ -64,9 +63,7 @@ def apply_pagination(query, page_number=None, page_size=None): def _limit(query, page_size): if page_size is not None: if page_size < 0: - raise InvalidPage( - 'Page size should not be negative: {}'.format(page_size) - ) + raise InvalidPage("Page size should not be negative: {}".format(page_size)) query = query.limit(page_size) @@ -76,9 +73,7 @@ def _limit(query, page_size): def _offset(query, page_number, page_size): if page_number is not None: if page_number < 1: - raise InvalidPage( - 'Page number should be positive: {}'.format(page_number) - ) + raise InvalidPage("Page number should be positive: {}".format(page_number)) query = query.offset((page_number - 1) * page_size) diff --git a/sqlalchemy_filters/sorting.py b/sqlalchemy_filters/sorting.py index 680d46e..b4409e0 100644 --- a/sqlalchemy_filters/sorting.py +++ b/sqlalchemy_filters/sorting.py @@ -9,8 +9,8 @@ should_sort_outer_join_relationship, ) -SORT_ASCENDING = 'asc' -SORT_DESCENDING = 'desc' +SORT_ASCENDING = "asc" +SORT_DESCENDING = "desc" class Sort(object): @@ -19,30 +19,32 @@ def __init__(self, sort_spec): self.sort_spec = sort_spec try: - field_name = sort_spec['field'] - direction = sort_spec['direction'] + field_name = sort_spec["field"] + direction = sort_spec["direction"] except KeyError: - raise BadSortFormat( - '`field` and `direction` are mandatory attributes.' - ) + raise BadSortFormat("`field` and `direction` are mandatory attributes.") except TypeError: raise BadSortFormat( - 'Sort spec `{}` should be a dictionary.'.format(sort_spec) + "Sort spec `{}` should be a dictionary.".format(sort_spec) ) if direction not in [SORT_ASCENDING, SORT_DESCENDING]: - raise BadSortFormat('Direction `{}` not valid.'.format(direction)) + raise BadSortFormat("Direction `{}` not valid.".format(direction)) self.field_name = field_name self.direction = direction - self.nullsfirst = sort_spec.get('nullsfirst') - self.nullslast = sort_spec.get('nullslast') + self.nullsfirst = sort_spec.get("nullsfirst") + self.nullslast = sort_spec.get("nullslast") def get_named_models(self, model): - field = self.sort_spec['field'] + field = self.sort_spec["field"] models = get_relationship_models(model, field) - return (list(), models) if should_sort_outer_join_relationship(models) else (models, list()) + return ( + (list(), models) + if should_sort_outer_join_relationship(models) + else (models, list()) + ) def format_for_sqlalchemy(self, query, default_model): sort_spec = self.sort_spec @@ -123,9 +125,7 @@ def apply_sort(model, query, sort_spec): inner_join_models, outer_join_models = get_named_models(model, sorts) query = auto_join(query, inner_join_models, outer_join_models) - sqlalchemy_sorts = [ - sort.format_for_sqlalchemy(query, model) for sort in sorts - ] + sqlalchemy_sorts = [sort.format_for_sqlalchemy(query, model) for sort in sorts] if sqlalchemy_sorts: query = query.order_by(*sqlalchemy_sorts) diff --git a/test/conftest.py b/test/conftest.py index 99e9be2..dae2b72 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -7,62 +7,59 @@ from test.models import Base, BasePostgresqlSpecific, BaseMysqlSpecific -SQLITE_TEST_DB_URI = 'SQLITE_TEST_DB_URI' -MYSQL_TEST_DB_URI = 'MYSQL_TEST_DB_URI' -POSTGRESQL_TEST_DB_URI = 'POSTGRESQL_TEST_DB_URI' +SQLITE_TEST_DB_URI = "SQLITE_TEST_DB_URI" +MYSQL_TEST_DB_URI = "MYSQL_TEST_DB_URI" +POSTGRESQL_TEST_DB_URI = "POSTGRESQL_TEST_DB_URI" def pytest_addoption(parser): parser.addoption( - '--sqlite-test-db-uri', - action='store', + "--sqlite-test-db-uri", + action="store", dest=SQLITE_TEST_DB_URI, - default='sqlite+pysqlite:///test_sqlalchemy_filters.db', + default="sqlite+pysqlite:///test_sqlalchemy_filters.db", help=( - 'DB uri for testing (e.g. ' + "DB uri for testing (e.g. " '"sqlite+pysqlite:///test_sqlalchemy_filters.db")' - ) + ), ) parser.addoption( - '--mysql-test-db-uri', - action='store', + "--mysql-test-db-uri", + action="store", dest=MYSQL_TEST_DB_URI, default=( - 'mysql+mysqlconnector://root:@localhost:3306' - '/test_sqlalchemy_filters' + "mysql+mysqlconnector://root:@localhost:3306" "/test_sqlalchemy_filters" ), help=( - 'DB uri for testing (e.g. ' + "DB uri for testing (e.g. " '"mysql+mysqlconnector://username:password@localhost:3306' '/test_sqlalchemy_filters")' - ) + ), ) parser.addoption( - '--postgresql-test-db-uri', - action='store', + "--postgresql-test-db-uri", + action="store", dest=POSTGRESQL_TEST_DB_URI, default=( - 'postgresql+psycopg2://postgres:@localhost:5432' - '/test_sqlalchemy_filters?client_encoding=utf8' + "postgresql+psycopg2://postgres:@localhost:5432" + "/test_sqlalchemy_filters?client_encoding=utf8" ), help=( - 'DB uri for testing (e.g. ' + "DB uri for testing (e.g. " '"postgresql+psycopg2://username:password@localhost:5432' '/test_sqlalchemy_filters?client_encoding=utf8")' - ) + ), ) -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def config(request): return { SQLITE_TEST_DB_URI: request.config.getoption(SQLITE_TEST_DB_URI), MYSQL_TEST_DB_URI: request.config.getoption(MYSQL_TEST_DB_URI), - POSTGRESQL_TEST_DB_URI: request.config.getoption( - POSTGRESQL_TEST_DB_URI - ), + POSTGRESQL_TEST_DB_URI: request.config.getoption(POSTGRESQL_TEST_DB_URI), } @@ -88,43 +85,40 @@ def test_db_keys(): return test_db_uris -@pytest.fixture(scope='session', params=test_db_keys()) +@pytest.fixture(scope="session", params=test_db_keys()) def db_uri(request, config): return config[request.param] -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def is_postgresql(db_uri): - if 'postgresql' in db_uri: + if "postgresql" in db_uri: return True return False -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def is_mysql(db_uri): - if 'mysql' in db_uri: + if "mysql" in db_uri: return True return False -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def is_sqlite(db_uri): - if 'sqlite' in db_uri: + if "sqlite" in db_uri: return True return False -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def db_engine_options(db_uri, is_postgresql): if is_postgresql: - return dict( - client_encoding='utf8', - connect_args={'client_encoding': 'utf8'} - ) + return dict(client_encoding="utf8", connect_args={"client_encoding": "utf8"}) return {} -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def connection(db_uri, db_engine_options, is_postgresql, is_mysql): create_db(db_uri) engine = create_engine(db_uri, **db_engine_options) @@ -165,12 +159,12 @@ def session(connection, is_postgresql, is_mysql): def create_db(uri): - """Drop the database at ``uri`` and create a brand new one. """ + """Drop the database at ``uri`` and create a brand new one.""" destroy_database(uri) create_database(uri) def destroy_database(uri): - """Destroy the database at ``uri``, if it exists. """ + """Destroy the database at ``uri``, if it exists.""" if database_exists(uri): drop_database(uri) diff --git a/test/interface/test_filters.py b/test/interface/test_filters.py index 28c5301..90520c6 100644 --- a/test/interface/test_filters.py +++ b/test/interface/test_filters.py @@ -8,43 +8,36 @@ from sqlalchemy.orm import joinedload from sqlalchemy_filters import apply_filters -from sqlalchemy_filters.exceptions import ( - BadFilterFormat, BadSpec, FieldNotFound -) +from sqlalchemy_filters.exceptions import BadFilterFormat, BadSpec, FieldNotFound from test.models import Foo, Bar, Qux, Corge, Grault, Garply, Point -ARRAY_NOT_SUPPORTED = ( - "ARRAY type and operators supported only by PostgreSQL" -) +ARRAY_NOT_SUPPORTED = "ARRAY type and operators supported only by PostgreSQL" -SET_NOT_SUPPORTED = ( - "SET type and operators supported only by MySQL" -) +SET_NOT_SUPPORTED = "SET type and operators supported only by MySQL" STRING_DATE_TIME_NOT_SUPPORTED = ( - "TODO: String Time / DateTime values currently not working as filters by " - "SQLite" + "TODO: String Time / DateTime values currently not working as filters by " "SQLite" ) @pytest.fixture def multiple_foos_inserted(session, multiple_bars_inserted): - foo_1 = Foo(id=1, bar_id=1, name='name_1', count=50) - foo_2 = Foo(id=2, bar_id=2, name='name_2', count=100) - foo_3 = Foo(id=3, bar_id=3, name='name_1', count=None) - foo_4 = Foo(id=4, bar_id=4, name='name_4', count=150) - foo_5 = Foo(id=5, name='name_1') + foo_1 = Foo(id=1, bar_id=1, name="name_1", count=50) + foo_2 = Foo(id=2, bar_id=2, name="name_2", count=100) + foo_3 = Foo(id=3, bar_id=3, name="name_1", count=None) + foo_4 = Foo(id=4, bar_id=4, name="name_4", count=150) + foo_5 = Foo(id=5, name="name_1") session.add_all([foo_1, foo_2, foo_3, foo_4, foo_5]) session.commit() @pytest.fixture def multiple_bars_inserted(session): - bar_1 = Bar(id=1, name='name_1', count=5) - bar_2 = Bar(id=2, name='name_2', count=10) - bar_3 = Bar(id=3, name='name_1', count=None) - bar_4 = Bar(id=4, name='name_4', count=15) + bar_1 = Bar(id=1, name="name_1", count=5) + bar_2 = Bar(id=2, name="name_2", count=10) + bar_3 = Bar(id=3, name="name_1", count=None) + bar_4 = Bar(id=4, name="name_4", count=15) session.add_all([bar_1, bar_2, bar_3, bar_4]) session.commit() @@ -52,26 +45,36 @@ def multiple_bars_inserted(session): @pytest.fixture def multiple_quxs_inserted(session): qux_1 = Qux( - id=1, name='name_1', count=5, + id=1, + name="name_1", + count=5, created_at=datetime.date(2016, 7, 12), execution_time=datetime.datetime(2016, 7, 12, 1, 5, 9), - expiration_time=datetime.time(1, 5, 9) + expiration_time=datetime.time(1, 5, 9), ) qux_2 = Qux( - id=2, name='name_2', count=10, + id=2, + name="name_2", + count=10, created_at=datetime.date(2016, 7, 13), execution_time=datetime.datetime(2016, 7, 13, 2, 5, 9), - expiration_time=datetime.time(2, 5, 9) + expiration_time=datetime.time(2, 5, 9), ) qux_3 = Qux( - id=3, name='name_1', count=None, - created_at=None, execution_time=None, expiration_time=None + id=3, + name="name_1", + count=None, + created_at=None, + execution_time=None, + expiration_time=None, ) qux_4 = Qux( - id=4, name='name_4', count=15, + id=4, + name="name_4", + count=15, created_at=datetime.date(2016, 7, 14), execution_time=datetime.datetime(2016, 7, 14, 3, 5, 9), - expiration_time=datetime.time(3, 5, 9) + expiration_time=datetime.time(3, 5, 9), ) session.add_all([qux_1, qux_2, qux_3, qux_4]) session.commit() @@ -80,10 +83,10 @@ def multiple_quxs_inserted(session): @pytest.fixture def multiple_corges_inserted(session, is_postgresql): if is_postgresql: - corge_1 = Corge(id=1, name='name_1', tags=[]) - corge_2 = Corge(id=2, name='name_2', tags=['foo']) - corge_3 = Corge(id=3, name='name_3', tags=['foo', 'bar']) - corge_4 = Corge(id=4, name='name_4', tags=['bar', 'baz']) + corge_1 = Corge(id=1, name="name_1", tags=[]) + corge_2 = Corge(id=2, name="name_2", tags=["foo"]) + corge_3 = Corge(id=3, name="name_3", tags=["foo", "bar"]) + corge_4 = Corge(id=4, name="name_4", tags=["bar", "baz"]) session.add_all([corge_1, corge_2, corge_3, corge_4]) session.commit() @@ -91,20 +94,20 @@ def multiple_corges_inserted(session, is_postgresql): @pytest.fixture def multiple_graults_inserted(session, is_mysql): if is_mysql: - grault_1 = Grault(id=1, name='name_1', types=["foo"]) - grault_2 = Grault(id=2, name='name_2', types=["foo", "bar"]) - grault_3 = Grault(id=3, name='name_3', types=["foo", "baz"]) - grault_4 = Grault(id=4, name='name_4', types=["foo", "bar", "baz"]) + grault_1 = Grault(id=1, name="name_1", types=["foo"]) + grault_2 = Grault(id=2, name="name_2", types=["foo", "bar"]) + grault_3 = Grault(id=3, name="name_3", types=["foo", "baz"]) + grault_4 = Grault(id=4, name="name_4", types=["foo", "bar", "baz"]) session.add_all([grault_1, grault_2, grault_3, grault_4]) session.commit() @pytest.fixture def multiple_garply_inserted(session, is_mysql): - garply_1 = Garply(id=1, name='name_1', count=1, x=1, y=1) - garply_2 = Garply(id=2, name='name_2', count=2, x=2, y=2) - garply_3 = Garply(id=3, name='name_3', count=3, x=3, y=3) - garply_4 = Garply(id=4, name='name_4', count=4, x=4, y=4) + garply_1 = Garply(id=1, name="name_1", count=1, x=1, y=1) + garply_2 = Garply(id=2, name="name_2", count=2, x=2, y=2) + garply_3 = Garply(id=3, name="name_3", count=3, x=3, y=3) + garply_4 = Garply(id=4, name="name_4", count=4, x=4, y=4) session.add_all([garply_1, garply_2, garply_3, garply_4]) session.commit() @@ -119,7 +122,7 @@ def test_no_filters_provided(self, session): assert query == filtered_query - @pytest.mark.parametrize('filter_', ['some text', 1, '']) + @pytest.mark.parametrize("filter_", ["some text", 1, ""]) def test_wrong_filters_format(self, session, filter_): query = session.query(Bar) filters = [filter_] @@ -127,24 +130,22 @@ def test_wrong_filters_format(self, session, filter_): with pytest.raises(BadFilterFormat) as err: apply_filters(Bar, query, filters) - expected_error = 'Filter spec `{}` should be a dictionary.'.format( - filter_ - ) + expected_error = "Filter spec `{}` should be a dictionary.".format(filter_) assert expected_error == err.value.args[0] def test_invalid_operator(self, session): query = session.query(Bar) - filters = [{'field': 'name', 'op': 'op_not_valid', 'value': 'name_1'}] + filters = [{"field": "name", "op": "op_not_valid", "value": "name_1"}] with pytest.raises(BadFilterFormat) as err: apply_filters(Bar, query, filters) - assert 'Operator `op_not_valid` not valid.' == err.value.args[0] + assert "Operator `op_not_valid` not valid." == err.value.args[0] - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_no_operator_provided(self, session): query = session.query(Bar) - filters = [{'field': 'name', 'value': 'name_1'}] + filters = [{"field": "name", "value": "name_1"}] filtered_query = apply_filters(Bar, query, filters) result = filtered_query.all() @@ -155,28 +156,33 @@ def test_no_operator_provided(self, session): def test_no_field_provided(self, session): query = session.query(Bar) - filters = [{'op': '==', 'value': 'name_1'}] + filters = [{"op": "==", "value": "name_1"}] with pytest.raises(BadFilterFormat) as err: apply_filters(Bar, query, filters) - expected_error = '`field` is a mandatory filter attribute.' + expected_error = "`field` is a mandatory filter attribute." assert expected_error == err.value.args[0] # TODO: replace this test once we add the option to compare against # another field def test_no_value_provided(self, session): query = session.query(Bar) - filters = [{'field': 'name', 'op': '==', }] + filters = [ + { + "field": "name", + "op": "==", + } + ] with pytest.raises(BadFilterFormat) as err: apply_filters(Bar, query, filters) - assert '`value` must be provided.' == err.value.args[0] + assert "`value` must be provided." == err.value.args[0] def test_invalid_field(self, session): query = session.query(Bar) - filters = [{'field': 'invalid_field', 'op': '==', 'value': 'name_1'}] + filters = [{"field": "invalid_field", "op": "==", "value": "name_1"}] with pytest.raises(FieldNotFound) as err: apply_filters(Bar, query, filters) @@ -186,33 +192,34 @@ def test_invalid_field(self, session): ) assert expected_error == err.value.args[0] - @pytest.mark.parametrize('attr_name', [ - 'metadata', # model attribute - 'foos', # model relationship - ]) + @pytest.mark.parametrize( + "attr_name", + [ + "metadata", # model attribute + "foos", # model relationship + ], + ) def test_invalid_field_but_valid_model_attribute(self, session, attr_name): query = session.query(Bar) - filters = [{'field': attr_name, 'op': '==', 'value': 'name_1'}] + filters = [{"field": attr_name, "op": "==", "value": "name_1"}] with pytest.raises(FieldNotFound) as err: apply_filters(Bar, query, filters) - expected_error = ( - "Model has no column `{}`.".format( - attr_name - ) + expected_error = "Model has no column `{}`.".format( + attr_name ) assert expected_error == err.value.args[0] class TestAutoJoin: - @pytest.mark.usefixtures('multiple_foos_inserted') + @pytest.mark.usefixtures("multiple_foos_inserted") def test_auto_join(self, session): query = session.query(Foo) filters = [ - {'field': 'name', 'op': '==', 'value': 'name_1'}, - {'field': 'bar.count', 'op': 'is_null'}, + {"field": "name", "op": "==", "value": "name_1"}, + {"field": "bar.count", "op": "is_null"}, ] filtered_query = apply_filters(Foo, query, filters) @@ -223,11 +230,11 @@ def test_auto_join(self, session): assert result[0].bar_id == 3 assert result[0].bar.count is None - @pytest.mark.usefixtures('multiple_foos_inserted') + @pytest.mark.usefixtures("multiple_foos_inserted") def test_auto_outer_join(self, session): query = session.query(Foo) filters = [ - {'field': 'bar.name', 'op': 'is_null'}, + {"field": "bar.name", "op": "is_null"}, ] query = apply_filters(Foo, query, filters) @@ -238,12 +245,12 @@ def test_auto_outer_join(self, session): assert result[0].id == 5 assert result[0].bar_id is None - @pytest.mark.usefixtures('multiple_foos_inserted') + @pytest.mark.usefixtures("multiple_foos_inserted") def test_noop_if_query_contains_named_models(self, session): query = session.query(Foo).join(Bar) filters = [ - {'field': 'name', 'op': '==', 'value': 'name_1'}, - {'field': 'bar.count', 'op': 'is_null'}, + {"field": "name", "op": "==", "value": "name_1"}, + {"field": "bar.count", "op": "is_null"}, ] filtered_query = apply_filters(Foo, query, filters) @@ -254,14 +261,14 @@ def test_noop_if_query_contains_named_models(self, session): assert result[0].bar_id == 3 assert result[0].bar.count is None - @pytest.mark.usefixtures('multiple_foos_inserted') + @pytest.mark.usefixtures("multiple_foos_inserted") def test_eager_load(self, session): # behaves as if the joinedload wasn't present query = session.query(Foo).options(joinedload(Foo.bar)) filters = [ - {'field': 'name', 'op': '==', 'value': 'name_1'}, - {'field': 'bar.count', 'op': 'is_null'}, + {"field": "name", "op": "==", "value": "name_1"}, + {"field": "bar.count", "op": "is_null"}, ] filtered_query = apply_filters(Foo, query, filters) @@ -275,10 +282,10 @@ def test_eager_load(self, session): class TestApplyIsNullFilter: - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_filter_field_with_null_values(self, session): query = session.query(Bar) - filters = [{'field': 'count', 'op': 'is_null'}] + filters = [{"field": "count", "op": "is_null"}] filtered_query = apply_filters(Bar, query, filters) result = filtered_query.all() @@ -286,10 +293,10 @@ def test_filter_field_with_null_values(self, session): assert len(result) == 1 assert result[0].id == 3 - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_filter_field_with_no_null_values(self, session): query = session.query(Bar) - filters = [{'field': 'name', 'op': 'is_null'}] + filters = [{"field": "name", "op": "is_null"}] filtered_query = apply_filters(Bar, query, filters) result = filtered_query.all() @@ -299,10 +306,10 @@ def test_filter_field_with_no_null_values(self, session): class TestApplyIsNotNullFilter: - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_filter_field_with_null_values(self, session): query = session.query(Bar) - filters = [{'field': 'count', 'op': 'is_not_null'}] + filters = [{"field": "count", "op": "is_not_null"}] filtered_query = apply_filters(Bar, query, filters) result = filtered_query.all() @@ -312,10 +319,10 @@ def test_filter_field_with_null_values(self, session): assert result[1].id == 2 assert result[2].id == 4 - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_filter_field_with_no_null_values(self, session): query = session.query(Bar) - filters = [{'field': 'name', 'op': 'is_not_null'}] + filters = [{"field": "name", "op": "is_not_null"}] filtered_query = apply_filters(Bar, query, filters) result = filtered_query.all() @@ -329,53 +336,53 @@ def test_filter_field_with_no_null_values(self, session): class TestApplyFiltersMultipleTimes: - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_concatenate_queries(self, session): query = session.query(Bar) - filters = [{'field': 'name', 'op': '==', 'value': 'name_1'}] + filters = [{"field": "name", "op": "==", "value": "name_1"}] filtered_query = apply_filters(Bar, query, filters) result = filtered_query.all() assert len(result) == 2 assert result[0].id == 1 - assert result[0].name == 'name_1' + assert result[0].name == "name_1" assert result[1].id == 3 - assert result[1].name == 'name_1' + assert result[1].name == "name_1" - filters = [{'field': 'id', 'op': '==', 'value': 3}] + filters = [{"field": "id", "op": "==", "value": 3}] filtered_query = apply_filters(Bar, filtered_query, filters) result = filtered_query.all() assert len(result) == 1 assert result[0].id == 3 - assert result[0].name == 'name_1' + assert result[0].name == "name_1" class TestApplyFilterWithoutList: - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_a_single_dict_can_be_supplied_as_filters(self, session): query = session.query(Bar) - filters = {'field': 'name', 'op': '==', 'value': 'name_1'} + filters = {"field": "name", "op": "==", "value": "name_1"} filtered_query = apply_filters(Bar, query, filters) result = filtered_query.all() assert len(result) == 2 assert result[0].id == 1 - assert result[0].name == 'name_1' + assert result[0].name == "name_1" assert result[1].id == 3 - assert result[1].name == 'name_1' + assert result[1].name == "name_1" class TestApplyFilterOnFieldBasedQuery: - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_apply_filter_on_single_field_query(self, session): query = session.query(Bar.id) - filters = [{'field': 'name', 'op': '==', 'value': 'name_1'}] + filters = [{"field": "name", "op": "==", "value": "name_1"}] filtered_query = apply_filters(Bar, query, filters) result = filtered_query.all() @@ -384,10 +391,10 @@ def test_apply_filter_on_single_field_query(self, session): assert result[0] == (1,) assert result[1] == (3,) - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_apply_filter_on_aggregate_query(self, session): query = session.query(func.count(Bar.id)) - filters = [{'field': 'name', 'op': '==', 'value': 'name_1'}] + filters = [{"field": "name", "op": "==", "value": "name_1"}] filtered_query = apply_filters(Bar, query, filters) result = filtered_query.all() @@ -398,37 +405,36 @@ def test_apply_filter_on_aggregate_query(self, session): class TestApplyEqualToFilter: - @pytest.mark.parametrize('operator', ['==', 'eq']) - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.parametrize("operator", ["==", "eq"]) + @pytest.mark.usefixtures("multiple_bars_inserted") def test_one_filter_applied_to_a_single_model(self, session, operator): query = session.query(Bar) - filters = [{'field': 'name', 'op': operator, 'value': 'name_1'}] + filters = [{"field": "name", "op": operator, "value": "name_1"}] filtered_query = apply_filters(Bar, query, filters) result = filtered_query.all() assert len(result) == 2 assert result[0].id == 1 - assert result[0].name == 'name_1' + assert result[0].name == "name_1" assert result[1].id == 3 - assert result[1].name == 'name_1' + assert result[1].name == "name_1" @pytest.mark.parametrize( - 'filters', [ + "filters", + [ [ # filters using `==` in a list - {'field': 'name', 'op': '==', 'value': 'name_1'}, - {'field': 'id', 'op': '==', 'value': 3} + {"field": "name", "op": "==", "value": "name_1"}, + {"field": "id", "op": "==", "value": 3}, ], ( # filters using `eq` in a tuple - {'field': 'name', 'op': 'eq', 'value': 'name_1'}, - {'field': 'id', 'op': 'eq', 'value': 3} - ) - ] + {"field": "name", "op": "eq", "value": "name_1"}, + {"field": "id", "op": "eq", "value": 3}, + ), + ], ) - @pytest.mark.usefixtures('multiple_bars_inserted') - def test_multiple_filters_applied_to_a_single_model( - self, session, filters - ): + @pytest.mark.usefixtures("multiple_bars_inserted") + def test_multiple_filters_applied_to_a_single_model(self, session, filters): query = session.query(Bar) filtered_query = apply_filters(Bar, query, filters) @@ -436,35 +442,33 @@ def test_multiple_filters_applied_to_a_single_model( assert len(result) == 1 assert result[0].id == 3 - assert result[0].name == 'name_1' + assert result[0].name == "name_1" class TestApplyNotEqualToFilter: - @pytest.mark.parametrize('operator', ['!=', 'ne']) - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.parametrize("operator", ["!=", "ne"]) + @pytest.mark.usefixtures("multiple_bars_inserted") def test_one_filter_applied_to_a_single_model(self, session, operator): query = session.query(Bar) - filters = [{'field': 'name', 'op': operator, 'value': 'name_1'}] + filters = [{"field": "name", "op": operator, "value": "name_1"}] filtered_query = apply_filters(Bar, query, filters) result = filtered_query.all() assert len(result) == 2 assert result[0].id == 2 - assert result[0].name == 'name_2' + assert result[0].name == "name_2" assert result[1].id == 4 - assert result[1].name == 'name_4' + assert result[1].name == "name_4" - @pytest.mark.parametrize('operator', ['!=', 'ne']) - @pytest.mark.usefixtures('multiple_bars_inserted') - def test_multiple_filters_applied_to_a_single_model( - self, session, operator - ): + @pytest.mark.parametrize("operator", ["!=", "ne"]) + @pytest.mark.usefixtures("multiple_bars_inserted") + def test_multiple_filters_applied_to_a_single_model(self, session, operator): query = session.query(Bar) filters = [ - {'field': 'name', 'op': operator, 'value': 'name_2'}, - {'field': 'id', 'op': operator, 'value': 3} + {"field": "name", "op": operator, "value": "name_2"}, + {"field": "id", "op": operator, "value": 3}, ] filtered_query = apply_filters(Bar, query, filters) @@ -472,18 +476,18 @@ def test_multiple_filters_applied_to_a_single_model( assert len(result) == 2 assert result[0].id == 1 - assert result[0].name == 'name_1' + assert result[0].name == "name_1" assert result[1].id == 4 - assert result[1].name == 'name_4' + assert result[1].name == "name_4" class TestApplyGreaterThanFilter: - @pytest.mark.parametrize('operator', ['>', 'gt']) - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.parametrize("operator", [">", "gt"]) + @pytest.mark.usefixtures("multiple_bars_inserted") def test_one_filter_applied_to_a_single_model(self, session, operator): query = session.query(Bar) - filters = [{'field': 'count', 'op': operator, 'value': '5'}] + filters = [{"field": "count", "op": operator, "value": "5"}] filtered_query = apply_filters(Bar, query, filters) result = filtered_query.all() @@ -492,15 +496,13 @@ def test_one_filter_applied_to_a_single_model(self, session, operator): assert result[0].id == 2 assert result[1].id == 4 - @pytest.mark.parametrize('operator', ['>', 'gt']) - @pytest.mark.usefixtures('multiple_bars_inserted') - def test_multiple_filters_applied_to_a_single_model( - self, session, operator - ): + @pytest.mark.parametrize("operator", [">", "gt"]) + @pytest.mark.usefixtures("multiple_bars_inserted") + def test_multiple_filters_applied_to_a_single_model(self, session, operator): query = session.query(Bar) filters = [ - {'field': 'count', 'op': operator, 'value': '5'}, - {'field': 'id', 'op': operator, 'value': 2}, + {"field": "count", "op": operator, "value": "5"}, + {"field": "id", "op": operator, "value": 2}, ] filtered_query = apply_filters(Bar, query, filters) @@ -512,11 +514,11 @@ def test_multiple_filters_applied_to_a_single_model( class TestApplyLessThanFilter: - @pytest.mark.parametrize('operator', ['<', 'lt']) - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.parametrize("operator", ["<", "lt"]) + @pytest.mark.usefixtures("multiple_bars_inserted") def test_one_filter_applied_to_a_single_model(self, session, operator): query = session.query(Bar) - filters = [{'field': 'count', 'op': operator, 'value': '7'}] + filters = [{"field": "count", "op": operator, "value": "7"}] filtered_query = apply_filters(Bar, query, filters) result = filtered_query.all() @@ -524,15 +526,13 @@ def test_one_filter_applied_to_a_single_model(self, session, operator): assert len(result) == 1 assert result[0].id == 1 - @pytest.mark.parametrize('operator', ['<', 'lt']) - @pytest.mark.usefixtures('multiple_bars_inserted') - def test_multiple_filters_applied_to_a_single_model( - self, session, operator - ): + @pytest.mark.parametrize("operator", ["<", "lt"]) + @pytest.mark.usefixtures("multiple_bars_inserted") + def test_multiple_filters_applied_to_a_single_model(self, session, operator): query = session.query(Bar) filters = [ - {'field': 'count', 'op': operator, 'value': '7'}, - {'field': 'id', 'op': operator, 'value': 1}, + {"field": "count", "op": operator, "value": "7"}, + {"field": "id", "op": operator, "value": 1}, ] filtered_query = apply_filters(Bar, query, filters) @@ -543,11 +543,11 @@ def test_multiple_filters_applied_to_a_single_model( class TestApplyGreaterOrEqualThanFilter: - @pytest.mark.parametrize('operator', ['>=', 'ge']) - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.parametrize("operator", [">=", "ge"]) + @pytest.mark.usefixtures("multiple_bars_inserted") def test_one_filter_applied_to_a_single_model(self, session, operator): query = session.query(Bar) - filters = [{'field': 'count', 'op': operator, 'value': '5'}] + filters = [{"field": "count", "op": operator, "value": "5"}] filtered_query = apply_filters(Bar, query, filters) result = filtered_query.all() @@ -557,15 +557,13 @@ def test_one_filter_applied_to_a_single_model(self, session, operator): assert result[1].id == 2 assert result[2].id == 4 - @pytest.mark.parametrize('operator', ['>=', 'ge']) - @pytest.mark.usefixtures('multiple_bars_inserted') - def test_multiple_filters_applied_to_a_single_model( - self, session, operator - ): + @pytest.mark.parametrize("operator", [">=", "ge"]) + @pytest.mark.usefixtures("multiple_bars_inserted") + def test_multiple_filters_applied_to_a_single_model(self, session, operator): query = session.query(Bar) filters = [ - {'field': 'count', 'op': operator, 'value': '5'}, - {'field': 'id', 'op': operator, 'value': 4}, + {"field": "count", "op": operator, "value": "5"}, + {"field": "id", "op": operator, "value": 4}, ] filtered_query = apply_filters(Bar, query, filters) @@ -577,11 +575,11 @@ def test_multiple_filters_applied_to_a_single_model( class TestApplyLessOrEqualThanFilter: - @pytest.mark.parametrize('operator', ['<=', 'le']) - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.parametrize("operator", ["<=", "le"]) + @pytest.mark.usefixtures("multiple_bars_inserted") def test_one_filter_applied_to_a_single_model(self, session, operator): query = session.query(Bar) - filters = [{'field': 'count', 'op': operator, 'value': '15'}] + filters = [{"field": "count", "op": operator, "value": "15"}] filtered_query = apply_filters(Bar, query, filters) result = filtered_query.all() @@ -591,15 +589,13 @@ def test_one_filter_applied_to_a_single_model(self, session, operator): assert result[1].id == 2 assert result[2].id == 4 - @pytest.mark.parametrize('operator', ['<=', 'le']) - @pytest.mark.usefixtures('multiple_bars_inserted') - def test_multiple_filters_applied_to_a_single_model( - self, session, operator - ): + @pytest.mark.parametrize("operator", ["<=", "le"]) + @pytest.mark.usefixtures("multiple_bars_inserted") + def test_multiple_filters_applied_to_a_single_model(self, session, operator): query = session.query(Bar) filters = [ - {'field': 'count', 'op': operator, 'value': '15'}, - {'field': 'id', 'op': operator, 'value': 1}, + {"field": "count", "op": operator, "value": "15"}, + {"field": "id", "op": operator, "value": 1}, ] filtered_query = apply_filters(Bar, query, filters) @@ -611,10 +607,10 @@ def test_multiple_filters_applied_to_a_single_model( class TestApplyLikeFilter: - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_one_filter_applied_to_a_single_model(self, session): query = session.query(Bar) - filters = [{'field': 'name', 'op': 'like', 'value': '%me_1'}] + filters = [{"field": "name", "op": "like", "value": "%me_1"}] filtered_query = apply_filters(Bar, query, filters) result = filtered_query.all() @@ -626,10 +622,10 @@ def test_one_filter_applied_to_a_single_model(self, session): class TestApplyILikeFilter: - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_one_filter_applied_to_a_single_model(self, session): query = session.query(Bar) - filters = [{'field': 'name', 'op': 'ilike', 'value': '%ME_1'}] + filters = [{"field": "name", "op": "ilike", "value": "%ME_1"}] filtered_query = apply_filters(Bar, query, filters) result = filtered_query.all() @@ -641,10 +637,10 @@ def test_one_filter_applied_to_a_single_model(self, session): class TestApplyNotILikeFilter: - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_one_filter_applied_to_a_single_model(self, session): query = session.query(Bar) - filters = [{'field': 'name', 'op': 'not_ilike', 'value': '%ME_1'}] + filters = [{"field": "name", "op": "not_ilike", "value": "%ME_1"}] filtered_query = apply_filters(Bar, query, filters) result = filtered_query.all() @@ -656,20 +652,20 @@ def test_one_filter_applied_to_a_single_model(self, session): class TestApplyInFilter: - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_field_not_in_value_list(self, session): query = session.query(Bar) - filters = [{'field': 'count', 'op': 'in', 'value': [1, 2, 3]}] + filters = [{"field": "count", "op": "in", "value": [1, 2, 3]}] filtered_query = apply_filters(Bar, query, filters) result = filtered_query.all() assert len(result) == 0 - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_field_in_value_list(self, session): query = session.query(Bar) - filters = [{'field': 'count', 'op': 'in', 'value': [15, 2, 3]}] + filters = [{"field": "count", "op": "in", "value": [15, 2, 3]}] filtered_query = apply_filters(Bar, query, filters) result = filtered_query.all() @@ -680,10 +676,10 @@ def test_field_in_value_list(self, session): class TestApplyNotInFilter: - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_field_not_in_value_list(self, session): query = session.query(Bar) - filters = [{'field': 'count', 'op': 'not_in', 'value': [1, 2, 3]}] + filters = [{"field": "count", "op": "not_in", "value": [1, 2, 3]}] filtered_query = apply_filters(Bar, query, filters) result = filtered_query.all() @@ -693,10 +689,10 @@ def test_field_not_in_value_list(self, session): assert result[1].id == 2 assert result[2].id == 4 - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_field_in_value_list(self, session): query = session.query(Bar) - filters = [{'field': 'count', 'op': 'not_in', 'value': [15, 2, 3]}] + filters = [{"field": "count", "op": "not_in", "value": [15, 2, 3]}] filtered_query = apply_filters(Bar, query, filters) result = filtered_query.all() @@ -709,21 +705,23 @@ def test_field_in_value_list(self, session): class TestApplyFindInSetFilter: @pytest.mark.parametrize( - 'value,result_count,result_ids', + "value,result_count,result_ids", [ - ('none', 0, []), - ('foo', 4, [1, 2, 3, 4]), - ('bar', 2, [2, 4]), - ('baz', 2, [3, 4]), - ] + ("none", 0, []), + ("foo", 4, [1, 2, 3, 4]), + ("bar", 2, [2, 4]), + ("baz", 2, [3, 4]), + ], ) - @pytest.mark.usefixtures('multiple_graults_inserted') - def test_field_in_value_list(self, session, is_mysql, value, result_count, result_ids): + @pytest.mark.usefixtures("multiple_graults_inserted") + def test_field_in_value_list( + self, session, is_mysql, value, result_count, result_ids + ): if not is_mysql: pytest.skip(SET_NOT_SUPPORTED) query = session.query(Grault) - filters = [{'field': 'types', 'op': 'in_set', 'value': value}] + filters = [{"field": "types", "op": "in_set", "value": value}] filtered_query = apply_filters(Grault, query, filters) result = filtered_query.all() @@ -736,16 +734,16 @@ def test_field_in_value_list(self, session, is_mysql, value, result_count, resul class TestCompositeField: @pytest.mark.parametrize( - 'value,result_count,result_ids', + "value,result_count,result_ids", [ (Point(0, 0), 0, []), (Point(3, 3), 1, [3]), - ] + ], ) @pytest.mark.usefixtures("multiple_garply_inserted") def test_filter_composite_field(self, session, value, result_count, result_ids): query = session.query(Garply) - filters = [{'field': 'points', 'op': '==', 'value': value}] + filters = [{"field": "points", "op": "==", "value": value}] filtered_query = apply_filters(Garply, query, filters) result = filtered_query.all() @@ -758,20 +756,12 @@ def test_filter_composite_field(self, session, value, result_count, result_ids): class TestDateFields: @pytest.mark.parametrize( - 'value', - [ - datetime.date(2016, 7, 14), - datetime.date(2016, 7, 14).isoformat() - ] + "value", [datetime.date(2016, 7, 14), datetime.date(2016, 7, 14).isoformat()] ) - @pytest.mark.usefixtures('multiple_quxs_inserted') + @pytest.mark.usefixtures("multiple_quxs_inserted") def test_filter_date_equality(self, session, value): query = session.query(Qux) - filters = [{ - 'field': 'created_at', - 'op': '==', - 'value': value - }] + filters = [{"field": "created_at", "op": "==", "value": value}] filtered_query = apply_filters(Qux, query, filters) result = filtered_query.all() @@ -780,20 +770,12 @@ def test_filter_date_equality(self, session, value): assert result[0].created_at == datetime.date(2016, 7, 14) @pytest.mark.parametrize( - 'value', - [ - datetime.date(2016, 7, 13), - datetime.date(2016, 7, 13).isoformat() - ] + "value", [datetime.date(2016, 7, 13), datetime.date(2016, 7, 13).isoformat()] ) - @pytest.mark.usefixtures('multiple_quxs_inserted') + @pytest.mark.usefixtures("multiple_quxs_inserted") def test_filter_multiple_dates(self, session, value): query = session.query(Qux) - filters = [{ - 'field': 'created_at', - 'op': '>=', - 'value': value - }] + filters = [{"field": "created_at", "op": ">=", "value": value}] filtered_query = apply_filters(Qux, query, filters) result = filtered_query.all() @@ -802,10 +784,10 @@ def test_filter_multiple_dates(self, session, value): assert result[0].created_at == datetime.date(2016, 7, 13) assert result[1].created_at == datetime.date(2016, 7, 14) - @pytest.mark.usefixtures('multiple_quxs_inserted') + @pytest.mark.usefixtures("multiple_quxs_inserted") def test_null_date(self, session): query = session.query(Qux) - filters = [{'field': 'created_at', 'op': 'is_null'}] + filters = [{"field": "created_at", "op": "is_null"}] filtered_query = apply_filters(Qux, query, filters) result = filtered_query.all() @@ -817,19 +799,16 @@ def test_null_date(self, session): class TestTimeFields: @pytest.mark.parametrize( - 'value', - [ - datetime.time(3, 5, 9), - datetime.time(3, 5, 9).isoformat() # '03:05:09' - ] + "value", + [datetime.time(3, 5, 9), datetime.time(3, 5, 9).isoformat()], # '03:05:09' ) - @pytest.mark.usefixtures('multiple_quxs_inserted') + @pytest.mark.usefixtures("multiple_quxs_inserted") def test_filter_time_equality(self, session, is_sqlite, value): if isinstance(value, string_types) and is_sqlite: pytest.skip(STRING_DATE_TIME_NOT_SUPPORTED) query = session.query(Qux) - filters = [{'field': 'expiration_time', 'op': '==', 'value': value}] + filters = [{"field": "expiration_time", "op": "==", "value": value}] filtered_query = apply_filters(Qux, query, filters) result = filtered_query.all() @@ -838,23 +817,16 @@ def test_filter_time_equality(self, session, is_sqlite, value): assert result[0].expiration_time == datetime.time(3, 5, 9) @pytest.mark.parametrize( - 'value', - [ - datetime.time(2, 5, 9), - datetime.time(2, 5, 9).isoformat() # '02:05:09' - ] + "value", + [datetime.time(2, 5, 9), datetime.time(2, 5, 9).isoformat()], # '02:05:09' ) - @pytest.mark.usefixtures('multiple_quxs_inserted') + @pytest.mark.usefixtures("multiple_quxs_inserted") def test_filter_multiple_times(self, session, is_sqlite, value): if isinstance(value, string_types) and is_sqlite: pytest.skip(STRING_DATE_TIME_NOT_SUPPORTED) query = session.query(Qux) - filters = [{ - 'field': 'expiration_time', - 'op': '>=', - 'value': value - }] + filters = [{"field": "expiration_time", "op": ">=", "value": value}] filtered_query = apply_filters(Qux, query, filters) result = filtered_query.all() @@ -863,10 +835,10 @@ def test_filter_multiple_times(self, session, is_sqlite, value): assert result[0].expiration_time == datetime.time(2, 5, 9) assert result[1].expiration_time == datetime.time(3, 5, 9) - @pytest.mark.usefixtures('multiple_quxs_inserted') + @pytest.mark.usefixtures("multiple_quxs_inserted") def test_null_time(self, session): query = session.query(Qux) - filters = [{'field': 'expiration_time', 'op': 'is_null'}] + filters = [{"field": "expiration_time", "op": "is_null"}] filtered_query = apply_filters(Qux, query, filters) result = filtered_query.all() @@ -878,68 +850,54 @@ def test_null_time(self, session): class TestDateTimeFields: @pytest.mark.parametrize( - 'value', + "value", [ datetime.datetime(2016, 7, 14, 3, 5, 9), # '2016-07-14T03:05:09' - datetime.datetime(2016, 7, 14, 3, 5, 9).isoformat() - ] + datetime.datetime(2016, 7, 14, 3, 5, 9).isoformat(), + ], ) - @pytest.mark.usefixtures('multiple_quxs_inserted') + @pytest.mark.usefixtures("multiple_quxs_inserted") def test_filter_datetime_equality(self, session, is_sqlite, value): if isinstance(value, string_types) and is_sqlite: pytest.skip(STRING_DATE_TIME_NOT_SUPPORTED) query = session.query(Qux) - filters = [{ - 'field': 'execution_time', - 'op': '==', - 'value': value - }] + filters = [{"field": "execution_time", "op": "==", "value": value}] filtered_query = apply_filters(Qux, query, filters) result = filtered_query.all() assert len(result) == 1 - assert result[0].execution_time == datetime.datetime( - 2016, 7, 14, 3, 5, 9 - ) + assert result[0].execution_time == datetime.datetime(2016, 7, 14, 3, 5, 9) @pytest.mark.parametrize( - 'value', + "value", [ datetime.datetime(2016, 7, 13, 2, 5, 9), # '2016-07-13T02:05:09' - datetime.datetime(2016, 7, 13, 2, 5, 9).isoformat() - ] + datetime.datetime(2016, 7, 13, 2, 5, 9).isoformat(), + ], ) - @pytest.mark.usefixtures('multiple_quxs_inserted') + @pytest.mark.usefixtures("multiple_quxs_inserted") def test_filter_multiple_datetimes(self, session, is_sqlite, value): if isinstance(value, string_types) and is_sqlite: pytest.skip(STRING_DATE_TIME_NOT_SUPPORTED) query = session.query(Qux) - filters = [{ - 'field': 'execution_time', - 'op': '>=', - 'value': value - }] + filters = [{"field": "execution_time", "op": ">=", "value": value}] filtered_query = apply_filters(Qux, query, filters) result = filtered_query.all() assert len(result) == 2 - assert result[0].execution_time == datetime.datetime( - 2016, 7, 13, 2, 5, 9 - ) - assert result[1].execution_time == datetime.datetime( - 2016, 7, 14, 3, 5, 9 - ) + assert result[0].execution_time == datetime.datetime(2016, 7, 13, 2, 5, 9) + assert result[1].execution_time == datetime.datetime(2016, 7, 14, 3, 5, 9) - @pytest.mark.usefixtures('multiple_quxs_inserted') + @pytest.mark.usefixtures("multiple_quxs_inserted") def test_null_datetime(self, session): query = session.query(Qux) - filters = [{'field': 'execution_time', 'op': 'is_null'}] + filters = [{"field": "execution_time", "op": "is_null"}] filtered_query = apply_filters(Qux, query, filters) result = filtered_query.all() @@ -950,14 +908,16 @@ def test_null_datetime(self, session): class TestApplyBooleanFunctions: - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_or(self, session): query = session.query(Bar) filters = [ - {'or': [ - {'field': 'id', 'op': '==', 'value': 1}, - {'field': 'id', 'op': '==', 'value': 3}, - ]}, + { + "or": [ + {"field": "id", "op": "==", "value": 1}, + {"field": "id", "op": "==", "value": 3}, + ] + }, ] filtered_query = apply_filters(Bar, query, filters) @@ -967,13 +927,15 @@ def test_or(self, session): assert result[0].id == 1 assert result[1].id == 3 - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_or_with_one_arg(self, session): query = session.query(Bar) filters = [ - {'or': [ - {'field': 'id', 'op': '==', 'value': 1}, - ]}, + { + "or": [ + {"field": "id", "op": "==", "value": 1}, + ] + }, ] filtered_query = apply_filters(Bar, query, filters) @@ -982,15 +944,17 @@ def test_or_with_one_arg(self, session): assert len(result) == 1 assert result[0].id == 1 - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_or_with_three_args(self, session): query = session.query(Bar) filters = [ - {'or': [ - {'field': 'id', 'op': '==', 'value': 1}, - {'field': 'id', 'op': '==', 'value': 3}, - {'field': 'id', 'op': '==', 'value': 4}, - ]}, + { + "or": [ + {"field": "id", "op": "==", "value": 1}, + {"field": "id", "op": "==", "value": 3}, + {"field": "id", "op": "==", "value": 4}, + ] + }, ] filtered_query = apply_filters(Bar, query, filters) @@ -1002,39 +966,33 @@ def test_or_with_three_args(self, session): assert result[2].id == 4 @pytest.mark.parametrize( - ('or_args', 'expected_error'), [ - ( - [], - '`or` must have one or more arguments' - ), - ( - {}, - '`or` value must be an iterable across the function arguments' - ), - ( - 'hello', - '`or` value must be an iterable across the function arguments' - ), - ] + ("or_args", "expected_error"), + [ + ([], "`or` must have one or more arguments"), + ({}, "`or` value must be an iterable across the function arguments"), + ("hello", "`or` value must be an iterable across the function arguments"), + ], ) - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_or_with_bad_format(self, session, or_args, expected_error): query = session.query(Bar) - filters = [{'or': or_args}] + filters = [{"or": or_args}] with pytest.raises(BadFilterFormat) as exc: apply_filters(Bar, query, filters) assert expected_error in str(exc) - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_and(self, session): query = session.query(Bar) filters = [ - {'and': [ - {'field': 'id', 'op': '<=', 'value': 2}, - {'field': 'count', 'op': '>=', 'value': 6}, - ]}, + { + "and": [ + {"field": "id", "op": "<=", "value": 2}, + {"field": "count", "op": ">=", "value": 6}, + ] + }, ] filtered_query = apply_filters(Bar, query, filters) @@ -1043,13 +1001,15 @@ def test_and(self, session): assert len(result) == 1 assert result[0].id == 2 - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_and_with_one_arg(self, session): query = session.query(Bar) filters = [ - {'and': [ - {'field': 'id', 'op': '==', 'value': 3}, - ]}, + { + "and": [ + {"field": "id", "op": "==", "value": 3}, + ] + }, ] filtered_query = apply_filters(Bar, query, filters) @@ -1058,15 +1018,17 @@ def test_and_with_one_arg(self, session): assert len(result) == 1 assert result[0].id == 3 - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_and_with_three_args(self, session): query = session.query(Bar) filters = [ - {'and': [ - {'field': 'id', 'op': '<=', 'value': 3}, - {'field': 'name', 'op': '==', 'value': 'name_1'}, - {'field': 'count', 'op': 'is_not_null'}, - ]}, + { + "and": [ + {"field": "id", "op": "<=", "value": 3}, + {"field": "name", "op": "==", "value": "name_1"}, + {"field": "count", "op": "is_not_null"}, + ] + }, ] filtered_query = apply_filters(Bar, query, filters) @@ -1076,38 +1038,32 @@ def test_and_with_three_args(self, session): assert result[0].id == 1 @pytest.mark.parametrize( - ('and_args', 'expected_error'), [ - ( - [], - '`and` must have one or more arguments' - ), - ( - {}, - '`and` value must be an iterable across the function arguments' - ), - ( - 'hello', - '`and` value must be an iterable across the function arguments' - ), - ] + ("and_args", "expected_error"), + [ + ([], "`and` must have one or more arguments"), + ({}, "`and` value must be an iterable across the function arguments"), + ("hello", "`and` value must be an iterable across the function arguments"), + ], ) - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_and_with_bad_format(self, session, and_args, expected_error): query = session.query(Bar) - filters = [{'and': and_args}] + filters = [{"and": and_args}] with pytest.raises(BadFilterFormat) as exc: apply_filters(Bar, query, filters) assert expected_error in str(exc) - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_not(self, session): query = session.query(Bar) filters = [ - {'not': [ - {'field': 'id', 'op': '==', 'value': 3}, - ]}, + { + "not": [ + {"field": "id", "op": "==", "value": 3}, + ] + }, ] filtered_query = apply_filters(Bar, query, filters) @@ -1119,53 +1075,43 @@ def test_not(self, session): assert result[2].id == 4 @pytest.mark.parametrize( - ('not_args', 'expected_error'), [ - ( - [{'field': 'id', 'op': '==', 'value': 1}, - {'field': 'id', 'op': '==', 'value': 2}], - '`not` must have one argument' - ), - ( - [], - '`not` must have one argument' - ), - ( - {}, - '`not` value must be an iterable across the function arguments' - ), + ("not_args", "expected_error"), + [ ( - 'hello', - '`not` value must be an iterable across the function arguments' + [ + {"field": "id", "op": "==", "value": 1}, + {"field": "id", "op": "==", "value": 2}, + ], + "`not` must have one argument", ), - ] + ([], "`not` must have one argument"), + ({}, "`not` value must be an iterable across the function arguments"), + ("hello", "`not` value must be an iterable across the function arguments"), + ], ) - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_not_with_bad_format(self, session, not_args, expected_error): query = session.query(Bar) - filters = [{'not': not_args}] + filters = [{"not": not_args}] with pytest.raises(BadFilterFormat) as exc: apply_filters(Bar, query, filters) assert expected_error in str(exc) - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_complex(self, session): query = session.query(Bar) filters = [ { - 'and': [ + "and": [ { - 'or': [ - {'field': 'id', 'op': '==', 'value': 2}, - {'field': 'id', 'op': '==', 'value': 3}, - ] - }, - { - 'not': [ - {'field': 'name', 'op': '==', 'value': 'name_2'} + "or": [ + {"field": "id", "op": "==", "value": 2}, + {"field": "id", "op": "==", "value": 3}, ] }, + {"not": [{"field": "name", "op": "==", "value": "name_2"}]}, ], } ] @@ -1176,23 +1122,19 @@ def test_complex(self, session): assert len(result) == 1 assert result[0].id == 3 - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_complex_using_tuples(self, session): query = session.query(Bar) filters = ( { - 'and': ( - { - 'or': ( - {'field': 'id', 'op': '==', 'value': 2}, - {'field': 'id', 'op': '==', 'value': 3}, - ) - }, + "and": ( { - 'not': ( - {'field': 'name', 'op': '==', 'value': 'name_2'}, + "or": ( + {"field": "id", "op": "==", "value": 2}, + {"field": "id", "op": "==", "value": 3}, ) }, + {"not": ({"field": "name", "op": "==", "value": "name_2"},)}, ), }, ) @@ -1206,13 +1148,13 @@ def test_complex_using_tuples(self, session): class TestApplyArrayFilters: - @pytest.mark.usefixtures('multiple_corges_inserted') + @pytest.mark.usefixtures("multiple_corges_inserted") def test_any_value_in_array(self, session, is_postgresql): if not is_postgresql: pytest.skip(ARRAY_NOT_SUPPORTED) query = session.query(Corge) - filters = [{'field': 'tags', 'op': 'any', 'value': 'foo'}] + filters = [{"field": "tags", "op": "any", "value": "foo"}] filtered_query = apply_filters(Corge, query, filters) result = filtered_query.all() @@ -1221,13 +1163,13 @@ def test_any_value_in_array(self, session, is_postgresql): assert result[0].id == 2 assert result[1].id == 3 - @pytest.mark.usefixtures('multiple_corges_inserted') + @pytest.mark.usefixtures("multiple_corges_inserted") def test_not_any_values_in_array(self, session, is_postgresql): if not is_postgresql: pytest.skip(ARRAY_NOT_SUPPORTED) query = session.query(Corge) - filters = [{'field': 'tags', 'op': 'not_any', 'value': 'foo'}] + filters = [{"field": "tags", "op": "not_any", "value": "foo"}] filtered_query = apply_filters(Corge, query, filters) result = filtered_query.all() @@ -1239,47 +1181,35 @@ def test_not_any_values_in_array(self, session, is_postgresql): class TestHybridAttributes: - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") @pytest.mark.parametrize( - ('field, expected_error'), + ("field, expected_error"), [ - ('foos', "Model has no column `foos`."), + ("foos", "Model has no column `foos`."), ( - '__mapper__', + "__mapper__", "Model has no column `__mapper__`.", ), ( - 'not_valid', + "not_valid", "Model has no column `not_valid`.", ), - ] + ], ) def test_orm_descriptors_not_valid_hybrid_attributes( self, session, field, expected_error ): query = session.query(Bar) - filters = [ - { - 'field': field, - 'op': '==', - 'value': 100 - } - ] + filters = [{"field": field, "op": "==", "value": 100}] with pytest.raises(FieldNotFound) as exc: apply_filters(Bar, query, filters) assert expected_error in str(exc) - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_filter_by_hybrid_properties(self, session): query = session.query(Bar) - filters = [ - { - 'field': 'count_square', - 'op': '==', - 'value': 100 - } - ] + filters = [{"field": "count_square", "op": "==", "value": 100}] filtered_query = apply_filters(Bar, query, filters) result = filtered_query.all() diff --git a/test/interface/test_pagination.py b/test/interface/test_pagination.py index 3ac17b9..42993ea 100644 --- a/test/interface/test_pagination.py +++ b/test/interface/test_pagination.py @@ -11,7 +11,7 @@ Pagination = namedtuple( - 'Pagination', ['page_number', 'page_size', 'num_pages', 'total_results'] + "Pagination", ["page_number", "page_size", "num_pages", "total_results"] ) @@ -19,99 +19,114 @@ class TestPaginationFixtures(object): @pytest.fixture def multiple_bars_inserted(self, session): - bar_1 = Bar(id=1, name='name_1', count=5) - bar_2 = Bar(id=2, name='name_2', count=10) - bar_3 = Bar(id=3, name='name_1', count=None) - bar_4 = Bar(id=4, name='name_4', count=15) - bar_5 = Bar(id=5, name='name_5', count=17) - bar_6 = Bar(id=6, name='name_5', count=17) - bar_7 = Bar(id=7, name='name_7', count=None) - bar_8 = Bar(id=8, name='name_8', count=18) - session.add_all( - [bar_1, bar_2, bar_3, bar_4, bar_5, bar_6, bar_7, bar_8] - ) + bar_1 = Bar(id=1, name="name_1", count=5) + bar_2 = Bar(id=2, name="name_2", count=10) + bar_3 = Bar(id=3, name="name_1", count=None) + bar_4 = Bar(id=4, name="name_4", count=15) + bar_5 = Bar(id=5, name="name_5", count=17) + bar_6 = Bar(id=6, name="name_5", count=17) + bar_7 = Bar(id=7, name="name_7", count=None) + bar_8 = Bar(id=8, name="name_8", count=18) + session.add_all([bar_1, bar_2, bar_3, bar_4, bar_5, bar_6, bar_7, bar_8]) session.commit() class TestWrongPagination(TestPaginationFixtures): @pytest.mark.parametrize( - 'page_number, page_size', + "page_number, page_size", [ - (-2, None), (-2, 0), (-2, 1), (-2, 2), - (-1, None), (-1, 0), (-1, 1), (-1, 2), - (0, None), (0, 0), (0, 1), (-0, 2), - ] + (-2, None), + (-2, 0), + (-2, 1), + (-2, 2), + (-1, None), + (-1, 0), + (-1, 1), + (-1, 2), + (0, None), + (0, 0), + (0, 1), + (-0, 2), + ], ) - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_wrong_page_number(self, session, page_number, page_size): query = session.query(Bar) with pytest.raises(InvalidPage) as err: apply_pagination(query, page_number, page_size) - expected_error = 'Page number should be positive: {}'.format( - page_number - ) + expected_error = "Page number should be positive: {}".format(page_number) assert error_value(err) == expected_error @pytest.mark.parametrize( - 'page_number, page_size', + "page_number, page_size", [ - (-2, None), (-2, 0), (-2, 1), (-2, 2), - (-1, None), (-1, 0), (-1, 1), (-1, 2), - (0, None), (0, 0), (0, 1), (-0, 2), - ] + (-2, None), + (-2, 0), + (-2, 1), + (-2, 2), + (-1, None), + (-1, 0), + (-1, 1), + (-1, 2), + (0, None), + (0, 0), + (0, 1), + (-0, 2), + ], ) - def test_wrong_page_number_with_no_results( - self, session, page_number, page_size - ): + def test_wrong_page_number_with_no_results(self, session, page_number, page_size): query = session.query(Bar) with pytest.raises(InvalidPage) as err: apply_pagination(query, page_number, page_size) - expected_error = 'Page number should be positive: {}'.format( - page_number - ) + expected_error = "Page number should be positive: {}".format(page_number) assert error_value(err) == expected_error @pytest.mark.parametrize( - 'page_number, page_size', + "page_number, page_size", [ - (None, -2), (-1, -2), (0, -2), (1, -2), (2, -2), - (None, -1), (-1, -1), (0, -1), (1, -1), (2, -1), - ] + (None, -2), + (-1, -2), + (0, -2), + (1, -2), + (2, -2), + (None, -1), + (-1, -1), + (0, -1), + (1, -1), + (2, -1), + ], ) - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_wrong_page_size(self, session, page_number, page_size): query = session.query(Bar) with pytest.raises(InvalidPage) as err: apply_pagination(query, page_number, page_size) - expected_error = 'Page size should not be negative: {}'.format( - page_size - ) + expected_error = "Page size should not be negative: {}".format(page_size) assert error_value(err) == expected_error class TestNoPaginationProvided(TestPaginationFixtures): - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_no_pagination_info_provided(self, session): query = session.query(Bar) page_size = None page_number = None - paginated_query, pagination = apply_pagination( - query, page_number, page_size - ) + paginated_query, pagination = apply_pagination(query, page_number, page_size) assert query == paginated_query - assert Pagination( - page_number=1, page_size=8, num_pages=1, total_results=8 - ) == pagination + assert ( + Pagination(page_number=1, page_size=8, num_pages=1, total_results=8) + == pagination + ) result = paginated_query.all() @@ -122,20 +137,19 @@ def test_no_pagination_info_provided(self, session): class TestNoPageNumberProvided(TestPaginationFixtures): - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_page_size_greater_than_total_records(self, session): query = session.query(Bar) page_size = 5000 page_number = None - paginated_query, pagination = apply_pagination( - query, page_number, page_size - ) + paginated_query, pagination = apply_pagination(query, page_number, page_size) assert query != paginated_query - assert Pagination( - page_number=1, page_size=8, num_pages=1, total_results=8 - ) == pagination + assert ( + Pagination(page_number=1, page_size=8, num_pages=1, total_results=8) + == pagination + ) result = paginated_query.all() @@ -143,20 +157,19 @@ def test_page_size_greater_than_total_records(self, session): for i in range(8): assert result[i].id == i + 1 - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_page_size_provided(self, session): query = session.query(Bar) page_size = 2 page_number = None - paginated_query, pagination = apply_pagination( - query, page_number, page_size - ) + paginated_query, pagination = apply_pagination(query, page_number, page_size) assert query != paginated_query - assert Pagination( - page_number=1, page_size=2, num_pages=4, total_results=8 - ) == pagination + assert ( + Pagination(page_number=1, page_size=2, num_pages=4, total_results=8) + == pagination + ) result = paginated_query.all() @@ -167,20 +180,19 @@ def test_page_size_provided(self, session): class TestNoPageSizeProvided(TestPaginationFixtures): - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_first_page(self, session): query = session.query(Bar) page_size = None page_number = 1 - paginated_query, pagination = apply_pagination( - query, page_number, page_size - ) + paginated_query, pagination = apply_pagination(query, page_number, page_size) assert query != paginated_query - assert Pagination( - page_number=1, page_size=8, num_pages=1, total_results=8 - ) == pagination + assert ( + Pagination(page_number=1, page_size=8, num_pages=1, total_results=8) + == pagination + ) result = paginated_query.all() @@ -188,20 +200,21 @@ def test_first_page(self, session): for i in range(8): assert result[i].id == i + 1 - @pytest.mark.parametrize('page_number', [2, 3, 4]) - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.parametrize("page_number", [2, 3, 4]) + @pytest.mark.usefixtures("multiple_bars_inserted") def test_page_number_greater_than_one(self, session, page_number): query = session.query(Bar) page_size = None - paginated_query, pagination = apply_pagination( - query, page_number, page_size - ) + paginated_query, pagination = apply_pagination(query, page_number, page_size) assert query != paginated_query - assert Pagination( - page_number=page_number, page_size=8, num_pages=1, total_results=8 - ) == pagination + assert ( + Pagination( + page_number=page_number, page_size=8, num_pages=1, total_results=8 + ) + == pagination + ) result = paginated_query.all() @@ -210,57 +223,56 @@ def test_page_number_greater_than_one(self, session, page_number): class TestApplyPagination(TestPaginationFixtures): - @pytest.mark.parametrize('page_number', [1, 2, 3]) - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.parametrize("page_number", [1, 2, 3]) + @pytest.mark.usefixtures("multiple_bars_inserted") def test_page_size_zero(self, session, page_number): query = session.query(Bar) page_size = 0 - paginated_query, pagination = apply_pagination( - query, page_number, page_size - ) + paginated_query, pagination = apply_pagination(query, page_number, page_size) assert query != paginated_query - assert Pagination( - page_number=page_number, page_size=0, num_pages=0, total_results=8 - ) == pagination + assert ( + Pagination( + page_number=page_number, page_size=0, num_pages=0, total_results=8 + ) + == pagination + ) result = paginated_query.all() assert len(result) == 0 - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_page_size_zero_and_no_page_number_provided(self, session): query = session.query(Bar) page_size = 0 page_number = None - paginated_query, pagination = apply_pagination( - query, page_number, page_size - ) + paginated_query, pagination = apply_pagination(query, page_number, page_size) assert query != paginated_query - assert Pagination( - page_number=1, page_size=0, num_pages=0, total_results=8 - ) == pagination + assert ( + Pagination(page_number=1, page_size=0, num_pages=0, total_results=8) + == pagination + ) result = paginated_query.all() assert len(result) == 0 - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_page_number_and_page_size_provided(self, session): query = session.query(Bar) page_size = 2 page_number = 3 - paginated_query, pagination = apply_pagination( - query, page_number, page_size - ) + paginated_query, pagination = apply_pagination(query, page_number, page_size) assert query != paginated_query - assert Pagination( - page_number=3, page_size=2, num_pages=4, total_results=8 - ) == pagination + assert ( + Pagination(page_number=3, page_size=2, num_pages=4, total_results=8) + == pagination + ) result = paginated_query.all() @@ -268,61 +280,58 @@ def test_page_number_and_page_size_provided(self, session): assert result[0].id == 5 assert result[1].id == 6 - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_get_individual_record(self, session): query = session.query(Bar) page_size = 1 page_number = 5 - paginated_query, pagination = apply_pagination( - query, page_number, page_size - ) + paginated_query, pagination = apply_pagination(query, page_number, page_size) assert query != paginated_query - assert Pagination( - page_number=5, page_size=1, num_pages=8, total_results=8 - ) == pagination + assert ( + Pagination(page_number=5, page_size=1, num_pages=8, total_results=8) + == pagination + ) result = paginated_query.all() assert len(result) == 1 assert result[0].id == 5 - @pytest.mark.parametrize('page_number', [5, 6, 7]) - @pytest.mark.usefixtures('multiple_bars_inserted') - def test_page_number_greater_than_number_of_pages( - self, session, page_number - ): + @pytest.mark.parametrize("page_number", [5, 6, 7]) + @pytest.mark.usefixtures("multiple_bars_inserted") + def test_page_number_greater_than_number_of_pages(self, session, page_number): query = session.query(Bar) page_size = 2 - paginated_query, pagination = apply_pagination( - query, page_number, page_size - ) + paginated_query, pagination = apply_pagination(query, page_number, page_size) assert query != paginated_query - assert Pagination( - page_number=page_number, page_size=2, num_pages=4, total_results=8 - ) == pagination + assert ( + Pagination( + page_number=page_number, page_size=2, num_pages=4, total_results=8 + ) + == pagination + ) result = paginated_query.all() assert len(result) == 0 - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_last_complete_page(self, session): query = session.query(Bar) page_size = 2 page_number = 4 - paginated_query, pagination = apply_pagination( - query, page_number, page_size - ) + paginated_query, pagination = apply_pagination(query, page_number, page_size) assert query != paginated_query - assert Pagination( - page_number=4, page_size=2, num_pages=4, total_results=8 - ) == pagination + assert ( + Pagination(page_number=4, page_size=2, num_pages=4, total_results=8) + == pagination + ) result = paginated_query.all() @@ -330,20 +339,19 @@ def test_last_complete_page(self, session): assert result[0].id == 7 assert result[1].id == 8 - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_last_incomplete_page(self, session): query = session.query(Bar) page_size = 5 page_number = 2 - paginated_query, pagination = apply_pagination( - query, page_number, page_size - ) + paginated_query, pagination = apply_pagination(query, page_number, page_size) assert query != paginated_query - assert Pagination( - page_number=2, page_size=5, num_pages=2, total_results=8 - ) == pagination + assert ( + Pagination(page_number=2, page_size=5, num_pages=2, total_results=8) + == pagination + ) result = paginated_query.all() @@ -352,20 +360,19 @@ def test_last_incomplete_page(self, session): assert result[1].id == 7 assert result[2].id == 8 - @pytest.mark.usefixtures('multiple_bars_inserted') + @pytest.mark.usefixtures("multiple_bars_inserted") def test_get_first_page(self, session): query = session.query(Bar) page_size = 2 page_number = 1 - paginated_query, pagination = apply_pagination( - query, page_number, page_size - ) + paginated_query, pagination = apply_pagination(query, page_number, page_size) assert query != paginated_query - assert Pagination( - page_number=1, page_size=2, num_pages=4, total_results=8 - ) == pagination + assert ( + Pagination(page_number=1, page_size=2, num_pages=4, total_results=8) + == pagination + ) result = paginated_query.all() @@ -381,14 +388,13 @@ def test_page_size_and_page_number_provided(self, session): page_size = 2 page_number = 1 - paginated_query, pagination = apply_pagination( - query, page_number, page_size - ) + paginated_query, pagination = apply_pagination(query, page_number, page_size) assert query != paginated_query - assert Pagination( - page_number=1, page_size=2, num_pages=0, total_results=0 - ) == pagination + assert ( + Pagination(page_number=1, page_size=2, num_pages=0, total_results=0) + == pagination + ) result = paginated_query.all() diff --git a/test/interface/test_sorting.py b/test/interface/test_sorting.py index 694b67c..6ada3a1 100644 --- a/test/interface/test_sorting.py +++ b/test/interface/test_sorting.py @@ -20,45 +20,43 @@ @pytest.fixture def multiple_foos_inserted(session): - foo_1 = Foo(id=1, bar_id=1, name='name_1', count=1) - foo_2 = Foo(id=2, bar_id=2, name='name_2', count=1) - foo_3 = Foo(id=3, bar_id=3, name='name_1', count=1) - foo_4 = Foo(id=4, bar_id=4, name='name_4', count=1) - foo_5 = Foo(id=5, bar_id=5, name='name_1', count=2) - foo_6 = Foo(id=6, bar_id=6, name='name_4', count=2) - foo_7 = Foo(id=7, bar_id=7, name='name_1', count=2) - foo_8 = Foo(id=8, bar_id=8, name='name_5', count=2) + foo_1 = Foo(id=1, bar_id=1, name="name_1", count=1) + foo_2 = Foo(id=2, bar_id=2, name="name_2", count=1) + foo_3 = Foo(id=3, bar_id=3, name="name_1", count=1) + foo_4 = Foo(id=4, bar_id=4, name="name_4", count=1) + foo_5 = Foo(id=5, bar_id=5, name="name_1", count=2) + foo_6 = Foo(id=6, bar_id=6, name="name_4", count=2) + foo_7 = Foo(id=7, bar_id=7, name="name_1", count=2) + foo_8 = Foo(id=8, bar_id=8, name="name_5", count=2) session.add_all([foo_1, foo_2, foo_3, foo_4, foo_5, foo_6, foo_7, foo_8]) session.commit() @pytest.fixture def multiple_bars_with_no_nulls_inserted(session): - bar_1 = Bar(id=1, name='name_1', count=5) - bar_2 = Bar(id=2, name='name_2', count=10) - bar_3 = Bar(id=3, name='name_1', count=3) - bar_4 = Bar(id=4, name='name_4', count=12) - bar_5 = Bar(id=5, name='name_1', count=2) - bar_6 = Bar(id=6, name='name_4', count=15) - bar_7 = Bar(id=7, name='name_1', count=2) - bar_8 = Bar(id=8, name='name_5', count=1) + bar_1 = Bar(id=1, name="name_1", count=5) + bar_2 = Bar(id=2, name="name_2", count=10) + bar_3 = Bar(id=3, name="name_1", count=3) + bar_4 = Bar(id=4, name="name_4", count=12) + bar_5 = Bar(id=5, name="name_1", count=2) + bar_6 = Bar(id=6, name="name_4", count=15) + bar_7 = Bar(id=7, name="name_1", count=2) + bar_8 = Bar(id=8, name="name_5", count=1) session.add_all([bar_1, bar_2, bar_3, bar_4, bar_5, bar_6, bar_7, bar_8]) session.commit() @pytest.fixture def multiple_bars_with_nulls_inserted(session): - bar_1 = Bar(id=1, name='name_1', count=5) - bar_2 = Bar(id=2, name='name_2', count=20) - bar_3 = Bar(id=3, name='name_1', count=None) - bar_4 = Bar(id=4, name='name_4', count=10) - bar_5 = Bar(id=5, name='name_1', count=40) - bar_6 = Bar(id=6, name='name_4', count=None) - bar_7 = Bar(id=7, name='name_1', count=30) - bar_8 = Bar(id=8, name='name_5', count=50) - session.add_all( - [bar_1, bar_2, bar_3, bar_4, bar_5, bar_6, bar_7, bar_8] - ) + bar_1 = Bar(id=1, name="name_1", count=5) + bar_2 = Bar(id=2, name="name_2", count=20) + bar_3 = Bar(id=3, name="name_1", count=None) + bar_4 = Bar(id=4, name="name_4", count=10) + bar_5 = Bar(id=5, name="name_1", count=40) + bar_6 = Bar(id=6, name="name_4", count=None) + bar_7 = Bar(id=7, name="name_1", count=30) + bar_8 = Bar(id=8, name="name_5", count=50) + session.add_all([bar_1, bar_2, bar_3, bar_4, bar_5, bar_6, bar_7, bar_8]) session.commit() @@ -72,7 +70,7 @@ def test_no_sort_provided(self, session): assert query == filtered_query - @pytest.mark.parametrize('sort', ['some text', 1, []]) + @pytest.mark.parametrize("sort", ["some text", 1, []]) def test_wrong_sort_format(self, session, sort): query = session.query(Bar) order_by = [sort] @@ -80,22 +78,22 @@ def test_wrong_sort_format(self, session, sort): with pytest.raises(BadSortFormat) as err: apply_sort(Bar, query, order_by) - expected_error = 'Sort spec `{}` should be a dictionary.'.format(sort) + expected_error = "Sort spec `{}` should be a dictionary.".format(sort) assert expected_error == error_value(err) def test_field_not_provided(self, session): query = session.query(Bar) - order_by = [{'direction': 'asc'}] + order_by = [{"direction": "asc"}] with pytest.raises(BadSortFormat) as err: apply_sort(Bar, query, order_by) - expected_error = '`field` and `direction` are mandatory attributes.' + expected_error = "`field` and `direction` are mandatory attributes." assert expected_error == error_value(err) def test_invalid_field(self, session): query = session.query(Bar) - order_by = [{'field': 'invalid_field', 'direction': 'asc'}] + order_by = [{"field": "invalid_field", "direction": "asc"}] with pytest.raises(FieldNotFound) as err: apply_sort(Bar, query, order_by) @@ -107,27 +105,26 @@ def test_invalid_field(self, session): def test_direction_not_provided(self, session): query = session.query(Bar) - order_by = [{'field': 'name'}] + order_by = [{"field": "name"}] with pytest.raises(BadSortFormat) as err: apply_sort(Bar, query, order_by) - expected_error = '`field` and `direction` are mandatory attributes.' + expected_error = "`field` and `direction` are mandatory attributes." assert expected_error == error_value(err) def test_invalid_direction(self, session): query = session.query(Bar) - order_by = [{'field': 'name', 'direction': 'invalid_direction'}] + order_by = [{"field": "name", "direction": "invalid_direction"}] with pytest.raises(BadSortFormat) as err: apply_sort(Bar, query, order_by) - expected_error = 'Direction `invalid_direction` not valid.' + expected_error = "Direction `invalid_direction` not valid." assert expected_error == error_value(err) class TestSortApplied(object): - """Tests that results are sorted only according to the provided filters. @@ -142,97 +139,104 @@ class TestSortApplied(object): or last. """ - @pytest.mark.usefixtures('multiple_bars_with_no_nulls_inserted') + @pytest.mark.usefixtures("multiple_bars_with_no_nulls_inserted") def test_single_sort_field_asc(self, session): query = session.query(Bar) - order_by = [{'field': 'name', 'direction': 'asc'}] + order_by = [{"field": "name", "direction": "asc"}] sorted_query = apply_sort(Bar, query, order_by) results = sorted_query.all() assert [result.name for result in results] == [ - 'name_1', 'name_1', 'name_1', 'name_1', - 'name_2', - 'name_4', 'name_4', - 'name_5', + "name_1", + "name_1", + "name_1", + "name_1", + "name_2", + "name_4", + "name_4", + "name_5", ] - @pytest.mark.usefixtures('multiple_bars_with_no_nulls_inserted') + @pytest.mark.usefixtures("multiple_bars_with_no_nulls_inserted") def test_single_sort_field_desc(self, session): query = session.query(Bar) - order_by = [{'field': 'name', 'direction': 'desc'}] + order_by = [{"field": "name", "direction": "desc"}] sorted_query = apply_sort(Bar, query, order_by) results = sorted_query.all() assert [result.name for result in results] == [ - 'name_5', - 'name_4', 'name_4', - 'name_2', - 'name_1', 'name_1', 'name_1', 'name_1', + "name_5", + "name_4", + "name_4", + "name_2", + "name_1", + "name_1", + "name_1", + "name_1", ] - @pytest.mark.usefixtures('multiple_bars_with_no_nulls_inserted') + @pytest.mark.usefixtures("multiple_bars_with_no_nulls_inserted") def test_multiple_sort_fields(self, session): query = session.query(Bar) order_by = [ - {'field': 'name', 'direction': 'asc'}, - {'field': 'count', 'direction': 'desc'}, - {'field': 'id', 'direction': 'desc'}, + {"field": "name", "direction": "asc"}, + {"field": "count", "direction": "desc"}, + {"field": "id", "direction": "desc"}, ] sorted_query = apply_sort(Bar, query, order_by) results = sorted_query.all() - assert [ - (result.name, result.count, result.id) for result in results - ] == [ - ('name_1', 5, 1), - ('name_1', 3, 3), - ('name_1', 2, 7), - ('name_1', 2, 5), - ('name_2', 10, 2), - ('name_4', 15, 6), - ('name_4', 12, 4), - ('name_5', 1, 8), + assert [(result.name, result.count, result.id) for result in results] == [ + ("name_1", 5, 1), + ("name_1", 3, 3), + ("name_1", 2, 7), + ("name_1", 2, 5), + ("name_2", 10, 2), + ("name_4", 15, 6), + ("name_4", 12, 4), + ("name_5", 1, 8), ] def test_multiple_models(self, session): - bar_1 = Bar(id=1, name='name_1', count=15) - bar_2 = Bar(id=2, name='name_2', count=10) - bar_3 = Bar(id=3, name='name_1', count=20) - bar_4 = Bar(id=4, name='name_1', count=10) + bar_1 = Bar(id=1, name="name_1", count=15) + bar_2 = Bar(id=2, name="name_2", count=10) + bar_3 = Bar(id=3, name="name_1", count=20) + bar_4 = Bar(id=4, name="name_1", count=10) qux_1 = Qux( - id=1, name='name_1', count=15, + id=1, + name="name_1", + count=15, created_at=datetime.date(2016, 7, 12), - execution_time=datetime.datetime(2016, 7, 12, 1, 5, 9) + execution_time=datetime.datetime(2016, 7, 12, 1, 5, 9), ) qux_2 = Qux( - id=2, name='name_2', count=10, + id=2, + name="name_2", + count=10, created_at=datetime.date(2016, 7, 13), - execution_time=datetime.datetime(2016, 7, 13, 2, 5, 9) - ) - qux_3 = Qux( - id=3, name='name_1', count=10, - created_at=None, execution_time=None + execution_time=datetime.datetime(2016, 7, 13, 2, 5, 9), ) + qux_3 = Qux(id=3, name="name_1", count=10, created_at=None, execution_time=None) qux_4 = Qux( - id=4, name='name_1', count=20, + id=4, + name="name_1", + count=20, created_at=datetime.date(2016, 7, 14), - execution_time=datetime.datetime(2016, 7, 14, 3, 5, 9) + execution_time=datetime.datetime(2016, 7, 14, 3, 5, 9), ) - session.add_all( - [bar_1, bar_2, bar_3, bar_4, qux_1, qux_2, qux_3, qux_4] - ) + session.add_all([bar_1, bar_2, bar_3, bar_4, qux_1, qux_2, qux_3, qux_4]) session.commit() query = session.query(Bar).join(Qux, Bar.id == Qux.id) order_by = [ - {'model': 'Bar', 'field': 'name', 'direction': 'asc'}, - {'model': 'Qux', 'field': 'count', 'direction': 'asc'}, + {"model": "Bar", "field": "name", "direction": "asc"}, + {"model": "Qux", "field": "count", "direction": "asc"}, ] sorted_query = apply_sort(Bar, query, order_by) @@ -245,28 +249,40 @@ def test_multiple_models(self, session): assert results[3].id == 2 def test_nullable_relationships(self, session): - bar_1 = Bar(id=1, name='name_1', count=5) - bar_2 = Bar(id=2, name='name_2', count=20) - bar_3 = Bar(id=3, name='name_1', count=None) - bar_4 = Bar(id=4, name='name_4', count=10) - foo_1 = Foo(id=1, bar_id=1, name='name_1', count=1) - foo_2 = Foo(id=2, bar_id=2, name='name_2', count=1) - foo_3 = Foo(id=3, bar_id=3, name='name_1', count=1) - foo_4 = Foo(id=4, bar_id=4, name='name_4', count=1) - foo_5 = Foo(id=5, bar_id=None, name='name_1', count=2) - foo_6 = Foo(id=6, bar_id=None, name='name_4', count=2) - foo_7 = Foo(id=7, bar_id=None, name='name_2', count=2) - foo_8 = Foo(id=8, bar_id=None, name='name_5', count=2) - session.add_all([ - bar_1, bar_2, bar_3, bar_4, - foo_1, foo_2, foo_3, foo_4, foo_5, foo_6, foo_7, foo_8, - ]) + bar_1 = Bar(id=1, name="name_1", count=5) + bar_2 = Bar(id=2, name="name_2", count=20) + bar_3 = Bar(id=3, name="name_1", count=None) + bar_4 = Bar(id=4, name="name_4", count=10) + foo_1 = Foo(id=1, bar_id=1, name="name_1", count=1) + foo_2 = Foo(id=2, bar_id=2, name="name_2", count=1) + foo_3 = Foo(id=3, bar_id=3, name="name_1", count=1) + foo_4 = Foo(id=4, bar_id=4, name="name_4", count=1) + foo_5 = Foo(id=5, bar_id=None, name="name_1", count=2) + foo_6 = Foo(id=6, bar_id=None, name="name_4", count=2) + foo_7 = Foo(id=7, bar_id=None, name="name_2", count=2) + foo_8 = Foo(id=8, bar_id=None, name="name_5", count=2) + session.add_all( + [ + bar_1, + bar_2, + bar_3, + bar_4, + foo_1, + foo_2, + foo_3, + foo_4, + foo_5, + foo_6, + foo_7, + foo_8, + ] + ) session.commit() query = session.query(Foo) sort_spec = [ - {'field': 'bar.count', 'direction': 'desc'}, - {'field': 'name', 'direction': 'asc'}, + {"field": "bar.count", "direction": "desc"}, + {"field": "name", "direction": "asc"}, ] sorted_query = apply_sort(Foo, query, sort_spec) results = sorted_query.all() @@ -276,318 +292,324 @@ def test_nullable_relationships(self, session): assert [ (result.id, result.bar.count, result.name) for result in results_with_bar ] == [ - (2, 20, 'name_2'), - (4, 10, 'name_4'), - (1, 5, 'name_1'), - (3, None, 'name_1'), + (2, 20, "name_2"), + (4, 10, "name_4"), + (1, 5, "name_1"), + (3, None, "name_1"), ] assert [ (result.id, result.bar, result.name) for result in results_without_bar ] == [ - (5, None, 'name_1'), - (7, None, 'name_2'), - (6, None, 'name_4'), - (8, None, 'name_5'), + (5, None, "name_1"), + (7, None, "name_2"), + (6, None, "name_4"), + (8, None, "name_5"), ] - @pytest.mark.usefixtures('multiple_bars_with_no_nulls_inserted') + @pytest.mark.usefixtures("multiple_bars_with_no_nulls_inserted") def test_a_single_dict_can_be_supplied_as_sort_spec(self, session): query = session.query(Bar) - sort_spec = {'field': 'name', 'direction': 'desc'} + sort_spec = {"field": "name", "direction": "desc"} sorted_query = apply_sort(Bar, query, sort_spec) results = sorted_query.all() assert [result.name for result in results] == [ - 'name_5', - 'name_4', 'name_4', - 'name_2', - 'name_1', 'name_1', 'name_1', 'name_1', + "name_5", + "name_4", + "name_4", + "name_2", + "name_1", + "name_1", + "name_1", + "name_1", ] class TestAutoJoin: @pytest.mark.usefixtures( - 'multiple_bars_with_no_nulls_inserted', - 'multiple_foos_inserted' + "multiple_bars_with_no_nulls_inserted", "multiple_foos_inserted" ) def test_auto_join(self, session): query = session.query(Foo) order_by = [ - {'field': 'count', 'direction': 'desc'}, - {'field': 'bar.name', 'direction': 'asc'}, - {'field': 'id', 'direction': 'asc'}, + {"field": "count", "direction": "desc"}, + {"field": "bar.name", "direction": "asc"}, + {"field": "id", "direction": "asc"}, ] sorted_query = apply_sort(Foo, query, order_by) results = sorted_query.all() - assert [ - (result.count, result.bar.name, result.id) for result in results - ] == [ - (2, 'name_1', 5), - (2, 'name_1', 7), - (2, 'name_4', 6), - (2, 'name_5', 8), - (1, 'name_1', 1), - (1, 'name_1', 3), - (1, 'name_2', 2), - (1, 'name_4', 4), + assert [(result.count, result.bar.name, result.id) for result in results] == [ + (2, "name_1", 5), + (2, "name_1", 7), + (2, "name_4", 6), + (2, "name_5", 8), + (1, "name_1", 1), + (1, "name_1", 3), + (1, "name_2", 2), + (1, "name_4", 4), ] @pytest.mark.usefixtures( - 'multiple_bars_with_no_nulls_inserted', - 'multiple_foos_inserted' + "multiple_bars_with_no_nulls_inserted", "multiple_foos_inserted" ) def test_noop_if_query_contains_named_models(self, session): query = session.query(Foo).join(Bar) order_by = [ - {'model': 'Foo', 'field': 'count', 'direction': 'desc'}, - {'model': 'Bar', 'field': 'name', 'direction': 'asc'}, - {'model': 'Foo', 'field': 'id', 'direction': 'asc'}, + {"model": "Foo", "field": "count", "direction": "desc"}, + {"model": "Bar", "field": "name", "direction": "asc"}, + {"model": "Foo", "field": "id", "direction": "asc"}, ] sorted_query = apply_sort(Foo, query, order_by) results = sorted_query.all() - assert [ - (result.count, result.bar.name, result.id) for result in results - ] == [ - (2, 'name_1', 5), - (2, 'name_1', 7), - (2, 'name_4', 6), - (2, 'name_5', 8), - (1, 'name_1', 1), - (1, 'name_1', 3), - (1, 'name_2', 2), - (1, 'name_4', 4), + assert [(result.count, result.bar.name, result.id) for result in results] == [ + (2, "name_1", 5), + (2, "name_1", 7), + (2, "name_4", 6), + (2, "name_5", 8), + (1, "name_1", 1), + (1, "name_1", 3), + (1, "name_2", 2), + (1, "name_4", 4), ] @pytest.mark.usefixtures( - 'multiple_bars_with_no_nulls_inserted', - 'multiple_foos_inserted' + "multiple_bars_with_no_nulls_inserted", "multiple_foos_inserted" ) def test_eager_load(self, session): # behaves as if the joinedload wasn't present query = session.query(Foo).options(joinedload(Foo.bar)) order_by = [ - {'field': 'count', 'direction': 'desc'}, - {'field': 'bar.name', 'direction': 'asc'}, - {'field': 'id', 'direction': 'asc'}, + {"field": "count", "direction": "desc"}, + {"field": "bar.name", "direction": "asc"}, + {"field": "id", "direction": "asc"}, ] sorted_query = apply_sort(Foo, query, order_by) results = sorted_query.all() - assert [ - (result.count, result.bar.name, result.id) for result in results - ] == [ - (2, 'name_1', 5), - (2, 'name_1', 7), - (2, 'name_4', 6), - (2, 'name_5', 8), - (1, 'name_1', 1), - (1, 'name_1', 3), - (1, 'name_2', 2), - (1, 'name_4', 4), + assert [(result.count, result.bar.name, result.id) for result in results] == [ + (2, "name_1", 5), + (2, "name_1", 7), + (2, "name_4", 6), + (2, "name_5", 8), + (1, "name_1", 1), + (1, "name_1", 3), + (1, "name_2", 2), + (1, "name_4", 4), ] class TestSortNullsFirst(object): - """Tests `nullsfirst`. This is currently not supported by MySQL and SQLite. Only tested for PostgreSQL. """ - @pytest.mark.usefixtures('multiple_bars_with_nulls_inserted') + @pytest.mark.usefixtures("multiple_bars_with_nulls_inserted") def test_single_sort_field_asc_nulls_first(self, session, is_postgresql): if not is_postgresql: pytest.skip(NULLSFIRST_NOT_SUPPORTED) query = session.query(Bar) - order_by = [ - {'field': 'count', 'direction': 'asc', 'nullsfirst': True} - ] + order_by = [{"field": "count", "direction": "asc", "nullsfirst": True}] sorted_query = apply_sort(Bar, query, order_by) results = sorted_query.all() assert [result.count for result in results] == [ - None, None, 5, 10, 20, 30, 40, 50, + None, + None, + 5, + 10, + 20, + 30, + 40, + 50, ] - @pytest.mark.usefixtures('multiple_bars_with_nulls_inserted') + @pytest.mark.usefixtures("multiple_bars_with_nulls_inserted") def test_single_sort_field_desc_nulls_first(self, session, is_postgresql): if not is_postgresql: pytest.skip(NULLSFIRST_NOT_SUPPORTED) query = session.query(Bar) - order_by = [ - {'field': 'count', 'direction': 'desc', 'nullsfirst': True} - ] + order_by = [{"field": "count", "direction": "desc", "nullsfirst": True}] sorted_query = apply_sort(Bar, query, order_by) results = sorted_query.all() assert [result.count for result in results] == [ - None, None, 50, 40, 30, 20, 10, 5, - ] - - @pytest.mark.usefixtures('multiple_bars_with_nulls_inserted') - def test_multiple_sort_fields_asc_nulls_first( - self, session, is_postgresql - ): + None, + None, + 50, + 40, + 30, + 20, + 10, + 5, + ] + + @pytest.mark.usefixtures("multiple_bars_with_nulls_inserted") + def test_multiple_sort_fields_asc_nulls_first(self, session, is_postgresql): if not is_postgresql: pytest.skip(NULLSFIRST_NOT_SUPPORTED) query = session.query(Bar) order_by = [ - {'field': 'name', 'direction': 'asc'}, - {'field': 'count', 'direction': 'asc', 'nullsfirst': True}, + {"field": "name", "direction": "asc"}, + {"field": "count", "direction": "asc", "nullsfirst": True}, ] sorted_query = apply_sort(Bar, query, order_by) results = sorted_query.all() assert [(result.name, result.count) for result in results] == [ - ('name_1', None), - ('name_1', 5), - ('name_1', 30), - ('name_1', 40), - ('name_2', 20), - ('name_4', None), - ('name_4', 10), - ('name_5', 50), - ] - - @pytest.mark.usefixtures('multiple_bars_with_nulls_inserted') - def test_multiple_sort_fields_desc_nulls_first( - self, session, is_postgresql - ): + ("name_1", None), + ("name_1", 5), + ("name_1", 30), + ("name_1", 40), + ("name_2", 20), + ("name_4", None), + ("name_4", 10), + ("name_5", 50), + ] + + @pytest.mark.usefixtures("multiple_bars_with_nulls_inserted") + def test_multiple_sort_fields_desc_nulls_first(self, session, is_postgresql): if not is_postgresql: pytest.skip(NULLSFIRST_NOT_SUPPORTED) query = session.query(Bar) order_by = [ - {'field': 'name', 'direction': 'asc'}, - {'field': 'count', 'direction': 'desc', 'nullsfirst': True}, + {"field": "name", "direction": "asc"}, + {"field": "count", "direction": "desc", "nullsfirst": True}, ] sorted_query = apply_sort(Bar, query, order_by) results = sorted_query.all() assert [(result.name, result.count) for result in results] == [ - ('name_1', None), - ('name_1', 40), - ('name_1', 30), - ('name_1', 5), - ('name_2', 20), - ('name_4', None), - ('name_4', 10), - ('name_5', 50), + ("name_1", None), + ("name_1", 40), + ("name_1", 30), + ("name_1", 5), + ("name_2", 20), + ("name_4", None), + ("name_4", 10), + ("name_5", 50), ] class TestSortNullsLast(object): - """Tests `nullslast`. This is currently not supported by MySQL and SQLite. Only tested for PostgreSQL. """ - @pytest.mark.usefixtures('multiple_bars_with_nulls_inserted') + @pytest.mark.usefixtures("multiple_bars_with_nulls_inserted") def test_single_sort_field_asc_nulls_last(self, session, is_postgresql): if not is_postgresql: pytest.skip(NULLSLAST_NOT_SUPPORTED) query = session.query(Bar) - order_by = [ - {'field': 'count', 'direction': 'asc', 'nullslast': True} - ] + order_by = [{"field": "count", "direction": "asc", "nullslast": True}] sorted_query = apply_sort(Bar, query, order_by) results = sorted_query.all() assert [result.count for result in results] == [ - 5, 10, 20, 30, 40, 50, None, None, + 5, + 10, + 20, + 30, + 40, + 50, + None, + None, ] - @pytest.mark.usefixtures('multiple_bars_with_nulls_inserted') + @pytest.mark.usefixtures("multiple_bars_with_nulls_inserted") def test_single_sort_field_desc_nulls_last(self, session, is_postgresql): if not is_postgresql: pytest.skip(NULLSLAST_NOT_SUPPORTED) query = session.query(Bar) - order_by = [ - {'field': 'count', 'direction': 'desc', 'nullslast': True} - ] + order_by = [{"field": "count", "direction": "desc", "nullslast": True}] sorted_query = apply_sort(Bar, query, order_by) results = sorted_query.all() assert [result.count for result in results] == [ - 50, 40, 30, 20, 10, 5, None, None, + 50, + 40, + 30, + 20, + 10, + 5, + None, + None, ] - @pytest.mark.usefixtures('multiple_bars_with_nulls_inserted') + @pytest.mark.usefixtures("multiple_bars_with_nulls_inserted") def test_multiple_sort_fields_asc_nulls_last(self, session, is_postgresql): if not is_postgresql: pytest.skip(NULLSLAST_NOT_SUPPORTED) query = session.query(Bar) order_by = [ - {'field': 'name', 'direction': 'asc'}, - {'field': 'count', 'direction': 'asc', 'nullslast': True}, + {"field": "name", "direction": "asc"}, + {"field": "count", "direction": "asc", "nullslast": True}, ] sorted_query = apply_sort(Bar, query, order_by) results = sorted_query.all() assert [(result.name, result.count) for result in results] == [ - ('name_1', 5), - ('name_1', 30), - ('name_1', 40), - ('name_1', None), - ('name_2', 20), - ('name_4', 10), - ('name_4', None), - ('name_5', 50), - ] - - @pytest.mark.usefixtures('multiple_bars_with_nulls_inserted') - def test_multiple_sort_fields_desc_nulls_last( - self, session, is_postgresql - ): + ("name_1", 5), + ("name_1", 30), + ("name_1", 40), + ("name_1", None), + ("name_2", 20), + ("name_4", 10), + ("name_4", None), + ("name_5", 50), + ] + + @pytest.mark.usefixtures("multiple_bars_with_nulls_inserted") + def test_multiple_sort_fields_desc_nulls_last(self, session, is_postgresql): if not is_postgresql: pytest.skip(NULLSLAST_NOT_SUPPORTED) query = session.query(Bar) order_by = [ - {'field': 'name', 'direction': 'asc'}, - {'field': 'count', 'direction': 'desc', 'nullslast': True}, + {"field": "name", "direction": "asc"}, + {"field": "count", "direction": "desc", "nullslast": True}, ] sorted_query = apply_sort(Bar, query, order_by) results = sorted_query.all() assert [(result.name, result.count) for result in results] == [ - ('name_1', 40), - ('name_1', 30), - ('name_1', 5), - ('name_1', None), - ('name_2', 20), - ('name_4', 10), - ('name_4', None), - ('name_5', 50), + ("name_1", 40), + ("name_1", 30), + ("name_1", 5), + ("name_1", None), + ("name_2", 20), + ("name_4", 10), + ("name_4", None), + ("name_5", 50), ] class TestSortHybridAttributes(object): - """Tests that results are sorted only according to the provided filters. @@ -602,50 +624,78 @@ class TestSortHybridAttributes(object): or last. """ - @pytest.mark.usefixtures('multiple_bars_with_no_nulls_inserted') + @pytest.mark.usefixtures("multiple_bars_with_no_nulls_inserted") def test_single_sort_hybrid_property_asc(self, session): query = session.query(Bar) - order_by = [{'field': 'count_square', 'direction': 'asc'}] + order_by = [{"field": "count_square", "direction": "asc"}] sorted_query = apply_sort(Bar, query, order_by) results = sorted_query.all() assert [result.count_square for result in results] == [ - 1, 4, 4, 9, 25, 100, 144, 225 + 1, + 4, + 4, + 9, + 25, + 100, + 144, + 225, ] - @pytest.mark.usefixtures('multiple_bars_with_no_nulls_inserted') + @pytest.mark.usefixtures("multiple_bars_with_no_nulls_inserted") def test_single_sort_hybrid_property_desc(self, session): query = session.query(Bar) - order_by = [{'field': 'count_square', 'direction': 'desc'}] + order_by = [{"field": "count_square", "direction": "desc"}] sorted_query = apply_sort(Bar, query, order_by) results = sorted_query.all() assert [result.count_square for result in results] == [ - 225, 144, 100, 25, 9, 4, 4, 1 + 225, + 144, + 100, + 25, + 9, + 4, + 4, + 1, ] - @pytest.mark.usefixtures('multiple_bars_with_no_nulls_inserted') + @pytest.mark.usefixtures("multiple_bars_with_no_nulls_inserted") def test_single_sort_hybrid_method_asc(self, session): query = session.query(Bar) - order_by = [{'field': 'three_times_count', 'direction': 'asc'}] + order_by = [{"field": "three_times_count", "direction": "asc"}] sorted_query = apply_sort(Bar, query, order_by) results = sorted_query.all() assert [result.three_times_count() for result in results] == [ - 3, 6, 6, 9, 15, 30, 36, 45 + 3, + 6, + 6, + 9, + 15, + 30, + 36, + 45, ] - @pytest.mark.usefixtures('multiple_bars_with_no_nulls_inserted') + @pytest.mark.usefixtures("multiple_bars_with_no_nulls_inserted") def test_single_sort_hybrid_method_desc(self, session): query = session.query(Bar) - order_by = [{'field': 'three_times_count', 'direction': 'desc'}] + order_by = [{"field": "three_times_count", "direction": "desc"}] sorted_query = apply_sort(Bar, query, order_by) results = sorted_query.all() assert [result.three_times_count() for result in results] == [ - 45, 36, 30, 15, 9, 6, 6, 3 + 45, + 36, + 30, + 15, + 9, + 6, + 6, + 3, ] diff --git a/test/models.py b/test/models.py index 88f43e4..5c346f7 100644 --- a/test/models.py +++ b/test/models.py @@ -1,8 +1,6 @@ # -*- coding: utf-8 -*- -from sqlalchemy import ( - Column, Date, DateTime, ForeignKey, Integer, String, Time -) +from sqlalchemy import Column, Date, DateTime, ForeignKey, Integer, String, Time from sqlalchemy.dialects.mysql import SET from sqlalchemy.dialects.postgresql import ARRAY from sqlalchemy.ext.declarative import declarative_base @@ -49,28 +47,28 @@ def __ne__(self, other): class Foo(Base): - __tablename__ = 'foo' + __tablename__ = "foo" - bar_id = Column(Integer, ForeignKey('bar.id'), nullable=True) - bar = relationship('Bar', back_populates='foos') + bar_id = Column(Integer, ForeignKey("bar.id"), nullable=True) + bar = relationship("Bar", back_populates="foos") class Bar(Base): - __tablename__ = 'bar' - foos = relationship('Foo', back_populates='bar') + __tablename__ = "bar" + foos = relationship("Foo", back_populates="bar") class Baz(Base): - __tablename__ = 'baz' + __tablename__ = "baz" - qux_id = Column(Integer, ForeignKey('qux.id'), nullable=True) + qux_id = Column(Integer, ForeignKey("qux.id"), nullable=True) class Qux(Base): - __tablename__ = 'qux' + __tablename__ = "qux" created_at = Column(Date) execution_time = Column(DateTime) @@ -79,21 +77,21 @@ class Qux(Base): class Corge(BasePostgresqlSpecific): - __tablename__ = 'corge' + __tablename__ = "corge" tags = Column(ARRAY(String, dimensions=1)) class Grault(BaseMysqlSpecific): - __tablename__ = 'grault' + __tablename__ = "grault" types = Column(SET("foo", "bar", "baz"), nullable=True) class Garply(Base): - __tablename__ = 'garply' + __tablename__ = "garply" x = Column(Integer) y = Column(Integer) From 4c05c6e2ec98c011487d7cd626a77bcfccc90738 Mon Sep 17 00:00:00 2001 From: Ron Rademaker Date: Wed, 14 Feb 2024 15:26:37 +0000 Subject: [PATCH 29/30] Add poe for testing --- poetry.lock | 33 ++++++++++++++++++++++++++++++++- pyproject.toml | 4 ++++ 2 files changed, 36 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index 7cec1f2..e2152f6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -151,6 +151,18 @@ files = [ {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] +[[package]] +name = "pastel" +version = "0.2.1" +description = "Bring colors to your terminal." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pastel-0.2.1-py2.py3-none-any.whl", hash = "sha256:4349225fcdf6c2bb34d483e523475de5bb04a5c10ef711263452cb37d7dd4364"}, + {file = "pastel-0.2.1.tar.gz", hash = "sha256:e6581ac04e973cac858828c6202c1e1e81fee1dc7de7683f3e1ffe0bfd8a573d"}, +] + [[package]] name = "pathspec" version = "0.12.1" @@ -195,6 +207,25 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "poethepoet" +version = "0.24.4" +description = "A task runner that works well with poetry." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "poethepoet-0.24.4-py3-none-any.whl", hash = "sha256:fb4ea35d7f40fe2081ea917d2e4102e2310fda2cde78974050ca83896e229075"}, + {file = "poethepoet-0.24.4.tar.gz", hash = "sha256:ff4220843a87c888cbcb5312c8905214701d0af60ac7271795baa8369b428fef"}, +] + +[package.dependencies] +pastel = ">=0.2.1,<0.3.0" +tomli = ">=1.2.2" + +[package.extras] +poetry-plugin = ["poetry (>=1.0,<2.0)"] + [[package]] name = "psycopg2-binary" version = "2.9.1" @@ -420,4 +451,4 @@ postgresql = ["psycopg2-binary"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "9657d7d746a17107b50ab54a082568af8eb8340057c36141ff1dd90e6c3a3e48" +content-hash = "10119d2d258aafafa20a1668800237efb8929308b371b9ebea506c623a501ce4" diff --git a/pyproject.toml b/pyproject.toml index 086f6ad..2751ea7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,6 +19,7 @@ sqlalchemy-utils = "^0.37.8" [tool.poetry.group.dev.dependencies] black = "^24.2.0" +poethepoet = "^0.24.4" [build-system] requires = ["poetry-core>=1.0.0"] @@ -27,3 +28,6 @@ build-backend = "poetry.core.masonry.api" [tool.poetry.extras] mysql = ["mysql-connector-python-rf"] postgresql = ["psycopg2-binary"] + +[tool.poe.tasks] +test = "pytest test" \ No newline at end of file From c23fc82e9b2215cb3dd9a1be7ffff2d5882f0f01 Mon Sep 17 00:00:00 2001 From: Ron Rademaker Date: Wed, 14 Feb 2024 15:27:13 +0000 Subject: [PATCH 30/30] Run tests using GA --- .github/workflows/tests.yml | 76 +++++-------------------------------- 1 file changed, 10 insertions(+), 66 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 9d24827..4427881 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,70 +1,14 @@ -name: Tests CI +name: Pull request checks + on: - - push - - pull_request + pull_request: jobs: - tests: - name: ${{ matrix.tox }} - runs-on: ubuntu-20.04 - - services: - mariadb: - image: mariadb:10 - ports: - - 3306:3306 - env: - MYSQL_ALLOW_EMPTY_PASSWORD: yes - options: --health-cmd="mysqladmin ping" --health-interval=5s --health-timeout=2s --health-retries=3 - - postgres: - image: postgres - ports: - - 5432:5432 - env: - POSTGRES_USER: postgres - POSTGRES_HOST_AUTH_METHOD: trust - POSTGRES_DB: test_sqlalchemy_filters - POSTGRES_INITDB_ARGS: "--encoding=UTF8 --lc-collate=en_US.utf8 --lc-ctype=en_US.utf8" - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - - - strategy: - fail-fast: false - matrix: - include: - # sqlalchemylatest (i.e. > 2.0.0) is not yet supported - # for any version of python - - - {python: '3.7', tox: "py37-sqlalchemy1.0"} - - {python: '3.7', tox: "py37-sqlalchemy1.1"} - - {python: '3.7', tox: "py37-sqlalchemy1.2"} - - {python: '3.7', tox: "py37-sqlalchemy1.3"} - - {python: '3.7', tox: "py37-sqlalchemy1.4"} - - - {python: '3.8', tox: "py38-sqlalchemy1.0"} - - {python: '3.8', tox: "py38-sqlalchemy1.1"} - - {python: '3.8', tox: "py38-sqlalchemy1.2"} - - {python: '3.8', tox: "py38-sqlalchemy1.3"} - - {python: '3.8', tox: "py38-sqlalchemy1.4"} - - - {python: '3.9', tox: "py39-sqlalchemy1.0"} - - {python: '3.9', tox: "py39-sqlalchemy1.1"} - - {python: '3.9', tox: "py39-sqlalchemy1.2"} - - {python: '3.9', tox: "py39-sqlalchemy1.3"} - - {python: '3.9', tox: "py39-sqlalchemy1.4"} - - # python3.10 with sqlalchemy <= 1.1 errors with: - # AttributeError: module 'collections' has no attribute 'MutableMapping' - - {python: '3.10', tox: "py310-sqlalchemy1.2"} - - {python: '3.10', tox: "py310-sqlalchemy1.3"} - - {python: '3.10', tox: "py310-sqlalchemy1.4"} - + ci: + runs-on: self-hosted steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python }} - - - run: pip install tox~=3.28 - - run: tox -e ${{ matrix.tox }} + - name: Install project and dependencies + uses: actions/checkout@v4 + - name: Run tests + uses: Harborn-digital/github-action-poetry-poe-task@python3.11 + \ No newline at end of file