Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

inner json[b] filtering support (WIP) #49

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 35 additions & 3 deletions fastapi_jsonapi/data_layers/filtering/sqlalchemy.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
"""Helper to create sqlalchemy filters according to filter querystring parameter"""
from typing import Any, List, Tuple, Type, Union
from typing import Any, List, Optional, Tuple, Type, Union

from pydantic import BaseModel
from pydantic.fields import ModelField
from sqlalchemy import and_, not_, or_
from sqlalchemy.orm import InstrumentedAttribute, aliased
from sqlalchemy.sql.elements import BinaryExpression
from sqlalchemy.sql.schema import Column
from sqlalchemy.sql.sqltypes import JSON

from fastapi_jsonapi.data_layers.shared import create_filters_or_sorts
from fastapi_jsonapi.data_typing import TypeModel, TypeSchema
Expand Down Expand Up @@ -91,7 +93,16 @@ def create_filter(self, schema_field: ModelField, model_column, operator, value)
raise InvalidType(detail=", ".join(errors))
return getattr(model_column, self.operator)(clear_value)

def resolve(self) -> FilterAndJoins: # noqa: PLR0911
def _is_json_column(self, column_name: str) -> bool:
column: Optional[Column] = self.model.__table__.columns.get(column_name)

if column is None:
return False

return isinstance(column.type, JSON)

# TODO: refactor and remove ignore PLR0911, PLR0912
def resolve(self) -> FilterAndJoins: # noqa: PLR0911, PLR0912
"""Create filter for a particular node of the filter tree"""
if "or" in self.filter_:
return self._create_filters(type_filter="or")
Expand All @@ -114,12 +125,22 @@ def resolve(self) -> FilterAndJoins: # noqa: PLR0911
operator=operator,
)

# TODO: check if relationship or inner schema
# TODO: create base schema `BaseJsonModel(BaseModel)`? reuse:
# https://github.com/AdCombo/combojsonapi/blob/45a43cf28c6496195c6e6762955db16f9a813b2f/combojsonapi/postgresql_jsonb/plugin.py#L103-L120

if SPLIT_REL in self.filter_.get("name", ""):
current_rel_or_column_name, *rel_names = self.filter_["name"].split(SPLIT_REL)
value = {
"name": SPLIT_REL.join(self.filter_["name"].split(SPLIT_REL)[1:]),
"name": SPLIT_REL.join(rel_names),
"op": operator,
"val": value,
}

is_json_filter = self._is_json_column(current_rel_or_column_name)
if is_json_filter:
return self._json_inner_filtering(value)

return self._relationship_filtering(value)

if isinstance(value, dict):
Expand Down Expand Up @@ -160,6 +181,13 @@ def _relationship_filtering(self, value):
joins.extend(new_joins)
return filters, joins

def _json_inner_filtering(self, value):
# TODO!! Upgrade Node usage :thinking:
node = Node(self.related_model, value, self.related_schema)
filters, new_joins = node.resolve()
# joins.extend(new_joins)
return filters, []

def _create_filters(self, type_filter: str) -> FilterAndJoins:
"""
Создаём фильтр or или and
Expand Down Expand Up @@ -215,6 +243,10 @@ def column(self) -> InstrumentedAttribute:

model_field = get_model_field(self.schema, field)

is_json_field = hasattr(self.model, "type") and isinstance(self.model.type, JSON)
if is_json_field:
return self.model.op("->>")(model_field)

try:
return getattr(self.model, model_field)
except AttributeError:
Expand Down
19 changes: 18 additions & 1 deletion fastapi_jsonapi/utils/sqla.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,11 @@
from typing import Type

# from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.orm.attributes import InstrumentedAttribute

# from sqlalchemy import JSON
from sqlalchemy.sql.sqltypes import JSON

from fastapi_jsonapi.data_typing import TypeModel


Expand All @@ -22,4 +28,15 @@ class ComputerSchema(pydantic_base):
:param relation_name:
:return:
"""
return getattr(cls, relation_name).property.mapper.class_
related_column: InstrumentedAttribute = getattr(cls, relation_name)
# TODO: any flags for JSON / JSONB?
# TODO: or any plugins to add support for JSON / JSONB, etc?
# TODO: https://github.com/AdCombo/combojsonapi/blob/45a43cf28c6496195c6e6762955db16f9a813b2f/combojsonapi/postgresql_jsonb/plugin.py#L103-L120

column_is_json = hasattr(related_column, "type") and isinstance(related_column.type, JSON)
if column_is_json:
# return related_column.op("->>")
return related_column

related_property = related_column.property
return related_property.mapper.class_
5 changes: 5 additions & 0 deletions tests/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,8 @@ def sqla_uri():
db_dir = Path(__file__).resolve().parent
testing_db_url = f"sqlite+aiosqlite:///{db_dir}/db.sqlite3"
return testing_db_url


db_uri = sqla_uri()
IS_POSTGRES = "postgres" in db_uri
IS_SQLITE = "sqlite" in db_uri
1 change: 1 addition & 0 deletions tests/fixtures/db_connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ async def async_session_dependency():
async def async_engine():
engine = create_async_engine(
url=make_url(sqla_uri()),
# TODO: env var
echo=False,
# echo=True,
)
Expand Down
7 changes: 4 additions & 3 deletions tests/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from sqlalchemy.orm import declared_attr, relationship
from sqlalchemy.types import CHAR, TypeDecorator

from tests.common import sqla_uri
from tests.common import IS_POSTGRES, IS_SQLITE, sqla_uri


class Base:
Expand Down Expand Up @@ -74,6 +74,7 @@ class UserBio(AutoIdMixin, Base):
birth_city: str = Column(String, nullable=False, default="", server_default="")
favourite_movies: str = Column(String, nullable=False, default="", server_default="")
keys_to_ids_list: Dict[str, List[int]] = Column(JSON)
meta: Dict[str, str] = Column(JSON, server_default="{}")

user_id = Column(Integer, ForeignKey("users.id"), nullable=False, unique=True)
user = relationship(
Expand Down Expand Up @@ -267,10 +268,10 @@ def python_type(self):


db_uri = sqla_uri()
if "postgres" in db_uri:
if IS_POSTGRES:
# noinspection PyPep8Naming
from sqlalchemy.dialects.postgresql import UUID as UUIDType
elif "sqlite" in db_uri:
elif IS_SQLITE:
UUIDType = CustomUUIDType
else:
msg = "unsupported dialect (custom uuid?)"
Expand Down
2 changes: 0 additions & 2 deletions tests/test_api/test_api_sqla_with_includes.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,6 @@

pytestmark = mark.asyncio

logging.basicConfig(level=logging.DEBUG)


def association_key(data: dict):
return data["type"], data["id"]
Expand Down
Empty file.
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
import pytest
import simplejson as json
from fastapi import FastAPI, status
from httpx import AsyncClient
from pydantic import BaseModel
from sqlalchemy.ext.asyncio import AsyncSession

from tests.common import IS_POSTGRES
from tests.fixtures.app import build_app_custom
from tests.misc.utils import fake
from tests.models import (
UserBio,
)
from tests.schemas import UserBioAttributesBaseSchema

pytestmark = pytest.mark.asyncio


class UserBioMeta(BaseModel):
spam_and_eggs: str


class UserBioJsonMetaSchema(UserBioAttributesBaseSchema):
meta: UserBioMeta


@pytest.mark.skipif(not IS_POSTGRES, reason="only for pg (for now)")
class TestPostgresFilterByInnerSchema:
"""
Todo:
----
To create tests for fields:
- json
- jsonb
"""

@pytest.fixture()
def resource_type(self) -> str:
return "user_bio_custom_json_meta"

@pytest.fixture()
def app(self, resource_type):
app = build_app_custom(
model=UserBio,
schema=UserBioJsonMetaSchema,
resource_type=resource_type,
path=f"/{resource_type}",
)
return app

async def test_filter_inner_json_field(
self,
app: FastAPI,
resource_type: str,
client: AsyncClient,
async_session: AsyncSession,
user_1_bio: UserBio,
user_2_bio: UserBio,
):
# Declared as UserBioMeta.spam_and_eggs
some_key = "spam_and_eggs"
# todo: use sentence and take part to check ilike using %{part}%
value_1 = fake.word()
value_2 = fake.word()
assert value_1 != value_2
assert user_1_bio.id != user_2_bio.id

await async_session.refresh(user_1_bio)
await async_session.refresh(user_2_bio)

# re-assign meta dict! sqla doesn't watch mutations
user_1_bio.meta = {some_key: value_1}
user_2_bio.meta = {some_key: value_2}
await async_session.commit()

filter_inner = {
"name": f"meta.{some_key}",
"op": "ilike",
"val": value_1,
}
query_params = {
"filter": json.dumps(
[
filter_inner,
],
),
}
url = app.url_path_for(f"get_{resource_type}_list")
res = await client.get(url, params=query_params)
assert res.status_code == status.HTTP_200_OK, res.text
assert res.json() == {
"data": [
{
"id": str(user_1_bio.id),
"type": resource_type,
"attributes": UserBioJsonMetaSchema.from_orm(user_1_bio).dict(),
},
],
"jsonapi": {"version": "1.0"},
"meta": {"count": 1, "totalPages": 1},
}
3 changes: 0 additions & 3 deletions tests/test_atomic/test_create_objects.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import logging
from typing import Callable

import pytest
Expand Down Expand Up @@ -26,8 +25,6 @@

pytestmark = mark.asyncio

logging.basicConfig(level=logging.DEBUG)


def random_sentence() -> str:
return fake.sentence()[:COLUMN_CHARACTERS_LIMIT]
Expand Down
3 changes: 0 additions & 3 deletions tests/test_atomic/test_delete_objects.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import logging
from typing import Awaitable, Callable

from httpx import AsyncClient
Expand All @@ -13,8 +12,6 @@

pytestmark = mark.asyncio

logging.basicConfig(level=logging.DEBUG)


class TestAtomicDeleteObjects:
async def test_delete_two_objects(
Expand Down
4 changes: 0 additions & 4 deletions tests/test_atomic/test_update_objects.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
import logging

import pytest
from httpx import AsyncClient
from sqlalchemy.ext.asyncio import AsyncSession
Expand All @@ -11,8 +9,6 @@

pytestmark = pytest.mark.asyncio

logging.basicConfig(level=logging.DEBUG)


class TestAtomicUpdateObjects:
async def test_update_two_objects(
Expand Down