Skip to content

Commit

Permalink
🌿 Fern Regeneration -- September 17, 2024 (#196)
Browse files Browse the repository at this point in the history
* SDK regeneration

* update python version

---------

Co-authored-by: fern-api <115122769+fern-api[bot]@users.noreply.github.com>
Co-authored-by: David Konigsberg <[email protected]>
  • Loading branch information
fern-api[bot] and davidkonigsberg authored Sep 17, 2024
1 parent d617569 commit d627398
Show file tree
Hide file tree
Showing 34 changed files with 518 additions and 145 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ jobs:
- name: Set up python
uses: actions/setup-python@v5
with:
python-version: 3.8
python-version: 3.11
- name: Bootstrap poetry
run: |
curl -sSL https://install.python-poetry.org | python - -y --version 1.5.1
Expand All @@ -31,7 +31,7 @@ jobs:
- name: Set up python
uses: actions/setup-python@v5
with:
python-version: 3.8
python-version: 3.11
- name: Bootstrap poetry
run: |
curl -sSL https://install.python-poetry.org | python - -y --version 1.5.1
Expand Down Expand Up @@ -76,7 +76,7 @@ jobs:
- name: Set up python
uses: actions/setup-python@v5
with:
python-version: 3.8
python-version: 3.11
- name: Bootstrap poetry
run: |
curl -sSL https://install.python-poetry.org | python - -y --version 1.5.1
Expand Down
13 changes: 9 additions & 4 deletions src/sayari/attributes/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from .types.add_attribute import AddAttribute
from ..core.request_options import RequestOptions
from .types.attribute_response import AttributeResponse
from ..core.serialization import convert_and_respect_annotation_metadata
from ..core.pydantic_utilities import parse_obj_as
from ..shared_errors.errors.bad_request import BadRequest
from ..shared_errors.types.bad_request_response import BadRequestResponse
Expand Down Expand Up @@ -82,7 +83,7 @@ def post_attribute(
_response = self._client_wrapper.httpx_client.request(
"v1/attribute",
method="POST",
json=request,
json=convert_and_respect_annotation_metadata(object_=request, annotation=AddAttribute, direction="write"),
request_options=request_options,
omit=OMIT,
)
Expand Down Expand Up @@ -217,7 +218,9 @@ def patch_attribute(
_response = self._client_wrapper.httpx_client.request(
f"v1/attribute/{jsonable_encoder(attribute_id)}",
method="PATCH",
json=request,
json=convert_and_respect_annotation_metadata(
object_=request, annotation=UpdateAttribute, direction="write"
),
request_options=request_options,
omit=OMIT,
)
Expand Down Expand Up @@ -502,7 +505,7 @@ async def main() -> None:
_response = await self._client_wrapper.httpx_client.request(
"v1/attribute",
method="POST",
json=request,
json=convert_and_respect_annotation_metadata(object_=request, annotation=AddAttribute, direction="write"),
request_options=request_options,
omit=OMIT,
)
Expand Down Expand Up @@ -645,7 +648,9 @@ async def main() -> None:
_response = await self._client_wrapper.httpx_client.request(
f"v1/attribute/{jsonable_encoder(attribute_id)}",
method="PATCH",
json=request,
json=convert_and_respect_annotation_metadata(
object_=request, annotation=UpdateAttribute, direction="write"
),
request_options=request_options,
omit=OMIT,
)
Expand Down
3 changes: 2 additions & 1 deletion src/sayari/core/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from .api_error import ApiError
from .client_wrapper import AsyncClientWrapper, BaseClientWrapper, SyncClientWrapper
from .datetime_utils import serialize_datetime
from .file import File, convert_file_dict_to_httpx_tuples
from .file import File, convert_file_dict_to_httpx_tuples, with_content_type
from .http_client import AsyncHttpClient, HttpClient
from .jsonable_encoder import jsonable_encoder
from .pydantic_utilities import (
Expand Down Expand Up @@ -43,4 +43,5 @@
"universal_field_validator",
"universal_root_validator",
"update_forward_refs",
"with_content_type",
]
41 changes: 30 additions & 11 deletions src/sayari/core/file.py
Original file line number Diff line number Diff line change
@@ -1,30 +1,30 @@
# This file was auto-generated by Fern from our API Definition.

import typing
from typing import IO, Dict, List, Mapping, Optional, Tuple, Union, cast

# File typing inspired by the flexibility of types within the httpx library
# https://github.com/encode/httpx/blob/master/httpx/_types.py
FileContent = typing.Union[typing.IO[bytes], bytes, str]
File = typing.Union[
FileContent = Union[IO[bytes], bytes, str]
File = Union[
# file (or bytes)
FileContent,
# (filename, file (or bytes))
typing.Tuple[typing.Optional[str], FileContent],
Tuple[Optional[str], FileContent],
# (filename, file (or bytes), content_type)
typing.Tuple[typing.Optional[str], FileContent, typing.Optional[str]],
Tuple[Optional[str], FileContent, Optional[str]],
# (filename, file (or bytes), content_type, headers)
typing.Tuple[
typing.Optional[str],
Tuple[
Optional[str],
FileContent,
typing.Optional[str],
typing.Mapping[str, str],
Optional[str],
Mapping[str, str],
],
]


def convert_file_dict_to_httpx_tuples(
d: typing.Dict[str, typing.Union[File, typing.List[File]]],
) -> typing.List[typing.Tuple[str, File]]:
d: Dict[str, Union[File, List[File]]],
) -> List[Tuple[str, File]]:
"""
The format we use is a list of tuples, where the first element is the
name of the file and the second is the file object. Typically HTTPX wants
Expand All @@ -41,3 +41,22 @@ def convert_file_dict_to_httpx_tuples(
else:
httpx_tuples.append((key, file_like))
return httpx_tuples


def with_content_type(*, file: File, content_type: str) -> File:
""" """
if isinstance(file, tuple):
if len(file) == 2:
filename, content = cast(Tuple[Optional[str], FileContent], file) # type: ignore
return (filename, content, content_type)
elif len(file) == 3:
filename, content, _ = cast(Tuple[Optional[str], FileContent, Optional[str]], file) # type: ignore
return (filename, content, content_type)
elif len(file) == 4:
filename, content, _, headers = cast( # type: ignore
Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], file
)
return (filename, content, content_type, headers)
else:
raise ValueError(f"Unexpected tuple length: {len(file)}")
return (None, file, content_type)
129 changes: 98 additions & 31 deletions src/sayari/core/pydantic_utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import pydantic

from .datetime_utils import serialize_datetime
from .serialization import convert_and_respect_annotation_metadata

IS_PYDANTIC_V2 = pydantic.VERSION.startswith("2.")

Expand Down Expand Up @@ -56,11 +57,12 @@


def parse_obj_as(type_: typing.Type[T], object_: typing.Any) -> T:
dealiased_object = convert_and_respect_annotation_metadata(object_=object_, annotation=type_, direction="read")
if IS_PYDANTIC_V2:
adapter = pydantic.TypeAdapter(type_) # type: ignore # Pydantic v2
return adapter.validate_python(object_)
return adapter.validate_python(dealiased_object)
else:
return pydantic.parse_obj_as(type_, object_)
return pydantic.parse_obj_as(type_, dealiased_object)


def to_jsonable_with_fallback(
Expand All @@ -75,11 +77,40 @@ def to_jsonable_with_fallback(


class UniversalBaseModel(pydantic.BaseModel):
class Config:
populate_by_name = True
smart_union = True
allow_population_by_field_name = True
json_encoders = {dt.datetime: serialize_datetime}
if IS_PYDANTIC_V2:
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(
# Allow fields begining with `model_` to be used in the model
protected_namespaces=(),
) # type: ignore # Pydantic v2

@pydantic.model_serializer(mode="wrap", when_used="json") # type: ignore # Pydantic v2
def serialize_model(self, handler: pydantic.SerializerFunctionWrapHandler) -> typing.Any: # type: ignore # Pydantic v2
serialized = handler(self)
data = {k: serialize_datetime(v) if isinstance(v, dt.datetime) else v for k, v in serialized.items()}
return data

else:

class Config:
smart_union = True
json_encoders = {dt.datetime: serialize_datetime}

@classmethod
def model_construct(
cls: type[Model], _fields_set: typing.Optional[typing.Set[str]] = None, **values: typing.Any
) -> Model:
dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read")
return cls.construct(_fields_set, **dealiased_object)

@classmethod
def construct(
cls: type[Model], _fields_set: typing.Optional[typing.Set[str]] = None, **values: typing.Any
) -> Model:
dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read")
if IS_PYDANTIC_V2:
return super().model_construct(_fields_set, **dealiased_object) # type: ignore # Pydantic v2
else:
return super().construct(_fields_set, **dealiased_object)

def json(self, **kwargs: typing.Any) -> str:
kwargs_with_defaults: typing.Any = {
Expand All @@ -97,30 +128,66 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
Override the default dict method to `exclude_unset` by default. This function patches
`exclude_unset` to work include fields within non-None default values.
"""
_fields_set = self.__fields_set__

fields = _get_model_fields(self.__class__)
for name, field in fields.items():
if name not in _fields_set:
default = _get_field_default(field)

# If the default values are non-null act like they've been set
# This effectively allows exclude_unset to work like exclude_none where
# the latter passes through intentionally set none values.
if default != None:
_fields_set.add(name)

kwargs_with_defaults_exclude_unset: typing.Any = {
"by_alias": True,
"exclude_unset": True,
"include": _fields_set,
**kwargs,
}

# Note: the logic here is multi-plexed given the levers exposed in Pydantic V1 vs V2
# Pydantic V1's .dict can be extremely slow, so we do not want to call it twice.
#
# We'd ideally do the same for Pydantic V2, but it shells out to a library to serialize models
# that we have less control over, and this is less intrusive than custom serializers for now.
if IS_PYDANTIC_V2:
return super().model_dump(**kwargs_with_defaults_exclude_unset) # type: ignore # Pydantic v2
kwargs_with_defaults_exclude_unset: typing.Any = {
**kwargs,
"by_alias": True,
"exclude_unset": True,
"exclude_none": False,
}
kwargs_with_defaults_exclude_none: typing.Any = {
**kwargs,
"by_alias": True,
"exclude_none": True,
"exclude_unset": False,
}
dict_dump = deep_union_pydantic_dicts(
super().model_dump(**kwargs_with_defaults_exclude_unset), # type: ignore # Pydantic v2
super().model_dump(**kwargs_with_defaults_exclude_none), # type: ignore # Pydantic v2
)

else:
return super().dict(**kwargs_with_defaults_exclude_unset)
_fields_set = self.__fields_set__

fields = _get_model_fields(self.__class__)
for name, field in fields.items():
if name not in _fields_set:
default = _get_field_default(field)

# If the default values are non-null act like they've been set
# This effectively allows exclude_unset to work like exclude_none where
# the latter passes through intentionally set none values.
if default != None:
_fields_set.add(name)

kwargs_with_defaults_exclude_unset_include_fields: typing.Any = {
"by_alias": True,
"exclude_unset": True,
"include": _fields_set,
**kwargs,
}

dict_dump = super().dict(**kwargs_with_defaults_exclude_unset_include_fields)

return convert_and_respect_annotation_metadata(object_=dict_dump, annotation=self.__class__, direction="write")


def deep_union_pydantic_dicts(
source: typing.Dict[str, typing.Any], destination: typing.Dict[str, typing.Any]
) -> typing.Dict[str, typing.Any]:
for key, value in source.items():
if isinstance(value, dict):
node = destination.setdefault(key, {})
deep_union_pydantic_dicts(value, node)
else:
destination[key] = value

return destination


if IS_PYDANTIC_V2:
Expand All @@ -147,11 +214,11 @@ def encode_by_type(o: typing.Any) -> typing.Any:
return encoder(o)


def update_forward_refs(model: typing.Type["Model"]) -> None:
def update_forward_refs(model: typing.Type["Model"], **localns: typing.Any) -> None:
if IS_PYDANTIC_V2:
model.model_rebuild(raise_errors=False) # type: ignore # Pydantic v2
else:
model.update_forward_refs()
model.update_forward_refs(**localns)


# Mirrors Pydantic's internal typing
Expand Down
Loading

0 comments on commit d627398

Please sign in to comment.