From c4c3140b3b89f5a7d5c299b3c940da0d0a4771b7 Mon Sep 17 00:00:00 2001 From: dphuang2 Date: Thu, 3 Oct 2024 12:10:25 -0700 Subject: [PATCH] fix --- .../main/resources/python/schemas.handlebars | 69 +- .../schemas.py | 1452 +++++------------ ...string-integer-path-parameter.test.ts.snap | 172 ++ 3 files changed, 632 insertions(+), 1061 deletions(-) create mode 100644 generator/konfig-integration-tests/tests/__snapshots__/python-one-of-uuid-string-integer-path-parameter.test.ts.snap diff --git a/generator/konfig-generator-api/src/main/resources/python/schemas.handlebars b/generator/konfig-generator-api/src/main/resources/python/schemas.handlebars index 7a608ef64..687f69905 100644 --- a/generator/konfig-generator-api/src/main/resources/python/schemas.handlebars +++ b/generator/konfig-generator-api/src/main/resources/python/schemas.handlebars @@ -968,6 +968,40 @@ class StrBase(ValidatorBase): return super()._validate_oapg(arg, validation_metadata=validation_metadata) +class IntBase: + @property + def as_int_oapg(self) -> int: + try: + return self._as_int + except AttributeError: + self._as_int = int(self) + return self._as_int + + @classmethod + def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata): + if isinstance(arg, decimal.Decimal): + + denominator = arg.as_integer_ratio()[-1] + if denominator != 1: + raise ApiValueError( + "Invalid value '{}' for type integer at {}".format(arg, validation_metadata.path_to_item) + ) + + @classmethod + def _validate_oapg( + cls, + arg, + validation_metadata: ValidationMetadata, + ): + """ + IntBase _validate_oapg + TODO what about types = (int, number) -> IntBase, NumberBase? We could drop int and keep number only + """ + if cls._types and int not in cls._types: cls._types.add(int) + cls.__validate_format(arg, validation_metadata=validation_metadata) + return super()._validate_oapg(arg, validation_metadata=validation_metadata) + + class UUIDBase: @property @functools.lru_cache() @@ -2011,6 +2045,7 @@ class ComposedSchema( ComposedBase, DictBase, ListBase, + IntBase, NumberBase, StrBase, BoolBase, @@ -2073,40 +2108,6 @@ class NumberSchema( return super().__new__(cls, arg, **kwargs) -class IntBase: - @property - def as_int_oapg(self) -> int: - try: - return self._as_int - except AttributeError: - self._as_int = int(self) - return self._as_int - - @classmethod - def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata): - if isinstance(arg, decimal.Decimal): - - denominator = arg.as_integer_ratio()[-1] - if denominator != 1: - raise ApiValueError( - "Invalid value '{}' for type integer at {}".format(arg, validation_metadata.path_to_item) - ) - - @classmethod - def _validate_oapg( - cls, - arg, - validation_metadata: ValidationMetadata, - ): - """ - IntBase _validate_oapg - TODO what about types = (int, number) -> IntBase, NumberBase? We could drop int and keep number only - """ - if cls._types and int not in cls._types: cls._types.add(int) - cls.__validate_format(arg, validation_metadata=validation_metadata) - return super()._validate_oapg(arg, validation_metadata=validation_metadata) - - class IntSchema(IntBase, NumberBase, Schema, IntMixin): @classmethod diff --git a/generator/konfig-integration-tests/sdks/python-one-of-uuid-string-integer-path-parameter/python/python_one_of_uuid_string_integer_path_parameter/schemas.py b/generator/konfig-integration-tests/sdks/python-one-of-uuid-string-integer-path-parameter/python/python_one_of_uuid_string_integer_path_parameter/schemas.py index 4a52b7200..ba01d9bf7 100644 --- a/generator/konfig-integration-tests/sdks/python-one-of-uuid-string-integer-path-parameter/python/python_one_of_uuid_string_integer_path_parameter/schemas.py +++ b/generator/konfig-integration-tests/sdks/python-one-of-uuid-string-integer-path-parameter/python/python_one_of_uuid_string_integer_path_parameter/schemas.py @@ -32,32 +32,21 @@ from python_one_of_uuid_string_integer_path_parameter.configuration import ( Configuration, ) -from python_one_of_uuid_string_integer_path_parameter.exceptions import ( - SchemaValidationError, -) +from python_one_of_uuid_string_integer_path_parameter.exceptions import SchemaValidationError from python_one_of_uuid_string_integer_path_parameter.exceptions import render_path -from python_one_of_uuid_string_integer_path_parameter.validation_metadata import ( - ValidationMetadata, -) -from python_one_of_uuid_string_integer_path_parameter.exceptions import ( - AnyOfValidationError, -) -from python_one_of_uuid_string_integer_path_parameter.exceptions import ( - MissingRequiredPropertiesError, -) +from python_one_of_uuid_string_integer_path_parameter.validation_metadata import ValidationMetadata +from python_one_of_uuid_string_integer_path_parameter.exceptions import AnyOfValidationError +from python_one_of_uuid_string_integer_path_parameter.exceptions import MissingRequiredPropertiesError Primitive: typing_extensions.TypeAlias = typing.Union[int, float, bool, str] - class Unset(object): """ An instance of this class is set as the default value for object type(dict) properties that are optional When a property has an unset value, that property will not be assigned in the dict """ - pass - unset = Unset() none_type = type(None) @@ -73,14 +62,12 @@ class FileIO(io.FileIO): def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader]): if isinstance(arg, (io.FileIO, io.BufferedReader)): if arg.closed: - raise ApiValueError( - "Invalid file state; file is closed and must be open" - ) + raise ApiValueError('Invalid file state; file is closed and must be open') arg.close() inst = super(FileIO, cls).__new__(cls, arg.name) super(FileIO, inst).__init__(arg.name) return inst - raise ApiValueError("FileIO must be passed arg which contains the open file") + raise ApiValueError('FileIO must be passed arg which contains the open file') def __init__(self, arg: typing.Union[io.FileIO, io.BufferedReader]): pass @@ -105,7 +92,6 @@ class Singleton: Enums and singletons are the same The same instance is returned for a given key of (cls, arg) """ - _instances = {} def __new__(cls, arg: typing.Any, **kwargs): @@ -129,12 +115,12 @@ def __new__(cls, arg: typing.Any, **kwargs): def __repr__(self): if isinstance(self, NoneClass): - return f"<{self.__class__.__name__}: None>" + return f'<{self.__class__.__name__}: None>' elif isinstance(self, BoolClass): if bool(self): - return f"<{self.__class__.__name__}: True>" - return f"<{self.__class__.__name__}: False>" - return f"<{self.__class__.__name__}: {super().__repr__()}>" + return f'<{self.__class__.__name__}: True>' + return f'<{self.__class__.__name__}: False>' + return f'<{self.__class__.__name__}: {super().__repr__()}>' class classproperty: @@ -169,7 +155,7 @@ def __bool__(self) -> bool: for key, instance in self._instances.items(): if self is instance: return bool(key[1]) - raise ValueError("Unable to find the boolean value of this instance") + raise ValueError('Unable to find the boolean value of this instance') def __str__(self) -> str: return str(bool(self)) @@ -182,40 +168,31 @@ class MetaOapgTyped: inclusive_minimum: typing.Union[int, float] max_items: int min_items: int - discriminator: typing.Dict[str, typing.Dict[str, typing.Type["Schema"]]] + discriminator: typing.Dict[str, typing.Dict[str, typing.Type['Schema']]] x_konfig_strip: bool + class properties: # to hold object properties pass - additional_properties: typing.Optional[typing.Type["Schema"]] + additional_properties: typing.Optional[typing.Type['Schema']] max_properties: int min_properties: int - all_of: typing.Callable[[], typing.List[typing.Type["Schema"]]] - one_of: typing.Callable[[], typing.List[typing.Type["Schema"]]] - any_of: typing.Callable[[], typing.List[typing.Type["Schema"]]] - not_schema: typing.Type["Schema"] + all_of: typing.Callable[[], typing.List[typing.Type['Schema']]] + one_of: typing.Callable[[], typing.List[typing.Type['Schema']]] + any_of: typing.Callable[[], typing.List[typing.Type['Schema']]] + not_schema: typing.Type['Schema'] max_length: int min_length: int - items: typing.Type["Schema"] + items: typing.Type['Schema'] class Schema: """ the base class of all swagger/openapi schemas/models """ - - __inheritable_primitive_types_set = { - decimal.Decimal, - str, - tuple, - frozendict.frozendict, - FileIO, - bytes, - BoolClass, - NoneClass, - } + __inheritable_primitive_types_set = {decimal.Decimal, str, tuple, frozendict.frozendict, FileIO, bytes, BoolClass, NoneClass} _types: typing.Set[typing.Type] MetaOapg = MetaOapgTyped @@ -230,9 +207,7 @@ def __get_valid_classes_phrase(input_classes): return "is one of [{0}]".format(", ".join(all_class_names)) @staticmethod - def _get_class_oapg( - item_cls: typing.Union[types.FunctionType, staticmethod, typing.Type["Schema"]] - ) -> typing.Type["Schema"]: + def _get_class_oapg(item_cls: typing.Union[types.FunctionType, staticmethod, typing.Type['Schema']]) -> typing.Type['Schema']: if isinstance(item_cls, types.FunctionType): # referenced schema return item_cls() @@ -259,14 +234,11 @@ def __type_error_message( if key_type: key_or_value = "key" valid_classes_phrase = cls.__get_valid_classes_phrase(valid_classes) - msg = ( - "Invalid type. Required {0} type {1} and " - 'passed type was {2} for "{3}"'.format( - key_or_value, - valid_classes_phrase, - type(var_value).__name__, - var_name, - ) + msg = "Invalid type. Required {0} type {1} and " "passed type was {2} for \"{3}\"".format( + key_or_value, + valid_classes_phrase, + type(var_value).__name__, + var_name, ) return msg @@ -291,20 +263,7 @@ def _validate_oapg( cls, arg, validation_metadata: ValidationMetadata, - ) -> typing.Dict[ - typing.Tuple[typing.Union[str, int], ...], - typing.Set[ - typing.Union[ - "Schema", - str, - decimal.Decimal, - BoolClass, - NoneClass, - frozendict.frozendict, - tuple, - ] - ], - ]: + ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]: """ Schema _validate_oapg All keyword validation except for type checking was done in calling stack frames @@ -333,17 +292,7 @@ def _validate_oapg( @staticmethod def _process_schema_classes_oapg( - schema_classes: typing.Set[ - typing.Union[ - "Schema", - str, - decimal.Decimal, - BoolClass, - NoneClass, - frozendict.frozendict, - tuple, - ] - ] + schema_classes: typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]] ): """ Processes and mutates schema_classes @@ -363,8 +312,10 @@ def _process_schema_classes_oapg( @classmethod def __get_new_cls( - cls, arg, validation_metadata: ValidationMetadata - ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Type["Schema"]]: + cls, + arg, + validation_metadata: ValidationMetadata + ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Type['Schema']]: """ Make a new dynamic class and return an instance of that class We are making an instance of cls, but instead of making cls @@ -388,9 +339,7 @@ def __get_new_cls( if validation_metadata.validated_path_to_schemas: update(_path_to_schemas, validation_metadata.validated_path_to_schemas) if not validation_metadata.validation_ran_earlier(cls): - other_path_to_schemas = cls._validate_oapg( - arg, validation_metadata=validation_metadata - ) + other_path_to_schemas = cls._validate_oapg(arg, validation_metadata=validation_metadata) update(_path_to_schemas, other_path_to_schemas) # loop through it make a new class for each entry # do not modify the returned result because it is cached and we would be modifying the cached value @@ -406,21 +355,15 @@ def __get_new_cls( """ cls._process_schema_classes_oapg(schema_classes) enum_schema = any( - issubclass(this_cls, EnumBase) for this_cls in schema_classes - ) - inheritable_primitive_type = schema_classes.intersection( - cls.__inheritable_primitive_types_set - ) + issubclass(this_cls, EnumBase) for this_cls in schema_classes) + inheritable_primitive_type = schema_classes.intersection(cls.__inheritable_primitive_types_set) chosen_schema_classes = schema_classes - inheritable_primitive_type suffix = tuple(inheritable_primitive_type) if enum_schema and suffix[0] not in {NoneClass, BoolClass}: suffix = (Singleton,) + suffix - used_classes = ( - tuple(sorted(chosen_schema_classes, key=lambda a_cls: a_cls.__name__)) - + suffix - ) - mfg_cls = get_new_class(class_name="DynamicSchema", bases=used_classes) + used_classes = tuple(sorted(chosen_schema_classes, key=lambda a_cls: a_cls.__name__)) + suffix + mfg_cls = get_new_class(class_name='DynamicSchema', bases=used_classes) path_to_schemas[path] = mfg_cls return path_to_schemas @@ -430,9 +373,7 @@ def _get_new_instance_without_conversion_oapg( cls, arg: typing.Any, path_to_item: typing.Tuple[typing.Union[str, int], ...], - path_to_schemas: typing.Dict[ - typing.Tuple[typing.Union[str, int], ...], typing.Type["Schema"] - ], + path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Type['Schema']] ): # We have a Dynamic class and we are making an instance of it if issubclass(cls, frozendict.frozendict) and issubclass(cls, DictBase): @@ -467,16 +408,16 @@ def from_openapi_data_oapg( decimal.Decimal, bool, None, - "Schema", + 'Schema', dict, frozendict.frozendict, tuple, list, io.FileIO, io.BufferedReader, - bytes, + bytes ], - _configuration: typing.Optional[Configuration], + _configuration: typing.Optional[Configuration] ): """ Schema from_openapi_data_oapg @@ -485,14 +426,13 @@ def from_openapi_data_oapg( validated_path_to_schemas = {} arg = cast_to_allowed_types(arg, from_server, validated_path_to_schemas) validation_metadata = ValidationMetadata( - from_server=from_server, - configuration=_configuration, - validated_path_to_schemas=validated_path_to_schemas, - ) + from_server=from_server, configuration=_configuration, validated_path_to_schemas=validated_path_to_schemas) path_to_schemas = cls.__get_new_cls(arg, validation_metadata) new_cls = path_to_schemas[validation_metadata.path_to_item] new_inst = new_cls._get_new_instance_without_conversion_oapg( - arg, validation_metadata.path_to_item, path_to_schemas + arg, + validation_metadata.path_to_item, + path_to_schemas ) return new_inst @@ -509,41 +449,7 @@ def __get_input_dict(*args, **kwargs) -> frozendict.frozendict: def __remove_unsets(kwargs): return {key: val for key, val in kwargs.items() if val is not unset} - def __new__( - cls, - *args: typing.Union[ - dict, - frozendict.frozendict, - list, - tuple, - decimal.Decimal, - float, - int, - str, - date, - datetime, - bool, - None, - "Schema", - ], - _configuration: typing.Optional[Configuration] = None, - **kwargs: typing.Union[ - dict, - frozendict.frozendict, - list, - tuple, - decimal.Decimal, - float, - int, - str, - date, - datetime, - bool, - None, - "Schema", - Unset, - ], - ): + def __new__(cls, *args: typing.Union[dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema'], _configuration: typing.Optional[Configuration] = None, **kwargs: typing.Union[dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema', Unset]): """ Schema __new__ @@ -558,7 +464,9 @@ def __new__( """ _kwargs = cls.__remove_unsets(kwargs) if not args and not _kwargs: - raise TypeError("No input given. args or kwargs must be given.") + raise TypeError( + 'No input given. args or kwargs must be given.' + ) if not _kwargs and args and not isinstance(args[0], dict): _arg = args[0] else: @@ -566,53 +474,25 @@ def __new__( _from_server = False _validated_path_to_schemas = {} _arg = cast_to_allowed_types( - _arg, _from_server, _validated_path_to_schemas, schema=cls - ) + _arg, _from_server, _validated_path_to_schemas, schema=cls) _validation_metadata = ValidationMetadata( - configuration=_configuration, - from_server=_from_server, - validated_path_to_schemas=_validated_path_to_schemas, - ) + configuration=_configuration, from_server=_from_server, validated_path_to_schemas=_validated_path_to_schemas) _path_to_schemas = cls.__get_new_cls(_arg, _validation_metadata) _new_cls = _path_to_schemas[_validation_metadata.path_to_item] return _new_cls._get_new_instance_without_conversion_oapg( - _arg, _validation_metadata.path_to_item, _path_to_schemas + _arg, + _validation_metadata.path_to_item, + _path_to_schemas ) def __init__( self, *args: typing.Union[ - dict, - frozendict.frozendict, - list, - tuple, - decimal.Decimal, - float, - int, - str, - date, - datetime, - bool, - None, - "Schema", - ], + dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema'], _configuration: typing.Optional[Configuration] = None, **kwargs: typing.Union[ - dict, - frozendict.frozendict, - list, - tuple, - decimal.Decimal, - float, - int, - str, - date, - datetime, - bool, - None, - "Schema", - Unset, - ], + dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema', Unset + ] ): """ this is needed to fix 'Unexpected argument' warning in pycharm @@ -622,7 +502,6 @@ def __init__( """ pass - """ import itertools data_types = ('None', 'FrozenDict', 'Tuple', 'Str', 'Decimal', 'Bool') @@ -653,462 +532,278 @@ def __init__( BoolMixin = BoolClass BytesMixin = bytes FileMixin = FileIO - # qty 2 class NumberMixin(decimal.Decimal, int): pass - class BinaryMixin(bytes, FileIO): pass - class NoneFrozenDictMixin(NoneClass, frozendict.frozendict): pass - class NoneTupleMixin(NoneClass, tuple): pass - class NoneStrMixin(NoneClass, str): pass - class NoneDecimalMixin(NoneClass, decimal.Decimal): pass - class NoneBoolMixin(NoneClass, BoolClass): pass - class FrozenDictTupleMixin(frozendict.frozendict, tuple): pass - class FrozenDictStrMixin(frozendict.frozendict, str): pass - class FrozenDictDecimalMixin(frozendict.frozendict, decimal.Decimal): pass - class FrozenDictBoolMixin(frozendict.frozendict, BoolClass): pass - class TupleStrMixin(tuple, str): pass - class TupleDecimalMixin(tuple, decimal.Decimal): pass - class TupleBoolMixin(tuple, BoolClass): pass - class StrDecimalMixin(str, decimal.Decimal): pass - class StrBoolMixin(str, BoolClass): pass - class DecimalBoolMixin(decimal.Decimal, BoolClass): pass - # qty 3 class NoneFrozenDictTupleMixin(NoneClass, frozendict.frozendict, tuple): pass - class NoneFrozenDictStrMixin(NoneClass, frozendict.frozendict, str): pass - class NoneFrozenDictDecimalMixin(NoneClass, frozendict.frozendict, decimal.Decimal): pass - class NoneFrozenDictBoolMixin(NoneClass, frozendict.frozendict, BoolClass): pass - class NoneTupleStrMixin(NoneClass, tuple, str): pass - class NoneTupleDecimalMixin(NoneClass, tuple, decimal.Decimal): pass - class NoneTupleBoolMixin(NoneClass, tuple, BoolClass): pass - class NoneStrDecimalMixin(NoneClass, str, decimal.Decimal): pass - class NoneStrBoolMixin(NoneClass, str, BoolClass): pass - class NoneDecimalBoolMixin(NoneClass, decimal.Decimal, BoolClass): pass - class FrozenDictTupleStrMixin(frozendict.frozendict, tuple, str): pass - class FrozenDictTupleDecimalMixin(frozendict.frozendict, tuple, decimal.Decimal): pass - class FrozenDictTupleBoolMixin(frozendict.frozendict, tuple, BoolClass): pass - class FrozenDictStrDecimalMixin(frozendict.frozendict, str, decimal.Decimal): pass - class FrozenDictStrBoolMixin(frozendict.frozendict, str, BoolClass): pass - class FrozenDictDecimalBoolMixin(frozendict.frozendict, decimal.Decimal, BoolClass): pass - class TupleStrDecimalMixin(tuple, str, decimal.Decimal): pass - class TupleStrBoolMixin(tuple, str, BoolClass): pass - class TupleDecimalBoolMixin(tuple, decimal.Decimal, BoolClass): pass - class StrDecimalBoolMixin(str, decimal.Decimal, BoolClass): pass - # qty 4 class NoneFrozenDictTupleStrMixin(NoneClass, frozendict.frozendict, tuple, str): pass - - class NoneFrozenDictTupleDecimalMixin( - NoneClass, frozendict.frozendict, tuple, decimal.Decimal - ): + class NoneFrozenDictTupleDecimalMixin(NoneClass, frozendict.frozendict, tuple, decimal.Decimal): pass - - class NoneFrozenDictTupleBoolMixin( - NoneClass, frozendict.frozendict, tuple, BoolClass - ): + class NoneFrozenDictTupleBoolMixin(NoneClass, frozendict.frozendict, tuple, BoolClass): pass - - class NoneFrozenDictStrDecimalMixin( - NoneClass, frozendict.frozendict, str, decimal.Decimal - ): + class NoneFrozenDictStrDecimalMixin(NoneClass, frozendict.frozendict, str, decimal.Decimal): pass - class NoneFrozenDictStrBoolMixin(NoneClass, frozendict.frozendict, str, BoolClass): pass - - class NoneFrozenDictDecimalBoolMixin( - NoneClass, frozendict.frozendict, decimal.Decimal, BoolClass - ): + class NoneFrozenDictDecimalBoolMixin(NoneClass, frozendict.frozendict, decimal.Decimal, BoolClass): pass - class NoneTupleStrDecimalMixin(NoneClass, tuple, str, decimal.Decimal): pass - class NoneTupleStrBoolMixin(NoneClass, tuple, str, BoolClass): pass - class NoneTupleDecimalBoolMixin(NoneClass, tuple, decimal.Decimal, BoolClass): pass - class NoneStrDecimalBoolMixin(NoneClass, str, decimal.Decimal, BoolClass): pass - - class FrozenDictTupleStrDecimalMixin( - frozendict.frozendict, tuple, str, decimal.Decimal - ): + class FrozenDictTupleStrDecimalMixin(frozendict.frozendict, tuple, str, decimal.Decimal): pass - class FrozenDictTupleStrBoolMixin(frozendict.frozendict, tuple, str, BoolClass): pass - - class FrozenDictTupleDecimalBoolMixin( - frozendict.frozendict, tuple, decimal.Decimal, BoolClass - ): + class FrozenDictTupleDecimalBoolMixin(frozendict.frozendict, tuple, decimal.Decimal, BoolClass): pass - - class FrozenDictStrDecimalBoolMixin( - frozendict.frozendict, str, decimal.Decimal, BoolClass - ): + class FrozenDictStrDecimalBoolMixin(frozendict.frozendict, str, decimal.Decimal, BoolClass): pass - class TupleStrDecimalBoolMixin(tuple, str, decimal.Decimal, BoolClass): pass - # qty 5 - class NoneFrozenDictTupleStrDecimalMixin( - NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal - ): + class NoneFrozenDictTupleStrDecimalMixin(NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal): pass - - class NoneFrozenDictTupleStrBoolMixin( - NoneClass, frozendict.frozendict, tuple, str, BoolClass - ): + class NoneFrozenDictTupleStrBoolMixin(NoneClass, frozendict.frozendict, tuple, str, BoolClass): pass - - class NoneFrozenDictTupleDecimalBoolMixin( - NoneClass, frozendict.frozendict, tuple, decimal.Decimal, BoolClass - ): + class NoneFrozenDictTupleDecimalBoolMixin(NoneClass, frozendict.frozendict, tuple, decimal.Decimal, BoolClass): pass - - class NoneFrozenDictStrDecimalBoolMixin( - NoneClass, frozendict.frozendict, str, decimal.Decimal, BoolClass - ): + class NoneFrozenDictStrDecimalBoolMixin(NoneClass, frozendict.frozendict, str, decimal.Decimal, BoolClass): pass - - class NoneTupleStrDecimalBoolMixin( - NoneClass, tuple, str, decimal.Decimal, BoolClass - ): + class NoneTupleStrDecimalBoolMixin(NoneClass, tuple, str, decimal.Decimal, BoolClass): pass - - class FrozenDictTupleStrDecimalBoolMixin( - frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass - ): + class FrozenDictTupleStrDecimalBoolMixin(frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass): pass - # qty 6 - class NoneFrozenDictTupleStrDecimalBoolMixin( - NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass - ): + class NoneFrozenDictTupleStrDecimalBoolMixin(NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass): pass - # qty 9 - class NoneFrozenDictTupleStrIntDecimalBoolFileBytesMixin( - NoneClass, - frozendict.frozendict, - tuple, - str, - int, - decimal.Decimal, - BoolClass, - FileIO, - bytes, - ): + class NoneFrozenDictTupleStrIntDecimalBoolFileBytesMixin(NoneClass, frozendict.frozendict, tuple, str, int, decimal.Decimal, BoolClass, FileIO, bytes): pass - else: # qty 1 class NoneMixin: _types = {NoneClass} - class FrozenDictMixin: _types = {frozendict.frozendict} - class TupleMixin: _types = {tuple} - class StrMixin: _types = {str} - class DecimalMixin: _types = {decimal.Decimal} - class IntMixin: _types = {int} - class BoolMixin: _types = {BoolClass} - class BytesMixin: _types = {bytes} - class FileMixin: _types = {FileIO} - # qty 2 class NumberMixin: _types = {decimal.Decimal, int} - class BinaryMixin: _types = {bytes, FileIO} - class NoneFrozenDictMixin: _types = {NoneClass, frozendict.frozendict} - class NoneTupleMixin: _types = {NoneClass, tuple} - class NoneStrMixin: _types = {NoneClass, str} - class NoneDecimalMixin: _types = {NoneClass, decimal.Decimal} - class NoneBoolMixin: _types = {NoneClass, BoolClass} - class FrozenDictTupleMixin: _types = {frozendict.frozendict, tuple} - class FrozenDictStrMixin: _types = {frozendict.frozendict, str} - class FrozenDictDecimalMixin: _types = {frozendict.frozendict, decimal.Decimal} - class FrozenDictBoolMixin: _types = {frozendict.frozendict, BoolClass} - class TupleStrMixin: _types = {tuple, str} - class TupleDecimalMixin: _types = {tuple, decimal.Decimal} - class TupleBoolMixin: _types = {tuple, BoolClass} - class StrDecimalMixin: _types = {str, decimal.Decimal} - class StrBoolMixin: _types = {str, BoolClass} - class DecimalBoolMixin: _types = {decimal.Decimal, BoolClass} - # qty 3 class NoneFrozenDictTupleMixin: _types = {NoneClass, frozendict.frozendict, tuple} - class NoneFrozenDictStrMixin: _types = {NoneClass, frozendict.frozendict, str} - class NoneFrozenDictDecimalMixin: _types = {NoneClass, frozendict.frozendict, decimal.Decimal} - class NoneFrozenDictBoolMixin: _types = {NoneClass, frozendict.frozendict, BoolClass} - class NoneTupleStrMixin: _types = {NoneClass, tuple, str} - class NoneTupleDecimalMixin: _types = {NoneClass, tuple, decimal.Decimal} - class NoneTupleBoolMixin: _types = {NoneClass, tuple, BoolClass} - class NoneStrDecimalMixin: _types = {NoneClass, str, decimal.Decimal} - class NoneStrBoolMixin: _types = {NoneClass, str, BoolClass} - class NoneDecimalBoolMixin: _types = {NoneClass, decimal.Decimal, BoolClass} - class FrozenDictTupleStrMixin: _types = {frozendict.frozendict, tuple, str} - class FrozenDictTupleDecimalMixin: _types = {frozendict.frozendict, tuple, decimal.Decimal} - class FrozenDictTupleBoolMixin: _types = {frozendict.frozendict, tuple, BoolClass} - class FrozenDictStrDecimalMixin: _types = {frozendict.frozendict, str, decimal.Decimal} - class FrozenDictStrBoolMixin: _types = {frozendict.frozendict, str, BoolClass} - class FrozenDictDecimalBoolMixin: _types = {frozendict.frozendict, decimal.Decimal, BoolClass} - class TupleStrDecimalMixin: _types = {tuple, str, decimal.Decimal} - class TupleStrBoolMixin: _types = {tuple, str, BoolClass} - class TupleDecimalBoolMixin: _types = {tuple, decimal.Decimal, BoolClass} - class StrDecimalBoolMixin: _types = {str, decimal.Decimal, BoolClass} - # qty 4 class NoneFrozenDictTupleStrMixin: _types = {NoneClass, frozendict.frozendict, tuple, str} - class NoneFrozenDictTupleDecimalMixin: _types = {NoneClass, frozendict.frozendict, tuple, decimal.Decimal} - class NoneFrozenDictTupleBoolMixin: _types = {NoneClass, frozendict.frozendict, tuple, BoolClass} - class NoneFrozenDictStrDecimalMixin: _types = {NoneClass, frozendict.frozendict, str, decimal.Decimal} - class NoneFrozenDictStrBoolMixin: _types = {NoneClass, frozendict.frozendict, str, BoolClass} - class NoneFrozenDictDecimalBoolMixin: _types = {NoneClass, frozendict.frozendict, decimal.Decimal, BoolClass} - class NoneTupleStrDecimalMixin: _types = {NoneClass, tuple, str, decimal.Decimal} - class NoneTupleStrBoolMixin: _types = {NoneClass, tuple, str, BoolClass} - class NoneTupleDecimalBoolMixin: _types = {NoneClass, tuple, decimal.Decimal, BoolClass} - class NoneStrDecimalBoolMixin: _types = {NoneClass, str, decimal.Decimal, BoolClass} - class FrozenDictTupleStrDecimalMixin: _types = {frozendict.frozendict, tuple, str, decimal.Decimal} - class FrozenDictTupleStrBoolMixin: _types = {frozendict.frozendict, tuple, str, BoolClass} - class FrozenDictTupleDecimalBoolMixin: _types = {frozendict.frozendict, tuple, decimal.Decimal, BoolClass} - class FrozenDictStrDecimalBoolMixin: _types = {frozendict.frozendict, str, decimal.Decimal, BoolClass} - class TupleStrDecimalBoolMixin: _types = {tuple, str, decimal.Decimal, BoolClass} - # qty 5 class NoneFrozenDictTupleStrDecimalMixin: _types = {NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal} - class NoneFrozenDictTupleStrBoolMixin: _types = {NoneClass, frozendict.frozendict, tuple, str, BoolClass} - class NoneFrozenDictTupleDecimalBoolMixin: _types = {NoneClass, frozendict.frozendict, tuple, decimal.Decimal, BoolClass} - class NoneFrozenDictStrDecimalBoolMixin: _types = {NoneClass, frozendict.frozendict, str, decimal.Decimal, BoolClass} - class NoneTupleStrDecimalBoolMixin: _types = {NoneClass, tuple, str, decimal.Decimal, BoolClass} - class FrozenDictTupleStrDecimalBoolMixin: _types = {frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass} - # qty 6 class NoneFrozenDictTupleStrDecimalBoolMixin: - _types = { - NoneClass, - frozendict.frozendict, - tuple, - str, - decimal.Decimal, - BoolClass, - } - + _types = {NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass} # qty 9 class NoneFrozenDictTupleStrIntDecimalBoolFileBytesMixin: - _types = { - NoneClass, - frozendict.frozendict, - tuple, - str, - int, - decimal.Decimal, - BoolClass, - FileIO, - bytes, - } + _types = {NoneClass, frozendict.frozendict, tuple, str, int, decimal.Decimal, BoolClass, FileIO, bytes} class ValidatorBase: @@ -1125,16 +820,12 @@ def _is_json_validation_enabled_oapg(schema_keyword, configuration=None): configuration (Configuration): the configuration class. """ - return ( - configuration is None - or not hasattr(configuration, "_disabled_client_side_validations") - or schema_keyword not in configuration._disabled_client_side_validations - ) + return (configuration is None or + not hasattr(configuration, '_disabled_client_side_validations') or + schema_keyword not in configuration._disabled_client_side_validations) @staticmethod - def _raise_validation_errror_message_oapg( - value, constraint_msg, constraint_value, path_to_item, additional_txt="" - ): + def _raise_validation_errror_message_oapg(value, constraint_msg, constraint_value, path_to_item, additional_txt=""): raise ApiValueError( "Invalid value `{value}`, {constraint_msg} `{constraint_value}`{additional_txt} at {path_to_item}".format( value=value, @@ -1152,20 +843,7 @@ def _validate_oapg( cls, arg, validation_metadata: ValidationMetadata, - ) -> typing.Dict[ - typing.Tuple[typing.Union[str, int], ...], - typing.Set[ - typing.Union[ - "Schema", - str, - decimal.Decimal, - BoolClass, - NoneClass, - frozendict.frozendict, - tuple, - ] - ], - ]: + ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]: """ EnumBase _validate_oapg Validates that arg is in the enum's allowed values @@ -1173,73 +851,28 @@ def _validate_oapg( try: cls.MetaOapg.enum_value_to_name[arg] except KeyError: - raise ApiValueError( - "Invalid value {} passed in to {}, allowed_values={}".format( - arg, cls, cls.MetaOapg.enum_value_to_name.keys() - ) - ) + raise ApiValueError("Invalid value {} passed in to {}, allowed_values={}".format(arg, cls, cls.MetaOapg.enum_value_to_name.keys())) return super()._validate_oapg(arg, validation_metadata=validation_metadata) class BoolBase: def is_true_oapg(self) -> bool: """ - A replacement for x is True - True if the instance is a BoolClass True Singleton - """ - if not issubclass(self.__class__, BoolClass): - return False - return bool(self) - - def is_false_oapg(self) -> bool: - """ - A replacement for x is False - True if the instance is a BoolClass False Singleton - """ - if not issubclass(self.__class__, BoolClass): - return False - return bool(self) is False - - -class IntBase: - @property - def as_int_oapg(self) -> int: - try: - return self._as_int - except AttributeError: - self._as_int = int(self) - return self._as_int - - @classmethod - def __validate_format( - cls, - arg: typing.Optional[decimal.Decimal], - validation_metadata: ValidationMetadata, - ): - if isinstance(arg, decimal.Decimal): - - denominator = arg.as_integer_ratio()[-1] - if denominator != 1: - raise ApiValueError( - "Invalid value '{}' for type integer at {}".format( - arg, validation_metadata.path_to_item - ) - ) + A replacement for x is True + True if the instance is a BoolClass True Singleton + """ + if not issubclass(self.__class__, BoolClass): + return False + return bool(self) - @classmethod - def _validate_oapg( - cls, - arg, - validation_metadata: ValidationMetadata, - ): + def is_false_oapg(self) -> bool: """ - IntBase _validate_oapg - TODO what about types = (int, number) -> IntBase, NumberBase? We could drop int and keep number only + A replacement for x is False + True if the instance is a BoolClass False Singleton """ - if cls._types and int not in cls._types: - cls._types.add(int) - cls.__validate_format(arg, validation_metadata=validation_metadata) - return super()._validate_oapg(arg, validation_metadata=validation_metadata) + if not issubclass(self.__class__, BoolClass): + return False + return bool(self) is False class NoneBase: @@ -1262,73 +895,68 @@ def as_str_oapg(self) -> str: @property def as_date_oapg(self) -> date: - raise Exception("not implemented") + raise Exception('not implemented') @property def as_datetime_oapg(self) -> datetime: - raise Exception("not implemented") + raise Exception('not implemented') @property def as_decimal_oapg(self) -> decimal.Decimal: - raise Exception("not implemented") + raise Exception('not implemented') @property def as_uuid_oapg(self) -> uuid.UUID: - raise Exception("not implemented") + raise Exception('not implemented') @classmethod - def __check_str_validations(cls, arg: str, validation_metadata: ValidationMetadata): - if not hasattr(cls, "MetaOapg"): + def __check_str_validations( + cls, + arg: str, + validation_metadata: ValidationMetadata + ): + if not hasattr(cls, 'MetaOapg'): return - if ( - cls._is_json_validation_enabled_oapg( - "maxLength", validation_metadata.configuration - ) - and hasattr(cls.MetaOapg, "max_length") - and len(arg) > cls.MetaOapg.max_length - ): + if (cls._is_json_validation_enabled_oapg('maxLength', validation_metadata.configuration) and + hasattr(cls.MetaOapg, 'max_length') and + len(arg) > cls.MetaOapg.max_length): cls._raise_validation_errror_message_oapg( value=arg, constraint_msg="length must be less than or equal to", constraint_value=cls.MetaOapg.max_length, - path_to_item=validation_metadata.path_to_item, + path_to_item=validation_metadata.path_to_item ) - if ( - cls._is_json_validation_enabled_oapg( - "minLength", validation_metadata.configuration - ) - and hasattr(cls.MetaOapg, "min_length") - and len(arg) < cls.MetaOapg.min_length - ): + if (cls._is_json_validation_enabled_oapg('minLength', validation_metadata.configuration) and + hasattr(cls.MetaOapg, 'min_length') and + len(arg) < cls.MetaOapg.min_length): cls._raise_validation_errror_message_oapg( value=arg, constraint_msg="length must be greater than or equal to", constraint_value=cls.MetaOapg.min_length, - path_to_item=validation_metadata.path_to_item, + path_to_item=validation_metadata.path_to_item ) - if cls._is_json_validation_enabled_oapg( - "pattern", validation_metadata.configuration - ) and hasattr(cls.MetaOapg, "regex"): + if (cls._is_json_validation_enabled_oapg('pattern', validation_metadata.configuration) and + hasattr(cls.MetaOapg, 'regex')): for regex_dict in cls.MetaOapg.regex: - flags = regex_dict.get("flags", 0) - if not re.search(regex_dict["pattern"], arg, flags=flags): + flags = regex_dict.get('flags', 0) + if not re.search(regex_dict['pattern'], arg, flags=flags): if flags != 0: # Don't print the regex flags if the flags are not # specified in the OAS document. cls._raise_validation_errror_message_oapg( value=arg, constraint_msg="must match regular expression", - constraint_value=regex_dict["pattern"], + constraint_value=regex_dict['pattern'], path_to_item=validation_metadata.path_to_item, - additional_txt=" with flags=`{}`".format(flags), + additional_txt=" with flags=`{}`".format(flags) ) cls._raise_validation_errror_message_oapg( value=arg, constraint_msg="must match regular expression", - constraint_value=regex_dict["pattern"], - path_to_item=validation_metadata.path_to_item, + constraint_value=regex_dict['pattern'], + path_to_item=validation_metadata.path_to_item ) @classmethod @@ -1336,31 +964,52 @@ def _validate_oapg( cls, arg, validation_metadata: ValidationMetadata, - ) -> typing.Dict[ - typing.Tuple[typing.Union[str, int], ...], - typing.Set[ - typing.Union[ - "Schema", - str, - decimal.Decimal, - BoolClass, - NoneClass, - frozendict.frozendict, - tuple, - ] - ], - ]: + ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]: """ StrBase _validate_oapg Validates that validations pass """ if isinstance(arg, str): - if hasattr(cls.MetaOapg, "x_konfig_strip") and cls.MetaOapg.x_konfig_strip: + if hasattr(cls.MetaOapg, 'x_konfig_strip') and cls.MetaOapg.x_konfig_strip: arg = arg.strip() cls.__check_str_validations(arg, validation_metadata) return super()._validate_oapg(arg, validation_metadata=validation_metadata) +class IntBase: + @property + def as_int_oapg(self) -> int: + try: + return self._as_int + except AttributeError: + self._as_int = int(self) + return self._as_int + + @classmethod + def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata): + if isinstance(arg, decimal.Decimal): + + denominator = arg.as_integer_ratio()[-1] + if denominator != 1: + raise ApiValueError( + "Invalid value '{}' for type integer at {}".format(arg, validation_metadata.path_to_item) + ) + + @classmethod + def _validate_oapg( + cls, + arg, + validation_metadata: ValidationMetadata, + ): + """ + IntBase _validate_oapg + TODO what about types = (int, number) -> IntBase, NumberBase? We could drop int and keep number only + """ + if cls._types and int not in cls._types: cls._types.add(int) + cls.__validate_format(arg, validation_metadata=validation_metadata) + return super()._validate_oapg(arg, validation_metadata=validation_metadata) + + class UUIDBase: @property @functools.lru_cache() @@ -1368,18 +1017,14 @@ def as_uuid_oapg(self) -> uuid.UUID: return uuid.UUID(self) @classmethod - def __validate_format( - cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata - ): + def __validate_format(cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata): if isinstance(arg, str): try: uuid.UUID(arg) return True except ValueError: raise ApiValueError( - "Invalid value '{}' for type UUID at {}".format( - arg, validation_metadata.path_to_item - ) + "Invalid value '{}' for type UUID at {}".format(arg, validation_metadata.path_to_item) ) @classmethod @@ -1401,17 +1046,17 @@ class CustomIsoparser(isoparser): def parse_isodatetime(self, dt_str): components, pos = self._parse_isodate(dt_str) if len(dt_str) > pos: - if self._sep is None or dt_str[pos : pos + 1] == self._sep: - components += self._parse_isotime(dt_str[pos + 1 :]) + if self._sep is None or dt_str[pos:pos + 1] == self._sep: + components += self._parse_isotime(dt_str[pos + 1:]) else: - raise ValueError("String contains unknown ISO components") + raise ValueError('String contains unknown ISO components') if len(components) > 3 and components[3] == 24: components[3] = 0 return datetime(*components) + timedelta(days=1) if len(components) <= 3: - raise ValueError("Value is not a datetime") + raise ValueError('Value is not a datetime') return datetime(*components) @@ -1420,10 +1065,10 @@ def parse_isodate(self, datestr): components, pos = self._parse_isodate(datestr) if len(datestr) > pos: - raise ValueError("String contains invalid time components") + raise ValueError('String contains invalid time components') if len(components) > 3: - raise ValueError("String contains invalid time components") + raise ValueError('String contains invalid time components') return date(*components) @@ -1438,9 +1083,7 @@ def as_date_oapg(self) -> date: return DEFAULT_ISOPARSER.parse_isodate(self) @classmethod - def __validate_format( - cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata - ): + def __validate_format(cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata): if isinstance(arg, str): try: DEFAULT_ISOPARSER.parse_isodate(arg) @@ -1448,9 +1091,7 @@ def __validate_format( except ValueError: raise ApiValueError( "Value does not conform to the required ISO-8601 date format. " - "Invalid value '{}' for type date at {}".format( - arg, validation_metadata.path_to_item - ) + "Invalid value '{}' for type date at {}".format(arg, validation_metadata.path_to_item) ) @classmethod @@ -1473,9 +1114,7 @@ def as_datetime_oapg(self) -> datetime: return DEFAULT_ISOPARSER.parse_isodatetime(self) @classmethod - def __validate_format( - cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata - ): + def __validate_format(cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata): if isinstance(arg, str): try: DEFAULT_ISOPARSER.parse_isodatetime(arg) @@ -1483,9 +1122,7 @@ def __validate_format( except ValueError: raise ApiValueError( "Value does not conform to the required ISO-8601 datetime format. " - "Invalid value '{}' for type datetime at {}".format( - arg, validation_metadata.path_to_item - ) + "Invalid value '{}' for type datetime at {}".format(arg, validation_metadata.path_to_item) ) @classmethod @@ -1514,9 +1151,7 @@ def as_decimal_oapg(self) -> decimal.Decimal: return decimal.Decimal(self) @classmethod - def __validate_format( - cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata - ): + def __validate_format(cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata): if isinstance(arg, str): try: decimal.Decimal(arg) @@ -1524,9 +1159,7 @@ def __validate_format( except decimal.InvalidOperation: raise ApiValueError( "Value cannot be converted to a decimal. " - "Invalid value '{}' for type decimal at {}".format( - arg, validation_metadata.path_to_item - ) + "Invalid value '{}' for type decimal at {}".format(arg, validation_metadata.path_to_item) ) @classmethod @@ -1561,7 +1194,7 @@ def as_int_oapg(self) -> int: if self.as_tuple().exponent < 0: # this could be represented as an integer but should be represented as a float # because that's what it was serialized from - raise ApiValueError(f"{self} is not an integer") + raise ApiValueError(f'{self} is not an integer') self._as_int = int(self) return self._as_int @@ -1571,93 +1204,79 @@ def as_float_oapg(self) -> float: return self._as_float except AttributeError: if self.as_tuple().exponent >= 0: - raise ApiValueError(f"{self} is not an float") + raise ApiValueError(f'{self} is not an float') self._as_float = float(self) return self._as_float @classmethod - def __check_numeric_validations(cls, arg, validation_metadata: ValidationMetadata): - if not hasattr(cls, "MetaOapg"): + def __check_numeric_validations( + cls, + arg, + validation_metadata: ValidationMetadata + ): + if not hasattr(cls, 'MetaOapg'): return - if cls._is_json_validation_enabled_oapg( - "multipleOf", validation_metadata.configuration - ) and hasattr(cls.MetaOapg, "multiple_of"): + if cls._is_json_validation_enabled_oapg('multipleOf', + validation_metadata.configuration) and hasattr(cls.MetaOapg, 'multiple_of'): multiple_of_value = cls.MetaOapg.multiple_of - if not (float(arg) / multiple_of_value).is_integer(): + if (not (float(arg) / multiple_of_value).is_integer()): # Note 'multipleOf' will be as good as the floating point arithmetic. cls._raise_validation_errror_message_oapg( value=arg, constraint_msg="value must be a multiple of", constraint_value=multiple_of_value, - path_to_item=validation_metadata.path_to_item, + path_to_item=validation_metadata.path_to_item ) checking_max_or_min_values = any( - hasattr(cls.MetaOapg, validation_key) - for validation_key in { - "exclusive_maximum", - "inclusive_maximum", - "exclusive_minimum", - "inclusive_minimum", + hasattr(cls.MetaOapg, validation_key) for validation_key in { + 'exclusive_maximum', + 'inclusive_maximum', + 'exclusive_minimum', + 'inclusive_minimum', } ) if not checking_max_or_min_values: return - if ( - cls._is_json_validation_enabled_oapg( - "exclusiveMaximum", validation_metadata.configuration - ) - and hasattr(cls.MetaOapg, "exclusive_maximum") - and arg >= cls.MetaOapg.exclusive_maximum - ): + if (cls._is_json_validation_enabled_oapg('exclusiveMaximum', validation_metadata.configuration) and + hasattr(cls.MetaOapg, 'exclusive_maximum') and + arg >= cls.MetaOapg.exclusive_maximum): cls._raise_validation_errror_message_oapg( value=arg, constraint_msg="must be a value less than", constraint_value=cls.MetaOapg.exclusive_maximum, - path_to_item=validation_metadata.path_to_item, + path_to_item=validation_metadata.path_to_item ) - if ( - cls._is_json_validation_enabled_oapg( - "maximum", validation_metadata.configuration - ) - and hasattr(cls.MetaOapg, "inclusive_maximum") - and arg > cls.MetaOapg.inclusive_maximum - ): + if (cls._is_json_validation_enabled_oapg('maximum', validation_metadata.configuration) and + hasattr(cls.MetaOapg, 'inclusive_maximum') and + arg > cls.MetaOapg.inclusive_maximum): cls._raise_validation_errror_message_oapg( value=arg, constraint_msg="must be a value less than or equal to", constraint_value=cls.MetaOapg.inclusive_maximum, - path_to_item=validation_metadata.path_to_item, + path_to_item=validation_metadata.path_to_item ) - if ( - cls._is_json_validation_enabled_oapg( - "exclusiveMinimum", validation_metadata.configuration - ) - and hasattr(cls.MetaOapg, "exclusive_minimum") - and arg <= cls.MetaOapg.exclusive_minimum - ): + if (cls._is_json_validation_enabled_oapg('exclusiveMinimum', validation_metadata.configuration) and + hasattr(cls.MetaOapg, 'exclusive_minimum') and + arg <= cls.MetaOapg.exclusive_minimum): cls._raise_validation_errror_message_oapg( value=arg, constraint_msg="must be a value greater than", constraint_value=cls.MetaOapg.exclusive_maximum, - path_to_item=validation_metadata.path_to_item, + path_to_item=validation_metadata.path_to_item ) - if ( - cls._is_json_validation_enabled_oapg( - "minimum", validation_metadata.configuration - ) - and hasattr(cls.MetaOapg, "inclusive_minimum") - and arg < cls.MetaOapg.inclusive_minimum - ): + if (cls._is_json_validation_enabled_oapg('minimum', validation_metadata.configuration) and + hasattr(cls.MetaOapg, 'inclusive_minimum') and + arg < cls.MetaOapg.inclusive_minimum): cls._raise_validation_errror_message_oapg( value=arg, constraint_msg="must be a value greater than or equal to", constraint_value=cls.MetaOapg.inclusive_minimum, - path_to_item=validation_metadata.path_to_item, + path_to_item=validation_metadata.path_to_item ) @classmethod @@ -1665,20 +1284,7 @@ def _validate_oapg( cls, arg, validation_metadata: ValidationMetadata, - ) -> typing.Dict[ - typing.Tuple[typing.Union[str, int], ...], - typing.Set[ - typing.Union[ - "Schema", - str, - decimal.Decimal, - BoolClass, - NoneClass, - frozendict.frozendict, - tuple, - ] - ], - ]: + ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]: """ NumberBase _validate_oapg Validates that validations pass @@ -1708,71 +1314,58 @@ def __validate_items(cls, list_items, validation_metadata: ValidationMetadata): # if we have definitions for an items schema, use it # otherwise accept anything - item_cls = getattr(cls.MetaOapg, "items", UnsetAnyTypeSchema) + item_cls = getattr(cls.MetaOapg, 'items', UnsetAnyTypeSchema) item_cls = cls._get_class_oapg(item_cls) path_to_schemas = {} for i, value in enumerate(list_items): item_validation_metadata = ValidationMetadata( from_server=validation_metadata.from_server, configuration=validation_metadata.configuration, - path_to_item=validation_metadata.path_to_item + (i,), - validated_path_to_schemas=validation_metadata.validated_path_to_schemas, + path_to_item=validation_metadata.path_to_item+(i,), + validated_path_to_schemas=validation_metadata.validated_path_to_schemas ) if item_validation_metadata.validation_ran_earlier(item_cls): continue other_path_to_schemas = item_cls._validate_oapg( - value, validation_metadata=item_validation_metadata - ) + value, validation_metadata=item_validation_metadata) update(path_to_schemas, other_path_to_schemas) return path_to_schemas @classmethod - def __check_tuple_validations(cls, arg, validation_metadata: ValidationMetadata): - if not hasattr(cls, "MetaOapg"): + def __check_tuple_validations( + cls, arg, + validation_metadata: ValidationMetadata): + if not hasattr(cls, 'MetaOapg'): return - if ( - cls._is_json_validation_enabled_oapg( - "maxItems", validation_metadata.configuration - ) - and hasattr(cls.MetaOapg, "max_items") - and len(arg) > cls.MetaOapg.max_items - ): + if (cls._is_json_validation_enabled_oapg('maxItems', validation_metadata.configuration) and + hasattr(cls.MetaOapg, 'max_items') and + len(arg) > cls.MetaOapg.max_items): cls._raise_validation_errror_message_oapg( value=arg, constraint_msg="number of items must be less than or equal to", constraint_value=cls.MetaOapg.max_items, - path_to_item=validation_metadata.path_to_item, + path_to_item=validation_metadata.path_to_item ) - if ( - cls._is_json_validation_enabled_oapg( - "minItems", validation_metadata.configuration - ) - and hasattr(cls.MetaOapg, "min_items") - and len(arg) < cls.MetaOapg.min_items - ): + if (cls._is_json_validation_enabled_oapg('minItems', validation_metadata.configuration) and + hasattr(cls.MetaOapg, 'min_items') and + len(arg) < cls.MetaOapg.min_items): cls._raise_validation_errror_message_oapg( value=arg, constraint_msg="number of items must be greater than or equal to", constraint_value=cls.MetaOapg.min_items, - path_to_item=validation_metadata.path_to_item, + path_to_item=validation_metadata.path_to_item ) - if ( - cls._is_json_validation_enabled_oapg( - "uniqueItems", validation_metadata.configuration - ) - and hasattr(cls.MetaOapg, "unique_items") - and cls.MetaOapg.unique_items - and arg - ): + if (cls._is_json_validation_enabled_oapg('uniqueItems', validation_metadata.configuration) and + hasattr(cls.MetaOapg, 'unique_items') and cls.MetaOapg.unique_items and arg): unique_items = set(arg) if len(arg) > len(unique_items): cls._raise_validation_errror_message_oapg( value=arg, constraint_msg="duplicate items were found, and the tuple must not contain duplicates because", - constraint_value="unique_items==True", - path_to_item=validation_metadata.path_to_item, + constraint_value='unique_items==True', + path_to_item=validation_metadata.path_to_item ) @classmethod @@ -1798,9 +1391,7 @@ def _validate_oapg( """ if isinstance(arg, tuple): cls.__check_tuple_validations(arg, validation_metadata) - _path_to_schemas = super()._validate_oapg( - arg, validation_metadata=validation_metadata - ) + _path_to_schemas = super()._validate_oapg(arg, validation_metadata=validation_metadata) if not isinstance(arg, tuple): return _path_to_schemas updated_vm = ValidationMetadata( @@ -1808,33 +1399,31 @@ def _validate_oapg( from_server=validation_metadata.from_server, path_to_item=validation_metadata.path_to_item, seen_classes=validation_metadata.seen_classes | frozenset({cls}), - validated_path_to_schemas=validation_metadata.validated_path_to_schemas, - ) - other_path_to_schemas = cls.__validate_items( - arg, validation_metadata=updated_vm + validated_path_to_schemas=validation_metadata.validated_path_to_schemas ) + other_path_to_schemas = cls.__validate_items(arg, validation_metadata=updated_vm) update(_path_to_schemas, other_path_to_schemas) return _path_to_schemas @classmethod def _get_items_oapg( - cls: "Schema", + cls: 'Schema', arg: typing.List[typing.Any], path_to_item: typing.Tuple[typing.Union[str, int], ...], - path_to_schemas: typing.Dict[ - typing.Tuple[typing.Union[str, int], ...], typing.Type["Schema"] - ], + path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Type['Schema']] ): - """ + ''' ListBase _get_items_oapg - """ + ''' cast_items = [] for i, value in enumerate(arg): item_path_to_item = path_to_item + (i,) item_cls = path_to_schemas[item_path_to_item] new_value = item_cls._get_new_instance_without_conversion_oapg( - value, item_path_to_item, path_to_schemas + value, + item_path_to_item, + path_to_schemas ) cast_items.append(new_value) @@ -1845,26 +1434,20 @@ class Discriminable: MetaOapg: MetaOapgTyped @classmethod - def _ensure_discriminator_value_present_oapg( - cls, disc_property_name: str, validation_metadata: ValidationMetadata, *args - ): + def _ensure_discriminator_value_present_oapg(cls, disc_property_name: str, validation_metadata: ValidationMetadata, *args): if not args or args and disc_property_name not in args[0]: # The input data does not contain the discriminator property raise ApiValueError( "Cannot deserialize input data due to missing discriminator. " - "The discriminator property '{}' is missing at path: {}".format( - disc_property_name, validation_metadata.path_to_item - ) + "The discriminator property '{}' is missing at path: {}".format(disc_property_name, validation_metadata.path_to_item) ) @classmethod - def get_discriminated_class_oapg( - cls, disc_property_name: str, disc_payload_value: str - ): + def get_discriminated_class_oapg(cls, disc_property_name: str, disc_payload_value: str): """ Used in schemas with discriminators """ - if not hasattr(cls.MetaOapg, "discriminator"): + if not hasattr(cls.MetaOapg, 'discriminator'): return None disc = cls.MetaOapg.discriminator() if disc_property_name not in disc: @@ -1872,37 +1455,31 @@ def get_discriminated_class_oapg( discriminated_cls = disc[disc_property_name].get(disc_payload_value) if discriminated_cls is not None: return discriminated_cls - if not hasattr(cls, "MetaOapg"): + if not hasattr(cls, 'MetaOapg'): return None elif not ( - hasattr(cls.MetaOapg, "all_of") - or hasattr(cls.MetaOapg, "one_of") - or hasattr(cls.MetaOapg, "any_of") + hasattr(cls.MetaOapg, 'all_of') or + hasattr(cls.MetaOapg, 'one_of') or + hasattr(cls.MetaOapg, 'any_of') ): return None # TODO stop traveling if a cycle is hit - if hasattr(cls.MetaOapg, "all_of"): + if hasattr(cls.MetaOapg, 'all_of'): for allof_cls in cls.MetaOapg.all_of(): discriminated_cls = allof_cls.get_discriminated_class_oapg( - disc_property_name=disc_property_name, - disc_payload_value=disc_payload_value, - ) + disc_property_name=disc_property_name, disc_payload_value=disc_payload_value) if discriminated_cls is not None: return discriminated_cls - if hasattr(cls.MetaOapg, "one_of"): + if hasattr(cls.MetaOapg, 'one_of'): for oneof_cls in cls.MetaOapg.one_of(): discriminated_cls = oneof_cls.get_discriminated_class_oapg( - disc_property_name=disc_property_name, - disc_payload_value=disc_payload_value, - ) + disc_property_name=disc_property_name, disc_payload_value=disc_payload_value) if discriminated_cls is not None: return discriminated_cls - if hasattr(cls.MetaOapg, "any_of"): + if hasattr(cls.MetaOapg, 'any_of'): for anyof_cls in cls.MetaOapg.any_of(): discriminated_cls = anyof_cls.get_discriminated_class_oapg( - disc_property_name=disc_property_name, - disc_payload_value=disc_payload_value, - ) + disc_property_name=disc_property_name, disc_payload_value=disc_payload_value) if discriminated_cls is not None: return discriminated_cls return None @@ -1932,12 +1509,10 @@ def __validate_arg_presence(cls, arg, validation_metadata: ValidationMetadata): """ seen_required_properties = set() invalid_arguments = [] - required_property_names = getattr(cls.MetaOapg, "required", set()) - additional_properties = getattr( - cls.MetaOapg, "additional_properties", UnsetAnyTypeSchema - ) - properties = getattr(cls.MetaOapg, "properties", {}) - property_annotations = getattr(properties, "__annotations__", {}) + required_property_names = getattr(cls.MetaOapg, 'required', set()) + additional_properties = getattr(cls.MetaOapg, 'additional_properties', UnsetAnyTypeSchema) + properties = getattr(cls.MetaOapg, 'properties', {}) + property_annotations = getattr(properties, '__annotations__', {}) for property_name in arg: if property_name in required_property_names: seen_required_properties.add(property_name) @@ -1947,9 +1522,7 @@ def __validate_arg_presence(cls, arg, validation_metadata: ValidationMetadata): continue else: invalid_arguments.append(property_name) - missing_required_arguments = list( - required_property_names - seen_required_properties - ) + missing_required_arguments = list(required_property_names - seen_required_properties) if missing_required_arguments: missing_required_arguments.sort() raise MissingRequiredPropertiesError( @@ -1957,16 +1530,8 @@ def __validate_arg_presence(cls, arg, validation_metadata: ValidationMetadata): cls.__name__, len(missing_required_arguments), "ies" if len(missing_required_arguments) > 1 else "y", - ( - " at '{}'".format( - ".".join( - [str(i) for i in validation_metadata.path_to_item[1:]] - ) - ) - if len(validation_metadata.path_to_item) > 1 - else "" - ), - missing_required_arguments, + " at '{}'".format('.'.join([str(i) for i in validation_metadata.path_to_item[1:]])) if len(validation_metadata.path_to_item) > 1 else "", + missing_required_arguments ) ) if invalid_arguments: @@ -1976,7 +1541,7 @@ def __validate_arg_presence(cls, arg, validation_metadata: ValidationMetadata): cls.__name__, len(invalid_arguments), "s" if len(invalid_arguments) > 1 else "", - invalid_arguments, + invalid_arguments ) ) @@ -1995,14 +1560,12 @@ def __validate_args(cls, arg, validation_metadata: ValidationMetadata): ApiTypeError - for missing required arguments, or for invalid properties """ path_to_schemas = {} - additional_properties = getattr( - cls.MetaOapg, "additional_properties", UnsetAnyTypeSchema - ) - properties = getattr(cls.MetaOapg, "properties", {}) - property_annotations = getattr(properties, "__annotations__", {}) + additional_properties = getattr(cls.MetaOapg, 'additional_properties', UnsetAnyTypeSchema) + properties = getattr(cls.MetaOapg, 'properties', {}) + property_annotations = getattr(properties, '__annotations__', {}) validation_errors = [] for property_name, value in arg.items(): - path_to_item = validation_metadata.path_to_item + (property_name,) + path_to_item = validation_metadata.path_to_item+(property_name,) if property_name in property_annotations: schema = property_annotations[property_name] elif additional_properties is not NotAnyTypeSchema: @@ -2016,24 +1579,20 @@ def __validate_args(cls, arg, validation_metadata: ValidationMetadata): continue schema = additional_properties else: - raise ApiTypeError( - "Unable to find schema for value={} in class={} at path_to_item={}".format( - value, cls, validation_metadata.path_to_item + (property_name,) - ) - ) + raise ApiTypeError('Unable to find schema for value={} in class={} at path_to_item={}'.format( + value, cls, validation_metadata.path_to_item+(property_name,) + )) schema = cls._get_class_oapg(schema) arg_validation_metadata = ValidationMetadata( from_server=validation_metadata.from_server, configuration=validation_metadata.configuration, path_to_item=path_to_item, - validated_path_to_schemas=validation_metadata.validated_path_to_schemas, + validated_path_to_schemas=validation_metadata.validated_path_to_schemas ) if arg_validation_metadata.validation_ran_earlier(schema): continue try: - other_path_to_schemas = schema._validate_oapg( - value, validation_metadata=arg_validation_metadata - ) + other_path_to_schemas = schema._validate_oapg(value, validation_metadata=arg_validation_metadata) update(path_to_schemas, other_path_to_schemas) except (ApiTypeError, ApiValueError, MissingRequiredPropertiesError) as e: validation_errors.append(e) @@ -2042,35 +1601,31 @@ def __validate_args(cls, arg, validation_metadata: ValidationMetadata): return path_to_schemas @classmethod - def __check_dict_validations(cls, arg, validation_metadata: ValidationMetadata): - if not hasattr(cls, "MetaOapg"): + def __check_dict_validations( + cls, + arg, + validation_metadata: ValidationMetadata + ): + if not hasattr(cls, 'MetaOapg'): return - if ( - cls._is_json_validation_enabled_oapg( - "maxProperties", validation_metadata.configuration - ) - and hasattr(cls.MetaOapg, "max_properties") - and len(arg) > cls.MetaOapg.max_properties - ): + if (cls._is_json_validation_enabled_oapg('maxProperties', validation_metadata.configuration) and + hasattr(cls.MetaOapg, 'max_properties') and + len(arg) > cls.MetaOapg.max_properties): cls._raise_validation_errror_message_oapg( value=arg, constraint_msg="number of properties must be less than or equal to", constraint_value=cls.MetaOapg.max_properties, - path_to_item=validation_metadata.path_to_item, + path_to_item=validation_metadata.path_to_item ) - if ( - cls._is_json_validation_enabled_oapg( - "minProperties", validation_metadata.configuration - ) - and hasattr(cls.MetaOapg, "min_properties") - and len(arg) < cls.MetaOapg.min_properties - ): + if (cls._is_json_validation_enabled_oapg('minProperties', validation_metadata.configuration) and + hasattr(cls.MetaOapg, 'min_properties') and + len(arg) < cls.MetaOapg.min_properties): cls._raise_validation_errror_message_oapg( value=arg, constraint_msg="number of properties must be greater than or equal to", constraint_value=cls.MetaOapg.min_properties, - path_to_item=validation_metadata.path_to_item, + path_to_item=validation_metadata.path_to_item ) @classmethod @@ -2096,15 +1651,11 @@ def _validate_oapg( """ if isinstance(arg, frozendict.frozendict): cls.__check_dict_validations(arg, validation_metadata) - _path_to_schemas = super()._validate_oapg( - arg, validation_metadata=validation_metadata - ) + _path_to_schemas = super()._validate_oapg(arg, validation_metadata=validation_metadata) if not isinstance(arg, frozendict.frozendict): return _path_to_schemas cls.__validate_arg_presence(arg, validation_metadata) - other_path_to_schemas = cls.__validate_args( - arg, validation_metadata=validation_metadata - ) + other_path_to_schemas = cls.__validate_args(arg, validation_metadata=validation_metadata) update(_path_to_schemas, other_path_to_schemas) try: discriminator = cls.MetaOapg.discriminator() @@ -2112,19 +1663,16 @@ def _validate_oapg( return _path_to_schemas # discriminator exists disc_prop_name = list(discriminator.keys())[0] - cls._ensure_discriminator_value_present_oapg( - disc_prop_name, validation_metadata, arg - ) + cls._ensure_discriminator_value_present_oapg(disc_prop_name, validation_metadata, arg) discriminated_cls = cls.get_discriminated_class_oapg( - disc_property_name=disc_prop_name, disc_payload_value=arg[disc_prop_name] - ) + disc_property_name=disc_prop_name, disc_payload_value=arg[disc_prop_name]) if discriminated_cls is None: raise ApiValueError( "Invalid discriminator value was passed in to {}.{} Only the values {} are allowed at {}".format( cls.__name__, disc_prop_name, list(discriminator[disc_prop_name].keys()), - validation_metadata.path_to_item + (disc_prop_name,), + validation_metadata.path_to_item + (disc_prop_name,) ) ) updated_vm = ValidationMetadata( @@ -2132,13 +1680,11 @@ def _validate_oapg( from_server=validation_metadata.from_server, path_to_item=validation_metadata.path_to_item, seen_classes=validation_metadata.seen_classes | frozenset({cls}), - validated_path_to_schemas=validation_metadata.validated_path_to_schemas, + validated_path_to_schemas=validation_metadata.validated_path_to_schemas ) if updated_vm.validation_ran_earlier(discriminated_cls): return _path_to_schemas - other_path_to_schemas = discriminated_cls._validate_oapg( - arg, validation_metadata=updated_vm - ) + other_path_to_schemas = discriminated_cls._validate_oapg(arg, validation_metadata=updated_vm) update(_path_to_schemas, other_path_to_schemas) return _path_to_schemas @@ -2147,9 +1693,7 @@ def _get_properties_oapg( cls, arg: typing.Dict[str, typing.Any], path_to_item: typing.Tuple[typing.Union[str, int], ...], - path_to_schemas: typing.Dict[ - typing.Tuple[typing.Union[str, int], ...], typing.Type["Schema"] - ], + path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Type['Schema']] ): """ DictBase _get_properties_oapg, this is how properties are set @@ -2161,7 +1705,9 @@ def _get_properties_oapg( property_path_to_item = path_to_item + (property_name_js,) property_cls = path_to_schemas[property_path_to_item] new_value = property_cls._get_new_instance_without_conversion_oapg( - value, property_path_to_item, path_to_schemas + value, + property_path_to_item, + path_to_schemas ) dict_items[property_name_js] = new_value @@ -2169,9 +1715,7 @@ def _get_properties_oapg( def __setattr__(self, name: str, value: typing.Any): if not isinstance(self, FileIO): - raise AttributeError( - "property setting not supported on immutable instances" - ) + raise AttributeError('property setting not supported on immutable instances') def __getattr__(self, name: str): """ @@ -2198,7 +1742,7 @@ def __getitem__(self, name: str): return super().__getattr__(name) return super().__getitem__(name) - def get_item_oapg(self, name: str) -> typing.Union["AnyTypeSchema", Unset]: + def get_item_oapg(self, name: str) -> typing.Union['AnyTypeSchema', Unset]: # dict_instance[name] accessor if not isinstance(self, frozendict.frozendict): raise NotImplementedError() @@ -2209,51 +1753,12 @@ def get_item_oapg(self, name: str) -> typing.Union["AnyTypeSchema", Unset]: def cast_to_allowed_types( - arg: typing.Union[ - str, - date, - datetime, - uuid.UUID, - decimal.Decimal, - int, - float, - None, - dict, - frozendict.frozendict, - list, - tuple, - bytes, - Schema, - io.FileIO, - io.BufferedReader, - ], + arg: typing.Union[str, date, datetime, uuid.UUID, decimal.Decimal, int, float, None, dict, frozendict.frozendict, list, tuple, bytes, Schema, io.FileIO, io.BufferedReader], from_server: bool, - validated_path_to_schemas: typing.Dict[ - typing.Tuple[typing.Union[str, int], ...], - typing.Set[ - typing.Union[ - "Schema", - str, - decimal.Decimal, - BoolClass, - NoneClass, - frozendict.frozendict, - tuple, - ] - ], - ], - path_to_item: typing.Tuple[typing.Union[str, int], ...] = tuple(["args[0]"]), + validated_path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]], + path_to_item: typing.Tuple[typing.Union[str, int], ...] = tuple(['args[0]']), schema: Schema = None, -) -> typing.Union[ - frozendict.frozendict, - tuple, - decimal.Decimal, - str, - bytes, - BoolClass, - NoneClass, - FileIO, -]: +) -> typing.Union[frozendict.frozendict, tuple, decimal.Decimal, str, bytes, BoolClass, NoneClass, FileIO]: """ Casts the input payload arg into the allowed types The input validated_path_to_schemas is mutated by running this function @@ -2277,10 +1782,7 @@ def cast_to_allowed_types( if isinstance(arg, Schema): # store the already run validations schema_classes = set() - source_schema_was_unset = ( - len(arg.__class__.__bases__) == 2 - and UnsetAnyTypeSchema in arg.__class__.__bases__ - ) + source_schema_was_unset = len(arg.__class__.__bases__) == 2 and UnsetAnyTypeSchema in arg.__class__.__bases__ if not source_schema_was_unset: """ Do not include UnsetAnyTypeSchema and its base class because @@ -2294,20 +1796,11 @@ def cast_to_allowed_types( schema_classes.add(cls) validated_path_to_schemas[path_to_item] = schema_classes - type_error = ApiTypeError( - f"Invalid type. Required value type is str and passed type was {type(arg)} at {path_to_item}" - ) + type_error = ApiTypeError(f"Invalid type. Required value type is str and passed type was {type(arg)} at {path_to_item}") if isinstance(arg, str): return str(arg) elif isinstance(arg, (dict, frozendict.frozendict)): - return frozendict.frozendict( - { - key: cast_to_allowed_types( - val, from_server, validated_path_to_schemas, path_to_item + (key,) - ) - for key, val in arg.items() - } - ) + return frozendict.frozendict({key: cast_to_allowed_types(val, from_server, validated_path_to_schemas, path_to_item + (key,)) for key, val in arg.items()}) elif isinstance(arg, (bool, BoolClass)): """ this check must come before isinstance(arg, (int, float)) @@ -2323,17 +1816,10 @@ def cast_to_allowed_types( if decimal_from_float.as_integer_ratio()[1] == 1: # 9.0 -> Decimal('9.0') # 3.4028234663852886e+38 -> Decimal('340282346638528859811704183484516925440.0') - return decimal.Decimal(str(decimal_from_float) + ".0") + return decimal.Decimal(str(decimal_from_float)+'.0') return decimal_from_float elif isinstance(arg, (tuple, list)): - return tuple( - [ - cast_to_allowed_types( - item, from_server, validated_path_to_schemas, path_to_item + (i,) - ) - for i, item in enumerate(arg) - ] - ) + return tuple([cast_to_allowed_types(item, from_server, validated_path_to_schemas, path_to_item + (i,)) for i, item in enumerate(arg)]) elif isinstance(arg, (none_type, NoneClass)): return NoneClass.NONE elif isinstance(arg, (date, datetime)): @@ -2345,7 +1831,7 @@ def cast_to_allowed_types( if schema is DateTimeSchema: return arg.isoformat() if schema is DateSchema: - return arg.strftime("%Y-%m-%d") + return arg.strftime('%Y-%m-%d') raise type_error elif isinstance(arg, uuid.UUID): if not from_server: @@ -2358,15 +1844,8 @@ def cast_to_allowed_types( elif isinstance(arg, (io.FileIO, io.BufferedReader)): return FileIO(arg) elif isinstance(arg, pydantic.BaseModel): - return cast_to_allowed_types( - {k: v for k, v in dict(arg).items() if v is not None}, - from_server, - validated_path_to_schemas, - path_to_item, - ) - raise ValueError( - "Invalid type passed in got input={} type={}".format(arg, type(arg)) - ) + return cast_to_allowed_types({ k:v for k, v in dict(arg).items() if v is not None }, from_server, validated_path_to_schemas, path_to_item) + raise ValueError('Invalid type passed in got input={} type={}'.format(arg, type(arg))) class ComposedBase(Discriminable): @@ -2377,9 +1856,7 @@ def __get_allof_classes(cls, arg, validation_metadata: ValidationMetadata): for allof_cls in cls.MetaOapg.all_of(): if validation_metadata.validation_ran_earlier(allof_cls): continue - other_path_to_schemas = allof_cls._validate_oapg( - arg, validation_metadata=validation_metadata - ) + other_path_to_schemas = allof_cls._validate_oapg(arg, validation_metadata=validation_metadata) update(path_to_schemas, other_path_to_schemas) return path_to_schemas @@ -2400,9 +1877,7 @@ def __get_oneof_class( oneof_classes.append(oneof_cls) continue try: - path_to_schemas = oneof_cls._validate_oapg( - arg, validation_metadata=validation_metadata - ) + path_to_schemas = oneof_cls._validate_oapg(arg, validation_metadata=validation_metadata) except (ApiValueError, ApiTypeError) as ex: if discriminated_cls is not None and oneof_cls is discriminated_cls: raise ex @@ -2416,16 +1891,17 @@ def __get_oneof_class( elif len(oneof_classes) > 1: raise ApiValueError( "Invalid inputs given to generate an instance of {}. Multiple " - "oneOf schemas {} matched the inputs, but a max of one is allowed.".format( - cls, oneof_classes - ) + "oneOf schemas {} matched the inputs, but a max of one is allowed.".format(cls, oneof_classes) ) # exactly one class matches return path_to_schemas @classmethod def __get_anyof_classes( - cls, arg, discriminated_cls, validation_metadata: ValidationMetadata + cls, + arg, + discriminated_cls, + validation_metadata: ValidationMetadata ): anyof_classes = [] exceptions: typing.List[typing.Union[ApiTypeError, ApiValueError]] = [] @@ -2436,9 +1912,7 @@ def __get_anyof_classes( continue try: - other_path_to_schemas = anyof_cls._validate_oapg( - arg, validation_metadata=validation_metadata - ) + other_path_to_schemas = anyof_cls._validate_oapg(arg, validation_metadata=validation_metadata) except (ApiValueError, ApiTypeError) as ex: if discriminated_cls is not None and anyof_cls is discriminated_cls: raise ex @@ -2455,20 +1929,7 @@ def _validate_oapg( cls, arg, validation_metadata: ValidationMetadata, - ) -> typing.Dict[ - typing.Tuple[typing.Union[str, int], ...], - typing.Set[ - typing.Union[ - "Schema", - str, - decimal.Decimal, - BoolClass, - NoneClass, - frozendict.frozendict, - tuple, - ] - ], - ]: + ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]: """ ComposedBase _validate_oapg We return dynamic classes of different bases depending upon the inputs @@ -2485,33 +1946,27 @@ def _validate_oapg( ApiTypeError: when the input type is not in the list of allowed spec types """ # validation checking on types, validations, and enums - path_to_schemas = super()._validate_oapg( - arg, validation_metadata=validation_metadata - ) + path_to_schemas = super()._validate_oapg(arg, validation_metadata=validation_metadata) updated_vm = ValidationMetadata( configuration=validation_metadata.configuration, from_server=validation_metadata.from_server, path_to_item=validation_metadata.path_to_item, seen_classes=validation_metadata.seen_classes | frozenset({cls}), - validated_path_to_schemas=validation_metadata.validated_path_to_schemas, + validated_path_to_schemas=validation_metadata.validated_path_to_schemas ) # process composed schema discriminator = None - if hasattr(cls, "MetaOapg") and hasattr(cls.MetaOapg, "discriminator"): + if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'discriminator'): discriminator = cls.MetaOapg.discriminator() discriminated_cls = None if discriminator and arg and isinstance(arg, frozendict.frozendict): disc_property_name = list(discriminator.keys())[0] - cls._ensure_discriminator_value_present_oapg( - disc_property_name, updated_vm, arg - ) + cls._ensure_discriminator_value_present_oapg(disc_property_name, updated_vm, arg) # get discriminated_cls by looking at the dict in the current class discriminated_cls = cls.get_discriminated_class_oapg( - disc_property_name=disc_property_name, - disc_payload_value=arg[disc_property_name], - ) + disc_property_name=disc_property_name, disc_payload_value=arg[disc_property_name]) if discriminated_cls is None: raise ApiValueError( "Invalid discriminator value '{}' was passed in to {}.{} Only the values {} are allowed at {}".format( @@ -2519,27 +1974,29 @@ def _validate_oapg( cls.__name__, disc_property_name, list(discriminator[disc_property_name].keys()), - updated_vm.path_to_item + (disc_property_name,), + updated_vm.path_to_item + (disc_property_name,) ) ) - if hasattr(cls, "MetaOapg") and hasattr(cls.MetaOapg, "all_of"): - other_path_to_schemas = cls.__get_allof_classes( - arg, validation_metadata=updated_vm - ) + if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'all_of'): + other_path_to_schemas = cls.__get_allof_classes(arg, validation_metadata=updated_vm) update(path_to_schemas, other_path_to_schemas) - if hasattr(cls, "MetaOapg") and hasattr(cls.MetaOapg, "one_of"): + if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'one_of'): other_path_to_schemas = cls.__get_oneof_class( - arg, discriminated_cls=discriminated_cls, validation_metadata=updated_vm + arg, + discriminated_cls=discriminated_cls, + validation_metadata=updated_vm ) update(path_to_schemas, other_path_to_schemas) - if hasattr(cls, "MetaOapg") and hasattr(cls.MetaOapg, "any_of"): + if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'any_of'): other_path_to_schemas = cls.__get_anyof_classes( - arg, discriminated_cls=discriminated_cls, validation_metadata=updated_vm + arg, + discriminated_cls=discriminated_cls, + validation_metadata=updated_vm ) update(path_to_schemas, other_path_to_schemas) not_cls = None - if hasattr(cls, "MetaOapg") and hasattr(cls.MetaOapg, "not_schema"): + if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'not_schema'): not_cls = cls.MetaOapg.not_schema not_cls = cls._get_class_oapg(not_cls) if not_cls: @@ -2555,17 +2012,13 @@ def _validate_oapg( raise not_exception try: - other_path_to_schemas = not_cls._validate_oapg( - arg, validation_metadata=updated_vm - ) + other_path_to_schemas = not_cls._validate_oapg(arg, validation_metadata=updated_vm) except (ApiValueError, ApiTypeError): pass if other_path_to_schemas: raise not_exception - if discriminated_cls is not None and not updated_vm.validation_ran_earlier( - discriminated_cls - ): + if discriminated_cls is not None and not updated_vm.validation_ran_earlier(discriminated_cls): if discriminated_cls not in path_to_schemas[updated_vm.path_to_item]: raise ApiValueError("Could not find discriminator in value") return path_to_schemas @@ -2582,82 +2035,67 @@ class ComposedSchema( BoolBase, NoneBase, Schema, - NoneFrozenDictTupleStrDecimalBoolMixin, + NoneFrozenDictTupleStrDecimalBoolMixin ): @classmethod - def from_openapi_data_oapg( - cls, - *args: typing.Any, - _configuration: typing.Optional[Configuration] = None, - **kwargs, - ): + def from_openapi_data_oapg(cls, *args: typing.Any, _configuration: typing.Optional[Configuration] = None, **kwargs): if not args: if not kwargs: - raise ApiTypeError( - "{} is missing required input data in args or kwargs".format( - cls.__name__ - ) - ) - args = (kwargs,) + raise ApiTypeError('{} is missing required input data in args or kwargs'.format(cls.__name__)) + args = (kwargs, ) return super().from_openapi_data_oapg(args[0], _configuration=_configuration) -class ListSchema(ListBase, Schema, TupleMixin): +class ListSchema( + ListBase, + Schema, + TupleMixin +): @classmethod - def from_openapi_data_oapg( - cls, - arg: typing.List[typing.Any], - _configuration: typing.Optional[Configuration] = None, - ): + def from_openapi_data_oapg(cls, arg: typing.List[typing.Any], _configuration: typing.Optional[Configuration] = None): return super().from_openapi_data_oapg(arg, _configuration=_configuration) - def __new__( - cls, - arg: typing.Union[typing.List[typing.Any], typing.Tuple[typing.Any]], - **kwargs: Configuration, - ): + def __new__(cls, arg: typing.Union[typing.List[typing.Any], typing.Tuple[typing.Any]], **kwargs: Configuration): return super().__new__(cls, arg, **kwargs) -class NoneSchema(NoneBase, Schema, NoneMixin): +class NoneSchema( + NoneBase, + Schema, + NoneMixin +): @classmethod - def from_openapi_data_oapg( - cls, arg: None, _configuration: typing.Optional[Configuration] = None - ): + def from_openapi_data_oapg(cls, arg: None, _configuration: typing.Optional[Configuration] = None): return super().from_openapi_data_oapg(arg, _configuration=_configuration) def __new__(cls, arg: None, **kwargs: Configuration): return super().__new__(cls, arg, **kwargs) -class NumberSchema(NumberBase, Schema, NumberMixin): +class NumberSchema( + NumberBase, + Schema, + NumberMixin +): """ This is used for type: number with no format Both integers AND floats are accepted """ @classmethod - def from_openapi_data_oapg( - cls, - arg: typing.Union[int, float], - _configuration: typing.Optional[Configuration] = None, - ): + def from_openapi_data_oapg(cls, arg: typing.Union[int, float], _configuration: typing.Optional[Configuration] = None): return super().from_openapi_data_oapg(arg, _configuration=_configuration) - def __new__( - cls, arg: typing.Union[decimal.Decimal, int, float], **kwargs: Configuration - ): + def __new__(cls, arg: typing.Union[decimal.Decimal, int, float], **kwargs: Configuration): return super().__new__(cls, arg, **kwargs) class IntSchema(IntBase, NumberBase, Schema, IntMixin): @classmethod - def from_openapi_data_oapg( - cls, arg: int, _configuration: typing.Optional[Configuration] = None - ): + def from_openapi_data_oapg(cls, arg: int, _configuration: typing.Optional[Configuration] = None): return super().from_openapi_data_oapg(arg, _configuration=_configuration) def __new__(cls, arg: typing.Union[decimal.Decimal, int], **kwargs: Configuration): @@ -2669,17 +2107,11 @@ class Int32Base: __inclusive_maximum = decimal.Decimal(2147483647) @classmethod - def __validate_format( - cls, - arg: typing.Optional[decimal.Decimal], - validation_metadata: ValidationMetadata, - ): + def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata): if isinstance(arg, decimal.Decimal) and arg.as_tuple().exponent == 0: if not cls.__inclusive_minimum <= arg <= cls.__inclusive_maximum: raise ApiValueError( - "Invalid value '{}' for type int32 at {}".format( - arg, validation_metadata.path_to_item - ) + "Invalid value '{}' for type int32 at {}".format(arg, validation_metadata.path_to_item) ) @classmethod @@ -2695,7 +2127,10 @@ def _validate_oapg( return super()._validate_oapg(arg, validation_metadata=validation_metadata) -class Int32Schema(Int32Base, IntSchema): +class Int32Schema( + Int32Base, + IntSchema +): pass @@ -2704,17 +2139,11 @@ class Int64Base: __inclusive_maximum = decimal.Decimal(9223372036854775807) @classmethod - def __validate_format( - cls, - arg: typing.Optional[decimal.Decimal], - validation_metadata: ValidationMetadata, - ): + def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata): if isinstance(arg, decimal.Decimal) and arg.as_tuple().exponent == 0: if not cls.__inclusive_minimum <= arg <= cls.__inclusive_maximum: raise ApiValueError( - "Invalid value '{}' for type int64 at {}".format( - arg, validation_metadata.path_to_item - ) + "Invalid value '{}' for type int64 at {}".format(arg, validation_metadata.path_to_item) ) @classmethod @@ -2730,26 +2159,23 @@ def _validate_oapg( return super()._validate_oapg(arg, validation_metadata=validation_metadata) -class Int64Schema(Int64Base, IntSchema): +class Int64Schema( + Int64Base, + IntSchema +): pass class Float32Base: - __inclusive_minimum = decimal.Decimal(-3.4028234663852886e38) - __inclusive_maximum = decimal.Decimal(3.4028234663852886e38) + __inclusive_minimum = decimal.Decimal(-3.4028234663852886e+38) + __inclusive_maximum = decimal.Decimal(3.4028234663852886e+38) @classmethod - def __validate_format( - cls, - arg: typing.Optional[decimal.Decimal], - validation_metadata: ValidationMetadata, - ): + def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata): if isinstance(arg, decimal.Decimal): if not cls.__inclusive_minimum <= arg <= cls.__inclusive_maximum: raise ApiValueError( - "Invalid value '{}' for type float at {}".format( - arg, validation_metadata.path_to_item - ) + "Invalid value '{}' for type float at {}".format(arg, validation_metadata.path_to_item) ) @classmethod @@ -2765,31 +2191,26 @@ def _validate_oapg( return super()._validate_oapg(arg, validation_metadata=validation_metadata) -class Float32Schema(Float32Base, NumberSchema): +class Float32Schema( + Float32Base, + NumberSchema +): @classmethod - def from_openapi_data_oapg( - cls, arg: float, _configuration: typing.Optional[Configuration] = None - ): + def from_openapi_data_oapg(cls, arg: float, _configuration: typing.Optional[Configuration] = None): return super().from_openapi_data_oapg(arg, _configuration=_configuration) class Float64Base: - __inclusive_minimum = decimal.Decimal(-1.7976931348623157e308) - __inclusive_maximum = decimal.Decimal(1.7976931348623157e308) + __inclusive_minimum = decimal.Decimal(-1.7976931348623157E+308) + __inclusive_maximum = decimal.Decimal(1.7976931348623157E+308) @classmethod - def __validate_format( - cls, - arg: typing.Optional[decimal.Decimal], - validation_metadata: ValidationMetadata, - ): + def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata): if isinstance(arg, decimal.Decimal): if not cls.__inclusive_minimum <= arg <= cls.__inclusive_maximum: raise ApiValueError( - "Invalid value '{}' for type double at {}".format( - arg, validation_metadata.path_to_item - ) + "Invalid value '{}' for type double at {}".format(arg, validation_metadata.path_to_item) ) @classmethod @@ -2804,18 +2225,22 @@ def _validate_oapg( cls.__validate_format(arg, validation_metadata=validation_metadata) return super()._validate_oapg(arg, validation_metadata=validation_metadata) - -class Float64Schema(Float64Base, NumberSchema): +class Float64Schema( + Float64Base, + NumberSchema +): @classmethod - def from_openapi_data_oapg( - cls, arg: float, _configuration: typing.Optional[Configuration] = None - ): + def from_openapi_data_oapg(cls, arg: float, _configuration: typing.Optional[Configuration] = None): # todo check format return super().from_openapi_data_oapg(arg, _configuration=_configuration) -class StrSchema(StrBase, Schema, StrMixin): +class StrSchema( + StrBase, + Schema, + StrMixin +): """ date + datetime string types must inherit from this class That is because one can validate a str payload as both: @@ -2824,14 +2249,10 @@ class StrSchema(StrBase, Schema, StrMixin): """ @classmethod - def from_openapi_data_oapg( - cls, arg: str, _configuration: typing.Optional[Configuration] = None - ) -> "StrSchema": + def from_openapi_data_oapg(cls, arg: str, _configuration: typing.Optional[Configuration] = None) -> 'StrSchema': return super().from_openapi_data_oapg(arg, _configuration=_configuration) - def __new__( - cls, arg: typing.Union[str, date, datetime, uuid.UUID], **kwargs: Configuration - ): + def __new__(cls, arg: typing.Union[str, date, datetime, uuid.UUID], **kwargs: Configuration): return super().__new__(cls, arg, **kwargs) @@ -2867,16 +2288,21 @@ def __new__(cls, arg: str, **kwargs: Configuration): return super().__new__(cls, arg, **kwargs) -class BytesSchema(Schema, BytesMixin): +class BytesSchema( + Schema, + BytesMixin +): """ this class will subclass bytes and is immutable """ - def __new__(cls, arg: bytes, **kwargs: Configuration): return super(Schema, cls).__new__(cls, arg) -class FileSchema(Schema, FileMixin): +class FileSchema( + Schema, + FileMixin +): """ This class is NOT immutable Dynamic classes are built using it for example when AnyType allows in binary data @@ -2894,9 +2320,7 @@ class FileSchema(Schema, FileMixin): - to be able to preserve file name info """ - def __new__( - cls, arg: typing.Union[io.FileIO, io.BufferedReader], **kwargs: Configuration - ): + def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader], **kwargs: Configuration): return super(Schema, cls).__new__(cls, arg) @@ -2904,7 +2328,12 @@ class BinaryBase: pass -class BinarySchema(ComposedBase, BinaryBase, Schema, BinaryMixin): +class BinarySchema( + ComposedBase, + BinaryBase, + Schema, + BinaryMixin +): class MetaOapg: @staticmethod def one_of(): @@ -2913,20 +2342,18 @@ def one_of(): FileSchema, ] - def __new__( - cls, - arg: typing.Union[io.FileIO, io.BufferedReader, bytes], - **kwargs: Configuration, - ): + def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader, bytes], **kwargs: Configuration): return super().__new__(cls, arg) -class BoolSchema(BoolBase, Schema, BoolMixin): +class BoolSchema( + BoolBase, + Schema, + BoolMixin +): @classmethod - def from_openapi_data_oapg( - cls, arg: bool, _configuration: typing.Optional[Configuration] = None - ): + def from_openapi_data_oapg(cls, arg: bool, _configuration: typing.Optional[Configuration] = None): return super().from_openapi_data_oapg(arg, _configuration=_configuration) def __new__(cls, arg: bool, **kwargs: ValidationMetadata): @@ -2941,7 +2368,7 @@ class AnyTypeSchema( BoolBase, NoneBase, Schema, - NoneFrozenDictTupleStrIntDecimalBoolFileBytesMixin, + NoneFrozenDictTupleStrIntDecimalBoolFileBytesMixin ): # Python representation of a schema defined as true or {} pass @@ -2968,7 +2395,7 @@ def __new__( cls, *args, _configuration: typing.Optional[Configuration] = None, - ) -> "NotAnyTypeSchema": + ) -> 'NotAnyTypeSchema': return super().__new__( cls, *args, @@ -2976,55 +2403,26 @@ def __new__( ) -class DictSchema(DictBase, Schema, FrozenDictMixin): +class DictSchema( + DictBase, + Schema, + FrozenDictMixin +): @classmethod - def from_openapi_data_oapg( - cls, - arg: typing.Dict[str, typing.Any], - _configuration: typing.Optional[Configuration] = None, - ): + def from_openapi_data_oapg(cls, arg: typing.Dict[str, typing.Any], _configuration: typing.Optional[Configuration] = None): return super().from_openapi_data_oapg(arg, _configuration=_configuration) - def __new__( - cls, - *args: typing.Union[dict, frozendict.frozendict], - **kwargs: typing.Union[ - dict, - frozendict.frozendict, - list, - tuple, - decimal.Decimal, - float, - int, - str, - date, - datetime, - bool, - None, - bytes, - Schema, - Unset, - ValidationMetadata, - ], - ): + def __new__(cls, *args: typing.Union[dict, frozendict.frozendict], **kwargs: typing.Union[dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, bytes, Schema, Unset, ValidationMetadata]): return super().__new__(cls, *args, **kwargs) -schema_type_classes = { - NoneSchema, - DictSchema, - ListSchema, - NumberSchema, - StrSchema, - BoolSchema, - AnyTypeSchema, -} +schema_type_classes = {NoneSchema, DictSchema, ListSchema, NumberSchema, StrSchema, BoolSchema, AnyTypeSchema} @functools.lru_cache() def get_new_class( class_name: str, - bases: typing.Tuple[typing.Type[typing.Union[Schema, typing.Any]], ...], + bases: typing.Tuple[typing.Type[typing.Union[Schema, typing.Any]], ...] ) -> typing.Type[Schema]: """ Returns a new class that is made with the subclass bases diff --git a/generator/konfig-integration-tests/tests/__snapshots__/python-one-of-uuid-string-integer-path-parameter.test.ts.snap b/generator/konfig-integration-tests/tests/__snapshots__/python-one-of-uuid-string-integer-path-parameter.test.ts.snap new file mode 100644 index 000000000..8e559bf40 --- /dev/null +++ b/generator/konfig-integration-tests/tests/__snapshots__/python-one-of-uuid-string-integer-path-parameter.test.ts.snap @@ -0,0 +1,172 @@ +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html + +exports[`python-one-of-uuid-string-integer-path-parameter 1`] = ` +"# python-one-of-uuid-string-integer-path-parameter + +A simple API based for testing python-one-of-uuid-string-integer-path-parameter. + + +[![PyPI](https://img.shields.io/badge/PyPI-v1.0.0-blue)](https://pypi.org/project/python-one-of-uuid-string-integer-path-parameter/1.0.0) +[![README.md](https://img.shields.io/badge/README-Click%20Here-green)](https://github.com/konfig-dev/konfig/tree/main/python#readme) +[![More Info](https://img.shields.io/badge/More%20Info-Click%20Here-orange)](http://example.com/support) + +## Table of Contents + + + +- [Requirements](#requirements) +- [Installation](#installation) +- [Getting Started](#getting-started) +- [Async](#async) +- [Raw HTTP Response](#raw-http-response) +- [Reference](#reference) + * [\`pythononeofuuidstringintegerpathparameterclient.test.fetch\`](#pythononeofuuidstringintegerpathparameterclienttestfetch) + + + +## Requirements + +Python >=3.7 + +## Installation + +\`\`\`sh +pip install python-one-of-uuid-string-integer-path-parameter==1.0.0 +\`\`\` + +## Getting Started + +\`\`\`python +from pprint import pprint +from python_one_of_uuid_string_integer_path_parameter import ( + PythonOneOfUuidStringIntegerPathParameterClient, + ApiException, +) + +pythononeofuuidstringintegerpathparameterclient = ( + PythonOneOfUuidStringIntegerPathParameterClient( + api_key="YOUR_API_KEY", + ) +) + +try: + # Fetches a JSON value based on input parameter + fetch_response = pythononeofuuidstringintegerpathparameterclient.test.fetch( + id=None, + ) +except ApiException as e: + print("Exception when calling TestApi.fetch: %s\\n" % e) + pprint(e.body) + pprint(e.headers) + pprint(e.status) + pprint(e.reason) + pprint(e.round_trip_time) +\`\`\` + +## Async + +\`async\` support is available by prepending \`a\` to any method. + +\`\`\`python +import asyncio +from pprint import pprint +from python_one_of_uuid_string_integer_path_parameter import ( + PythonOneOfUuidStringIntegerPathParameterClient, + ApiException, +) + +pythononeofuuidstringintegerpathparameterclient = ( + PythonOneOfUuidStringIntegerPathParameterClient( + api_key="YOUR_API_KEY", + ) +) + + +async def main(): + try: + # Fetches a JSON value based on input parameter + fetch_response = ( + await pythononeofuuidstringintegerpathparameterclient.test.afetch( + id=None, + ) + ) + except ApiException as e: + print("Exception when calling TestApi.fetch: %s\\n" % e) + pprint(e.body) + pprint(e.headers) + pprint(e.status) + pprint(e.reason) + pprint(e.round_trip_time) + + +asyncio.run(main()) +\`\`\` + +## Raw HTTP Response + +To access raw HTTP response values, use the \`.raw\` namespace. + +\`\`\`python +from pprint import pprint +from python_one_of_uuid_string_integer_path_parameter import ( + PythonOneOfUuidStringIntegerPathParameterClient, + ApiException, +) + +pythononeofuuidstringintegerpathparameterclient = ( + PythonOneOfUuidStringIntegerPathParameterClient( + api_key="YOUR_API_KEY", + ) +) + +try: + # Fetches a JSON value based on input parameter + fetch_response = pythononeofuuidstringintegerpathparameterclient.test.raw.fetch( + id=None, + ) + pprint(fetch_response.headers) + pprint(fetch_response.status) + pprint(fetch_response.round_trip_time) +except ApiException as e: + print("Exception when calling TestApi.fetch: %s\\n" % e) + pprint(e.body) + pprint(e.headers) + pprint(e.status) + pprint(e.reason) + pprint(e.round_trip_time) +\`\`\` + + +## Reference +### \`pythononeofuuidstringintegerpathparameterclient.test.fetch\` + +Provide an input parameter to receive a JSON value with properties. + +#### 🛠️ Usage + +\`\`\`python +fetch_response = pythononeofuuidstringintegerpathparameterclient.test.fetch( + id=None, +) +\`\`\` + +#### ⚙️ Parameters + +##### id: Union[\`int\`, \`str\`] + + +The bucketId, projectId, or documentId to be searched. The document or documents within the specified container will be compared to the query, and relevant information will be extracted. + +#### 🌐 Endpoint + +\`/simple-endpoint/{id}\` \`get\` + +[🔙 **Back to Table of Contents**](#table-of-contents) + +--- + + +## Author +This Python package is automatically generated by [Konfig](https://konfigthis.com) +" +`;