diff --git a/.github/workflows/RavenClient.yml b/.github/workflows/RavenClient.yml index 33b609c9..1e4d486e 100644 --- a/.github/workflows/RavenClient.yml +++ b/.github/workflows/RavenClient.yml @@ -75,6 +75,9 @@ jobs: - name: Install embedded RavenDB run: pip install ravendb-embedded + - name: Install Pydantic + run: pip install pydantic + - name: Run certifi script run: python ./.github/workflows/add_ca.py diff --git a/ravendb/documents/commands/batches.py b/ravendb/documents/commands/batches.py index 79d3f8ec..322f1a2b 100644 --- a/ravendb/documents/commands/batches.py +++ b/ravendb/documents/commands/batches.py @@ -199,7 +199,7 @@ def set_response(self, response: str, from_cache: bool) -> None: "Got None response from the server after doing a batch, something is very wrong." " Probably a garbled response." ) - self.result = Utils.initialize_object(json.loads(response), self._result_class, True) + self.result = BatchCommandResult.from_json(json.loads(response)) class ClusterWideBatchCommand(SingleNodeBatchCommand): diff --git a/ravendb/documents/operations/compare_exchange/compare_exchange_value_result_parser.py b/ravendb/documents/operations/compare_exchange/compare_exchange_value_result_parser.py index da54ff36..53ee8748 100644 --- a/ravendb/documents/operations/compare_exchange/compare_exchange_value_result_parser.py +++ b/ravendb/documents/operations/compare_exchange/compare_exchange_value_result_parser.py @@ -1,5 +1,5 @@ import json -from typing import Dict, Type, TypeVar, Optional +from typing import Dict, Type, TypeVar, Optional, Any, Union from ravendb.primitives import constants from ravendb.documents.conventions import DocumentConventions @@ -60,11 +60,11 @@ def get_single_value( key: str = item.get("Key") index: int = item.get("Index") - raw: dict = item.get("Value") + raw: Dict[str, Union[Any, Dict[str, Any]]] = item.get("Value") if not raw: return CompareExchangeValue(key, index, None) metadata = None - bjro = raw.get(constants.Documents.Metadata.KEY) + bjro: Dict[str, Any] = raw.get(constants.Documents.Metadata.KEY) if bjro: metadata = ( MetadataAsDictionary(bjro) diff --git a/ravendb/documents/operations/operation.py b/ravendb/documents/operations/operation.py index 868198d9..32271f54 100644 --- a/ravendb/documents/operations/operation.py +++ b/ravendb/documents/operations/operation.py @@ -51,9 +51,7 @@ def wait_for_completion(self) -> None: raise OperationCancelledException() elif operation_status == "Faulted": result = status.get("Result") - exception_result: OperationExceptionResult = Utils.initialize_object( - result, OperationExceptionResult, True - ) + exception_result = OperationExceptionResult.from_json(result) schema = ExceptionDispatcher.ExceptionSchema( self.__request_executor.url, exception_result.type, exception_result.message, exception_result.error ) diff --git a/ravendb/documents/queries/utils.py b/ravendb/documents/queries/utils.py index 64e0e5eb..24755e7c 100644 --- a/ravendb/documents/queries/utils.py +++ b/ravendb/documents/queries/utils.py @@ -35,7 +35,7 @@ def write(self, obj: object): elif "__str__" in obj.__class__.__dict__: self.__buffer.append(str(obj)) else: - self.__buffer.append(str(Utils.dictionarize(obj))) + self.__buffer.append(str(Utils.object_to_dict_for_hash_calculator(obj))) def write_parameters(self, qp: "Parameters") -> None: if qp is None: diff --git a/ravendb/documents/session/document_session_operations/in_memory_document_session_operations.py b/ravendb/documents/session/document_session_operations/in_memory_document_session_operations.py index 3c212ca2..0620890a 100644 --- a/ravendb/documents/session/document_session_operations/in_memory_document_session_operations.py +++ b/ravendb/documents/session/document_session_operations/in_memory_document_session_operations.py @@ -599,6 +599,10 @@ def before_query_invoke(self, before_query_event_args: BeforeQueryEventArgs): def documents_by_id(self): return self._documents_by_id + @property + def included_documents_by_id(self): + return self._included_documents_by_id + @property def deleted_entities(self): return self._deleted_entities diff --git a/ravendb/documents/session/entity_to_json.py b/ravendb/documents/session/entity_to_json.py index cf17e4f5..48fb30bf 100644 --- a/ravendb/documents/session/entity_to_json.py +++ b/ravendb/documents/session/entity_to_json.py @@ -172,15 +172,8 @@ def convert_to_entity( if "from_json" in object_type.__dict__ and inspect.ismethod(object_type.from_json): # By custom defined 'from_json' serializer class method entity = object_type.from_json(document_deepcopy) - elif is_projection: - entity = DynamicStructure(**document_deepcopy) - entity.__class__ = object_type - try: - entity = Utils.initialize_object(document_deepcopy, object_type) - except TypeError as e: - raise InvalidOperationException("Probably projection error", e) else: - entity = Utils.convert_json_dict_to_object(document_deepcopy, object_type) + entity = Utils.convert_json_dict_to_object(document_deepcopy, object_type, is_projection) EntityToJsonUtils.invoke_after_conversion_to_entity_event(session, key, object_type, document_deepcopy) @@ -295,10 +288,15 @@ def determine_object_type( # Passed type is not a type from metadata, neither there's no inheritance - probably projection elif object_type_from_user is not object_type_from_metadata: - if not all([name in object_type_from_metadata.__dict__ for name in object_type_from_user.__dict__]): - # Document from database and object_type from user aren't compatible + # Check if types are compatible + incompatible_fields = Utils.check_valid_projection(object_type_from_user, object_type_from_metadata) + if incompatible_fields: raise exceptions.InvalidOperationException( - f"Cannot covert document from type {object_type_from_metadata} to {object_type_from_user}" + f"Invalid projection. Cannot covert document " + f"from type '{object_type_from_metadata.__name__}' " + f"to type '{object_type_from_user.__name__}'. " + f"Type '{object_type_from_user.__name__}' instance has fields {incompatible_fields} " + f"that aren't on '{object_type_from_metadata.__name__}'." ) # Projection diff --git a/ravendb/documents/session/operations/load_operation.py b/ravendb/documents/session/operations/load_operation.py index 3835d519..ec84d406 100644 --- a/ravendb/documents/session/operations/load_operation.py +++ b/ravendb/documents/session/operations/load_operation.py @@ -129,11 +129,11 @@ def __get_document(self, object_type: Type[_T], key: str) -> _T: if self._session.is_deleted(key): return Utils.get_default_value(object_type) - doc = self._session._documents_by_id.get(key) + doc = self._session.documents_by_id.get(key) if doc is not None: return self._session.track_entity_document_info(object_type, doc) - doc = self._session._included_documents_by_id.get(key) + doc = self._session.included_documents_by_id.get(key) if doc is not None: return self._session.track_entity_document_info(object_type, doc) diff --git a/ravendb/http/request_executor.py b/ravendb/http/request_executor.py index 4bb5d587..5bf77642 100644 --- a/ravendb/http/request_executor.py +++ b/ravendb/http/request_executor.py @@ -438,11 +438,7 @@ def __run(errors: list): topology = Topology( self._topology_etag, - ( - self.topology_nodes - if self.topology_nodes - else list(map(lambda url_val: ServerNode(url_val, self._database_name, "!"), initial_urls)) - ), + (self.topology_nodes or [ServerNode(url, self._database_name, "!") for url in initial_urls]), ) self._node_selector = NodeSelector(topology, self._thread_pool_executor) diff --git a/ravendb/http/server_node.py b/ravendb/http/server_node.py index 7c19aef1..6398ff70 100644 --- a/ravendb/http/server_node.py +++ b/ravendb/http/server_node.py @@ -1,5 +1,5 @@ from enum import Enum -from typing import Optional, TYPE_CHECKING +from typing import Optional, TYPE_CHECKING, Any, Dict if TYPE_CHECKING: from ravendb.http.topology import ClusterTopology @@ -26,8 +26,17 @@ def __init__( self.database = database self.cluster_tag = cluster_tag self.server_role = server_role - self.__last_server_version_check = 0 - self.__last_server_version: str = None + self._last_server_version_check = 0 + self._last_server_version: Optional[str] = None + + @classmethod + def from_json(cls, json_dict: Dict[str, Any]) -> "ServerNode": + return cls( + json_dict["Url"], + json_dict["Database"], + json_dict["ClusterTag"], + ServerNode.Role(json_dict["ServerRole"]) if "ServerRole" in json_dict else None, + ) def __eq__(self, other) -> bool: if self == other: @@ -45,7 +54,7 @@ def __hash__(self) -> int: @property def last_server_version(self) -> str: - return self.__last_server_version + return self._last_server_version @classmethod def create_from(cls, topology: "ClusterTopology"): @@ -64,16 +73,16 @@ def create_from(cls, topology: "ClusterTopology"): return nodes def should_update_server_version(self) -> bool: - if self.last_server_version is None or self.__last_server_version_check > 100: + if self.last_server_version is None or self._last_server_version_check > 100: return True - self.__last_server_version_check += 1 + self._last_server_version_check += 1 return False def update_server_version(self, server_version: str): - self.__last_server_version = server_version - self.__last_server_version_check = 0 + self._last_server_version = server_version + self._last_server_version_check = 0 def discard_server_version(self) -> None: - self.__last_server_version_check = None - self.__last_server_version_check = 0 + self._last_server_version_check = None + self._last_server_version_check = 0 diff --git a/ravendb/http/topology.py b/ravendb/http/topology.py index fd2dd78c..0ab6cbec 100644 --- a/ravendb/http/topology.py +++ b/ravendb/http/topology.py @@ -6,7 +6,7 @@ import uuid from abc import abstractmethod from concurrent.futures import ThreadPoolExecutor -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING, Optional, Any from typing import Union, List, Dict from ravendb.exceptions.exceptions import ( @@ -25,6 +25,10 @@ def __init__(self, etag: int, nodes: List[ServerNode]): self.etag = etag self.nodes = nodes + @classmethod + def from_json(cls, json_dict: Dict[str, Any]) -> "Topology": + return cls(json_dict["Etag"], [ServerNode.from_json(node_json_dict) for node_json_dict in json_dict["Nodes"]]) + class ClusterTopology: def __init__(self): diff --git a/ravendb/json/result.py b/ravendb/json/result.py index f82015ee..9adcbe50 100644 --- a/ravendb/json/result.py +++ b/ravendb/json/result.py @@ -1,10 +1,14 @@ -from typing import List, Any, Dict +from typing import List, Any, Dict, Optional class BatchCommandResult: - def __init__(self, results, transaction_index): - self.results: [None, list] = results - self.transaction_index: [None, int] = transaction_index + def __init__(self, results: Optional[List[Dict]], transaction_index: Optional[int]): + self.results = results + self.transaction_index = transaction_index + + @classmethod + def from_json(cls, json_dict: Dict[str, Any]) -> "BatchCommandResult": + return cls(json_dict["Results"], json_dict["TransactionIndex"] if "TransactionIndex" in json_dict else None) class JsonArrayResult: diff --git a/ravendb/serverwide/commands.py b/ravendb/serverwide/commands.py index 5d8a5477..6169104f 100644 --- a/ravendb/serverwide/commands.py +++ b/ravendb/serverwide/commands.py @@ -13,7 +13,7 @@ from ravendb.tools.utils import Utils -class GetDatabaseTopologyCommand(RavenCommand): +class GetDatabaseTopologyCommand(RavenCommand[Topology]): def __init__(self, debug_tag: Optional[str] = None, application_identifier: Optional[uuid.UUID] = None): super().__init__(Topology) self.__debug_tag = debug_tag @@ -33,13 +33,7 @@ def create_request(self, node: ServerNode) -> requests.Request: def set_response(self, response: str, from_cache: bool) -> None: if response is None: return - - # todo: that's pretty bad way to do that, replace with initialization function that take nested object types - self.result: Topology = Utils.initialize_object(json.loads(response), self._result_class, True) - node_list = [] - for node in self.result.nodes: - node_list.append(Utils.initialize_object(node, ServerNode, True)) - self.result.nodes = node_list + self.result = Topology.from_json(json.loads(response)) class GetClusterTopologyCommand(RavenCommand[ClusterTopologyResponse]): diff --git a/ravendb/tests/issue_tests/test_RDBC_855.py b/ravendb/tests/issue_tests/test_RDBC_855.py new file mode 100644 index 00000000..ecc8d4e9 --- /dev/null +++ b/ravendb/tests/issue_tests/test_RDBC_855.py @@ -0,0 +1,22 @@ +from datetime import datetime + +from pydantic import BaseModel + +from ravendb.tests.test_base import TestBase + + +class User(BaseModel): + name: str = None + birthday: datetime = None + Id: str = None + + +class TestRDBC855(TestBase): + def test_storing_pydantic_objects(self): + with self.store.open_session() as session: + session.store(User(name="Josh", birthday=datetime(1999, 1, 1), Id="users/51")) + session.save_changes() + + with self.store.open_session() as session: + user = session.load("users/51", User) + self.assertEqual("Josh", user.name) diff --git a/ravendb/tests/jvm_migrated_tests/client_tests/indexing_tests/counters_tests/test_basic_counters_indexes_strong_syntax.py b/ravendb/tests/jvm_migrated_tests/client_tests/indexing_tests/counters_tests/test_basic_counters_indexes_strong_syntax.py index 166a2704..ff4ce825 100644 --- a/ravendb/tests/jvm_migrated_tests/client_tests/indexing_tests/counters_tests/test_basic_counters_indexes_strong_syntax.py +++ b/ravendb/tests/jvm_migrated_tests/client_tests/indexing_tests/counters_tests/test_basic_counters_indexes_strong_syntax.py @@ -15,7 +15,7 @@ def __init__(self): super(MyCounterIndex, self).__init__() self.map = ( "counters.Companies.HeartRate.Select(counter => new {\n" - " heartBeat = counter.Value,\n" + " heart_beat = counter.Value,\n" " name = counter.Name,\n" " user = counter.DocumentId\n" "})" diff --git a/ravendb/tests/jvm_migrated_tests/client_tests/revisions_tests/test_revisions.py b/ravendb/tests/jvm_migrated_tests/client_tests/revisions_tests/test_revisions.py index a81f9435..cf6d248d 100644 --- a/ravendb/tests/jvm_migrated_tests/client_tests/revisions_tests/test_revisions.py +++ b/ravendb/tests/jvm_migrated_tests/client_tests/revisions_tests/test_revisions.py @@ -58,7 +58,7 @@ def test_can_get_revisions_by_change_vector(self): for i in range(10): with self.store.open_session() as session: - user = session.load(id_, Company) + user = session.load(id_, User) user.name = f"Fitzchak{i}" session.save_changes() @@ -134,7 +134,7 @@ def test_collection_case_sensitive_test_1(self): for i in range(10): with self.store.open_session() as session: - user = session.load(id_, Company) + user = session.load(id_, User) user.name = "raven" + str(i) session.save_changes() @@ -159,7 +159,7 @@ def test_collection_case_sensitive_test_2(self): for i in range(10): with self.store.open_session() as session: - user = session.load(id_, Company) + user = session.load(id_, User) user.name = "raven" + str(i) session.save_changes() @@ -284,7 +284,7 @@ def test_can_get_metadata_for_lazily(self): for i in range(10): with self.store.open_session() as session: - user = session.load(id_, Company) + user = session.load(id_, User) user.name = f"Omer{i}" session.save_changes() @@ -319,7 +319,7 @@ def test_can_get_for_lazily(self): for i in range(10): with self.store.open_session() as session: - user = session.load(id_, Company) + user = session.load(id_, User) user.name = f"Omer{i}" session.save_changes() @@ -392,7 +392,7 @@ def test_can_get_revisions_by_change_vectors_lazily(self): for i in range(10): with self.store.open_session() as session: - user = session.load(id_, Company) + user = session.load(id_, User) user.name = f"Omer{i}" session.save_changes() diff --git a/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_14006.py b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_14006.py index f52604ac..47272f5e 100644 --- a/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_14006.py +++ b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_14006.py @@ -1,14 +1,11 @@ import time -import unittest from typing import Optional - from ravendb.documents.indexes.abstract_index_creation_tasks import AbstractIndexCreationTask from ravendb.documents.session.loaders.include import QueryIncludeBuilder from ravendb.documents.session.misc import TransactionMode, SessionOptions from ravendb.documents.session.query import QueryStatistics from ravendb.infrastructure.orders import Company, Address, Employee from ravendb.tests.test_base import TestBase -from ravendb.util.util import StartingWithOptions class Companies_ByName(AbstractIndexCreationTask): diff --git a/ravendb/tests/jvm_migrated_tests/other_tests/test_get_topology.py b/ravendb/tests/jvm_migrated_tests/other_tests/test_get_topology.py index 57ddb72d..62873cbc 100644 --- a/ravendb/tests/jvm_migrated_tests/other_tests/test_get_topology.py +++ b/ravendb/tests/jvm_migrated_tests/other_tests/test_get_topology.py @@ -1,3 +1,4 @@ +from ravendb import ServerNode from ravendb.serverwide.commands import GetDatabaseTopologyCommand from ravendb.tests.test_base import TestBase @@ -18,4 +19,4 @@ def test_get_topology(self): self.assertEqual(server_node.url, self.store.urls[0]) self.assertEqual(server_node.database, self.store.database) self.assertEqual(server_node.cluster_tag, "A") - self.assertEqual(server_node.server_role, "Member") + self.assertEqual(server_node.server_role, ServerNode.Role.MEMBER) diff --git a/ravendb/tools/custom_decoder.py b/ravendb/tools/custom_decoder.py deleted file mode 100644 index 61ab391f..00000000 --- a/ravendb/tools/custom_decoder.py +++ /dev/null @@ -1,49 +0,0 @@ -from ravendb.tools.utils import Utils -import json - - -class JsonDecoder(json.JSONDecoder): - """ - This custom JsonDecoder can add to json.loads function to work with ravendb mappers solution. - just use it like this json.loads(YOUR_OBJECT, cls=JsonDecoder, object_mapper=YOUR_MAPPER) - Note that that last object that returns from the loads function will be a dict. - To get the dict as your custom object you can create it with the return dict or use the parse_json method - """ - - def __init__(self, **kwargs): - self.object_mapper = kwargs.pop("object_mapper", lambda key, value: None) - super(JsonDecoder, self).__init__(object_hook=self.object_hook) - self.step_down = None - - def object_hook(self, dict_object): - if self.step_down is not None: - for key in dict_object: - result = self.object_mapper(key, dict_object[key]) - if result is not None: - dict_object[key] = result - self.step_down = dict_object - return self.step_down - - -def parse_json(json_string, object_type, mappers, convert_to_snake_case=False): - """ - This function will use the custom JsonDecoder and the conventions.mappers to recreate your custom object - in the parse json string state just call this method with the json_string your complete object_type and with your - mappers dict. - the mappers dict must contain in the key the object_type (ex. User) and the value will contain a method that get - key, value (the key will be the name of the object property we like to parse and the value - will be the properties of the object) - """ - obj = json.loads(json_string, cls=JsonDecoder, object_mapper=mappers.get(object_type, None)) - - if obj is not None: - try: - obj = object_type(**obj) - except TypeError: - initialize_dict, set_needed = Utils.make_initialize_dict(obj, object_type.__init__, convert_to_snake_case) - o = object_type(**initialize_dict) - if set_needed: - for key, value in obj.items(): - setattr(o, key, value) - obj = o - return obj diff --git a/ravendb/tools/indexqueue.py b/ravendb/tools/indexqueue.py deleted file mode 100644 index 2b17e44c..00000000 --- a/ravendb/tools/indexqueue.py +++ /dev/null @@ -1,61 +0,0 @@ -from time import time as _time -import queue as queue - - -class IndexQueue(queue.Queue, object): - def __init__(self): - super(IndexQueue, self).__init__() - - def peek(self, index=0): - if not self.qsize(): - return None - return self.queue[index] - - def get(self, index=None, block=True, timeout=None): - """Remove and return an item from the queue. - - If optional 'index' is not None the get will return the item in the index place - If optional args 'block' is true and 'timeout' is None (the default), - block if necessary until an item is available. If 'timeout' is - a non-negative number, it blocks at most 'timeout' seconds and raises - the Empty exception if no item was available within that time. - Otherwise ('block' is false), return an item if one is immediately - available, else raise the Empty exception ('timeout' is ignored - in that case). - """ - self.not_empty.acquire() - try: - if not block: - if not self._qsize(): - raise queue.Empty - elif timeout is None: - while not self._qsize(): - self.not_empty.wait() - elif timeout < 0: - raise ValueError("'timeout' must be a non-negative number") - else: - endtime = _time() + timeout - while not self._qsize(): - remaining = endtime - _time() - if remaining <= 0.0: - raise queue.Empty - self.not_empty.wait(remaining) - if not index or index == 0: - item = self._get() - else: - item = self._get_with_index(index) - self.not_full.notify() - return item - finally: - self.not_empty.release() - - def _get_with_index(self, index): - value = self.peek(index) - self.queue.remove(value) - return value - - def __lt__(self, other): - return self.__key__() < other.__key__() - - def __len__(self): - return self.queue.__len__() diff --git a/ravendb/tools/projection.py b/ravendb/tools/projection.py deleted file mode 100644 index 17d229a2..00000000 --- a/ravendb/tools/projection.py +++ /dev/null @@ -1,43 +0,0 @@ -def create_entity_with_mapper(dict_obj, mapper, object_type, convert_to_snake_case=None): - """ - This method will create an entity from dict_obj and mapper - In case convert_to_snake_case is empty will convert dict_obj keys to snake_case - convert_to_snake_case can be dictionary with special words you can change ex. From -> from_date - """ - from ravendb.tools.utils import Utils - - def parse_dict_rec(data): - try: - if not isinstance(data, dict): - try: - for i in range(len(data)): - data[i] = Utils.initialize_object( - parse_dict_rec(data[i])[0], - object_type, - convert_to_snake_case, - ) - except TypeError: - return data, False - - for key in data: - if isinstance(data[key], dict): - parse_dict_rec(data[key]) - data[key] = mapper(key, data[key]) - if key is None: - pass - elif isinstance(data[key], (tuple, list)): - for item in data[key]: - parse_dict_rec(item) - data[key] = mapper(key, data[key]) - else: - mapper_result = mapper(key, data[key]) - data[key] = mapper_result if mapper_result else data[key] - return data, True - except TypeError as e: - raise TypeError("Can't parse to custom object", e) - - first_parsed, need_to_parse = parse_dict_rec(dict_obj) - # After create a complete dict for our object we need to create the object with the object_type - if first_parsed is not None and need_to_parse: - return Utils.initialize_object(first_parsed, object_type, convert_to_snake_case) - return first_parsed diff --git a/ravendb/tools/utils.py b/ravendb/tools/utils.py index bac00127..17fc301b 100644 --- a/ravendb/tools/utils.py +++ b/ravendb/tools/utils.py @@ -2,23 +2,33 @@ import enum import time -from typing import Optional, Dict, Generic, Tuple, TypeVar, Collection, List, Union, Type, TYPE_CHECKING +from typing import ( + Optional, + Dict, + Generic, + Tuple, + TypeVar, + Collection, + List, + Union, + Type, + Any, + Callable, +) + +import requests from ravendb.primitives import constants -from ravendb.exceptions import exceptions from ravendb.json.metadata_as_dictionary import MetadataAsDictionary -import OpenSSL.crypto try: from collections.abc import Iterable, Sequence except ImportError: from collections import Iterable, Sequence -from ravendb.tools.projection import create_entity_with_mapper from datetime import datetime, timedelta from enum import Enum from threading import Timer -from copy import deepcopy import urllib import inspect import json @@ -26,6 +36,7 @@ import re _T = TypeVar("_T") +_T2 = TypeVar("_T2") _TKey = TypeVar("_TKey") _TVal = TypeVar("_TVal") @@ -47,8 +58,6 @@ ":", "\\", } -if TYPE_CHECKING: - from ravendb.documents.conventions import DocumentConventions class TimeUnit(Enum): @@ -75,22 +84,22 @@ def to_nanos(self, duration: int) -> int: return duration * self.value def to_micros(self, duration: int) -> int: - return duration * self.value / 1000 + return duration * self.value // 1000 def to_millis(self, duration: int) -> int: - return duration * self.value / (1000 * 1000) + return duration * self.value // (1000 * 1000) def to_seconds(self, duration: int) -> int: - return duration * self.value / (1000 * 1000 * 1000) + return duration * self.value // (1000 * 1000 * 1000) def to_minutes(self, duration: int) -> int: - return duration * self.value / (1000 * 1000 * 1000 * 60) + return duration * self.value // (1000 * 1000 * 1000 * 60) def to_hours(self, duration: int) -> int: - return duration * self.value / (1000 * 1000 * 1000 * 60 * 60) + return duration * self.value // (1000 * 1000 * 1000 * 60 * 60) def to_days(self, duration: int) -> int: - return duration * self.value / (1000 * 1000 * 1000 * 60 * 60 * 24) + return duration * self.value // (1000 * 1000 * 1000 * 60 * 60 * 24) class Stopwatch: @@ -338,7 +347,7 @@ class Utils(object): @staticmethod def check_if_collection_but_not_str(instance) -> bool: - return isinstance(instance, (Sequence, set)) and not isinstance(instance, (str, bytes, bytearray)) + return isinstance(instance, (list, set, tuple)) @staticmethod def unpack_collection(items: Collection) -> List: @@ -346,14 +355,14 @@ def unpack_collection(items: Collection) -> List: for item in items: if Utils.check_if_collection_but_not_str(item): - results.extend(Utils.__unpack_collection(item)) + results.extend(Utils.unpack_collection(item)) continue results.append(item) return results @staticmethod - def quote_key(key, reserved_slash=False, reserved_at=False) -> str: + def quote_key(key: str, reserved_slash: bool = False, reserved_at: bool = False) -> str: reserved = "" if reserved_slash: reserved += "/" @@ -365,44 +374,13 @@ def quote_key(key, reserved_slash=False, reserved_at=False) -> str: return "" @staticmethod - def unpack_iterable(iterable): - for item in iterable: - if isinstance(item, Iterable) and not isinstance(item, str): - for nested in Utils.unpack_iterable(item): - yield nested - else: - yield item - - @staticmethod - def convert_to_snake_case(name): - s1 = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name) - return re.sub("([a-z0-9])([A-Z])", r"\1_\2", s1).lower() - - @staticmethod - def database_name_validation(name): - if name is None: - raise ValueError("None name is not valid") - result = re.match(r"([A-Za-z0-9_\-\.]+)", name, re.IGNORECASE) - if not result: - raise exceptions.InvalidOperationException( - 'Database name can only contain only A-Z, a-z, "_", "." or "-" but was: ' + name - ) - - @staticmethod - def first_or_default(iterator, func, default): - for item in iterator: - if func(item): - return item - return default - - @staticmethod - def get_change_vector_from_header(response): + def get_change_vector_from_header(response: requests.Response): header = response.headers.get("ETag", None) if header is not None and header[0] == '"': return header[1 : len(header) - 2] @staticmethod - def import_class(name) -> Optional[Type]: + def import_class(name: str) -> Optional[Type]: components = name.split(".") module_name = ".".join(name.split(".")[:-1]) mod = None @@ -413,7 +391,7 @@ def import_class(name) -> Optional[Type]: return mod @staticmethod - def is_inherit(parent, child): + def is_inherit(parent: Type[Any], child: Type[Any]) -> bool: if child is None or parent is None: return False if parent == child: @@ -422,53 +400,22 @@ def is_inherit(parent, child): return Utils.is_inherit(parent, child.__base__) @staticmethod - def fill_with_nested_object_types(entity: object, nested_object_types: Dict[str, type]) -> object: - for key in nested_object_types: - attr = getattr(entity, key) - if attr: - try: - if isinstance(attr, list): - nested_list = [] - for attribute in attr: - nested_list.append(Utils.initialize_object(attribute, nested_object_types[key])) - setattr(entity, key, nested_list) - elif nested_object_types[key] is datetime: - setattr(entity, key, Utils.string_to_datetime(attr)) - elif nested_object_types[key] is timedelta: - setattr(entity, key, Utils.string_to_timedelta(attr)) - else: - setattr( - entity, - key, - Utils.initialize_object(attr, nested_object_types[key]), - ) - except TypeError as e: - print(e) - pass - return entity - - @staticmethod - def initialize_object(obj: dict, object_type: Type[_T], convert_to_snake_case: bool = None) -> _T: - initialize_dict, set_needed = Utils.make_initialize_dict(obj, object_type.__init__, convert_to_snake_case) + def initialize_object(json_dict: Dict[str, Any], object_type: Type[_T]) -> _T: + initialize_dict, should_set_object_fields = Utils.create_initialize_kwargs(json_dict, object_type.__init__) try: - o = object_type(**initialize_dict) + entity = object_type(**initialize_dict) except Exception as e: if "Id" not in initialize_dict: initialize_dict["Id"] = None - o = object_type(**initialize_dict) + entity = object_type(**initialize_dict) else: raise TypeError( f"Couldn't initialize object of type '{object_type.__name__}' using dict '{initialize_dict}'" ) from e - if set_needed: - for key, value in obj.items(): - setattr(o, key, value) - return o - - @staticmethod - def get_field_names(object_type: Type[_T]) -> List[str]: - obj = Utils.initialize_object({}, object_type) - return list(obj.__dict__.keys()) + if should_set_object_fields: + for key, value in json_dict.items(): + setattr(entity, key, value) + return entity @staticmethod def try_get_new_instance(object_type: Type[_T]) -> _T: @@ -483,41 +430,17 @@ def try_get_new_instance(object_type: Type[_T]) -> _T: @staticmethod def convert_json_dict_to_object( - json_dict: dict, object_type: Optional[Type[_T]] = None, nested_object_types: Optional[Dict[str, type]] = None + json_dict: Dict[str, Any], object_type: Optional[Type[_T]] = None, projection: bool = False ) -> Union[DynamicStructure, _T]: if object_type == dict: return json_dict - if object_type is None: return DynamicStructure(**json_dict) - if nested_object_types is None: - return Utils.initialize_object(json_dict, object_type, True) - - entity = DynamicStructure(**json_dict) - entity.__class__ = object_type - entity = Utils.initialize_object(json_dict, object_type, True) - if nested_object_types: - Utils.fill_with_nested_object_types(entity, nested_object_types) - Utils.deep_convert_to_snake_case(entity) - return entity - - @staticmethod - def deep_convert_to_snake_case(entity): - if entity is None: - return - if type(entity) in [int, float, bool, str, set, list, tuple, dict]: - return - changes = {} - for key, value in entity.__dict__.items(): - # todo: i dont' like the way we try to convert to snake case content of the object dict - new_key = Utils.convert_to_snake_case(key) - if key != new_key: - changes.update({(key, new_key): value}) # collect the keys that changed (snake case conversion) - Utils.deep_convert_to_snake_case(value) - for keys, value in changes.items(): # erase - entity.__dict__[keys[1]] = value - del entity.__dict__[keys[0]] + try: + return Utils.initialize_object(json_dict, object_type) + except TypeError as e: + raise TypeError(f"Couldn't project results into object type '{object_type.__name}'") if projection else e @staticmethod def get_object_fields(instance: object) -> Dict[str, object]: @@ -528,7 +451,7 @@ def get_object_fields(instance: object) -> Dict[str, object]: } @staticmethod - def get_class_fields(object_type: type) -> Dict[str, object]: + def get_class_fields(object_type: Type[_T]) -> Dict[str, Any]: try: instance = Utils.try_get_new_instance(object_type) except Exception as e: @@ -540,90 +463,13 @@ def get_class_fields(object_type: type) -> Dict[str, object]: return Utils.get_object_fields(instance) @staticmethod - def convert_to_entity( - document: dict, - object_type: Optional[Type[_T]], - conventions: "DocumentConventions", - events, - nested_object_types=None, - ) -> Union[_T, DynamicStructure]: - if document is None: - return None - metadata = document.get("@metadata") - original_document = deepcopy(document) - type_from_metadata = conventions.try_get_type_from_metadata(metadata) - mapper = conventions.mappers.get(object_type, None) - - events.before_conversion_to_entity(document, metadata, type_from_metadata) - - if object_type == dict: - events.after_conversion_to_entity(document, document, metadata) - return document, metadata, original_document - - if type_from_metadata is None: - if object_type is not None: - metadata["Raven-Python-Type"] = "{0}.{1}".format(object_type.__module__, object_type.__name__) - else: # no type defined on document or during load, return a dict - dyn = DynamicStructure(**document) - events.after_conversion_to_entity(dyn, document, metadata) - return dyn, metadata, original_document - else: - object_from_metadata = Utils.import_class(type_from_metadata) - if object_from_metadata is not None: - if object_type is None: - object_type = object_from_metadata - - elif Utils.is_inherit(object_type, object_from_metadata): - mapper = conventions.mappers.get(object_from_metadata, None) or mapper - object_type = object_from_metadata - elif object_type is not object_from_metadata: - # todo: Try to parse if we use projection - raise exceptions.InvalidOperationException( - f"Cannot covert document from type {object_from_metadata} to {object_type}" - ) - - if nested_object_types is None and mapper: - entity = create_entity_with_mapper(document, mapper, object_type) - else: - entity = DynamicStructure(**document) - entity.__class__ = object_type - - entity = Utils.initialize_object(document, object_type) - - if nested_object_types: - Utils.fill_with_nested_object_types(entity, nested_object_types) - - if "Id" in entity.__dict__: - entity.Id = metadata.get("@id", None) - events.after_conversion_to_entity(entity, document, metadata) - return entity, metadata, original_document - - @staticmethod - def make_initialize_dict(document, entity_init, convert_to_snake_case=None): - """ - This method will create an entity from document - In case convert_to_snake_case will convert document keys to snake_case - convert_to_snake_case can be dictionary with special words you can change ex. From -> from_date - """ - if convert_to_snake_case: - convert_to_snake_case = {} if convert_to_snake_case is True else convert_to_snake_case - try: - converted_document = {} - for key in document: - converted_key = convert_to_snake_case.get(key, key) - converted_document[converted_key if key == "Id" else Utils.convert_to_snake_case(converted_key)] = ( - document[key] - ) - document = converted_document - except: - pass - - if entity_init is None: - return document - + def create_initialize_kwargs( + document: Dict[str, Any], + object_init_method: Callable[[Dict[str, Any]], None], + ) -> Tuple[Dict[str, Any], bool]: set_needed = False entity_initialize_dict = {} - args, __, keywords, defaults, _, _, _ = inspect.getfullargspec(entity_init) + args, __, keywords, defaults, _, _, _ = inspect.getfullargspec(object_init_method) if (len(args) - 1) > len(document): remainder = len(args) if defaults: @@ -646,7 +492,7 @@ def make_initialize_dict(document, entity_init, convert_to_snake_case=None): return entity_initialize_dict, set_needed @staticmethod - def dict_to_bytes(the_dict): + def dict_to_bytes(the_dict: Dict[str, Any]): json_dict = json.dumps(the_dict) return bytes(json_dict, encoding="utf-8") @@ -790,103 +636,6 @@ def escape_collection_name(collection_name: str): return "".join(buffer) - @staticmethod - def pfx_to_pem(pem_path, pfx_path, pfx_password): - """ - @param pem_path: Where to create the pem file - @param pfx_path: The path to the pfx file - @param pfx_password: The password to pfx file - """ - with open(pem_path, "wb") as pem_file: - with open(pfx_path, "rb") as pfx_file: - pfx = pfx_file.read() - p12 = OpenSSL.crypto.load_pkcs12(pfx, pfx_password) - pem_file.write(OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, p12.get_privatekey())) - pem_file.write(OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, p12.get_certificate())) - ca = p12.get_ca_certificates() - if ca is not None: - for cert in ca: - # In python 3.6* we need to save the ca to ?\lib\site-packages\certifi\cacert.pem. - pem_file.write(OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, cert)) - return pem_path - - @staticmethod - def get_cert_file_fingerprint(pem_path): - with open(pem_path, "rb") as pem_file: - cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, pem_file.read()) - return str(cert.digest("sha1")) - - @staticmethod - def pem_to_crt_and_key(pem_path: str) -> Tuple[str, str]: - with open(pem_path, "rb") as file: - content = file.read() - if "BEGIN CERTIFICATE" not in content: - raise ValueError( - f"Invalid file. File stored under the path '{pem_path}' isn't valid .pem certificate. " - f"BEGIN CERTIFICATE header wasn't found." - ) - if "BEGIN RSA PRIVATE KEY" not in content: - raise ValueError( - f"Invalid file. File stored under the path '{pem_path}' isn't valid .pem certificate. " - f"BEGIN RSA PRIVATE KEY header wasn't found." - ) - - content = content.decode("utf-8") - crt = content.split("-----BEGIN RSA PRIVATE KEY-----")[0] - key = content.split("-----END CERTIFICATE-----")[1] - - return crt, key - - @staticmethod - def index_of_any(text, any_of): - """ - @param str text: The text we want to check - @param list any_of: list of char - :returns False if text nit - """ - result = -1 - if not text or not any_of: - return result - - any_of = list(set(any_of)) - i = 0 - while i < len(text) and result == -1: - for c in any_of: - if c == text[i]: - result = i - break - i += 1 - return result - - @staticmethod - def contains_any(text, any_of): - """ - @param text: The text we want to check - :type str - @param any_of: list of char - :type list - :returns False if text nit - """ - if not text or not any_of: - return False - - any_of = list(set(any_of)) - for c in any_of: - if c in text: - return True - return False - - @staticmethod - def sort_iterable(iterable, key=None): - """ - This function will take an iterable and try to sort it by the key - if the key is not given will use the sorted default one. - return a generator - """ - if not iterable: - return iterable - yield sorted(iterable, key=key) - @staticmethod def json_default(o): if o is None: @@ -925,20 +674,30 @@ def get_default_value(object_type: Type[_T]) -> _T: return None @staticmethod - def dictionarize(obj: object) -> dict: - dictionarized = {"__name__": obj.__class__.__name__} - dictionarized.update(obj.__dict__) + def object_to_dict_for_hash_calculator(obj: object) -> dict: + object_dict = {"__name__": obj.__class__.__name__} + object_dict.update(obj.__dict__) to_update = {} - for k, v in dictionarized.items(): + for k, v in object_dict.items(): if v is not None and not isinstance( v, (bool, float, str, int, bytes, bytearray, list, set, dict, enum.Enum) ): if "__str__" in v.__dict__: to_update.update({k: str(v)}) else: - to_update.update({k: Utils.dictionarize(v)}) - dictionarized.update(to_update) - return dictionarized + to_update.update({k: Utils.object_to_dict_for_hash_calculator(v)}) + object_dict.update(to_update) + return object_dict + + @staticmethod + def check_valid_projection(object_type_from_user: Type[Any], object_type_from_metadata: Type[Any]) -> [List[str]]: + object_type_from_metadata_fields = Utils.get_class_fields(object_type_from_metadata) + object_type_from_user_fields = Utils.get_class_fields(object_type_from_user) + incompatible_fields = [] + for field_name in object_type_from_user_fields: + if field_name not in object_type_from_metadata_fields: + incompatible_fields.append(field_name) + return incompatible_fields @staticmethod def entity_to_dict(entity, default_method) -> dict: diff --git a/ravendb/util/util.py b/ravendb/util/util.py index 520967c9..084a1f74 100644 --- a/ravendb/util/util.py +++ b/ravendb/util/util.py @@ -1,5 +1,4 @@ import uuid -from typing import Optional class RaftIdGenerator: @@ -10,10 +9,3 @@ def new_id() -> str: @staticmethod def dont_care_id() -> str: return "" - - -class StartingWithOptions: - def __init__(self, start_with: str, start: Optional[int] = None, page_size: Optional[int] = None): - self.starts_with = start_with - self.start = start - self.page_size = page_size