From 78e2faf109cb6c7ff17a19be475734d8c2631cc5 Mon Sep 17 00:00:00 2001 From: Ralf Peschke Date: Mon, 30 Sep 2024 10:58:34 +0200 Subject: [PATCH] just an intermediate state --- openslides_backend/action/action_handler.py | 4 +- .../database/db_connection_handling.py | 7 +- .../sql_read_database_backend_service.py | 359 ++++++++++++++++++ .../shared/postgresql_backend/__init__.py | 5 +- .../pg_connection_handler.py | 25 +- .../sql_read_database_backend_service.py | 37 +- .../datastore/writer/services.py | 10 +- .../services/datastore/adapter.py | 53 +-- .../services/datastore/cache_adapter.py | 10 +- tests/datastore/fixtures.py | 4 +- tests/system/base.py | 34 +- tests/system/conftest.py | 67 ++-- 12 files changed, 473 insertions(+), 142 deletions(-) create mode 100644 openslides_backend/database/sql_read_database_backend_service.py diff --git a/openslides_backend/action/action_handler.py b/openslides_backend/action/action_handler.py index 7500e8986..b065ca81e 100644 --- a/openslides_backend/action/action_handler.py +++ b/openslides_backend/action/action_handler.py @@ -5,7 +5,7 @@ import fastjsonschema -from openslides_backend.database.db_connection_handling import get_current_os_conn +from openslides_backend.database.db_connection_handling import get_new_os_conn from ..shared.exceptions import ( ActionException, @@ -105,7 +105,7 @@ def handle_request( parsing all actions. In the end it sends everything to the event store. """ with make_span(self.env, "handle request"): - with get_current_os_conn() as db_connection: + with get_new_os_conn() as db_connection: self.db_connection = db_connection self.user_id = user_id self.internal = internal diff --git a/openslides_backend/database/db_connection_handling.py b/openslides_backend/database/db_connection_handling.py index f1fa3cd25..f25594663 100644 --- a/openslides_backend/database/db_connection_handling.py +++ b/openslides_backend/database/db_connection_handling.py @@ -4,13 +4,15 @@ import psycopg import psycopg_pool - from openslides_backend.shared.env import Environment from openslides_backend.shared.exceptions import DatabaseException env = Environment(os.environ) conn_string_without_db = f"host='{env.DATABASE_HOST}' port='{env.DATABASE_PORT}' user='{env.DATABASE_USER}' password='{env.PGPASSWORD}' " +def configure_connection(conn: psycopg.Connection) -> None: + """ callback, will be called after creation of new connection from connection pool""" + conn.isolation_level = psycopg.IsolationLevel.SERIALIZABLE def create_os_conn_pool(open: bool = True) -> psycopg_pool.ConnectionPool: """create the global connection pool on the openslides-db""" @@ -32,6 +34,7 @@ def create_os_conn_pool(open: bool = True) -> psycopg_pool.ConnectionPool: max_idle=float(env.DB_POOL_MAX_IDLE), reconnect_timeout=float(env.DB_POOL_RECONNECT_TIMEOUT), num_workers=int(env.DB_POOL_NUM_WORKERS), + configure= configure_connection, ) return os_conn_pool @@ -50,7 +53,7 @@ def get_current_os_conn_pool() -> psycopg_pool.ConnectionPool: return os_conn_pool -def get_current_os_conn() -> contextlib._GeneratorContextManager[psycopg.Connection]: +def get_new_os_conn() -> contextlib._GeneratorContextManager[psycopg.Connection]: os_conn_pool = get_current_os_conn_pool() return os_conn_pool.connection() diff --git a/openslides_backend/database/sql_read_database_backend_service.py b/openslides_backend/database/sql_read_database_backend_service.py new file mode 100644 index 000000000..26bbcc1fd --- /dev/null +++ b/openslides_backend/database/sql_read_database_backend_service.py @@ -0,0 +1,359 @@ +from collections import defaultdict +from collections.abc import Iterable +from textwrap import dedent +from typing import Any, ContextManager + +from openslides_backend.datastore.shared.di import service_as_singleton +from openslides_backend.datastore.shared.postgresql_backend.apply_list_updates import \ + apply_fields +from openslides_backend.datastore.shared.postgresql_backend.connection_handler import \ + ConnectionHandler +from openslides_backend.datastore.shared.postgresql_backend.sql_event_types import \ + EVENT_TYPE +from openslides_backend.datastore.shared.postgresql_backend.sql_query_helper import \ + SqlQueryHelper +from openslides_backend.datastore.shared.services.read_database import ( + BaseAggregateFilterQueryFieldsParameters, HistoryInformation, + MappedFieldsFilterQueryFieldsParameters) +from openslides_backend.datastore.shared.util import ( + BadCodingError, DeletedModelsBehaviour, InvalidDatastoreState, + ModelDoesNotExist, get_exception_for_deleted_models_behaviour) +from openslides_backend.datastore.shared.util.mapped_fields import MappedFields +from openslides_backend.shared.filters import Filter +from openslides_backend.shared.patterns import (META_DELETED, META_POSITION, + Collection, Field, + FullQualifiedId, Id, Position, + collection_and_id_from_fqid, + fqid_from_collection_and_id, + id_from_fqid) +from openslides_backend.shared.typing import Model + +from .connection_handler import ConnectionHandler +from .sql_event_types import EVENT_TYPE + + +@service_as_singleton +class SqlReadDatabaseBackendService: + """ + Reading data directly from postgres via Sql + """ + connection: ConnectionHandler + query_helper: SqlQueryHelper + + def get_context(self) -> ContextManager[None]: + return self.connection.get_connection_context() + + def get( + self, + fqid: FullQualifiedId, + mapped_fields: MappedFields | None = None, + get_deleted_models: DeletedModelsBehaviour = DeletedModelsBehaviour.NO_DELETED, + ) -> Model: + models = self.get_many([fqid], mapped_fields, get_deleted_models) + try: + return models[fqid] + except KeyError: + raise get_exception_for_deleted_models_behaviour(fqid, get_deleted_models) + + def get_many( + self, + fqids: Iterable[FullQualifiedId], + mapped_fields: MappedFields | None = None, + get_deleted_models: DeletedModelsBehaviour = DeletedModelsBehaviour.NO_DELETED, + ) -> dict[FullQualifiedId, Model]: + if not fqids: + return {} + if mapped_fields is None: + mapped_fields = MappedFields() + + arguments: list[Any] = [list(fqids)] + del_cond = self.query_helper.get_deleted_condition(get_deleted_models) + + ( + mapped_fields_str, + mapped_field_args, + ) = self.query_helper.build_select_from_mapped_fields(mapped_fields) + + query = f""" + select fqid, {mapped_fields_str} from models + where fqid = any(%s) {del_cond}""" + result = self.connection.query( + query, mapped_field_args + arguments, mapped_fields.unique_fields + ) + + models = self.build_models_from_result(result, mapped_fields) + return models + + def get_all( + self, + collection: Collection, + mapped_fields: MappedFields | None = None, + get_deleted_models: DeletedModelsBehaviour = DeletedModelsBehaviour.NO_DELETED, + ) -> dict[Id, Model]: + if mapped_fields is None: + mapped_fields = MappedFields() + del_cond = self.query_helper.get_deleted_condition(get_deleted_models) + ( + mapped_fields_str, + mapped_field_args, + ) = self.query_helper.build_select_from_mapped_fields(mapped_fields) + query = f""" + select fqid as __fqid__, {mapped_fields_str} from models + where fqid like %s {del_cond}""" + models = self.fetch_models( + query, + mapped_field_args + [fqid_from_collection_and_id(collection, "%")], + mapped_fields.unique_fields, + mapped_fields.unique_fields, + ) + return models + + def get_everything( + self, + get_deleted_models: DeletedModelsBehaviour = DeletedModelsBehaviour.NO_DELETED, + ) -> dict[Collection, dict[Id, Model]]: + del_cond = self.query_helper.get_deleted_condition( + get_deleted_models, prepend_and=False + ) + query = f""" + select fqid as __fqid__, data from models + {"where " + del_cond if del_cond else ""}""" + result = self.connection.query(query, [], []) + + data: dict[Collection, dict[Id, Model]] = defaultdict(dict) + for row in result: + collection, id = collection_and_id_from_fqid(row["__fqid__"]) + model = row["data"] + model["id"] = id + data[collection][id] = model + + return data + + def filter( + self, collection: Collection, filter: Filter, mapped_fields: list[Field] + ) -> dict[Id, Model]: + fields_params = MappedFieldsFilterQueryFieldsParameters(mapped_fields) + query, arguments, sql_params = self.query_helper.build_filter_query( + collection, filter, fields_params, select_fqid=True + ) + models = self.fetch_models(query, arguments, sql_params, mapped_fields) + return models + + def aggregate( + self, + collection: Collection, + filter: Filter, + fields_params: BaseAggregateFilterQueryFieldsParameters, + ) -> Any: + query, arguments, sql_params = self.query_helper.build_filter_query( + collection, filter, fields_params + ) + value = self.connection.query(query, arguments, sql_params) + return value[0].copy() + + def fetch_models( + self, + query: str, + arguments: list[str], + sql_parameters: list[str], + mapped_fields: list[str], + ) -> dict[int, Model]: + """Fetched models for one collection""" + result = self.connection.query(query, arguments, sql_parameters) + models = {} + for row in result: + # if there are mapped_fields, we already resolved them in the query and + # can just copy all fields. else we can just use the whole `data` field + if len(mapped_fields) > 0: + model = row.copy() + del model["__fqid__"] + else: + model = row["data"] + models[id_from_fqid(row["__fqid__"])] = model + return models + + def build_models_from_result( + self, result, mapped_fields: MappedFields + ) -> dict[str, Model]: + result_map = {} + for row in result: + fqid = row["fqid"] + + if mapped_fields.needs_whole_model: + # at least one collection needs all fields, so we need to select data + row = row["data"] + + if fqid in mapped_fields.per_fqid and len(mapped_fields.per_fqid[fqid]) > 0: + model = {} + for field in mapped_fields.per_fqid[fqid]: + if row.get(field) is not None: + model[field] = row[field] + else: + model = row + result_map[fqid] = model + + return result_map + + def build_model_ignore_deleted( + self, fqid: FullQualifiedId, position: Position | None = None + ) -> Model: + models = self.build_models_ignore_deleted([fqid], position) + try: + return models[fqid] + except KeyError: + raise ModelDoesNotExist(fqid) + + def build_models_ignore_deleted( + self, fqids: list[FullQualifiedId], position: Position | None = None + ) -> dict[FullQualifiedId, Model]: + # Optionally only builds the models up to the specified position. + # TODO: There might be a speedup: Get the model from the readdb first. + # If the model exists there, read the position from it, use the model + # as a starting point in `build_model_from_events` and just fetch all + # events after the position. + if position: + pos_cond = "and position <= %s" + pos_args = [position] + else: + pos_cond = "" + pos_args = [] + + query = dedent( + f"""\ + select fqid, type, data, position from events e + where fqid = any(%s) {pos_cond} + order by position asc, weight asc""" + ) + + args: list[Any] = [fqids] + db_events = self.connection.query(query, args + pos_args) + + events_per_fqid: dict[FullQualifiedId, list[dict[str, Any]]] = defaultdict(list) + for event in db_events: + events_per_fqid[event["fqid"]].append(event) + + models = {} + for fqid, events in events_per_fqid.items(): + models[fqid] = self.build_model_from_events(events) + + return models + + def build_model_from_events(self, events: list[dict[str, Any]]) -> Model: + if not events: + raise BadCodingError() + + create_event = events[0] + assert create_event["type"] == EVENT_TYPE.CREATE + model: Model = {**create_event["data"], META_DELETED: False} + + # apply all other update/delete_fields + for event in events[1:]: + if event["type"] == EVENT_TYPE.UPDATE: + model.update(event["data"]) + elif event["type"] == EVENT_TYPE.DELETE_FIELDS: + for field in event["data"]: + if field in model: + del model[field] + elif event["type"] == EVENT_TYPE.LIST_FIELDS: + for field, value in apply_fields( + model, event["data"]["add"], event["data"]["remove"] + ).items(): + model[field] = value + elif event["type"] == EVENT_TYPE.DELETE: + model[META_DELETED] = True + elif event["type"] == EVENT_TYPE.RESTORE: + model[META_DELETED] = False + else: + raise BadCodingError() + + model[META_POSITION] = events[-1]["position"] + return model + + def is_deleted( + self, fqid: FullQualifiedId, position: Position | None = None + ) -> bool: + result = self.get_deleted_status([fqid], position) + try: + return result[fqid] + except KeyError: + raise ModelDoesNotExist(fqid) + + def get_deleted_status( + self, fqids: list[FullQualifiedId], position: Position | None = None + ) -> dict[FullQualifiedId, bool]: + if not position: + return self.get_deleted_status_from_read_db(fqids) + else: + return self.get_deleted_status_from_events(fqids, position) + + def get_deleted_status_from_read_db( + self, fqids: list[FullQualifiedId] + ) -> dict[FullQualifiedId, bool]: + query = "select fqid, deleted from models where fqid = any(%s)" + result = self.connection.query(query, [fqids]) + return {row["fqid"]: row["deleted"] for row in result} + + def get_deleted_status_from_events( + self, fqids: list[FullQualifiedId], position: Position + ) -> dict[FullQualifiedId, bool]: + included_types = dedent( + f"""\ + ('{EVENT_TYPE.CREATE}', + '{EVENT_TYPE.DELETE}', + '{EVENT_TYPE.RESTORE}')""" + ) + query = f""" + select fqid, type from ( + select fqid, max(position) as position from events + where type in {included_types} and position <= {position} + and fqid = any(%s) group by fqid + ) t natural join events order by position asc, weight asc + """ + result = self.connection.query(query, [fqids]) + return {row["fqid"]: row["type"] == EVENT_TYPE.DELETE for row in result} + + def get_history_information( + self, fqids: list[FullQualifiedId] + ) -> dict[FullQualifiedId, list[HistoryInformation]]: + positions = self.connection.query( + """select fqid, position, timestamp, user_id, information from positions natural join events + where fqid = any(%s) and information::text <> %s::text order by position asc""", + [fqids, self.json(None)], + ) + history_information = defaultdict(list) + for position in positions: + history_information[position["fqid"]].append( + HistoryInformation( + position=position["position"], + timestamp=position["timestamp"].timestamp(), + user_id=position["user_id"], + information=position["information"], + ) + ) + return history_information + + def is_empty(self) -> bool: + return not self.connection.query_single_value( + "select exists(select * from positions)", [] + ) + + def get_max_position(self) -> Position: + return self.connection.query_single_value( + "select max(position) from positions", [] + ) + + def get_current_migration_index(self) -> int: + result = self.connection.query( + "select min(migration_index), max(migration_index) from positions", [] + ) + min_migration_index = result[0]["min"] if result else None + max_migration_index = result[0]["max"] if result else None + if min_migration_index != max_migration_index: + raise InvalidDatastoreState( + "The datastore has inconsistent migration indices: " + + f"Minimum is {min_migration_index}, maximum is {max_migration_index}." + ) + return max_migration_index or -1 + + def json(self, data): + return self.connection.to_json(data) diff --git a/openslides_backend/datastore/shared/postgresql_backend/__init__.py b/openslides_backend/datastore/shared/postgresql_backend/__init__.py index a7ed71439..de0a0805f 100644 --- a/openslides_backend/datastore/shared/postgresql_backend/__init__.py +++ b/openslides_backend/datastore/shared/postgresql_backend/__init__.py @@ -24,7 +24,10 @@ def setup_di(): from openslides_backend.datastore.shared.services import ReadDatabase from .pg_connection_handler import PgConnectionHandlerService - from .sql_read_database_backend_service import SqlReadDatabaseBackendService + from .sql_read_database_backend_service import \ + SqlReadDatabaseBackendService + + injector.register(ConnectionHandler, PgConnectionHandlerService) injector.register(SqlQueryHelper, SqlQueryHelper) diff --git a/openslides_backend/datastore/shared/postgresql_backend/pg_connection_handler.py b/openslides_backend/datastore/shared/postgresql_backend/pg_connection_handler.py index 95f323dc6..a973e8331 100755 --- a/openslides_backend/datastore/shared/postgresql_backend/pg_connection_handler.py +++ b/openslides_backend/datastore/shared/postgresql_backend/pg_connection_handler.py @@ -2,18 +2,16 @@ from functools import wraps from time import sleep +from openslides_backend.datastore.shared.di import (injector, + service_as_singleton) +from openslides_backend.datastore.shared.services import (EnvironmentService, + ShutdownService) +from openslides_backend.datastore.shared.util import BadCodingError, logger from psycopg import OperationalError, sql from psycopg.rows import dict_row from psycopg.types.json import Json from psycopg_pool import ConnectionPool -from openslides_backend.datastore.shared.di import injector, service_as_singleton -from openslides_backend.datastore.shared.services import ( - EnvironmentService, - ShutdownService, -) -from openslides_backend.datastore.shared.util import BadCodingError, logger - from .connection_handler import DatabaseError @@ -101,6 +99,9 @@ def __init__(self, shutdown_service: ShutdownService): def get_current_connection(self): try: + # raise Exception( + # f"TOREMOVE get_current_connection für aktuelle thread connection id:{id(self._storage.connection)}" + # ) return self._storage.connection except AttributeError: return None @@ -128,11 +129,17 @@ def get_connection(self): raise BadCodingError( "You cannot start multiple transactions in one thread!" ) - return self.connection_pool.connection() + connection = self.connection_pool.connection() + # raise Exception( + # f"TOREMOVE get_connection für neue pool-connection id:{id(connection)}" + # ) + return connection def get_connection_context(self): self.connection_pool.open() - return ConnectionContext(self) + c_ctx = ConnectionContext(self) + # raise Exception(f"TOREMOVE get_connection_context für genau den id:{id(c_ctx)}") + return c_ctx def to_json(self, data): return Json(data) diff --git a/openslides_backend/datastore/shared/postgresql_backend/sql_read_database_backend_service.py b/openslides_backend/datastore/shared/postgresql_backend/sql_read_database_backend_service.py index ae08e4509..55dd64768 100644 --- a/openslides_backend/datastore/shared/postgresql_backend/sql_read_database_backend_service.py +++ b/openslides_backend/datastore/shared/postgresql_backend/sql_read_database_backend_service.py @@ -5,35 +5,22 @@ from openslides_backend.datastore.shared.di import service_as_singleton from openslides_backend.datastore.shared.postgresql_backend import apply_fields -from openslides_backend.datastore.shared.postgresql_backend.sql_query_helper import ( - SqlQueryHelper, -) +from openslides_backend.datastore.shared.postgresql_backend.sql_query_helper import \ + SqlQueryHelper from openslides_backend.datastore.shared.services.read_database import ( - BaseAggregateFilterQueryFieldsParameters, - HistoryInformation, - MappedFieldsFilterQueryFieldsParameters, -) + BaseAggregateFilterQueryFieldsParameters, HistoryInformation, + MappedFieldsFilterQueryFieldsParameters) from openslides_backend.datastore.shared.util import ( - BadCodingError, - DeletedModelsBehaviour, - InvalidDatastoreState, - ModelDoesNotExist, - get_exception_for_deleted_models_behaviour, -) + BadCodingError, DeletedModelsBehaviour, InvalidDatastoreState, + ModelDoesNotExist, get_exception_for_deleted_models_behaviour) from openslides_backend.datastore.shared.util.mapped_fields import MappedFields from openslides_backend.shared.filters import Filter -from openslides_backend.shared.patterns import ( - META_DELETED, - META_POSITION, - Collection, - Field, - FullQualifiedId, - Id, - Position, - collection_and_id_from_fqid, - fqid_from_collection_and_id, - id_from_fqid, -) +from openslides_backend.shared.patterns import (META_DELETED, META_POSITION, + Collection, Field, + FullQualifiedId, Id, Position, + collection_and_id_from_fqid, + fqid_from_collection_and_id, + id_from_fqid) from openslides_backend.shared.typing import Model from .connection_handler import ConnectionHandler diff --git a/openslides_backend/datastore/writer/services.py b/openslides_backend/datastore/writer/services.py index 967be292e..e3586a8e6 100644 --- a/openslides_backend/datastore/writer/services.py +++ b/openslides_backend/datastore/writer/services.py @@ -1,11 +1,11 @@ -from openslides_backend.datastore.shared.postgresql_backend import ( - setup_di as postgresql_setup_di, -) -from openslides_backend.datastore.shared.services import setup_di as util_setup_di +from openslides_backend.datastore.shared.postgresql_backend import \ + setup_di as postgresql_setup_di +from openslides_backend.datastore.shared.services import \ + setup_di as util_setup_di from openslides_backend.datastore.writer import setup_di as writer_setup_di def register_services(): - util_setup_di() + util_setup_di() # EnvironmentService, ShutdownService postgresql_setup_di() writer_setup_di() diff --git a/openslides_backend/services/datastore/adapter.py b/openslides_backend/services/datastore/adapter.py index 6edaaa15a..df1ffbc6e 100644 --- a/openslides_backend/services/datastore/adapter.py +++ b/openslides_backend/services/datastore/adapter.py @@ -3,52 +3,37 @@ from typing import Any, ContextManager import simplejson as json -from simplejson.errors import JSONDecodeError - from openslides_backend.datastore.reader.core import ( - AggregateRequest, - FilterRequest, - GetAllRequest, - GetManyRequest, - GetManyRequestPart, - GetRequest, - HistoryInformationRequest, - MinMaxRequest, - Reader, -) + AggregateRequest, FilterRequest, GetAllRequest, GetManyRequest, + GetManyRequestPart, GetRequest, HistoryInformationRequest, MinMaxRequest, + Reader) from openslides_backend.datastore.shared.di import injector -from openslides_backend.datastore.shared.services.read_database import ( - HistoryInformation, -) +from openslides_backend.datastore.shared.services.read_database import \ + HistoryInformation from openslides_backend.datastore.shared.util import DeletedModelsBehaviour from openslides_backend.shared.patterns import is_reserved_field +from simplejson.errors import JSONDecodeError from ...models.base import model_registry from ...shared.exceptions import DatastoreException from ...shared.filters import And, Filter, FilterOperator, filter_visitor from ...shared.interfaces.collection_field_lock import ( - CollectionFieldLock, - CollectionFieldLockWithFilter, -) + CollectionFieldLock, CollectionFieldLockWithFilter) from ...shared.interfaces.env import Env from ...shared.interfaces.logging import LoggingModule from ...shared.interfaces.write_request import WriteRequest -from ...shared.patterns import ( - COLLECTIONFIELD_PATTERN, - Collection, - CollectionField, - FullQualifiedField, - FullQualifiedId, - collection_and_field_from_collectionfield, - collection_and_field_from_fqfield, - collectionfield_from_collection_and_field, - fqfield_from_fqid_and_field, - fqid_from_collection_and_id, - is_collectionfield, - is_fqfield, -) +from ...shared.patterns import (COLLECTIONFIELD_PATTERN, Collection, + CollectionField, FullQualifiedField, + FullQualifiedId, + collection_and_field_from_collectionfield, + collection_and_field_from_fqfield, + collectionfield_from_collection_and_field, + fqfield_from_fqid_and_field, + fqid_from_collection_and_id, + is_collectionfield, is_fqfield) from . import commands -from .handle_datastore_errors import handle_datastore_errors, raise_datastore_error +from .handle_datastore_errors import (handle_datastore_errors, + raise_datastore_error) from .interface import BaseDatastoreService, Engine, LockResult, PartialModel MappedFieldsPerFqid = dict[FullQualifiedId, list[str]] @@ -120,7 +105,7 @@ def get( self.logger.debug( f"Start GET request to datastore with the following data: {request}" ) - response = self.reader.get(request) + response = self.reader.get(request,) if lock_result: instance_position = response.get("meta_position") if not isinstance(instance_position, int): diff --git a/openslides_backend/services/datastore/cache_adapter.py b/openslides_backend/services/datastore/cache_adapter.py index 1eed81936..004134f74 100644 --- a/openslides_backend/services/datastore/cache_adapter.py +++ b/openslides_backend/services/datastore/cache_adapter.py @@ -6,13 +6,9 @@ from ...shared.interfaces.env import Env from ...shared.interfaces.logging import LoggingModule -from ...shared.patterns import ( - Collection, - FullQualifiedId, - collection_from_fqid, - fqid_from_collection_and_id, - id_from_fqid, -) +from ...shared.patterns import (Collection, FullQualifiedId, + collection_from_fqid, + fqid_from_collection_and_id, id_from_fqid) from ...shared.typing import ModelMap from .adapter import DatastoreAdapter from .commands import GetManyRequest diff --git a/tests/datastore/fixtures.py b/tests/datastore/fixtures.py index 3aa5851c2..27189ac83 100644 --- a/tests/datastore/fixtures.py +++ b/tests/datastore/fixtures.py @@ -2,7 +2,7 @@ import pytest -from openslides_backend.database.db_connection_handling import get_current_os_conn +from openslides_backend.database.db_connection_handling import get_new_os_conn from openslides_backend.datastore.shared.di import injector from openslides_backend.datastore.shared.postgresql_backend import ALL_TABLES @@ -34,7 +34,7 @@ def reset_di(): @pytest.fixture(scope="session", autouse=True) def setup_db_connection(): - with get_current_os_conn() as db_connection: + with get_new_os_conn() as db_connection: yield db_connection # teardown diff --git a/tests/system/base.py b/tests/system/base.py index bfe036339..ba76b6d96 100644 --- a/tests/system/base.py +++ b/tests/system/base.py @@ -7,35 +7,31 @@ import simplejson as json from fastjsonschema.exceptions import JsonSchemaException - from openslides_backend.datastore.reader.services import register_services from openslides_backend.datastore.shared.di import injector from openslides_backend.datastore.shared.services import ShutdownService from openslides_backend.datastore.shared.util import DeletedModelsBehaviour -from openslides_backend.http.application import OpenSlidesBackendWSGIApplication +from openslides_backend.http.application import \ + OpenSlidesBackendWSGIApplication from openslides_backend.models.base import Model, model_registry from openslides_backend.services.auth.interface import AuthenticationService from openslides_backend.services.datastore.interface import DatastoreService -from openslides_backend.services.datastore.with_database_context import ( - with_database_context, -) +from openslides_backend.services.datastore.with_database_context import \ + with_database_context from openslides_backend.shared.env import Environment -from openslides_backend.shared.exceptions import ActionException, DatastoreException +from openslides_backend.shared.exceptions import (ActionException, + DatastoreException) from openslides_backend.shared.filters import FilterOperator from openslides_backend.shared.interfaces.event import Event, EventType from openslides_backend.shared.interfaces.write_request import WriteRequest -from openslides_backend.shared.patterns import ( - FullQualifiedId, - collection_from_fqid, - id_from_fqid, - is_reserved_field, -) -from openslides_backend.shared.util import ( - EXAMPLE_DATA_FILE, - ONE_ORGANIZATION_FQID, - ONE_ORGANIZATION_ID, - get_initial_data_file, -) +from openslides_backend.shared.patterns import (FullQualifiedId, + collection_from_fqid, + id_from_fqid, + is_reserved_field) +from openslides_backend.shared.util import (EXAMPLE_DATA_FILE, + ONE_ORGANIZATION_FQID, + ONE_ORGANIZATION_ID, + get_initial_data_file) from tests.util import AuthData, Client, Response from .util import TestVoteService @@ -64,7 +60,7 @@ class BaseSystemTestCase(TestCase): init_with_login: bool = True def setUp(self) -> None: - register_services() + #register_services() self.app = self.get_application() self.logger = cast(MagicMock, self.app.logger) self.services = self.app.services diff --git a/tests/system/conftest.py b/tests/system/conftest.py index 307ba51a8..b676e347a 100644 --- a/tests/system/conftest.py +++ b/tests/system/conftest.py @@ -3,19 +3,13 @@ from unittest.mock import _patch import pytest -from psycopg import Connection - from openslides_backend.database.db_connection_handling import ( - env, - get_current_os_conn_pool, - os_conn_pool, -) + env, get_current_os_conn_pool, get_new_os_conn) +from psycopg import Connection from tests.mock_auth_login import auth_http_adapter_patch, login_patch -from .conftest_helper import ( - generate_remove_all_test_functions, - generate_sql_for_test_initiation, -) +from .conftest_helper import (generate_remove_all_test_functions, + generate_sql_for_test_initiation) openslides_db = env.DATABASE_NAME database_user = env.DATABASE_USER @@ -26,33 +20,34 @@ def setup_pytest_session() -> Generator[dict[str, _patch], None, None]: """applies the login and auth-service mocker truncates all database tables for initialization of tests """ - login_patch.start() - auth_http_adapter_patch.start() - with get_current_os_conn_pool().connection() as conn: - with conn.cursor() as curs: - rows = curs.execute( - "SELECT schemaname, tablename from pg_tables where schemaname in ('public', 'vote');" - ).fetchall() - tablenames = tuple( - f"{row.get('schemaname', '')}.{row.get('tablename', '')}" for row in rows # type: ignore - ) - curs.execute( - f"TRUNCATE TABLE {','.join(tablenames)} RESTART IDENTITY CASCADE" - ) - curs.execute(generate_sql_for_test_initiation(tablenames)) + connection_pool = get_current_os_conn_pool() + with connection_pool: + login_patch.start() + auth_http_adapter_patch.start() + with get_new_os_conn() as conn: + with conn.cursor() as curs: + rows = curs.execute( + "SELECT schemaname, tablename from pg_tables where schemaname in ('public', 'vote');" + ).fetchall() + tablenames = tuple( + f"{row.get('schemaname', '')}.{row.get('tablename', '')}" for row in rows # type: ignore + ) + curs.execute( + f"TRUNCATE TABLE {','.join(tablenames)} RESTART IDENTITY CASCADE" + ) + curs.execute(generate_sql_for_test_initiation(tablenames)) - # Todo: Load example-data.json as preset. BUT: with this truncate version this is not possible, because they would be truncated - yield { - "login_patch": login_patch, - "auth_http_adapter_patch": auth_http_adapter_patch, - } # auth_mocker + yield { + "login_patch": login_patch, + "auth_http_adapter_patch": auth_http_adapter_patch, + } # auth_mocker - # teardown session - with get_current_os_conn_pool().connection() as conn: - with conn.cursor() as curs: - curs.execute(generate_remove_all_test_functions()) - login_patch.stop() - auth_http_adapter_patch.stop() + # teardown session + with get_new_os_conn() as conn: + with conn.cursor() as curs: + curs.execute(generate_remove_all_test_functions()) + login_patch.stop() + auth_http_adapter_patch.stop() @pytest.fixture(scope="class") @@ -65,7 +60,7 @@ def auth_mockers(request: Any, setup_pytest_session: Any) -> None: @pytest.fixture(autouse=True) def db_connection() -> Generator[Connection, None, None]: - with os_conn_pool.connection() as conn: + with get_new_os_conn() as conn: yield conn with conn.cursor() as curs: curs.execute("SELECT truncate_testdata_tables()")