From 5c72c0e468749ca6ae72e24dedba2cf84922030a Mon Sep 17 00:00:00 2001 From: Ali Tavallaie Date: Sun, 16 Jun 2024 00:13:58 +0330 Subject: [PATCH 01/18] feat: adding transaction as decorator --- tembo-pgmq-python/tembo_pgmq_python/queue.py | 264 ++++++++++++------- 1 file changed, 163 insertions(+), 101 deletions(-) diff --git a/tembo-pgmq-python/tembo_pgmq_python/queue.py b/tembo-pgmq-python/tembo_pgmq_python/queue.py index 068e302b..cf3739f7 100644 --- a/tembo-pgmq-python/tembo_pgmq_python/queue.py +++ b/tembo-pgmq-python/tembo_pgmq_python/queue.py @@ -1,8 +1,9 @@ from dataclasses import dataclass, field from datetime import datetime -from typing import Optional, List +from typing import Optional, List, Callable, Union from psycopg.types.json import Jsonb from psycopg_pool import ConnectionPool +import functools import os @@ -25,6 +26,29 @@ class QueueMetrics: scrape_time: datetime +def transaction(func: Callable) -> Callable: + """Decorator to run a method within a database transaction.""" + + @functools.wraps(func) + def wrapper(self, *args, **kwargs): + perform_transaction = kwargs.pop( + "perform_transaction", self.perform_transaction + ) + if perform_transaction: + with self.pool.connection() as conn: + try: + with conn.transaction(): + return func(self, *args, conn=conn, **kwargs) + except Exception as e: + conn.rollback() + raise e + else: + with self.pool.connection() as conn: + return func(self, *args, conn=conn, **kwargs) + + return wrapper + + @dataclass class PGMQueue: """Base class for interacting with a queue""" @@ -39,17 +63,9 @@ class PGMQueue: pool_size: int = 10 kwargs: dict = field(default_factory=dict) pool: ConnectionPool = field(init=False) + perform_transaction: bool = False def __post_init__(self) -> None: - self.host = self.host or "localhost" - self.port = self.port or "5432" - self.database = self.database or "postgres" - self.username = self.username or "postgres" - self.password = self.password or "postgres" - - if not all([self.host, self.port, self.database, self.username, self.password]): - raise ValueError("Incomplete database connection information provided.") - conninfo = f""" host={self.host} port={self.port} @@ -58,15 +74,30 @@ def __post_init__(self) -> None: password={self.password} """ self.pool = ConnectionPool(conninfo, open=True, **self.kwargs) + self._initialize_extensions() + def _initialize_extensions(self) -> None: + self._execute_query("create extension if not exists pgmq cascade;") + + def _execute_query( + self, query: str, params: Optional[Union[List, tuple]] = None + ) -> None: + with self.pool.connection() as conn: + conn.execute(query, params) + + def _execute_query_with_result( + self, query: str, params: Optional[Union[List, tuple]] = None + ): with self.pool.connection() as conn: - conn.execute("create extension if not exists pgmq cascade;") + return conn.execute(query, params).fetchall() + @transaction def create_partitioned_queue( self, queue: str, partition_interval: int = 10000, retention_interval: int = 100000, + conn=None, ) -> None: """Create a new queue @@ -81,71 +112,84 @@ def create_partitioned_queue( retention_interval: The number of messages to retain. Messages exceeding this number will be dropped. Defaults to 100,000. """ + query = "select pgmq.create(%s, %s::text, %s::text);" + params = [queue, partition_interval, retention_interval] + self._execute_query(query, params) - with self.pool.connection() as conn: - conn.execute( - "select pgmq.create(%s, %s::text, %s::text);", - [queue, partition_interval, retention_interval], - ) - - def create_queue(self, queue: str, unlogged: bool = False) -> None: + @transaction + def create_queue(self, queue: str, unlogged: bool = False, conn=None) -> None: """Create a new queue.""" - with self.pool.connection() as conn: - if unlogged: - conn.execute("select pgmq.create_unlogged(%s);", [queue]) - else: - conn.execute("select pgmq.create(%s);", [queue]) + query = ( + "select pgmq.create_unlogged(%s);" + if unlogged + else "select pgmq.create(%s);" + ) + conn.execute(query, [queue]) # Use the provided connection def validate_queue_name(self, queue_name: str) -> None: """Validate the length of a queue name.""" - with self.pool.connection() as conn: - conn.execute("select pgmq.validate_queue_name(%s);", [queue_name]) + query = "select pgmq.validate_queue_name(%s);" + self._execute_query(query, [queue_name]) - def drop_queue(self, queue: str, partitioned: bool = False) -> bool: + @transaction + def drop_queue(self, queue: str, partitioned: bool = False, conn=None) -> bool: """Drop a queue.""" - with self.pool.connection() as conn: - result = conn.execute("select pgmq.drop_queue(%s, %s);", [queue, partitioned]).fetchone() - return result[0] + query = "select pgmq.drop_queue(%s, %s);" + result = self._execute_query_with_result(query, [queue, partitioned]) + return result[0][0] - def list_queues(self) -> List[str]: + @transaction + def list_queues(self, conn=None) -> List[str]: """List all queues.""" - with self.pool.connection() as conn: - rows = conn.execute("select queue_name from pgmq.list_queues();").fetchall() + query = "select queue_name from pgmq.list_queues();" + rows = self._execute_query_with_result(query) return [row[0] for row in rows] - def send(self, queue: str, message: dict, delay: int = 0) -> int: + @transaction + def send(self, queue: str, message: dict, delay: int = 0, conn=None) -> int: """Send a message to a queue.""" - with self.pool.connection() as conn: - result = conn.execute("select * from pgmq.send(%s, %s, %s);", [queue, Jsonb(message), delay]).fetchall() + query = "select * from pgmq.send(%s, %s, %s);" + result = self._execute_query_with_result(query, [queue, Jsonb(message), delay]) return result[0][0] - def send_batch(self, queue: str, messages: List[dict], delay: int = 0) -> List[int]: + @transaction + def send_batch( + self, queue: str, messages: List[dict], delay: int = 0, conn=None + ) -> List[int]: """Send a batch of messages to a queue.""" - with self.pool.connection() as conn: - result = conn.execute( - "select * from pgmq.send_batch(%s, %s, %s);", - [queue, [Jsonb(message) for message in messages], delay], - ).fetchall() + query = "select * from pgmq.send_batch(%s, %s, %s);" + params = [queue, [Jsonb(message) for message in messages], delay] + result = self._execute_query_with_result(query, params) return [message[0] for message in result] - def read(self, queue: str, vt: Optional[int] = None) -> Optional[Message]: + @transaction + def read( + self, queue: str, vt: Optional[int] = None, conn=None + ) -> Optional[Message]: """Read a message from a queue.""" - with self.pool.connection() as conn: - rows = conn.execute("select * from pgmq.read(%s, %s, %s);", [queue, vt or self.vt, 1]).fetchall() - - messages = [Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows] - return messages[0] if len(messages) == 1 else None + query = "select * from pgmq.read(%s, %s, %s);" + rows = self._execute_query_with_result(query, [queue, vt or self.vt, 1]) + messages = [ + Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) + for x in rows + ] + return messages[0] if messages else None - def read_batch(self, queue: str, vt: Optional[int] = None, batch_size=1) -> Optional[List[Message]]: + @transaction + def read_batch( + self, queue: str, vt: Optional[int] = None, batch_size=1, conn=None + ) -> Optional[List[Message]]: """Read a batch of messages from a queue.""" - with self.pool.connection() as conn: - rows = conn.execute( - "select * from pgmq.read(%s, %s, %s);", - [queue, vt or self.vt, batch_size], - ).fetchall() - - return [Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows] + query = "select * from pgmq.read(%s, %s, %s);" + rows = self._execute_query_with_result( + query, [queue, vt or self.vt, batch_size] + ) + return [ + Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) + for x in rows + ] + @transaction def read_with_poll( self, queue: str, @@ -153,60 +197,68 @@ def read_with_poll( qty: int = 1, max_poll_seconds: int = 5, poll_interval_ms: int = 100, + conn=None, ) -> Optional[List[Message]]: """Read messages from a queue with polling.""" - with self.pool.connection() as conn: - rows = conn.execute( - "select * from pgmq.read_with_poll(%s, %s, %s, %s, %s);", - [queue, vt or self.vt, qty, max_poll_seconds, poll_interval_ms], - ).fetchall() - - return [Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows] + query = "select * from pgmq.read_with_poll(%s, %s, %s, %s, %s);" + params = [queue, vt or self.vt, qty, max_poll_seconds, poll_interval_ms] + rows = self._execute_query_with_result(query, params) + return [ + Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) + for x in rows + ] - def pop(self, queue: str) -> Message: + @transaction + def pop(self, queue: str, conn=None) -> Message: """Pop a message from a queue.""" - with self.pool.connection() as conn: - rows = conn.execute("select * from pgmq.pop(%s);", [queue]).fetchall() - - messages = [Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows] + query = "select * from pgmq.pop(%s);" + rows = self._execute_query_with_result(query, [queue]) + messages = [ + Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) + for x in rows + ] return messages[0] - def delete(self, queue: str, msg_id: int) -> bool: + @transaction + def delete(self, queue: str, msg_id: int, conn=None) -> bool: """Delete a message from a queue.""" - with self.pool.connection() as conn: - row = conn.execute("select pgmq.delete(%s, %s);", [queue, msg_id]).fetchall() - - return row[0][0] + query = "select pgmq.delete(%s, %s);" + result = self._execute_query_with_result(query, [queue, msg_id]) + return result[0][0] - def delete_batch(self, queue: str, msg_ids: List[int]) -> List[int]: + @transaction + def delete_batch(self, queue: str, msg_ids: List[int], conn=None) -> List[int]: """Delete multiple messages from a queue.""" - with self.pool.connection() as conn: - result = conn.execute("select * from pgmq.delete(%s, %s);", [queue, msg_ids]).fetchall() + query = "select * from pgmq.delete(%s, %s);" + result = self._execute_query_with_result(query, [queue, msg_ids]) return [x[0] for x in result] - def archive(self, queue: str, msg_id: int) -> bool: + @transaction + def archive(self, queue: str, msg_id: int, conn=None) -> bool: """Archive a message from a queue.""" - with self.pool.connection() as conn: - row = conn.execute("select pgmq.archive(%s, %s);", [queue, msg_id]).fetchall() - - return row[0][0] + query = "select pgmq.archive(%s, %s);" + result = self._execute_query_with_result(query, [queue, msg_id]) + return result[0][0] - def archive_batch(self, queue: str, msg_ids: List[int]) -> List[int]: + @transaction + def archive_batch(self, queue: str, msg_ids: List[int], conn=None) -> List[int]: """Archive multiple messages from a queue.""" - with self.pool.connection() as conn: - result = conn.execute("select * from pgmq.archive(%s, %s);", [queue, msg_ids]).fetchall() + query = "select * from pgmq.archive(%s, %s);" + result = self._execute_query_with_result(query, [queue, msg_ids]) return [x[0] for x in result] - def purge(self, queue: str) -> int: + @transaction + def purge(self, queue: str, conn=None) -> int: """Purge a queue.""" - with self.pool.connection() as conn: - row = conn.execute("select pgmq.purge_queue(%s);", [queue]).fetchall() - - return row[0][0] + query = "select pgmq.purge_queue(%s);" + result = self._execute_query_with_result(query, [queue]) + return result[0][0] - def metrics(self, queue: str) -> QueueMetrics: - with self.pool.connection() as conn: - result = conn.execute("SELECT * FROM pgmq.metrics(%s);", [queue]).fetchone() + @transaction + def metrics(self, queue: str, conn=None) -> QueueMetrics: + """Get metrics for a specific queue.""" + query = "SELECT * FROM pgmq.metrics(%s);" + result = self._execute_query_with_result(query, [queue])[0] return QueueMetrics( queue_name=result[0], queue_length=result[1], @@ -216,9 +268,11 @@ def metrics(self, queue: str) -> QueueMetrics: scrape_time=result[5], ) - def metrics_all(self) -> List[QueueMetrics]: - with self.pool.connection() as conn: - results = conn.execute("SELECT * FROM pgmq.metrics_all();").fetchall() + @transaction + def metrics_all(self, conn=None) -> List[QueueMetrics]: + """Get metrics for all queues.""" + query = "SELECT * FROM pgmq.metrics_all();" + results = self._execute_query_with_result(query) return [ QueueMetrics( queue_name=row[0], @@ -231,13 +285,21 @@ def metrics_all(self) -> List[QueueMetrics]: for row in results ] - def set_vt(self, queue: str, msg_id: int, vt: int) -> Message: + @transaction + def set_vt(self, queue: str, msg_id: int, vt: int, conn=None) -> Message: """Set the visibility timeout for a specific message.""" - with self.pool.connection() as conn: - row = conn.execute("select * from pgmq.set_vt(%s, %s, %s);", [queue, msg_id, vt]).fetchone() - return Message(msg_id=row[0], read_ct=row[1], enqueued_at=row[2], vt=row[3], message=row[4]) + query = "select * from pgmq.set_vt(%s, %s, %s);" + result = self._execute_query_with_result(query, [queue, msg_id, vt])[0] + return Message( + msg_id=result[0], + read_ct=result[1], + enqueued_at=result[2], + vt=result[3], + message=result[4], + ) - def detach_archive(self, queue: str) -> None: + @transaction + def detach_archive(self, queue: str, conn=None) -> None: """Detach an archive from a queue.""" - with self.pool.connection() as conn: - conn.execute("select pgmq.detach_archive(%s);", [queue]) + query = "select pgmq.detach_archive(%s);" + self._execute_query(query, [queue]) From 39c4ac661b96bf624e704493a9eb7e1844043063 Mon Sep 17 00:00:00 2001 From: Ali Tavallaie Date: Sun, 16 Jun 2024 00:14:15 +0330 Subject: [PATCH 02/18] feat: test for transactions --- tembo-pgmq-python/tests/test_integration.py | 36 +++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/tembo-pgmq-python/tests/test_integration.py b/tembo-pgmq-python/tests/test_integration.py index d8991c61..4b141029 100644 --- a/tembo-pgmq-python/tests/test_integration.py +++ b/tembo-pgmq-python/tests/test_integration.py @@ -193,6 +193,42 @@ def test_validate_queue_name(self): self.queue.validate_queue_name(invalid_queue_name) self.assertIn("queue name is too long", str(context.exception)) + def test_transaction_create_queue(self): + """Test creating a queue within a transaction.""" + try: + self.queue.create_queue("test_queue_txn", perform_transaction=True) + raise Exception("Intentional failure") + except Exception: + pass + # Verify the queue was not created + queues = self.queue.list_queues() + self.assertNotIn("test_queue_txn", queues) + + def test_transaction_send_and_read_message(self): + """Test sending and reading a message within a transaction.""" + try: + self.queue.send( + self.test_queue, self.test_message, perform_transaction=True + ) + raise Exception("Intentional failure") + except Exception: + pass + # Verify no message was sent + message = self.queue.read(self.test_queue) + self.assertIsNone(message, "No message expected in queue") + + def test_transaction_purge_queue(self): + """Test purging a queue within a transaction.""" + self.queue.send(self.test_queue, self.test_message) + try: + self.queue.purge(self.test_queue, perform_transaction=True) + raise Exception("Intentional failure") + except Exception: + pass + # Verify no messages were purged + message = self.queue.read(self.test_queue) + self.assertIsNotNone(message, "Message expected in queue") + class TestPGMQueueWithEnv(BaseTestPGMQueue): @classmethod From 2e7ba205a6f0976c8b16109f799eed7d34ea8358 Mon Sep 17 00:00:00 2001 From: Ali Tavallaie Date: Sun, 16 Jun 2024 01:46:35 +0330 Subject: [PATCH 03/18] add logger --- tembo-pgmq-python/tembo_pgmq_python/queue.py | 140 +++++++++++++------ tembo-pgmq-python/tests/test_integration.py | 34 +++-- 2 files changed, 117 insertions(+), 57 deletions(-) diff --git a/tembo-pgmq-python/tembo_pgmq_python/queue.py b/tembo-pgmq-python/tembo_pgmq_python/queue.py index cf3739f7..9e1f2fbf 100644 --- a/tembo-pgmq-python/tembo_pgmq_python/queue.py +++ b/tembo-pgmq-python/tembo_pgmq_python/queue.py @@ -1,10 +1,21 @@ -from dataclasses import dataclass, field +import functools +import logging +import os from datetime import datetime +from dataclasses import dataclass, field from typing import Optional, List, Callable, Union from psycopg.types.json import Jsonb from psycopg_pool import ConnectionPool -import functools -import os + +logger = logging.getLogger(__name__) +log_filename = datetime.now().strftime("pgmq_debug_%Y%m%d_%H%M%S.log") + +# Configure logging at the start of the script +logging.basicConfig( + filename=log_filename, + level=logging.DEBUG, + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", +) @dataclass @@ -37,13 +48,26 @@ def wrapper(self, *args, **kwargs): if perform_transaction: with self.pool.connection() as conn: try: + logger.debug(f"Transaction started with conn: {conn}") with conn.transaction(): - return func(self, *args, conn=conn, **kwargs) + result = func(self, *args, conn=conn, **kwargs) + logger.debug(f"Transaction completed with conn: {conn}") + return result except Exception as e: - conn.rollback() - raise e + logger.error( + f"Transaction failed with exception: {e}, rolling back." + ) + try: + conn.rollback() + logger.debug( + f"Transaction rolled back successfully with conn: {conn}" + ) + except Exception as rollback_exception: + logger.error(f"Rollback failed: {rollback_exception}") + raise else: with self.pool.connection() as conn: + logger.debug(f"Non-transactional execution with conn: {conn}") return func(self, *args, conn=conn, **kwargs) return wrapper @@ -76,20 +100,32 @@ def __post_init__(self) -> None: self.pool = ConnectionPool(conninfo, open=True, **self.kwargs) self._initialize_extensions() - def _initialize_extensions(self) -> None: - self._execute_query("create extension if not exists pgmq cascade;") + def _initialize_extensions(self, conn=None) -> None: + self._execute_query("create extension if not exists pgmq cascade;", conn=conn) def _execute_query( - self, query: str, params: Optional[Union[List, tuple]] = None + self, query: str, params: Optional[Union[List, tuple]] = None, conn=None ) -> None: - with self.pool.connection() as conn: + logger.debug( + f"Executing query: {query} with params: {params} using conn: {conn}" + ) + if conn: conn.execute(query, params) + else: + with self.pool.connection() as conn: + conn.execute(query, params) def _execute_query_with_result( - self, query: str, params: Optional[Union[List, tuple]] = None + self, query: str, params: Optional[Union[List, tuple]] = None, conn=None ): - with self.pool.connection() as conn: + logger.debug( + f"Executing query with result: {query} with params: {params} using conn: {conn}" + ) + if conn: return conn.execute(query, params).fetchall() + else: + with self.pool.connection() as conn: + return conn.execute(query, params).fetchall() @transaction def create_partitioned_queue( @@ -99,57 +135,51 @@ def create_partitioned_queue( retention_interval: int = 100000, conn=None, ) -> None: - """Create a new queue - - Note: Partitions are created pg_partman which must be configured in postgresql.conf - Set `pg_partman_bgw.interval` to set the interval for partition creation and deletion. - A value of 10 will create new/delete partitions every 10 seconds. This value should be tuned - according to the volume of messages being sent to the queue. - - Args: - queue: The name of the queue. - partition_interval: The number of messages per partition. Defaults to 10,000. - retention_interval: The number of messages to retain. Messages exceeding this number will be dropped. - Defaults to 100,000. - """ + """Create a new queue""" query = "select pgmq.create(%s, %s::text, %s::text);" params = [queue, partition_interval, retention_interval] - self._execute_query(query, params) + self._execute_query(query, params, conn=conn) @transaction def create_queue(self, queue: str, unlogged: bool = False, conn=None) -> None: """Create a new queue.""" + logger.debug(f"create_queue called with conn: {conn}") query = ( "select pgmq.create_unlogged(%s);" if unlogged else "select pgmq.create(%s);" ) - conn.execute(query, [queue]) # Use the provided connection + self._execute_query(query, [queue], conn=conn) - def validate_queue_name(self, queue_name: str) -> None: + def validate_queue_name(self, queue_name: str, conn=None) -> None: """Validate the length of a queue name.""" query = "select pgmq.validate_queue_name(%s);" - self._execute_query(query, [queue_name]) + self._execute_query(query, [queue_name], conn=conn) @transaction def drop_queue(self, queue: str, partitioned: bool = False, conn=None) -> bool: """Drop a queue.""" + logger.debug(f"drop_queue called with conn: {conn}") query = "select pgmq.drop_queue(%s, %s);" - result = self._execute_query_with_result(query, [queue, partitioned]) + result = self._execute_query_with_result(query, [queue, partitioned], conn=conn) return result[0][0] @transaction def list_queues(self, conn=None) -> List[str]: """List all queues.""" + logger.debug(f"list_queues called with conn: {conn}") query = "select queue_name from pgmq.list_queues();" - rows = self._execute_query_with_result(query) + rows = self._execute_query_with_result(query, conn=conn) return [row[0] for row in rows] @transaction def send(self, queue: str, message: dict, delay: int = 0, conn=None) -> int: """Send a message to a queue.""" + logger.debug(f"send called with conn: {conn}") query = "select * from pgmq.send(%s, %s, %s);" - result = self._execute_query_with_result(query, [queue, Jsonb(message), delay]) + result = self._execute_query_with_result( + query, [queue, Jsonb(message), delay], conn=conn + ) return result[0][0] @transaction @@ -157,9 +187,10 @@ def send_batch( self, queue: str, messages: List[dict], delay: int = 0, conn=None ) -> List[int]: """Send a batch of messages to a queue.""" + logger.debug(f"send_batch called with conn: {conn}") query = "select * from pgmq.send_batch(%s, %s, %s);" params = [queue, [Jsonb(message) for message in messages], delay] - result = self._execute_query_with_result(query, params) + result = self._execute_query_with_result(query, params, conn=conn) return [message[0] for message in result] @transaction @@ -167,8 +198,11 @@ def read( self, queue: str, vt: Optional[int] = None, conn=None ) -> Optional[Message]: """Read a message from a queue.""" + logger.debug(f"read called with conn: {conn}") query = "select * from pgmq.read(%s, %s, %s);" - rows = self._execute_query_with_result(query, [queue, vt or self.vt, 1]) + rows = self._execute_query_with_result( + query, [queue, vt or self.vt, 1], conn=conn + ) messages = [ Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows @@ -180,9 +214,10 @@ def read_batch( self, queue: str, vt: Optional[int] = None, batch_size=1, conn=None ) -> Optional[List[Message]]: """Read a batch of messages from a queue.""" + logger.debug(f"read_batch called with conn: {conn}") query = "select * from pgmq.read(%s, %s, %s);" rows = self._execute_query_with_result( - query, [queue, vt or self.vt, batch_size] + query, [queue, vt or self.vt, batch_size], conn=conn ) return [ Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) @@ -200,9 +235,10 @@ def read_with_poll( conn=None, ) -> Optional[List[Message]]: """Read messages from a queue with polling.""" + logger.debug(f"read_with_poll called with conn: {conn}") query = "select * from pgmq.read_with_poll(%s, %s, %s, %s, %s);" params = [queue, vt or self.vt, qty, max_poll_seconds, poll_interval_ms] - rows = self._execute_query_with_result(query, params) + rows = self._execute_query_with_result(query, params, conn=conn) return [ Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows @@ -211,8 +247,9 @@ def read_with_poll( @transaction def pop(self, queue: str, conn=None) -> Message: """Pop a message from a queue.""" + logger.debug(f"pop called with conn: {conn}") query = "select * from pgmq.pop(%s);" - rows = self._execute_query_with_result(query, [queue]) + rows = self._execute_query_with_result(query, [queue], conn=conn) messages = [ Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows @@ -222,43 +259,49 @@ def pop(self, queue: str, conn=None) -> Message: @transaction def delete(self, queue: str, msg_id: int, conn=None) -> bool: """Delete a message from a queue.""" + logger.debug(f"delete called with conn: {conn}") query = "select pgmq.delete(%s, %s);" - result = self._execute_query_with_result(query, [queue, msg_id]) + result = self._execute_query_with_result(query, [queue, msg_id], conn=conn) return result[0][0] @transaction def delete_batch(self, queue: str, msg_ids: List[int], conn=None) -> List[int]: """Delete multiple messages from a queue.""" + logger.debug(f"delete_batch called with conn: {conn}") query = "select * from pgmq.delete(%s, %s);" - result = self._execute_query_with_result(query, [queue, msg_ids]) + result = self._execute_query_with_result(query, [queue, msg_ids], conn=conn) return [x[0] for x in result] @transaction def archive(self, queue: str, msg_id: int, conn=None) -> bool: """Archive a message from a queue.""" + logger.debug(f"archive called with conn: {conn}") query = "select pgmq.archive(%s, %s);" - result = self._execute_query_with_result(query, [queue, msg_id]) + result = self._execute_query_with_result(query, [queue, msg_id], conn=conn) return result[0][0] @transaction def archive_batch(self, queue: str, msg_ids: List[int], conn=None) -> List[int]: """Archive multiple messages from a queue.""" + logger.debug(f"archive_batch called with conn: {conn}") query = "select * from pgmq.archive(%s, %s);" - result = self._execute_query_with_result(query, [queue, msg_ids]) + result = self._execute_query_with_result(query, [queue, msg_ids], conn=conn) return [x[0] for x in result] @transaction def purge(self, queue: str, conn=None) -> int: """Purge a queue.""" + logger.debug(f"purge called with conn: {conn}") query = "select pgmq.purge_queue(%s);" - result = self._execute_query_with_result(query, [queue]) + result = self._execute_query_with_result(query, [queue], conn=conn) return result[0][0] @transaction def metrics(self, queue: str, conn=None) -> QueueMetrics: """Get metrics for a specific queue.""" + logger.debug(f"metrics called with conn: {conn}") query = "SELECT * FROM pgmq.metrics(%s);" - result = self._execute_query_with_result(query, [queue])[0] + result = self._execute_query_with_result(query, [queue], conn=conn)[0] return QueueMetrics( queue_name=result[0], queue_length=result[1], @@ -271,8 +314,9 @@ def metrics(self, queue: str, conn=None) -> QueueMetrics: @transaction def metrics_all(self, conn=None) -> List[QueueMetrics]: """Get metrics for all queues.""" + logger.debug(f"metrics_all called with conn: {conn}") query = "SELECT * FROM pgmq.metrics_all();" - results = self._execute_query_with_result(query) + results = self._execute_query_with_result(query, conn=conn) return [ QueueMetrics( queue_name=row[0], @@ -288,8 +332,11 @@ def metrics_all(self, conn=None) -> List[QueueMetrics]: @transaction def set_vt(self, queue: str, msg_id: int, vt: int, conn=None) -> Message: """Set the visibility timeout for a specific message.""" + logger.debug(f"set_vt called with conn: {conn}") query = "select * from pgmq.set_vt(%s, %s, %s);" - result = self._execute_query_with_result(query, [queue, msg_id, vt])[0] + result = self._execute_query_with_result(query, [queue, msg_id, vt], conn=conn)[ + 0 + ] return Message( msg_id=result[0], read_ct=result[1], @@ -301,5 +348,6 @@ def set_vt(self, queue: str, msg_id: int, vt: int, conn=None) -> Message: @transaction def detach_archive(self, queue: str, conn=None) -> None: """Detach an archive from a queue.""" + logger.debug(f"detach_archive called with conn: {conn}") query = "select pgmq.detach_archive(%s);" - self._execute_query(query, [queue]) + self._execute_query(query, [queue], conn=conn) diff --git a/tembo-pgmq-python/tests/test_integration.py b/tembo-pgmq-python/tests/test_integration.py index 4b141029..d89afab2 100644 --- a/tembo-pgmq-python/tests/test_integration.py +++ b/tembo-pgmq-python/tests/test_integration.py @@ -2,7 +2,6 @@ import time from tembo_pgmq_python import Message, PGMQueue from datetime import datetime, timezone, timedelta -# Function to load environment variables class BaseTestPGMQueue(unittest.TestCase): @@ -200,9 +199,9 @@ def test_transaction_create_queue(self): raise Exception("Intentional failure") except Exception: pass - # Verify the queue was not created - queues = self.queue.list_queues() - self.assertNotIn("test_queue_txn", queues) + finally: + queues = self.queue.list_queues(perform_transaction=False) + self.assertNotIn("test_queue_txn", queues) def test_transaction_send_and_read_message(self): """Test sending and reading a message within a transaction.""" @@ -213,21 +212,34 @@ def test_transaction_send_and_read_message(self): raise Exception("Intentional failure") except Exception: pass - # Verify no message was sent - message = self.queue.read(self.test_queue) - self.assertIsNone(message, "No message expected in queue") + finally: + message = self.queue.read(self.test_queue, perform_transaction=False) + self.assertIsNone(message, "No message expected in queue") def test_transaction_purge_queue(self): """Test purging a queue within a transaction.""" - self.queue.send(self.test_queue, self.test_message) + self.queue.send(self.test_queue, self.test_message, perform_transaction=False) try: self.queue.purge(self.test_queue, perform_transaction=True) raise Exception("Intentional failure") except Exception: pass - # Verify no messages were purged - message = self.queue.read(self.test_queue) - self.assertIsNotNone(message, "Message expected in queue") + finally: + message = self.queue.read(self.test_queue, perform_transaction=False) + self.assertIsNotNone(message, "Message expected in queue") + + def test_transaction_rollback(self): + """Test rollback of a transaction.""" + try: + self.queue.send( + self.test_queue, self.test_message, perform_transaction=True + ) + raise Exception("Intentional failure to trigger rollback") + except Exception: + pass + finally: + message = self.queue.read(self.test_queue, perform_transaction=False) + self.assertIsNone(message, "No message expected in queue after rollback") class TestPGMQueueWithEnv(BaseTestPGMQueue): From 18ccefcf26e20a2fa155c34d7d5e64598391dbf2 Mon Sep 17 00:00:00 2001 From: Ali Tavallaie Date: Sun, 16 Jun 2024 01:52:09 +0330 Subject: [PATCH 04/18] chore: linting --- tembo-pgmq-python/tembo_pgmq_python/queue.py | 82 +++++--------------- 1 file changed, 19 insertions(+), 63 deletions(-) diff --git a/tembo-pgmq-python/tembo_pgmq_python/queue.py b/tembo-pgmq-python/tembo_pgmq_python/queue.py index 9e1f2fbf..7f4c8948 100644 --- a/tembo-pgmq-python/tembo_pgmq_python/queue.py +++ b/tembo-pgmq-python/tembo_pgmq_python/queue.py @@ -42,9 +42,7 @@ def transaction(func: Callable) -> Callable: @functools.wraps(func) def wrapper(self, *args, **kwargs): - perform_transaction = kwargs.pop( - "perform_transaction", self.perform_transaction - ) + perform_transaction = kwargs.pop("perform_transaction", self.perform_transaction) if perform_transaction: with self.pool.connection() as conn: try: @@ -54,14 +52,10 @@ def wrapper(self, *args, **kwargs): logger.debug(f"Transaction completed with conn: {conn}") return result except Exception as e: - logger.error( - f"Transaction failed with exception: {e}, rolling back." - ) + logger.error(f"Transaction failed with exception: {e}, rolling back.") try: conn.rollback() - logger.debug( - f"Transaction rolled back successfully with conn: {conn}" - ) + logger.debug(f"Transaction rolled back successfully with conn: {conn}") except Exception as rollback_exception: logger.error(f"Rollback failed: {rollback_exception}") raise @@ -103,24 +97,16 @@ def __post_init__(self) -> None: def _initialize_extensions(self, conn=None) -> None: self._execute_query("create extension if not exists pgmq cascade;", conn=conn) - def _execute_query( - self, query: str, params: Optional[Union[List, tuple]] = None, conn=None - ) -> None: - logger.debug( - f"Executing query: {query} with params: {params} using conn: {conn}" - ) + def _execute_query(self, query: str, params: Optional[Union[List, tuple]] = None, conn=None) -> None: + logger.debug(f"Executing query: {query} with params: {params} using conn: {conn}") if conn: conn.execute(query, params) else: with self.pool.connection() as conn: conn.execute(query, params) - def _execute_query_with_result( - self, query: str, params: Optional[Union[List, tuple]] = None, conn=None - ): - logger.debug( - f"Executing query with result: {query} with params: {params} using conn: {conn}" - ) + def _execute_query_with_result(self, query: str, params: Optional[Union[List, tuple]] = None, conn=None): + logger.debug(f"Executing query with result: {query} with params: {params} using conn: {conn}") if conn: return conn.execute(query, params).fetchall() else: @@ -144,11 +130,7 @@ def create_partitioned_queue( def create_queue(self, queue: str, unlogged: bool = False, conn=None) -> None: """Create a new queue.""" logger.debug(f"create_queue called with conn: {conn}") - query = ( - "select pgmq.create_unlogged(%s);" - if unlogged - else "select pgmq.create(%s);" - ) + query = "select pgmq.create_unlogged(%s);" if unlogged else "select pgmq.create(%s);" self._execute_query(query, [queue], conn=conn) def validate_queue_name(self, queue_name: str, conn=None) -> None: @@ -177,15 +159,11 @@ def send(self, queue: str, message: dict, delay: int = 0, conn=None) -> int: """Send a message to a queue.""" logger.debug(f"send called with conn: {conn}") query = "select * from pgmq.send(%s, %s, %s);" - result = self._execute_query_with_result( - query, [queue, Jsonb(message), delay], conn=conn - ) + result = self._execute_query_with_result(query, [queue, Jsonb(message), delay], conn=conn) return result[0][0] @transaction - def send_batch( - self, queue: str, messages: List[dict], delay: int = 0, conn=None - ) -> List[int]: + def send_batch(self, queue: str, messages: List[dict], delay: int = 0, conn=None) -> List[int]: """Send a batch of messages to a queue.""" logger.debug(f"send_batch called with conn: {conn}") query = "select * from pgmq.send_batch(%s, %s, %s);" @@ -194,35 +172,21 @@ def send_batch( return [message[0] for message in result] @transaction - def read( - self, queue: str, vt: Optional[int] = None, conn=None - ) -> Optional[Message]: + def read(self, queue: str, vt: Optional[int] = None, conn=None) -> Optional[Message]: """Read a message from a queue.""" logger.debug(f"read called with conn: {conn}") query = "select * from pgmq.read(%s, %s, %s);" - rows = self._execute_query_with_result( - query, [queue, vt or self.vt, 1], conn=conn - ) - messages = [ - Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) - for x in rows - ] + rows = self._execute_query_with_result(query, [queue, vt or self.vt, 1], conn=conn) + messages = [Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows] return messages[0] if messages else None @transaction - def read_batch( - self, queue: str, vt: Optional[int] = None, batch_size=1, conn=None - ) -> Optional[List[Message]]: + def read_batch(self, queue: str, vt: Optional[int] = None, batch_size=1, conn=None) -> Optional[List[Message]]: """Read a batch of messages from a queue.""" logger.debug(f"read_batch called with conn: {conn}") query = "select * from pgmq.read(%s, %s, %s);" - rows = self._execute_query_with_result( - query, [queue, vt or self.vt, batch_size], conn=conn - ) - return [ - Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) - for x in rows - ] + rows = self._execute_query_with_result(query, [queue, vt or self.vt, batch_size], conn=conn) + return [Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows] @transaction def read_with_poll( @@ -239,10 +203,7 @@ def read_with_poll( query = "select * from pgmq.read_with_poll(%s, %s, %s, %s, %s);" params = [queue, vt or self.vt, qty, max_poll_seconds, poll_interval_ms] rows = self._execute_query_with_result(query, params, conn=conn) - return [ - Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) - for x in rows - ] + return [Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows] @transaction def pop(self, queue: str, conn=None) -> Message: @@ -250,10 +211,7 @@ def pop(self, queue: str, conn=None) -> Message: logger.debug(f"pop called with conn: {conn}") query = "select * from pgmq.pop(%s);" rows = self._execute_query_with_result(query, [queue], conn=conn) - messages = [ - Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) - for x in rows - ] + messages = [Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows] return messages[0] @transaction @@ -334,9 +292,7 @@ def set_vt(self, queue: str, msg_id: int, vt: int, conn=None) -> Message: """Set the visibility timeout for a specific message.""" logger.debug(f"set_vt called with conn: {conn}") query = "select * from pgmq.set_vt(%s, %s, %s);" - result = self._execute_query_with_result(query, [queue, msg_id, vt], conn=conn)[ - 0 - ] + result = self._execute_query_with_result(query, [queue, msg_id, vt], conn=conn)[0] return Message( msg_id=result[0], read_ct=result[1], From 405dd12f55249809e1a9a6d7b9200441de13923d Mon Sep 17 00:00:00 2001 From: Ali Tavallaie Date: Sun, 16 Jun 2024 18:33:25 +0330 Subject: [PATCH 05/18] feat: successfull transaction operation --- tembo-pgmq-python/tembo_pgmq_python/queue.py | 100 +++++++++++++------ 1 file changed, 70 insertions(+), 30 deletions(-) diff --git a/tembo-pgmq-python/tembo_pgmq_python/queue.py b/tembo-pgmq-python/tembo_pgmq_python/queue.py index 7f4c8948..527c9744 100644 --- a/tembo-pgmq-python/tembo_pgmq_python/queue.py +++ b/tembo-pgmq-python/tembo_pgmq_python/queue.py @@ -42,23 +42,25 @@ def transaction(func: Callable) -> Callable: @functools.wraps(func) def wrapper(self, *args, **kwargs): - perform_transaction = kwargs.pop("perform_transaction", self.perform_transaction) + perform_transaction = kwargs.pop( + "perform_transaction", self.perform_transaction + ) if perform_transaction: with self.pool.connection() as conn: - try: - logger.debug(f"Transaction started with conn: {conn}") - with conn.transaction(): - result = func(self, *args, conn=conn, **kwargs) - logger.debug(f"Transaction completed with conn: {conn}") - return result - except Exception as e: - logger.error(f"Transaction failed with exception: {e}, rolling back.") + with conn.transaction() as txn: try: - conn.rollback() - logger.debug(f"Transaction rolled back successfully with conn: {conn}") - except Exception as rollback_exception: - logger.error(f"Rollback failed: {rollback_exception}") - raise + logger.debug(f"Transaction started with conn: {conn}") + result = func(self, *args, conn=conn, **kwargs) + txn.commit() + logger.debug(f"Transaction committed with conn: {conn}") + return result + except Exception as e: + logger.error( + f"Transaction failed with exception: {e}, rolling back." + ) + txn.rollback() + logger.debug(f"Transaction rolled back with conn: {conn}") + raise else: with self.pool.connection() as conn: logger.debug(f"Non-transactional execution with conn: {conn}") @@ -97,16 +99,24 @@ def __post_init__(self) -> None: def _initialize_extensions(self, conn=None) -> None: self._execute_query("create extension if not exists pgmq cascade;", conn=conn) - def _execute_query(self, query: str, params: Optional[Union[List, tuple]] = None, conn=None) -> None: - logger.debug(f"Executing query: {query} with params: {params} using conn: {conn}") + def _execute_query( + self, query: str, params: Optional[Union[List, tuple]] = None, conn=None + ) -> None: + logger.debug( + f"Executing query: {query} with params: {params} using conn: {conn}" + ) if conn: conn.execute(query, params) else: with self.pool.connection() as conn: conn.execute(query, params) - def _execute_query_with_result(self, query: str, params: Optional[Union[List, tuple]] = None, conn=None): - logger.debug(f"Executing query with result: {query} with params: {params} using conn: {conn}") + def _execute_query_with_result( + self, query: str, params: Optional[Union[List, tuple]] = None, conn=None + ): + logger.debug( + f"Executing query with result: {query} with params: {params} using conn: {conn}" + ) if conn: return conn.execute(query, params).fetchall() else: @@ -130,7 +140,11 @@ def create_partitioned_queue( def create_queue(self, queue: str, unlogged: bool = False, conn=None) -> None: """Create a new queue.""" logger.debug(f"create_queue called with conn: {conn}") - query = "select pgmq.create_unlogged(%s);" if unlogged else "select pgmq.create(%s);" + query = ( + "select pgmq.create_unlogged(%s);" + if unlogged + else "select pgmq.create(%s);" + ) self._execute_query(query, [queue], conn=conn) def validate_queue_name(self, queue_name: str, conn=None) -> None: @@ -159,11 +173,15 @@ def send(self, queue: str, message: dict, delay: int = 0, conn=None) -> int: """Send a message to a queue.""" logger.debug(f"send called with conn: {conn}") query = "select * from pgmq.send(%s, %s, %s);" - result = self._execute_query_with_result(query, [queue, Jsonb(message), delay], conn=conn) + result = self._execute_query_with_result( + query, [queue, Jsonb(message), delay], conn=conn + ) return result[0][0] @transaction - def send_batch(self, queue: str, messages: List[dict], delay: int = 0, conn=None) -> List[int]: + def send_batch( + self, queue: str, messages: List[dict], delay: int = 0, conn=None + ) -> List[int]: """Send a batch of messages to a queue.""" logger.debug(f"send_batch called with conn: {conn}") query = "select * from pgmq.send_batch(%s, %s, %s);" @@ -172,21 +190,35 @@ def send_batch(self, queue: str, messages: List[dict], delay: int = 0, conn=None return [message[0] for message in result] @transaction - def read(self, queue: str, vt: Optional[int] = None, conn=None) -> Optional[Message]: + def read( + self, queue: str, vt: Optional[int] = None, conn=None + ) -> Optional[Message]: """Read a message from a queue.""" logger.debug(f"read called with conn: {conn}") query = "select * from pgmq.read(%s, %s, %s);" - rows = self._execute_query_with_result(query, [queue, vt or self.vt, 1], conn=conn) - messages = [Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows] + rows = self._execute_query_with_result( + query, [queue, vt or self.vt, 1], conn=conn + ) + messages = [ + Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) + for x in rows + ] return messages[0] if messages else None @transaction - def read_batch(self, queue: str, vt: Optional[int] = None, batch_size=1, conn=None) -> Optional[List[Message]]: + def read_batch( + self, queue: str, vt: Optional[int] = None, batch_size=1, conn=None + ) -> Optional[List[Message]]: """Read a batch of messages from a queue.""" logger.debug(f"read_batch called with conn: {conn}") query = "select * from pgmq.read(%s, %s, %s);" - rows = self._execute_query_with_result(query, [queue, vt or self.vt, batch_size], conn=conn) - return [Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows] + rows = self._execute_query_with_result( + query, [queue, vt or self.vt, batch_size], conn=conn + ) + return [ + Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) + for x in rows + ] @transaction def read_with_poll( @@ -203,7 +235,10 @@ def read_with_poll( query = "select * from pgmq.read_with_poll(%s, %s, %s, %s, %s);" params = [queue, vt or self.vt, qty, max_poll_seconds, poll_interval_ms] rows = self._execute_query_with_result(query, params, conn=conn) - return [Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows] + return [ + Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) + for x in rows + ] @transaction def pop(self, queue: str, conn=None) -> Message: @@ -211,7 +246,10 @@ def pop(self, queue: str, conn=None) -> Message: logger.debug(f"pop called with conn: {conn}") query = "select * from pgmq.pop(%s);" rows = self._execute_query_with_result(query, [queue], conn=conn) - messages = [Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows] + messages = [ + Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) + for x in rows + ] return messages[0] @transaction @@ -292,7 +330,9 @@ def set_vt(self, queue: str, msg_id: int, vt: int, conn=None) -> Message: """Set the visibility timeout for a specific message.""" logger.debug(f"set_vt called with conn: {conn}") query = "select * from pgmq.set_vt(%s, %s, %s);" - result = self._execute_query_with_result(query, [queue, msg_id, vt], conn=conn)[0] + result = self._execute_query_with_result(query, [queue, msg_id, vt], conn=conn)[ + 0 + ] return Message( msg_id=result[0], read_ct=result[1], From fd15e6021f1b6109f2c04e1fe5ea83112e1943fc Mon Sep 17 00:00:00 2001 From: Ali Tavallaie Date: Sun, 16 Jun 2024 18:52:23 +0330 Subject: [PATCH 06/18] chore: linting and formatting --- tembo-pgmq-python/tembo_pgmq_python/queue.py | 78 +++++--------------- 1 file changed, 18 insertions(+), 60 deletions(-) diff --git a/tembo-pgmq-python/tembo_pgmq_python/queue.py b/tembo-pgmq-python/tembo_pgmq_python/queue.py index 527c9744..d7b470fc 100644 --- a/tembo-pgmq-python/tembo_pgmq_python/queue.py +++ b/tembo-pgmq-python/tembo_pgmq_python/queue.py @@ -42,9 +42,7 @@ def transaction(func: Callable) -> Callable: @functools.wraps(func) def wrapper(self, *args, **kwargs): - perform_transaction = kwargs.pop( - "perform_transaction", self.perform_transaction - ) + perform_transaction = kwargs.pop("perform_transaction", self.perform_transaction) if perform_transaction: with self.pool.connection() as conn: with conn.transaction() as txn: @@ -55,9 +53,7 @@ def wrapper(self, *args, **kwargs): logger.debug(f"Transaction committed with conn: {conn}") return result except Exception as e: - logger.error( - f"Transaction failed with exception: {e}, rolling back." - ) + logger.error(f"Transaction failed with exception: {e}, rolling back.") txn.rollback() logger.debug(f"Transaction rolled back with conn: {conn}") raise @@ -99,24 +95,16 @@ def __post_init__(self) -> None: def _initialize_extensions(self, conn=None) -> None: self._execute_query("create extension if not exists pgmq cascade;", conn=conn) - def _execute_query( - self, query: str, params: Optional[Union[List, tuple]] = None, conn=None - ) -> None: - logger.debug( - f"Executing query: {query} with params: {params} using conn: {conn}" - ) + def _execute_query(self, query: str, params: Optional[Union[List, tuple]] = None, conn=None) -> None: + logger.debug(f"Executing query: {query} with params: {params} using conn: {conn}") if conn: conn.execute(query, params) else: with self.pool.connection() as conn: conn.execute(query, params) - def _execute_query_with_result( - self, query: str, params: Optional[Union[List, tuple]] = None, conn=None - ): - logger.debug( - f"Executing query with result: {query} with params: {params} using conn: {conn}" - ) + def _execute_query_with_result(self, query: str, params: Optional[Union[List, tuple]] = None, conn=None): + logger.debug(f"Executing query with result: {query} with params: {params} using conn: {conn}") if conn: return conn.execute(query, params).fetchall() else: @@ -140,11 +128,7 @@ def create_partitioned_queue( def create_queue(self, queue: str, unlogged: bool = False, conn=None) -> None: """Create a new queue.""" logger.debug(f"create_queue called with conn: {conn}") - query = ( - "select pgmq.create_unlogged(%s);" - if unlogged - else "select pgmq.create(%s);" - ) + query = "select pgmq.create_unlogged(%s);" if unlogged else "select pgmq.create(%s);" self._execute_query(query, [queue], conn=conn) def validate_queue_name(self, queue_name: str, conn=None) -> None: @@ -173,15 +157,11 @@ def send(self, queue: str, message: dict, delay: int = 0, conn=None) -> int: """Send a message to a queue.""" logger.debug(f"send called with conn: {conn}") query = "select * from pgmq.send(%s, %s, %s);" - result = self._execute_query_with_result( - query, [queue, Jsonb(message), delay], conn=conn - ) + result = self._execute_query_with_result(query, [queue, Jsonb(message), delay], conn=conn) return result[0][0] @transaction - def send_batch( - self, queue: str, messages: List[dict], delay: int = 0, conn=None - ) -> List[int]: + def send_batch(self, queue: str, messages: List[dict], delay: int = 0, conn=None) -> List[int]: """Send a batch of messages to a queue.""" logger.debug(f"send_batch called with conn: {conn}") query = "select * from pgmq.send_batch(%s, %s, %s);" @@ -190,35 +170,21 @@ def send_batch( return [message[0] for message in result] @transaction - def read( - self, queue: str, vt: Optional[int] = None, conn=None - ) -> Optional[Message]: + def read(self, queue: str, vt: Optional[int] = None, conn=None) -> Optional[Message]: """Read a message from a queue.""" logger.debug(f"read called with conn: {conn}") query = "select * from pgmq.read(%s, %s, %s);" - rows = self._execute_query_with_result( - query, [queue, vt or self.vt, 1], conn=conn - ) - messages = [ - Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) - for x in rows - ] + rows = self._execute_query_with_result(query, [queue, vt or self.vt, 1], conn=conn) + messages = [Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows] return messages[0] if messages else None @transaction - def read_batch( - self, queue: str, vt: Optional[int] = None, batch_size=1, conn=None - ) -> Optional[List[Message]]: + def read_batch(self, queue: str, vt: Optional[int] = None, batch_size=1, conn=None) -> Optional[List[Message]]: """Read a batch of messages from a queue.""" logger.debug(f"read_batch called with conn: {conn}") query = "select * from pgmq.read(%s, %s, %s);" - rows = self._execute_query_with_result( - query, [queue, vt or self.vt, batch_size], conn=conn - ) - return [ - Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) - for x in rows - ] + rows = self._execute_query_with_result(query, [queue, vt or self.vt, batch_size], conn=conn) + return [Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows] @transaction def read_with_poll( @@ -235,10 +201,7 @@ def read_with_poll( query = "select * from pgmq.read_with_poll(%s, %s, %s, %s, %s);" params = [queue, vt or self.vt, qty, max_poll_seconds, poll_interval_ms] rows = self._execute_query_with_result(query, params, conn=conn) - return [ - Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) - for x in rows - ] + return [Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows] @transaction def pop(self, queue: str, conn=None) -> Message: @@ -246,10 +209,7 @@ def pop(self, queue: str, conn=None) -> Message: logger.debug(f"pop called with conn: {conn}") query = "select * from pgmq.pop(%s);" rows = self._execute_query_with_result(query, [queue], conn=conn) - messages = [ - Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) - for x in rows - ] + messages = [Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows] return messages[0] @transaction @@ -330,9 +290,7 @@ def set_vt(self, queue: str, msg_id: int, vt: int, conn=None) -> Message: """Set the visibility timeout for a specific message.""" logger.debug(f"set_vt called with conn: {conn}") query = "select * from pgmq.set_vt(%s, %s, %s);" - result = self._execute_query_with_result(query, [queue, msg_id, vt], conn=conn)[ - 0 - ] + result = self._execute_query_with_result(query, [queue, msg_id, vt], conn=conn)[0] return Message( msg_id=result[0], read_ct=result[1], From bae10b4f1a9e623bb2b4ed093d5415d29f949d68 Mon Sep 17 00:00:00 2001 From: Ali Tavallaie Date: Sun, 16 Jun 2024 19:28:03 +0330 Subject: [PATCH 07/18] feat: adding better logger and optional for verbose --- tembo-pgmq-python/tembo_pgmq_python/queue.py | 122 ++++++++++--------- tembo-pgmq-python/tests/test_integration.py | 1 + 2 files changed, 65 insertions(+), 58 deletions(-) diff --git a/tembo-pgmq-python/tembo_pgmq_python/queue.py b/tembo-pgmq-python/tembo_pgmq_python/queue.py index d7b470fc..67671fe5 100644 --- a/tembo-pgmq-python/tembo_pgmq_python/queue.py +++ b/tembo-pgmq-python/tembo_pgmq_python/queue.py @@ -7,16 +7,6 @@ from psycopg.types.json import Jsonb from psycopg_pool import ConnectionPool -logger = logging.getLogger(__name__) -log_filename = datetime.now().strftime("pgmq_debug_%Y%m%d_%H%M%S.log") - -# Configure logging at the start of the script -logging.basicConfig( - filename=log_filename, - level=logging.DEBUG, - format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", -) - @dataclass class Message: @@ -37,34 +27,6 @@ class QueueMetrics: scrape_time: datetime -def transaction(func: Callable) -> Callable: - """Decorator to run a method within a database transaction.""" - - @functools.wraps(func) - def wrapper(self, *args, **kwargs): - perform_transaction = kwargs.pop("perform_transaction", self.perform_transaction) - if perform_transaction: - with self.pool.connection() as conn: - with conn.transaction() as txn: - try: - logger.debug(f"Transaction started with conn: {conn}") - result = func(self, *args, conn=conn, **kwargs) - txn.commit() - logger.debug(f"Transaction committed with conn: {conn}") - return result - except Exception as e: - logger.error(f"Transaction failed with exception: {e}, rolling back.") - txn.rollback() - logger.debug(f"Transaction rolled back with conn: {conn}") - raise - else: - with self.pool.connection() as conn: - logger.debug(f"Non-transactional execution with conn: {conn}") - return func(self, *args, conn=conn, **kwargs) - - return wrapper - - @dataclass class PGMQueue: """Base class for interacting with a queue""" @@ -78,8 +40,11 @@ class PGMQueue: vt: int = 30 pool_size: int = 10 kwargs: dict = field(default_factory=dict) + verbose: bool = False + log_filename: Optional[str] = None pool: ConnectionPool = field(init=False) perform_transaction: bool = False + logger: logging.Logger = field(init=False) def __post_init__(self) -> None: conninfo = f""" @@ -90,13 +55,26 @@ def __post_init__(self) -> None: password={self.password} """ self.pool = ConnectionPool(conninfo, open=True, **self.kwargs) + self._initialize_logging() self._initialize_extensions() + def _initialize_logging(self) -> None: + if self.verbose: + log_filename = self.log_filename or datetime.now().strftime("pgmq_debug_%Y%m%d_%H%M%S.log") + logging.basicConfig( + filename=os.path.join(os.getcwd(), log_filename), + level=logging.DEBUG, + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + else: + logging.basicConfig(level=logging.WARNING) + self.logger = logging.getLogger(__name__) + def _initialize_extensions(self, conn=None) -> None: self._execute_query("create extension if not exists pgmq cascade;", conn=conn) def _execute_query(self, query: str, params: Optional[Union[List, tuple]] = None, conn=None) -> None: - logger.debug(f"Executing query: {query} with params: {params} using conn: {conn}") + self.logger.debug(f"Executing query: {query} with params: {params} using conn: {conn}") if conn: conn.execute(query, params) else: @@ -104,13 +82,41 @@ def _execute_query(self, query: str, params: Optional[Union[List, tuple]] = None conn.execute(query, params) def _execute_query_with_result(self, query: str, params: Optional[Union[List, tuple]] = None, conn=None): - logger.debug(f"Executing query with result: {query} with params: {params} using conn: {conn}") + self.logger.debug(f"Executing query with result: {query} with params: {params} using conn: {conn}") if conn: return conn.execute(query, params).fetchall() else: with self.pool.connection() as conn: return conn.execute(query, params).fetchall() + def transaction(func: Callable) -> Callable: + """Decorator to run a method within a database transaction.""" + + @functools.wraps(func) + def wrapper(self, *args, **kwargs): + perform_transaction = kwargs.pop("perform_transaction", self.perform_transaction) + if perform_transaction: + with self.pool.connection() as conn: + txn = conn.transaction() + txn.begin() + self.logger.debug(f"Transaction started with conn: {conn}") + try: + result = func(self, *args, conn=conn, **kwargs) + txn.commit() + self.logger.debug(f"Transaction committed with conn: {conn}") + return result + except Exception as e: + txn.rollback() + self.logger.error(f"Transaction failed with exception: {e}, rolling back.") + self.logger.debug(f"Transaction rolled back with conn: {conn}") + raise + else: + with self.pool.connection() as conn: + self.logger.debug(f"Non-transactional execution with conn: {conn}") + return func(self, *args, conn=conn, **kwargs) + + return wrapper + @transaction def create_partitioned_queue( self, @@ -127,7 +133,7 @@ def create_partitioned_queue( @transaction def create_queue(self, queue: str, unlogged: bool = False, conn=None) -> None: """Create a new queue.""" - logger.debug(f"create_queue called with conn: {conn}") + self.logger.debug(f"create_queue called with conn: {conn}") query = "select pgmq.create_unlogged(%s);" if unlogged else "select pgmq.create(%s);" self._execute_query(query, [queue], conn=conn) @@ -139,7 +145,7 @@ def validate_queue_name(self, queue_name: str, conn=None) -> None: @transaction def drop_queue(self, queue: str, partitioned: bool = False, conn=None) -> bool: """Drop a queue.""" - logger.debug(f"drop_queue called with conn: {conn}") + self.logger.debug(f"drop_queue called with conn: {conn}") query = "select pgmq.drop_queue(%s, %s);" result = self._execute_query_with_result(query, [queue, partitioned], conn=conn) return result[0][0] @@ -147,7 +153,7 @@ def drop_queue(self, queue: str, partitioned: bool = False, conn=None) -> bool: @transaction def list_queues(self, conn=None) -> List[str]: """List all queues.""" - logger.debug(f"list_queues called with conn: {conn}") + self.logger.debug(f"list_queues called with conn: {conn}") query = "select queue_name from pgmq.list_queues();" rows = self._execute_query_with_result(query, conn=conn) return [row[0] for row in rows] @@ -155,7 +161,7 @@ def list_queues(self, conn=None) -> List[str]: @transaction def send(self, queue: str, message: dict, delay: int = 0, conn=None) -> int: """Send a message to a queue.""" - logger.debug(f"send called with conn: {conn}") + self.logger.debug(f"send called with conn: {conn}") query = "select * from pgmq.send(%s, %s, %s);" result = self._execute_query_with_result(query, [queue, Jsonb(message), delay], conn=conn) return result[0][0] @@ -163,7 +169,7 @@ def send(self, queue: str, message: dict, delay: int = 0, conn=None) -> int: @transaction def send_batch(self, queue: str, messages: List[dict], delay: int = 0, conn=None) -> List[int]: """Send a batch of messages to a queue.""" - logger.debug(f"send_batch called with conn: {conn}") + self.logger.debug(f"send_batch called with conn: {conn}") query = "select * from pgmq.send_batch(%s, %s, %s);" params = [queue, [Jsonb(message) for message in messages], delay] result = self._execute_query_with_result(query, params, conn=conn) @@ -172,7 +178,7 @@ def send_batch(self, queue: str, messages: List[dict], delay: int = 0, conn=None @transaction def read(self, queue: str, vt: Optional[int] = None, conn=None) -> Optional[Message]: """Read a message from a queue.""" - logger.debug(f"read called with conn: {conn}") + self.logger.debug(f"read called with conn: {conn}") query = "select * from pgmq.read(%s, %s, %s);" rows = self._execute_query_with_result(query, [queue, vt or self.vt, 1], conn=conn) messages = [Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows] @@ -181,7 +187,7 @@ def read(self, queue: str, vt: Optional[int] = None, conn=None) -> Optional[Mess @transaction def read_batch(self, queue: str, vt: Optional[int] = None, batch_size=1, conn=None) -> Optional[List[Message]]: """Read a batch of messages from a queue.""" - logger.debug(f"read_batch called with conn: {conn}") + self.logger.debug(f"read_batch called with conn: {conn}") query = "select * from pgmq.read(%s, %s, %s);" rows = self._execute_query_with_result(query, [queue, vt or self.vt, batch_size], conn=conn) return [Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows] @@ -197,7 +203,7 @@ def read_with_poll( conn=None, ) -> Optional[List[Message]]: """Read messages from a queue with polling.""" - logger.debug(f"read_with_poll called with conn: {conn}") + self.logger.debug(f"read_with_poll called with conn: {conn}") query = "select * from pgmq.read_with_poll(%s, %s, %s, %s, %s);" params = [queue, vt or self.vt, qty, max_poll_seconds, poll_interval_ms] rows = self._execute_query_with_result(query, params, conn=conn) @@ -206,7 +212,7 @@ def read_with_poll( @transaction def pop(self, queue: str, conn=None) -> Message: """Pop a message from a queue.""" - logger.debug(f"pop called with conn: {conn}") + self.logger.debug(f"pop called with conn: {conn}") query = "select * from pgmq.pop(%s);" rows = self._execute_query_with_result(query, [queue], conn=conn) messages = [Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows] @@ -215,7 +221,7 @@ def pop(self, queue: str, conn=None) -> Message: @transaction def delete(self, queue: str, msg_id: int, conn=None) -> bool: """Delete a message from a queue.""" - logger.debug(f"delete called with conn: {conn}") + self.logger.debug(f"delete called with conn: {conn}") query = "select pgmq.delete(%s, %s);" result = self._execute_query_with_result(query, [queue, msg_id], conn=conn) return result[0][0] @@ -223,7 +229,7 @@ def delete(self, queue: str, msg_id: int, conn=None) -> bool: @transaction def delete_batch(self, queue: str, msg_ids: List[int], conn=None) -> List[int]: """Delete multiple messages from a queue.""" - logger.debug(f"delete_batch called with conn: {conn}") + self.logger.debug(f"delete_batch called with conn: {conn}") query = "select * from pgmq.delete(%s, %s);" result = self._execute_query_with_result(query, [queue, msg_ids], conn=conn) return [x[0] for x in result] @@ -231,7 +237,7 @@ def delete_batch(self, queue: str, msg_ids: List[int], conn=None) -> List[int]: @transaction def archive(self, queue: str, msg_id: int, conn=None) -> bool: """Archive a message from a queue.""" - logger.debug(f"archive called with conn: {conn}") + self.logger.debug(f"archive called with conn: {conn}") query = "select pgmq.archive(%s, %s);" result = self._execute_query_with_result(query, [queue, msg_id], conn=conn) return result[0][0] @@ -239,7 +245,7 @@ def archive(self, queue: str, msg_id: int, conn=None) -> bool: @transaction def archive_batch(self, queue: str, msg_ids: List[int], conn=None) -> List[int]: """Archive multiple messages from a queue.""" - logger.debug(f"archive_batch called with conn: {conn}") + self.logger.debug(f"archive_batch called with conn: {conn}") query = "select * from pgmq.archive(%s, %s);" result = self._execute_query_with_result(query, [queue, msg_ids], conn=conn) return [x[0] for x in result] @@ -247,7 +253,7 @@ def archive_batch(self, queue: str, msg_ids: List[int], conn=None) -> List[int]: @transaction def purge(self, queue: str, conn=None) -> int: """Purge a queue.""" - logger.debug(f"purge called with conn: {conn}") + self.logger.debug(f"purge called with conn: {conn}") query = "select pgmq.purge_queue(%s);" result = self._execute_query_with_result(query, [queue], conn=conn) return result[0][0] @@ -255,7 +261,7 @@ def purge(self, queue: str, conn=None) -> int: @transaction def metrics(self, queue: str, conn=None) -> QueueMetrics: """Get metrics for a specific queue.""" - logger.debug(f"metrics called with conn: {conn}") + self.logger.debug(f"metrics called with conn: {conn}") query = "SELECT * FROM pgmq.metrics(%s);" result = self._execute_query_with_result(query, [queue], conn=conn)[0] return QueueMetrics( @@ -270,7 +276,7 @@ def metrics(self, queue: str, conn=None) -> QueueMetrics: @transaction def metrics_all(self, conn=None) -> List[QueueMetrics]: """Get metrics for all queues.""" - logger.debug(f"metrics_all called with conn: {conn}") + self.logger.debug(f"metrics_all called with conn: {conn}") query = "SELECT * FROM pgmq.metrics_all();" results = self._execute_query_with_result(query, conn=conn) return [ @@ -288,7 +294,7 @@ def metrics_all(self, conn=None) -> List[QueueMetrics]: @transaction def set_vt(self, queue: str, msg_id: int, vt: int, conn=None) -> Message: """Set the visibility timeout for a specific message.""" - logger.debug(f"set_vt called with conn: {conn}") + self.logger.debug(f"set_vt called with conn: {conn}") query = "select * from pgmq.set_vt(%s, %s, %s);" result = self._execute_query_with_result(query, [queue, msg_id, vt], conn=conn)[0] return Message( @@ -302,6 +308,6 @@ def set_vt(self, queue: str, msg_id: int, vt: int, conn=None) -> Message: @transaction def detach_archive(self, queue: str, conn=None) -> None: """Detach an archive from a queue.""" - logger.debug(f"detach_archive called with conn: {conn}") + self.logger.debug(f"detach_archive called with conn: {conn}") query = "select pgmq.detach_archive(%s);" self._execute_query(query, [queue], conn=conn) diff --git a/tembo-pgmq-python/tests/test_integration.py b/tembo-pgmq-python/tests/test_integration.py index d89afab2..f4b0a66d 100644 --- a/tembo-pgmq-python/tests/test_integration.py +++ b/tembo-pgmq-python/tests/test_integration.py @@ -14,6 +14,7 @@ def setUpClass(cls): username="postgres", password="postgres", database="postgres", + verbose=False, ) # Test database connection first From 2557382dec55847c974ea5573720bce160a9f441 Mon Sep 17 00:00:00 2001 From: Ali Tavallaie Date: Sun, 16 Jun 2024 19:34:47 +0330 Subject: [PATCH 08/18] feat: update readme for transaction --- tembo-pgmq-python/README.md | 48 +++++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/tembo-pgmq-python/README.md b/tembo-pgmq-python/README.md index 41bd3435..7e59b14e 100644 --- a/tembo-pgmq-python/README.md +++ b/tembo-pgmq-python/README.md @@ -217,5 +217,53 @@ for metrics in all_metrics: print(f"Scrape time: {metrics.scrape_time}") ``` +### Using Transactions +#### Class-level Transactions +You can enable transactions at the class level by setting the `perform_transaction` attribute to `True`. + +```python +queue.perform_transaction = True +``` + +#### Function-level Transactions + +You can also enable transactions at the function level by passing `perform_transaction=True` to the method. + +```python +queue.send("my_queue", {"hello": "world"}, perform_transaction=True) +``` +or +```python +queue = PGMQueue(perform_transaction=True) +``` + +### Verbose Logging + +Enable verbose logging by setting the `verbose` attribute to `True` when initializing the `PGMQueue` object. + +```python +queue = PGMQueue( + host="0.0.0.0", + port="5432", + username="postgres", + password="postgres", + database="postgres", + verbose=True +) +``` + +Optionally, you can specify a custom log filename. + +```python +queue = PGMQueue( + host="0.0.0.0", + port="5432", + username="postgres", + password="postgres", + database="postgres", + verbose=True, + log_filename="my_custom_log.log" +) +``` From 9d37d69802e757b64193cec5f73d4c6a8e4c8126 Mon Sep 17 00:00:00 2001 From: Ali Tavallaie Date: Sun, 15 Sep 2024 22:10:37 +0330 Subject: [PATCH 09/18] feat: support for transaction: - adding support for logging in sync and async functions - adding support for transaction in sync and asyns operations - adding uint tests - separated module for transaction decorators to avoid conflicts in unit tests - updating readme --- tembo-pgmq-python/README.md | 672 ++++++++++-------- .../tembo_pgmq_python/__init__.py | 3 +- .../tembo_pgmq_python/async_queue.py | 500 ++++++++++--- .../tembo_pgmq_python/decorators.py | 84 +++ tembo-pgmq-python/tembo_pgmq_python/queue.py | 110 +-- .../tests/test_async_integration.py | 56 +- tembo-pgmq-python/tests/test_integration.py | 54 +- 7 files changed, 957 insertions(+), 522 deletions(-) create mode 100644 tembo-pgmq-python/tembo_pgmq_python/decorators.py diff --git a/tembo-pgmq-python/README.md b/tembo-pgmq-python/README.md index 36827154..102324aa 100644 --- a/tembo-pgmq-python/README.md +++ b/tembo-pgmq-python/README.md @@ -1,290 +1,382 @@ -# Tembo's Python Client for PGMQ - -## Installation - -Install with `pip` from pypi.org: - -```bash -pip install tembo-pgmq-python -``` - -In order to use async version install with the optional dependecies: - -``` bash -pip install tembo-pgmq-python[async] -``` - - -Dependencies: - -Postgres running the [Tembo PGMQ extension](https://github.com/tembo-io/tembo/tree/main/pgmq). - -## Usage - -### Start a Postgres Instance with the Tembo extension installed - -```bash -docker run -d --name postgres -e POSTGRES_PASSWORD=postgres -p 5432:5432 quay.io/tembo/pg16-pgmq:latest -``` - -### Using Environment Variables - -Set environment variables: - -```bash -export PG_HOST=127.0.0.1 -export PG_PORT=5432 -export PG_USERNAME=postgres -export PG_PASSWORD=postgres -export PG_DATABASE=test_db -``` - -Initialize a connection to Postgres using environment variables: - -```python -from tembo_pgmq_python import PGMQueue, Message - -queue = PGMQueue() -``` - -### Note on the async version - -Initialization for the async version requires an explicit call of the initializer: - -``` bash -from tembo_pgmq_python.async_queue import PGMQueue - -async def main(): - queue = PGMQueue() - await queue.init() -``` - -Then, the interface is exactly the same as the sync version. - -### Initialize a connection to Postgres without environment variables - -```python -from tembo_pgmq_python import PGMQueue, Message - -queue = PGMQueue( - host="0.0.0.0", - port="5432", - username="postgres", - password="postgres", - database="postgres" -) -``` - -### Create a queue - -```python -queue.create_queue("my_queue") -``` - -### or a partitioned queue - -```python -queue.create_partitioned_queue("my_partitioned_queue", partition_interval=10000) -``` -### List all queues - -```python -queues = queue.list_queues() -for q in queues: - print(f"Queue name: {q}") -``` - -### Send a message - -```python -msg_id: int = queue.send("my_queue", {"hello": "world"}) -``` - -### Send a batch of messages - -```python -msg_ids: list[int] = queue.send_batch("my_queue", [{"hello": "world"}, {"foo": "bar"}]) -``` - -### Read a message, set it invisible for 30 seconds - -```python -read_message: Message = queue.read("my_queue", vt=30) -print(read_message) -``` - -### Read a batch of messages - -```python -read_messages: list[Message] = queue.read_batch("my_queue", vt=30, batch_size=5) -for message in read_messages: - print(message) -``` - -### Read messages with polling - -The `read_with_poll` method allows you to repeatedly check for messages in the queue until either a message is found or the specified polling duration is exceeded. This can be useful in scenarios where you want to wait for new messages to arrive without continuously querying the queue in a tight loop. - -In the following example, the method will check for up to 5 messages in the queue `my_queue`, making the messages invisible for 30 seconds (`vt`), and will poll for a maximum of 5 seconds (`max_poll_seconds`) with intervals of 100 milliseconds (`poll_interval_ms`) between checks. - -```python -read_messages: list[Message] = queue.read_with_poll("my_queue", vt=30, qty=5, max_poll_seconds=5, poll_interval_ms=100) -for message in read_messages: - print(message) -``` - -This method will continue polling until it either finds the specified number of messages (`qty`) or the `max_poll_seconds` duration is reached. The `poll_interval_ms` parameter controls the interval between successive polls, allowing you to avoid hammering the database with continuous queries. - -### Archive the message after we're done with it. Archived messages are moved to an archive table - -```python -archived: bool = queue.archive("my_queue", read_message.msg_id) -``` - -### Archive a batch of messages - -```python -archived_ids: list[int] = queue.archive_batch("my_queue", [msg_id1, msg_id2]) -``` - -### Delete a message completely - -```python -read_message: Message = queue.read("my_queue") -deleted: bool = queue.delete("my_queue", read_message.msg_id) -``` - -### Delete a batch of messages - -```python -deleted_ids: list[int] = queue.delete_batch("my_queue", [msg_id1, msg_id2]) -``` - -### Set the visibility timeout (VT) for a specific message - -```python -updated_message: Message = queue.set_vt("my_queue", msg_id, 60) -print(updated_message) -``` - -### Pop a message, deleting it and reading it in one transaction - -```python -popped_message: Message = queue.pop("my_queue") -print(popped_message) -``` - -### Purge all messages from a queue - -```python -purged_count: int = queue.purge("my_queue") -print(f"Purged {purged_count} messages from the queue.") -``` - -### Detach an archive from a queue - -```python -queue.detach_archive("my_queue") -``` - -### Drop a queue - -```python -dropped: bool = queue.drop_queue("my_queue") -print(f"Queue dropped: {dropped}") -``` - -### Validate the length of a queue name - -```python -queue.validate_queue_name("my_queue") -``` - -### Get queue metrics - -The `metrics` method retrieves various statistics for a specific queue, such as the queue length, the age of the newest and oldest messages, the total number of messages, and the time of the metrics scrape. - -```python -metrics = queue.metrics("my_queue") -print(f"Metrics: {metrics}") -``` - -### Access individual metrics - -You can access individual metrics directly from the `metrics` method's return value: - -```python -metrics = queue.metrics("my_queue") -print(f"Queue name: {metrics.queue_name}") -print(f"Queue length: {metrics.queue_length}") -print(f"Newest message age (seconds): {metrics.newest_msg_age_sec}") -print(f"Oldest message age (seconds): {metrics.oldest_msg_age_sec}") -print(f"Total messages: {metrics.total_messages}") -print(f"Scrape time: {metrics.scrape_time}") -``` - -### Get metrics for all queues - -The `metrics_all` method retrieves metrics for all queues, allowing you to iterate through each queue's metrics. - -```python -all_metrics = queue.metrics_all() -for metrics in all_metrics: - print(f"Queue name: {metrics.queue_name}") - print(f"Queue length: {metrics.queue_length}") - print(f"Newest message age (seconds): {metrics.newest_msg_age_sec}") - print(f"Oldest message age (seconds): {metrics.oldest_msg_age_sec}") - print(f"Total messages: {metrics.total_messages}") - print(f"Scrape time: {metrics.scrape_time}") -``` - -### Using Transactions - -#### Class-level Transactions - -You can enable transactions at the class level by setting the `perform_transaction` attribute to `True`. - -```python -queue.perform_transaction = True -``` - -#### Function-level Transactions - -You can also enable transactions at the function level by passing `perform_transaction=True` to the method. - -```python -queue.send("my_queue", {"hello": "world"}, perform_transaction=True) -``` -or -```python -queue = PGMQueue(perform_transaction=True) -``` - -### Verbose Logging - -Enable verbose logging by setting the `verbose` attribute to `True` when initializing the `PGMQueue` object. - -```python -queue = PGMQueue( - host="0.0.0.0", - port="5432", - username="postgres", - password="postgres", - database="postgres", - verbose=True -) -``` - -Optionally, you can specify a custom log filename. - -```python -queue = PGMQueue( - host="0.0.0.0", - port="5432", - username="postgres", - password="postgres", - database="postgres", - verbose=True, - log_filename="my_custom_log.log" -) -``` + # Tembo's Python Client for PGMQ + + ## Installation + + Install with `pip` from pypi.org: + + ```bash + pip install tembo-pgmq-python + ``` + + To use the async version, install with the optional dependencies: + + ```bash + pip install tembo-pgmq-python[async] + ``` + + Dependencies: + + - Postgres running the [Tembo PGMQ extension](https://github.com/tembo-io/tembo/tree/main/pgmq). + + ## Usage + + ### Start a Postgres Instance with the Tembo extension installed + + ```bash + docker run -d --name postgres -e POSTGRES_PASSWORD=postgres -p 5432:5432 quay.io/tembo/pg16-pgmq:latest + ``` + + ### Using Environment Variables + + Set environment variables: + + ```bash + export PG_HOST=127.0.0.1 + export PG_PORT=5432 + export PG_USERNAME=postgres + export PG_PASSWORD=postgres + export PG_DATABASE=test_db + ``` + + Initialize a connection to Postgres using environment variables: + + ```python + from tembo_pgmq_python import PGMQueue, Message + + queue = PGMQueue() + ``` + + ### Note on the async version + + Initialization for the async version requires an explicit call of the initializer: + + ```python + from tembo_pgmq_python.async_queue import PGMQueue + + async def main(): + queue = PGMQueue() + await queue.init() + ``` + + Then, the interface is exactly the same as the sync version. + + ### Initialize a connection to Postgres without environment variables + + ```python + from tembo_pgmq_python import PGMQueue, Message + + queue = PGMQueue( + host="0.0.0.0", + port="5432", + username="postgres", + password="postgres", + database="postgres" + ) + ``` + + ### Create a queue + + ```python + queue.create_queue("my_queue") + ``` + + ### Or create a partitioned queue + + ```python + queue.create_partitioned_queue("my_partitioned_queue", partition_interval=10000) + ``` + + ### List all queues + + ```python + queues = queue.list_queues() + for q in queues: + print(f"Queue name: {q}") + ``` + + ### Send a message + + ```python + msg_id: int = queue.send("my_queue", {"hello": "world"}) + ``` + + ### Send a batch of messages + + ```python + msg_ids: list[int] = queue.send_batch("my_queue", [{"hello": "world"}, {"foo": "bar"}]) + ``` + + ### Read a message, set it invisible for 30 seconds + + ```python + read_message: Message = queue.read("my_queue", vt=30) + print(read_message) + ``` + + ### Read a batch of messages + + ```python + read_messages: list[Message] = queue.read_batch("my_queue", vt=30, batch_size=5) + for message in read_messages: + print(message) + ``` + + ### Read messages with polling + + The `read_with_poll` method allows you to repeatedly check for messages in the queue until either a message is found or the specified polling duration is exceeded. This can be useful in scenarios where you want to wait for new messages to arrive without continuously querying the queue in a tight loop. + + In the following example, the method will check for up to 5 messages in the queue `my_queue`, making the messages invisible for 30 seconds (`vt`), and will poll for a maximum of 5 seconds (`max_poll_seconds`) with intervals of 100 milliseconds (`poll_interval_ms`) between checks. + + ```python + read_messages: list[Message] = queue.read_with_poll( + "my_queue", vt=30, qty=5, max_poll_seconds=5, poll_interval_ms=100 + ) + for message in read_messages: + print(message) + ``` + + This method will continue polling until it either finds the specified number of messages (`qty`) or the `max_poll_seconds` duration is reached. The `poll_interval_ms` parameter controls the interval between successive polls, allowing you to avoid hammering the database with continuous queries. + + ### Archive the message after we're done with it + + Archived messages are moved to an archive table. + + ```python + archived: bool = queue.archive("my_queue", read_message.msg_id) + ``` + + ### Archive a batch of messages + + ```python + archived_ids: list[int] = queue.archive_batch("my_queue", [msg_id1, msg_id2]) + ``` + + ### Delete a message completely + + ```python + read_message: Message = queue.read("my_queue") + deleted: bool = queue.delete("my_queue", read_message.msg_id) + ``` + + ### Delete a batch of messages + + ```python + deleted_ids: list[int] = queue.delete_batch("my_queue", [msg_id1, msg_id2]) + ``` + + ### Set the visibility timeout (VT) for a specific message + + ```python + updated_message: Message = queue.set_vt("my_queue", msg_id, 60) + print(updated_message) + ``` + + ### Pop a message, deleting it and reading it in one transaction + + ```python + popped_message: Message = queue.pop("my_queue") + print(popped_message) + ``` + + ### Purge all messages from a queue + + ```python + purged_count: int = queue.purge("my_queue") + print(f"Purged {purged_count} messages from the queue.") + ``` + + ### Detach an archive from a queue + + ```python + queue.detach_archive("my_queue") + ``` + + ### Drop a queue + + ```python + dropped: bool = queue.drop_queue("my_queue") + print(f"Queue dropped: {dropped}") + ``` + + ### Validate the length of a queue name + + ```python + queue.validate_queue_name("my_queue") + ``` + + ### Get queue metrics + + The `metrics` method retrieves various statistics for a specific queue, such as the queue length, the age of the newest and oldest messages, the total number of messages, and the time of the metrics scrape. + + ```python + metrics = queue.metrics("my_queue") + print(f"Metrics: {metrics}") + ``` + + ### Access individual metrics + + You can access individual metrics directly from the `metrics` method's return value: + + ```python + metrics = queue.metrics("my_queue") + print(f"Queue name: {metrics.queue_name}") + print(f"Queue length: {metrics.queue_length}") + print(f"Newest message age (seconds): {metrics.newest_msg_age_sec}") + print(f"Oldest message age (seconds): {metrics.oldest_msg_age_sec}") + print(f"Total messages: {metrics.total_messages}") + print(f"Scrape time: {metrics.scrape_time}") + ``` + + ### Get metrics for all queues + + The `metrics_all` method retrieves metrics for all queues, allowing you to iterate through each queue's metrics. + + ```python + all_metrics = queue.metrics_all() + for metrics in all_metrics: + print(f"Queue name: {metrics.queue_name}") + print(f"Queue length: {metrics.queue_length}") + print(f"Newest message age (seconds): {metrics.newest_msg_age_sec}") + print(f"Oldest message age (seconds): {metrics.oldest_msg_age_sec}") + print(f"Total messages: {metrics.total_messages}") + print(f"Scrape time: {metrics.scrape_time}") + ``` + + ### Optional Logging Configuration + + You can enable verbose logging and specify a custom log filename. + + ```python + queue = PGMQueue( + host="0.0.0.0", + port="5432", + username="postgres", + password="postgres", + database="postgres", + verbose=True, + log_filename="my_custom_log.log" + ) + ``` + + ### Using Transactions + + To perform multiple operations within a single transaction, you can use the `@transaction` decorator from the `tembo_pgmq_python.decorators` module. This ensures that all operations within the transaction either complete successfully or are rolled back if an error occurs. + + First, import the `transaction` decorator: + + ```python + from tembo_pgmq_python.decorators import transaction + ``` + + #### Example: Transactional Operation + + ```python + @transaction + def transactional_operation(queue: PGMQueue, conn=None): + # Perform multiple queue operations within a transaction + queue.create_queue("transactional_queue", conn=conn) + queue.send("transactional_queue", {"message": "Hello, World!"}, conn=conn) + # If an exception occurs here, all previous operations will be rolled back + # Uncomment the following line to simulate an error + # raise Exception("Simulated failure") + + # Execute the transactional function + try: + transactional_operation(queue) + except Exception as e: + print(f"Transaction failed: {e}") + ``` + + In this example: + + - The `transactional_operation` function is decorated with `@transaction`, ensuring all operations within it are part of the same transaction. + - The `conn` parameter is passed to each method to use the same database connection within the transaction. + - If an exception occurs within the function (e.g., by raising an exception), the transaction is rolled back. + + #### Example: Transaction Rollback on Failure + + ```python + @transaction + def transactional_send_and_fail(queue: PGMQueue, conn=None): + queue.send("my_queue", {"data": "test"}, conn=conn) + # Simulate an error to trigger rollback + raise Exception("Intentional failure") + + try: + transactional_send_and_fail(queue) + except Exception as e: + print(f"Transaction failed: {e}") + # Verify that the message was not sent due to rollback + message = queue.read("my_queue") + assert message is None, "Message should not exist after rollback" + ``` + + #### Using Transactions with Async Queue + + For the async version, you can use the `@transaction` decorator in a similar way. Make sure to import the decorator and define your transactional functions as async. + + ```python + from tembo_pgmq_python.decorators import transaction + from tembo_pgmq_python.async_queue import PGMQueue + + async def main(): + queue = PGMQueue() + await queue.init() + + @transaction + async def transactional_operation_async(queue: PGMQueue, conn=None): + await queue.create_queue("async_transactional_queue", conn=conn) + await queue.send("async_transactional_queue", {"message": "Hello, Async World!"}, conn=conn) + # Uncomment to simulate an error + # raise Exception("Simulated failure") + + try: + await transactional_operation_async(queue) + except Exception as e: + print(f"Transaction failed: {e}") + ``` + + In this async example: + + - Use `async def` to define asynchronous functions. + - Use `await` when calling async methods. + - The `@transaction` decorator manages the transaction context. + + ### Important Notes on Transactions + + - All methods used within a transaction must accept the `conn` parameter and pass it to the query execution methods. + - If an exception occurs within the transactional function, the transaction will be rolled back. + - Transactions help maintain data integrity by ensuring that a group of operations either all succeed or all fail together. + + ### Enabling Transactions by Default + + You can set the `perform_transaction` parameter to `True` when initializing the `PGMQueue` instance to enable transactions by default for all methods. + + ```python + queue = PGMQueue( + host="0.0.0.0", + port="5432", + username="postgres", + password="postgres", + database="postgres", + perform_transaction=True + ) + ``` + + However, be cautious with this approach, as it will wrap every method call in a transaction, which might not be necessary or optimal for all operations. + + ### Customizing Transaction Behavior + + You can control transaction behavior on a per-method basis by using the `perform_transaction` keyword argument. + + ```python + # This method call will be executed within a transaction + queue.send("my_queue", {"data": "test"}, perform_transaction=True) + + # This method call will not use a transaction + queue.send("my_queue", {"data": "test"}, perform_transaction=False) + ``` + + ### Conclusion + + Using transactions allows you to group multiple database operations into a single atomic unit of work, ensuring consistency and integrity of your data when performing complex operations. diff --git a/tembo-pgmq-python/tembo_pgmq_python/__init__.py b/tembo-pgmq-python/tembo_pgmq_python/__init__.py index 58d8ab2a..e02685a7 100644 --- a/tembo-pgmq-python/tembo_pgmq_python/__init__.py +++ b/tembo-pgmq-python/tembo_pgmq_python/__init__.py @@ -1,3 +1,4 @@ from tembo_pgmq_python.queue import Message, PGMQueue # type: ignore +from tembo_pgmq_python.decorators import transaction, async_transaction -__all__ = ["Message", "PGMQueue"] +__all__ = ["Message", "PGMQueue", "transaction", "async_transaction"] diff --git a/tembo-pgmq-python/tembo_pgmq_python/async_queue.py b/tembo-pgmq-python/tembo_pgmq_python/async_queue.py index e5926079..f3137820 100644 --- a/tembo-pgmq-python/tembo_pgmq_python/async_queue.py +++ b/tembo-pgmq-python/tembo_pgmq_python/async_queue.py @@ -1,16 +1,21 @@ +# async_queue.py + from dataclasses import dataclass, field from typing import Optional, List import asyncpg import os +import logging +from datetime import datetime from orjson import dumps, loads from tembo_pgmq_python.messages import Message, QueueMetrics +from tembo_pgmq_python.decorators import async_transaction as transaction @dataclass class PGMQueue: - """Base class for interacting with a queue""" + """Asynchronous PGMQueue client for interacting with queues.""" host: str = field(default_factory=lambda: os.getenv("PG_HOST", "localhost")) port: str = field(default_factory=lambda: os.getenv("PG_PORT", "5432")) @@ -20,7 +25,11 @@ class PGMQueue: delay: int = 0 vt: int = 30 pool_size: int = 10 + perform_transaction: bool = False + verbose: bool = False + log_filename: Optional[str] = None pool: asyncpg.pool.Pool = field(init=False) + logger: logging.Logger = field(init=False) def __post_init__(self) -> None: self.host = self.host or "localhost" @@ -32,110 +41,229 @@ def __post_init__(self) -> None: if not all([self.host, self.port, self.database, self.username, self.password]): raise ValueError("Incomplete database connection information provided.") + self._initialize_logging() + self.logger.debug("PGMQueue initialized") + + def _initialize_logging(self) -> None: + if self.verbose: + log_filename = self.log_filename or datetime.now().strftime("pgmq_async_debug_%Y%m%d_%H%M%S.log") + logging.basicConfig( + filename=os.path.join(os.getcwd(), log_filename), + level=logging.DEBUG, + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + ) + else: + logging.basicConfig(level=logging.WARNING) + self.logger = logging.getLogger(__name__) + async def init(self): + self.logger.debug("Creating asyncpg connection pool") self.pool = await asyncpg.create_pool( user=self.username, database=self.database, password=self.password, host=self.host, port=self.port, + min_size=1, + max_size=self.pool_size, ) + self.logger.debug("Initializing pgmq extension") async with self.pool.acquire() as conn: - await conn.fetch("create extension if not exists pgmq cascade;") + await conn.execute("create extension if not exists pgmq cascade;") + @transaction async def create_partitioned_queue( self, queue: str, partition_interval: int = 10000, retention_interval: int = 100000, + conn=None, ) -> None: - """Create a new queue - - Note: Partitions are created pg_partman which must be configured in postgresql.conf - Set `pg_partman_bgw.interval` to set the interval for partition creation and deletion. - A value of 10 will create new/delete partitions every 10 seconds. This value should be tuned - according to the volume of messages being sent to the queue. - - Args: - queue: The name of the queue. - partition_interval: The number of messages per partition. Defaults to 10,000. - retention_interval: The number of messages to retain. Messages exceeding this number will be dropped. - Defaults to 100,000. - """ - - async with self.pool.acquire() as conn: - await conn.execute( - "SELECT pgmq.create($1, $2::text, $3::text);", - queue, - partition_interval, - retention_interval, - ) + """Create a new partitioned queue.""" + self.logger.debug( + f"create_partitioned_queue called with queue='{queue}', " + f"partition_interval={partition_interval}, " + f"retention_interval={retention_interval}, conn={conn}" + ) + if conn is None: + async with self.pool.acquire() as conn: + await self._create_partitioned_queue_internal(queue, partition_interval, retention_interval, conn) + else: + await self._create_partitioned_queue_internal(queue, partition_interval, retention_interval, conn) + + async def _create_partitioned_queue_internal(self, queue, partition_interval, retention_interval, conn): + self.logger.debug(f"Creating partitioned queue '{queue}'") + await conn.execute( + "SELECT pgmq.create($1, $2::text, $3::text);", + queue, + partition_interval, + retention_interval, + ) - async def create_queue(self, queue: str, unlogged: bool = False) -> None: + @transaction + async def create_queue(self, queue: str, unlogged: bool = False, conn=None) -> None: """Create a new queue.""" - async with self.pool.acquire() as conn: - if unlogged: - await conn.execute("SELECT pgmq.create_unlogged($1);", queue) - else: - await conn.execute("SELECT pgmq.create($1);", queue) + self.logger.debug(f"create_queue called with queue='{queue}', unlogged={unlogged}, conn={conn}") + if conn is None: + async with self.pool.acquire() as conn: + await self._create_queue_internal(queue, unlogged, conn) + else: + await self._create_queue_internal(queue, unlogged, conn) + + async def _create_queue_internal(self, queue, unlogged, conn): + self.logger.debug(f"Creating queue '{queue}' with unlogged={unlogged}") + if unlogged: + await conn.execute("SELECT pgmq.create_unlogged($1);", queue) + else: + await conn.execute("SELECT pgmq.create($1);", queue) async def validate_queue_name(self, queue_name: str) -> None: """Validate the length of a queue name.""" + self.logger.debug(f"validate_queue_name called with queue_name='{queue_name}'") async with self.pool.acquire() as conn: await conn.execute("SELECT pgmq.validate_queue_name($1);", queue_name) - async def drop_queue(self, queue: str, partitioned: bool = False) -> bool: + @transaction + async def drop_queue(self, queue: str, partitioned: bool = False, conn=None) -> bool: """Drop a queue.""" - async with self.pool.acquire() as conn: - result = await conn.fetchrow("SELECT pgmq.drop_queue($1, $2);", queue, partitioned) + self.logger.debug(f"drop_queue called with queue='{queue}', partitioned={partitioned}, conn={conn}") + if conn is None: + async with self.pool.acquire() as conn: + return await self._drop_queue_internal(queue, partitioned, conn) + else: + return await self._drop_queue_internal(queue, partitioned, conn) + + async def _drop_queue_internal(self, queue, partitioned, conn): + result = await conn.fetchrow("SELECT pgmq.drop_queue($1, $2);", queue, partitioned) + self.logger.debug(f"Queue '{queue}' dropped: {result[0]}") return result[0] - async def list_queues(self) -> List[str]: + @transaction + async def list_queues(self, conn=None) -> List[str]: """List all queues.""" - async with self.pool.acquire() as conn: - rows = await conn.fetch("SELECT queue_name FROM pgmq.list_queues();") - return [row["queue_name"] for row in rows] - - async def send(self, queue: str, message: dict, delay: int = 0) -> int: + self.logger.debug(f"list_queues called with conn={conn}") + if conn is None: + async with self.pool.acquire() as conn: + return await self._list_queues_internal(conn) + else: + return await self._list_queues_internal(conn) + + async def _list_queues_internal(self, conn): + rows = await conn.fetch("SELECT queue_name FROM pgmq.list_queues();") + queues = [row["queue_name"] for row in rows] + self.logger.debug(f"Queues listed: {queues}") + return queues + + @transaction + async def send(self, queue: str, message: dict, delay: int = 0, conn=None) -> int: """Send a message to a queue.""" - async with self.pool.acquire() as conn: - result = await conn.fetchrow( - "SELECT * FROM pgmq.send($1, $2::jsonb, $3);", queue, dumps(message).decode("utf-8"), delay - ) + self.logger.debug(f"send called with queue='{queue}', message={message}, delay={delay}, conn={conn}") + if conn is None: + async with self.pool.acquire() as conn: + return await self._send_internal(queue, message, delay, conn) + else: + return await self._send_internal(queue, message, delay, conn) + + async def _send_internal(self, queue, message, delay, conn): + self.logger.debug(f"Sending message to queue '{queue}' with delay={delay}") + result = await conn.fetchrow( + "SELECT * FROM pgmq.send($1, $2::jsonb, $3);", + queue, + dumps(message).decode("utf-8"), + delay, + ) + self.logger.debug(f"Message sent with msg_id={result[0]}") return result[0] - async def send_batch(self, queue: str, messages: List[dict], delay: int = 0) -> List[int]: + @transaction + async def send_batch(self, queue: str, messages: List[dict], delay: int = 0, conn=None) -> List[int]: """Send a batch of messages to a queue.""" + self.logger.debug(f"send_batch called with queue='{queue}', messages={messages}, delay={delay}, conn={conn}") + if conn is None: + async with self.pool.acquire() as conn: + return await self._send_batch_internal(queue, messages, delay, conn) + else: + return await self._send_batch_internal(queue, messages, delay, conn) + + async def _send_batch_internal(self, queue, messages, delay, conn): + self.logger.debug(f"Sending batch of messages to queue '{queue}' with delay={delay}") jsonb_array = [dumps(message).decode("utf-8") for message in messages] + result = await conn.fetch( + "SELECT * FROM pgmq.send_batch($1, $2::jsonb[], $3);", + queue, + jsonb_array, + delay, + ) + msg_ids = [message[0] for message in result] + self.logger.debug(f"Batch messages sent with msg_ids={msg_ids}") + return msg_ids - async with self.pool.acquire() as conn: - result = await conn.fetch( - "SELECT * FROM pgmq.send_batch($1, $2::jsonb[], $3);", - queue, - jsonb_array, - delay, - ) - return [message[0] for message in result] - - async def read(self, queue: str, vt: Optional[int] = None) -> Optional[Message]: + @transaction + async def read(self, queue: str, vt: Optional[int] = None, conn=None) -> Optional[Message]: """Read a message from a queue.""" + self.logger.debug(f"read called with queue='{queue}', vt={vt}, conn={conn}") batch_size = 1 - async with self.pool.acquire() as conn: - rows = await conn.fetch("SELECT * FROM pgmq.read($1, $2, $3);", queue, vt or self.vt, batch_size) + if conn is None: + async with self.pool.acquire() as conn: + return await self._read_internal(queue, vt, batch_size, conn) + else: + return await self._read_internal(queue, vt, batch_size, conn) + + async def _read_internal(self, queue, vt, batch_size, conn): + self.logger.debug(f"Reading message from queue '{queue}' with vt={vt}") + rows = await conn.fetch( + "SELECT * FROM pgmq.read($1, $2, $3);", + queue, + vt or self.vt, + batch_size, + ) messages = [ - Message(msg_id=row[0], read_ct=row[1], enqueued_at=row[2], vt=row[3], message=loads(row[4])) for row in rows + Message( + msg_id=row[0], + read_ct=row[1], + enqueued_at=row[2], + vt=row[3], + message=loads(row[4]), + ) + for row in rows ] - return messages[0] if len(messages) == 1 else None + self.logger.debug(f"Message read: {messages[0] if messages else None}") + return messages[0] if messages else None - async def read_batch(self, queue: str, vt: Optional[int] = None, batch_size=1) -> Optional[List[Message]]: + @transaction + async def read_batch( + self, queue: str, vt: Optional[int] = None, batch_size=1, conn=None + ) -> Optional[List[Message]]: """Read a batch of messages from a queue.""" - async with self.pool.acquire() as conn: - rows = await conn.fetch("SELECT * FROM pgmq.read($1, $2, $3);", queue, vt or self.vt, batch_size) - - return [ - Message(msg_id=row[0], read_ct=row[1], enqueued_at=row[2], vt=row[3], message=loads(row[4])) for row in rows + self.logger.debug(f"read_batch called with queue='{queue}', vt={vt}, batch_size={batch_size}, conn={conn}") + if conn is None: + async with self.pool.acquire() as conn: + return await self._read_batch_internal(queue, vt, batch_size, conn) + else: + return await self._read_batch_internal(queue, vt, batch_size, conn) + + async def _read_batch_internal(self, queue, vt, batch_size, conn): + self.logger.debug(f"Reading batch of messages from queue '{queue}' with vt={vt}") + rows = await conn.fetch( + "SELECT * FROM pgmq.read($1, $2, $3);", + queue, + vt or self.vt, + batch_size, + ) + messages = [ + Message( + msg_id=row[0], + read_ct=row[1], + enqueued_at=row[2], + vt=row[3], + message=loads(row[4]), + ) + for row in rows ] + self.logger.debug(f"Batch messages read: {messages}") + return messages + @transaction async def read_with_poll( self, queue: str, @@ -143,69 +271,164 @@ async def read_with_poll( qty: int = 1, max_poll_seconds: int = 5, poll_interval_ms: int = 100, + conn=None, ) -> Optional[List[Message]]: """Read messages from a queue with polling.""" - async with self.pool.acquire() as conn: - rows = await conn.fetch( - "SELECT * FROM pgmq.read_with_poll($1, $2, $3, $4, $5);", - queue, - vt or self.vt, - qty, - max_poll_seconds, - poll_interval_ms, + self.logger.debug( + f"read_with_poll called with queue='{queue}', vt={vt}, qty={qty}, " + f"max_poll_seconds={max_poll_seconds}, poll_interval_ms={poll_interval_ms}, conn={conn}" + ) + if conn is None: + async with self.pool.acquire() as conn: + return await self._read_with_poll_internal(queue, vt, qty, max_poll_seconds, poll_interval_ms, conn) + else: + return await self._read_with_poll_internal(queue, vt, qty, max_poll_seconds, poll_interval_ms, conn) + + async def _read_with_poll_internal(self, queue, vt, qty, max_poll_seconds, poll_interval_ms, conn): + self.logger.debug(f"Reading messages with polling from queue '{queue}'") + rows = await conn.fetch( + "SELECT * FROM pgmq.read_with_poll($1, $2, $3, $4, $5);", + queue, + vt or self.vt, + qty, + max_poll_seconds, + poll_interval_ms, + ) + messages = [ + Message( + msg_id=row[0], + read_ct=row[1], + enqueued_at=row[2], + vt=row[3], + message=loads(row[4]), ) - - return [ - Message(msg_id=row[0], read_ct=row[1], enqueued_at=row[2], vt=row[3], message=loads(row[4])) for row in rows + for row in rows ] + self.logger.debug(f"Messages read with polling: {messages}") + return messages - async def pop(self, queue: str) -> Message: + @transaction + async def pop(self, queue: str, conn=None) -> Message: """Pop a message from a queue.""" - async with self.pool.acquire() as conn: - rows = await conn.fetch("SELECT * FROM pgmq.pop($1);", queue) - + self.logger.debug(f"pop called with queue='{queue}', conn={conn}") + if conn is None: + async with self.pool.acquire() as conn: + return await self._pop_internal(queue, conn) + else: + return await self._pop_internal(queue, conn) + + async def _pop_internal(self, queue, conn): + self.logger.debug(f"Popping message from queue '{queue}'") + rows = await conn.fetch("SELECT * FROM pgmq.pop($1);", queue) messages = [ - Message(msg_id=row[0], read_ct=row[1], enqueued_at=row[2], vt=row[3], message=loads(row[4])) for row in rows + Message( + msg_id=row[0], + read_ct=row[1], + enqueued_at=row[2], + vt=row[3], + message=loads(row[4]), + ) + for row in rows ] - return messages[0] + self.logger.debug(f"Message popped: {messages[0] if messages else None}") + return messages[0] if messages else None - async def delete(self, queue: str, msg_id: int) -> bool: + @transaction + async def delete(self, queue: str, msg_id: int, conn=None) -> bool: """Delete a message from a queue.""" - async with self.pool.acquire() as conn: - row = await conn.fetchrow("SELECT pgmq.delete($1::text, $2::int);", queue, msg_id) - + self.logger.debug(f"delete called with queue='{queue}', msg_id={msg_id}, conn={conn}") + if conn is None: + async with self.pool.acquire() as conn: + return await self._delete_internal(queue, msg_id, conn) + else: + return await self._delete_internal(queue, msg_id, conn) + + async def _delete_internal(self, queue, msg_id, conn): + self.logger.debug(f"Deleting message with msg_id={msg_id} from queue '{queue}'") + row = await conn.fetchrow("SELECT pgmq.delete($1::text, $2::int);", queue, msg_id) + self.logger.debug(f"Message deleted: {row[0]}") return row[0] - async def delete_batch(self, queue: str, msg_ids: List[int]) -> List[int]: + @transaction + async def delete_batch(self, queue: str, msg_ids: List[int], conn=None) -> List[int]: """Delete multiple messages from a queue.""" - async with self.pool.acquire() as conn: - results = await conn.fetch("SELECT * FROM pgmq.delete($1::text, $2::int[]);", queue, msg_ids) - return [result[0] for result in results] - - async def archive(self, queue: str, msg_id: int) -> bool: + self.logger.debug(f"delete_batch called with queue='{queue}', msg_ids={msg_ids}, conn={conn}") + if conn is None: + async with self.pool.acquire() as conn: + return await self._delete_batch_internal(queue, msg_ids, conn) + else: + return await self._delete_batch_internal(queue, msg_ids, conn) + + async def _delete_batch_internal(self, queue, msg_ids, conn): + self.logger.debug(f"Deleting messages with msg_ids={msg_ids} from queue '{queue}'") + results = await conn.fetch("SELECT * FROM pgmq.delete($1::text, $2::int[]);", queue, msg_ids) + deleted_ids = [result[0] for result in results] + self.logger.debug(f"Messages deleted: {deleted_ids}") + return deleted_ids + + @transaction + async def archive(self, queue: str, msg_id: int, conn=None) -> bool: """Archive a message from a queue.""" - async with self.pool.acquire() as conn: - row = await conn.fetchrow("SELECT pgmq.archive($1::text, $2::int);", queue, msg_id) - + self.logger.debug(f"archive called with queue='{queue}', msg_id={msg_id}, conn={conn}") + if conn is None: + async with self.pool.acquire() as conn: + return await self._archive_internal(queue, msg_id, conn) + else: + return await self._archive_internal(queue, msg_id, conn) + + async def _archive_internal(self, queue, msg_id, conn): + self.logger.debug(f"Archiving message with msg_id={msg_id} from queue '{queue}'") + row = await conn.fetchrow("SELECT pgmq.archive($1::text, $2::int);", queue, msg_id) + self.logger.debug(f"Message archived: {row[0]}") return row[0] - async def archive_batch(self, queue: str, msg_ids: List[int]) -> List[int]: + @transaction + async def archive_batch(self, queue: str, msg_ids: List[int], conn=None) -> List[int]: """Archive multiple messages from a queue.""" - async with self.pool.acquire() as conn: - results = await conn.fetch("SELECT * FROM pgmq.archive($1::text, $2::int[]);", queue, msg_ids) - return [result[0] for result in results] - - async def purge(self, queue: str) -> int: + self.logger.debug(f"archive_batch called with queue='{queue}', msg_ids={msg_ids}, conn={conn}") + if conn is None: + async with self.pool.acquire() as conn: + return await self._archive_batch_internal(queue, msg_ids, conn) + else: + return await self._archive_batch_internal(queue, msg_ids, conn) + + async def _archive_batch_internal(self, queue, msg_ids, conn): + self.logger.debug(f"Archiving messages with msg_ids={msg_ids} from queue '{queue}'") + results = await conn.fetch("SELECT * FROM pgmq.archive($1::text, $2::int[]);", queue, msg_ids) + archived_ids = [result[0] for result in results] + self.logger.debug(f"Messages archived: {archived_ids}") + return archived_ids + + @transaction + async def purge(self, queue: str, conn=None) -> int: """Purge a queue.""" - async with self.pool.acquire() as conn: - row = await conn.fetchrow("SELECT pgmq.purge_queue($1);", queue) - + self.logger.debug(f"purge called with queue='{queue}', conn={conn}") + if conn is None: + async with self.pool.acquire() as conn: + return await self._purge_internal(queue, conn) + else: + return await self._purge_internal(queue, conn) + + async def _purge_internal(self, queue, conn): + self.logger.debug(f"Purging queue '{queue}'") + row = await conn.fetchrow("SELECT pgmq.purge_queue($1);", queue) + self.logger.debug(f"Messages purged: {row[0]}") return row[0] - async def metrics(self, queue: str) -> QueueMetrics: - async with self.pool.acquire() as conn: - result = await conn.fetchrow("SELECT * FROM pgmq.metrics($1);", queue) - return QueueMetrics( + @transaction + async def metrics(self, queue: str, conn=None) -> QueueMetrics: + """Get metrics for a specific queue.""" + self.logger.debug(f"metrics called with queue='{queue}', conn={conn}") + if conn is None: + async with self.pool.acquire() as conn: + return await self._metrics_internal(queue, conn) + else: + return await self._metrics_internal(queue, conn) + + async def _metrics_internal(self, queue, conn): + self.logger.debug(f"Fetching metrics for queue '{queue}'") + result = await conn.fetchrow("SELECT * FROM pgmq.metrics($1);", queue) + metrics = QueueMetrics( queue_name=result[0], queue_length=result[1], newest_msg_age_sec=result[2], @@ -213,11 +436,23 @@ async def metrics(self, queue: str) -> QueueMetrics: total_messages=result[4], scrape_time=result[5], ) - - async def metrics_all(self) -> List[QueueMetrics]: - async with self.pool.acquire() as conn: - results = await conn.fetch("SELECT * FROM pgmq.metrics_all();") - return [ + self.logger.debug(f"Metrics fetched: {metrics}") + return metrics + + @transaction + async def metrics_all(self, conn=None) -> List[QueueMetrics]: + """Get metrics for all queues.""" + self.logger.debug(f"metrics_all called with conn={conn}") + if conn is None: + async with self.pool.acquire() as conn: + return await self._metrics_all_internal(conn) + else: + return await self._metrics_all_internal(conn) + + async def _metrics_all_internal(self, conn): + self.logger.debug("Fetching metrics for all queues") + results = await conn.fetch("SELECT * FROM pgmq.metrics_all();") + metrics_list = [ QueueMetrics( queue_name=row[0], queue_length=row[1], @@ -228,14 +463,43 @@ async def metrics_all(self) -> List[QueueMetrics]: ) for row in results ] + self.logger.debug(f"All metrics fetched: {metrics_list}") + return metrics_list - async def set_vt(self, queue: str, msg_id: int, vt: int) -> Message: + @transaction + async def set_vt(self, queue: str, msg_id: int, vt: int, conn=None) -> Message: """Set the visibility timeout for a specific message.""" - async with self.pool.acquire() as conn: - row = await conn.fetchrow("SELECT * FROM pgmq.set_vt($1, $2, $3);", queue, msg_id, vt) - return Message(msg_id=row[0], read_ct=row[1], enqueued_at=row[2], vt=row[3], message=row[4]) + self.logger.debug(f"set_vt called with queue='{queue}', msg_id={msg_id}, vt={vt}, conn={conn}") + if conn is None: + async with self.pool.acquire() as conn: + return await self._set_vt_internal(queue, msg_id, vt, conn) + else: + return await self._set_vt_internal(queue, msg_id, vt, conn) + + async def _set_vt_internal(self, queue, msg_id, vt, conn): + self.logger.debug(f"Setting VT for msg_id={msg_id} in queue '{queue}' to vt={vt}") + row = await conn.fetchrow("SELECT * FROM pgmq.set_vt($1, $2, $3);", queue, msg_id, vt) + message = Message( + msg_id=row[0], + read_ct=row[1], + enqueued_at=row[2], + vt=row[3], + message=loads(row[4]), + ) + self.logger.debug(f"VT set for message: {message}") + return message - async def detach_archive(self, queue: str) -> None: + @transaction + async def detach_archive(self, queue: str, conn=None) -> None: """Detach an archive from a queue.""" - async with self.pool.acquire() as conn: - await conn.fetch("select pgmq.detach_archive($1);", queue) + self.logger.debug(f"detach_archive called with queue='{queue}', conn={conn}") + if conn is None: + async with self.pool.acquire() as conn: + await self._detach_archive_internal(queue, conn) + else: + await self._detach_archive_internal(queue, conn) + + async def _detach_archive_internal(self, queue, conn): + self.logger.debug(f"Detaching archive from queue '{queue}'") + await conn.execute("SELECT pgmq.detach_archive($1);", queue) + self.logger.debug(f"Archive detached from queue '{queue}'") diff --git a/tembo-pgmq-python/tembo_pgmq_python/decorators.py b/tembo-pgmq-python/tembo_pgmq_python/decorators.py new file mode 100644 index 00000000..25a1f44a --- /dev/null +++ b/tembo-pgmq-python/tembo_pgmq_python/decorators.py @@ -0,0 +1,84 @@ +# decorators.py +import functools + + +def transaction(func): + """Decorator to run a function within a database transaction.""" + + @functools.wraps(func) + def wrapper(*args, **kwargs): + # Determine if 'self' is passed (for methods) + if args and hasattr(args[0], "pool") and hasattr(args[0], "logger"): + self = args[0] + perform_transaction = kwargs.pop("perform_transaction", self.perform_transaction) + if perform_transaction: + with self.pool.connection() as conn: + txn = conn.transaction() + txn.begin() + self.logger.debug(f"Transaction started with conn: {conn}") + try: + result = func(*args, conn=conn, **kwargs) + txn.commit() + self.logger.debug(f"Transaction committed with conn: {conn}") + return result + except Exception as e: + txn.rollback() + self.logger.error(f"Transaction failed with exception: {e}, rolling back.") + self.logger.debug(f"Transaction rolled back with conn: {conn}") + raise + else: + with self.pool.connection() as conn: + self.logger.debug(f"Non-transactional execution with conn: {conn}") + return func(*args, conn=conn, **kwargs) + else: + # For functions without 'self', assume 'queue' is passed explicitly + queue = kwargs.get("queue") or args[0] + perform_transaction = kwargs.pop("perform_transaction", queue.perform_transaction) + if perform_transaction: + with queue.pool.connection() as conn: + txn = conn.transaction() + txn.begin() + queue.logger.debug(f"Transaction started with conn: {conn}") + try: + result = func(*args, conn=conn, **kwargs) + txn.commit() + queue.logger.debug(f"Transaction committed with conn: {conn}") + return result + except Exception as e: + txn.rollback() + queue.logger.error(f"Transaction failed with exception: {e}, rolling back.") + queue.logger.debug(f"Transaction rolled back with conn: {conn}") + raise + else: + with queue.pool.connection() as conn: + queue.logger.debug(f"Non-transactional execution with conn: {conn}") + return func(*args, conn=conn, **kwargs) + + return wrapper + + +def async_transaction(func): + """Asynchronous decorator to run a method within a database transaction.""" + import functools + + @functools.wraps(func) + async def wrapper(self, *args, **kwargs): + perform_transaction = kwargs.pop("perform_transaction", getattr(self, "perform_transaction", False)) + if perform_transaction: + async with self.pool.acquire() as conn: + txn = conn.transaction() + await txn.start() + try: + kwargs["conn"] = conn # Injecting 'conn' into kwargs + result = await func(self, *args, **kwargs) + await txn.commit() + return result + except Exception: + await txn.rollback() + raise + else: + async with self.pool.acquire() as conn: + kwargs["conn"] = conn # Injecting 'conn' into kwargs + return await func(self, *args, **kwargs) + + return wrapper diff --git a/tembo-pgmq-python/tembo_pgmq_python/queue.py b/tembo-pgmq-python/tembo_pgmq_python/queue.py index df2c17aa..df03c796 100644 --- a/tembo-pgmq-python/tembo_pgmq_python/queue.py +++ b/tembo-pgmq-python/tembo_pgmq_python/queue.py @@ -1,11 +1,11 @@ from dataclasses import dataclass, field -from typing import Optional, List, Callable, Union +from typing import Optional, List, Union from psycopg.types.json import Jsonb from psycopg_pool import ConnectionPool import os from tembo_pgmq_python.messages import Message, QueueMetrics +from tembo_pgmq_python.decorators import transaction import logging -import functools import datetime @@ -42,9 +42,7 @@ def __post_init__(self) -> None: def _initialize_logging(self) -> None: if self.verbose: - log_filename = self.log_filename or datetime.now().strftime( - "pgmq_debug_%Y%m%d_%H%M%S.log" - ) + log_filename = self.log_filename or datetime.now().strftime("pgmq_debug_%Y%m%d_%H%M%S.log") logging.basicConfig( filename=os.path.join(os.getcwd(), log_filename), level=logging.DEBUG, @@ -57,62 +55,22 @@ def _initialize_logging(self) -> None: def _initialize_extensions(self, conn=None) -> None: self._execute_query("create extension if not exists pgmq cascade;", conn=conn) - def _execute_query( - self, query: str, params: Optional[Union[List, tuple]] = None, conn=None - ) -> None: - self.logger.debug( - f"Executing query: {query} with params: {params} using conn: {conn}" - ) + def _execute_query(self, query: str, params: Optional[Union[List, tuple]] = None, conn=None) -> None: + self.logger.debug(f"Executing query: {query} with params: {params} using conn: {conn}") if conn: conn.execute(query, params) else: with self.pool.connection() as conn: conn.execute(query, params) - def _execute_query_with_result( - self, query: str, params: Optional[Union[List, tuple]] = None, conn=None - ): - self.logger.debug( - f"Executing query with result: {query} with params: {params} using conn: {conn}" - ) + def _execute_query_with_result(self, query: str, params: Optional[Union[List, tuple]] = None, conn=None): + self.logger.debug(f"Executing query with result: {query} with params: {params} using conn: {conn}") if conn: return conn.execute(query, params).fetchall() else: with self.pool.connection() as conn: return conn.execute(query, params).fetchall() - def transaction(func: Callable) -> Callable: - """Decorator to run a method within a database transaction.""" - - @functools.wraps(func) - def wrapper(self, *args, **kwargs): - perform_transaction = kwargs.pop( - "perform_transaction", self.perform_transaction - ) - if perform_transaction: - with self.pool.connection() as conn: - txn = conn.transaction() - txn.begin() - self.logger.debug(f"Transaction started with conn: {conn}") - try: - result = func(self, *args, conn=conn, **kwargs) - txn.commit() - self.logger.debug(f"Transaction committed with conn: {conn}") - return result - except Exception as e: - txn.rollback() - self.logger.error( - f"Transaction failed with exception: {e}, rolling back." - ) - self.logger.debug(f"Transaction rolled back with conn: {conn}") - raise - else: - with self.pool.connection() as conn: - self.logger.debug(f"Non-transactional execution with conn: {conn}") - return func(self, *args, conn=conn, **kwargs) - - return wrapper - @transaction def create_partitioned_queue( self, @@ -130,11 +88,7 @@ def create_partitioned_queue( def create_queue(self, queue: str, unlogged: bool = False, conn=None) -> None: """Create a new queue.""" self.logger.debug(f"create_queue called with conn: {conn}") - query = ( - "select pgmq.create_unlogged(%s);" - if unlogged - else "select pgmq.create(%s);" - ) + query = "select pgmq.create_unlogged(%s);" if unlogged else "select pgmq.create(%s);" self._execute_query(query, [queue], conn=conn) def validate_queue_name(self, queue_name: str, conn=None) -> None: @@ -163,15 +117,11 @@ def send(self, queue: str, message: dict, delay: int = 0, conn=None) -> int: """Send a message to a queue.""" self.logger.debug(f"send called with conn: {conn}") query = "select * from pgmq.send(%s, %s, %s);" - result = self._execute_query_with_result( - query, [queue, Jsonb(message), delay], conn=conn - ) + result = self._execute_query_with_result(query, [queue, Jsonb(message), delay], conn=conn) return result[0][0] @transaction - def send_batch( - self, queue: str, messages: List[dict], delay: int = 0, conn=None - ) -> List[int]: + def send_batch(self, queue: str, messages: List[dict], delay: int = 0, conn=None) -> List[int]: """Send a batch of messages to a queue.""" self.logger.debug(f"send_batch called with conn: {conn}") query = "select * from pgmq.send_batch(%s, %s, %s);" @@ -180,35 +130,21 @@ def send_batch( return [message[0] for message in result] @transaction - def read( - self, queue: str, vt: Optional[int] = None, conn=None - ) -> Optional[Message]: + def read(self, queue: str, vt: Optional[int] = None, conn=None) -> Optional[Message]: """Read a message from a queue.""" self.logger.debug(f"read called with conn: {conn}") query = "select * from pgmq.read(%s, %s, %s);" - rows = self._execute_query_with_result( - query, [queue, vt or self.vt, 1], conn=conn - ) - messages = [ - Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) - for x in rows - ] + rows = self._execute_query_with_result(query, [queue, vt or self.vt, 1], conn=conn) + messages = [Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows] return messages[0] if messages else None @transaction - def read_batch( - self, queue: str, vt: Optional[int] = None, batch_size=1, conn=None - ) -> Optional[List[Message]]: + def read_batch(self, queue: str, vt: Optional[int] = None, batch_size=1, conn=None) -> Optional[List[Message]]: """Read a batch of messages from a queue.""" self.logger.debug(f"read_batch called with conn: {conn}") query = "select * from pgmq.read(%s, %s, %s);" - rows = self._execute_query_with_result( - query, [queue, vt or self.vt, batch_size], conn=conn - ) - return [ - Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) - for x in rows - ] + rows = self._execute_query_with_result(query, [queue, vt or self.vt, batch_size], conn=conn) + return [Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows] @transaction def read_with_poll( @@ -225,10 +161,7 @@ def read_with_poll( query = "select * from pgmq.read_with_poll(%s, %s, %s, %s, %s);" params = [queue, vt or self.vt, qty, max_poll_seconds, poll_interval_ms] rows = self._execute_query_with_result(query, params, conn=conn) - return [ - Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) - for x in rows - ] + return [Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows] @transaction def pop(self, queue: str, conn=None) -> Message: @@ -236,10 +169,7 @@ def pop(self, queue: str, conn=None) -> Message: self.logger.debug(f"pop called with conn: {conn}") query = "select * from pgmq.pop(%s);" rows = self._execute_query_with_result(query, [queue], conn=conn) - messages = [ - Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) - for x in rows - ] + messages = [Message(msg_id=x[0], read_ct=x[1], enqueued_at=x[2], vt=x[3], message=x[4]) for x in rows] return messages[0] @transaction @@ -320,9 +250,7 @@ def set_vt(self, queue: str, msg_id: int, vt: int, conn=None) -> Message: """Set the visibility timeout for a specific message.""" self.logger.debug(f"set_vt called with conn: {conn}") query = "select * from pgmq.set_vt(%s, %s, %s);" - result = self._execute_query_with_result(query, [queue, msg_id, vt], conn=conn)[ - 0 - ] + result = self._execute_query_with_result(query, [queue, msg_id, vt], conn=conn)[0] return Message( msg_id=result[0], read_ct=result[1], diff --git a/tembo-pgmq-python/tests/test_async_integration.py b/tembo-pgmq-python/tests/test_async_integration.py index 057829b7..e4e614ba 100644 --- a/tembo-pgmq-python/tests/test_async_integration.py +++ b/tembo-pgmq-python/tests/test_async_integration.py @@ -2,6 +2,7 @@ import time from tembo_pgmq_python.messages import Message from tembo_pgmq_python.async_queue import PGMQueue +from tembo_pgmq_python.decorators import async_transaction as transaction from datetime import datetime, timezone, timedelta # Function to load environment variables @@ -92,7 +93,9 @@ async def test_read_batch(self): """Test reading a batch of messages from the queue.""" messages = [self.test_message, self.test_message] await self.queue.send_batch(self.test_queue, messages) - read_messages = await self.queue.read_batch(self.test_queue, vt=20, batch_size=2) + read_messages = await self.queue.read_batch( + self.test_queue, vt=20, batch_size=2 + ) self.assertEqual(len(read_messages), 2) for message in read_messages: self.assertEqual(message.message, self.test_message) @@ -145,7 +148,9 @@ async def test_archive_batch(self): messages = [self.test_message, self.test_message] msg_ids = await self.queue.send_batch(self.test_queue, messages) await self.queue.archive_batch(self.test_queue, msg_ids) - read_messages = await self.queue.read_batch(self.test_queue, vt=20, batch_size=2) + read_messages = await self.queue.read_batch( + self.test_queue, vt=20, batch_size=2 + ) self.assertEqual(len(read_messages), 0) async def test_delete_batch(self): @@ -153,7 +158,9 @@ async def test_delete_batch(self): messages = [self.test_message, self.test_message] msg_ids = await self.queue.send_batch(self.test_queue, messages) await self.queue.delete_batch(self.test_queue, msg_ids) - read_messages = await self.queue.read_batch(self.test_queue, vt=20, batch_size=2) + read_messages = await self.queue.read_batch( + self.test_queue, vt=20, batch_size=2 + ) self.assertEqual(len(read_messages), 0) async def test_set_vt(self): @@ -195,9 +202,50 @@ async def test_validate_queue_name(self): await self.queue.validate_queue_name(invalid_queue_name) self.assertIn("queue name is too long", str(context.exception)) + async def test_transaction_create_queue(self): + @transaction + async def transactional_create_queue(queue): + await queue.create_queue("test_queue_txn") + raise Exception("Simulated failure") -class TestPGMQueueWithEnv(unittest.IsolatedAsyncioTestCase): + try: + await transactional_create_queue(self.queue) + except Exception: + pass + queues = await self.queue.list_queues() + self.assertNotIn("test_queue_txn", queues) + + async def test_transaction_rollback(self): + @transaction + async def transactional_operation(queue): + await queue.send( + self.test_queue, + self.test_message, + ) + raise Exception("Intentional failure") + + try: + await transactional_operation(self.queue) + except Exception: + pass + + message = await self.queue.read(self.test_queue) + self.assertIsNone(message, "No message expected in queue after rollback") + + async def test_transaction_send_and_read_message(self): + @transaction + async def transactional_send(queue, conn): + await queue.send(self.test_queue, self.test_message, conn=conn) + + await transactional_send(self.queue) + + message = await self.queue.read(self.test_queue) + self.assertIsNotNone(message, "Expected message in queue") + self.assertEqual(message.message, self.test_message) + + +class TestPGMQueueWithEnv(unittest.IsolatedAsyncioTestCase): async def asyncSetUp(self): """Set up a connection to the PGMQueue using environment variables and create a test queue.""" diff --git a/tembo-pgmq-python/tests/test_integration.py b/tembo-pgmq-python/tests/test_integration.py index f4b0a66d..f55a5bd6 100644 --- a/tembo-pgmq-python/tests/test_integration.py +++ b/tembo-pgmq-python/tests/test_integration.py @@ -1,6 +1,7 @@ import unittest import time -from tembo_pgmq_python import Message, PGMQueue +from tembo_pgmq_python import Message, PGMQueue, transaction + from datetime import datetime, timezone, timedelta @@ -195,51 +196,68 @@ def test_validate_queue_name(self): def test_transaction_create_queue(self): """Test creating a queue within a transaction.""" - try: - self.queue.create_queue("test_queue_txn", perform_transaction=True) + + @transaction + def transactional_create_queue(queue, conn=None): + queue.create_queue("test_queue_txn", conn=conn) raise Exception("Intentional failure") + + try: + transactional_create_queue(self.queue) except Exception: pass finally: - queues = self.queue.list_queues(perform_transaction=False) + queues = self.queue.list_queues() self.assertNotIn("test_queue_txn", queues) def test_transaction_send_and_read_message(self): """Test sending and reading a message within a transaction.""" - try: - self.queue.send( - self.test_queue, self.test_message, perform_transaction=True - ) + + @transaction + def transactional_send(queue, conn=None): + queue.send(self.test_queue, self.test_message, conn=conn) raise Exception("Intentional failure") + + try: + transactional_send(self.queue) except Exception: pass finally: - message = self.queue.read(self.test_queue, perform_transaction=False) + message = self.queue.read(self.test_queue) self.assertIsNone(message, "No message expected in queue") def test_transaction_purge_queue(self): """Test purging a queue within a transaction.""" - self.queue.send(self.test_queue, self.test_message, perform_transaction=False) - try: - self.queue.purge(self.test_queue, perform_transaction=True) + + self.queue.send(self.test_queue, self.test_message) + + @transaction + def transactional_purge(queue, conn=None): + queue.purge(self.test_queue, conn=conn) raise Exception("Intentional failure") + + try: + transactional_purge(self.queue) except Exception: pass finally: - message = self.queue.read(self.test_queue, perform_transaction=False) + message = self.queue.read(self.test_queue) self.assertIsNotNone(message, "Message expected in queue") def test_transaction_rollback(self): """Test rollback of a transaction.""" - try: - self.queue.send( - self.test_queue, self.test_message, perform_transaction=True - ) + + @transaction + def transactional_operation(queue, conn=None): + queue.send(self.test_queue, self.test_message, conn=conn) raise Exception("Intentional failure to trigger rollback") + + try: + transactional_operation(self.queue) except Exception: pass finally: - message = self.queue.read(self.test_queue, perform_transaction=False) + message = self.queue.read(self.test_queue) self.assertIsNone(message, "No message expected in queue after rollback") From e57141f9d44f7ddda84e786fd66bd94867c1d46d Mon Sep 17 00:00:00 2001 From: Ali Tavallaie Date: Fri, 20 Sep 2024 04:05:33 +0330 Subject: [PATCH 10/18] feat:remove perform_transaction --- .../tembo_pgmq_python/decorators.py | 83 +++++++++---------- tembo-pgmq-python/tembo_pgmq_python/queue.py | 1 - tembo-pgmq-python/tests/test_integration.py | 1 - 3 files changed, 38 insertions(+), 47 deletions(-) diff --git a/tembo-pgmq-python/tembo_pgmq_python/decorators.py b/tembo-pgmq-python/tembo_pgmq_python/decorators.py index 25a1f44a..9e9c3633 100644 --- a/tembo-pgmq-python/tembo_pgmq_python/decorators.py +++ b/tembo-pgmq-python/tembo_pgmq_python/decorators.py @@ -7,69 +7,64 @@ def transaction(func): @functools.wraps(func) def wrapper(*args, **kwargs): - # Determine if 'self' is passed (for methods) if args and hasattr(args[0], "pool") and hasattr(args[0], "logger"): self = args[0] - perform_transaction = kwargs.pop("perform_transaction", self.perform_transaction) - if perform_transaction: + + if "conn" not in kwargs: with self.pool.connection() as conn: - txn = conn.transaction() - txn.begin() - self.logger.debug(f"Transaction started with conn: {conn}") - try: - result = func(*args, conn=conn, **kwargs) - txn.commit() - self.logger.debug(f"Transaction committed with conn: {conn}") - return result - except Exception as e: - txn.rollback() - self.logger.error(f"Transaction failed with exception: {e}, rolling back.") - self.logger.debug(f"Transaction rolled back with conn: {conn}") - raise + with conn.transaction() as txn: + self.logger.debug(f"Transaction started with conn: {conn}") + try: + kwargs["conn"] = conn # Inject 'conn' into kwargs + result = func(*args, **kwargs) + self.logger.debug( + f"Transaction completed with conn: {conn}" + ) + return result + except Exception as e: + self.logger.error( + f"Transaction failed with exception: {e}, rolling back." + ) + raise else: - with self.pool.connection() as conn: - self.logger.debug(f"Non-transactional execution with conn: {conn}") - return func(*args, conn=conn, **kwargs) + return func(*args, **kwargs) + else: - # For functions without 'self', assume 'queue' is passed explicitly queue = kwargs.get("queue") or args[0] - perform_transaction = kwargs.pop("perform_transaction", queue.perform_transaction) - if perform_transaction: + + if "conn" not in kwargs: with queue.pool.connection() as conn: - txn = conn.transaction() - txn.begin() - queue.logger.debug(f"Transaction started with conn: {conn}") - try: - result = func(*args, conn=conn, **kwargs) - txn.commit() - queue.logger.debug(f"Transaction committed with conn: {conn}") - return result - except Exception as e: - txn.rollback() - queue.logger.error(f"Transaction failed with exception: {e}, rolling back.") - queue.logger.debug(f"Transaction rolled back with conn: {conn}") - raise + with conn.transaction() as txn: + queue.logger.debug(f"Transaction started with conn: {conn}") + try: + kwargs["conn"] = conn # Inject 'conn' into kwargs + result = func(*args, **kwargs) + queue.logger.debug( + f"Transaction completed with conn: {conn}" + ) + return result + except Exception as e: + queue.logger.error( + f"Transaction failed with exception: {e}, rolling back." + ) + raise else: - with queue.pool.connection() as conn: - queue.logger.debug(f"Non-transactional execution with conn: {conn}") - return func(*args, conn=conn, **kwargs) + return func(*args, **kwargs) return wrapper def async_transaction(func): """Asynchronous decorator to run a method within a database transaction.""" - import functools @functools.wraps(func) async def wrapper(self, *args, **kwargs): - perform_transaction = kwargs.pop("perform_transaction", getattr(self, "perform_transaction", False)) - if perform_transaction: + if "conn" not in kwargs: async with self.pool.acquire() as conn: txn = conn.transaction() await txn.start() try: - kwargs["conn"] = conn # Injecting 'conn' into kwargs + kwargs["conn"] = conn result = await func(self, *args, **kwargs) await txn.commit() return result @@ -77,8 +72,6 @@ async def wrapper(self, *args, **kwargs): await txn.rollback() raise else: - async with self.pool.acquire() as conn: - kwargs["conn"] = conn # Injecting 'conn' into kwargs - return await func(self, *args, **kwargs) + return await func(self, *args, **kwargs) return wrapper diff --git a/tembo-pgmq-python/tembo_pgmq_python/queue.py b/tembo-pgmq-python/tembo_pgmq_python/queue.py index df03c796..f4a56e83 100644 --- a/tembo-pgmq-python/tembo_pgmq_python/queue.py +++ b/tembo-pgmq-python/tembo_pgmq_python/queue.py @@ -25,7 +25,6 @@ class PGMQueue: verbose: bool = False log_filename: Optional[str] = None pool: ConnectionPool = field(init=False) - perform_transaction: bool = False logger: logging.Logger = field(init=False) def __post_init__(self) -> None: diff --git a/tembo-pgmq-python/tests/test_integration.py b/tembo-pgmq-python/tests/test_integration.py index f55a5bd6..1f1190db 100644 --- a/tembo-pgmq-python/tests/test_integration.py +++ b/tembo-pgmq-python/tests/test_integration.py @@ -174,7 +174,6 @@ def test_detach_archive(self): self.queue.send(self.test_queue, self.test_message) self.queue.archive(self.test_queue, 1) self.queue.detach_archive(self.test_queue) - # This is just a basic call to ensure the method works without exceptions. def test_drop_queue(self): """Test dropping a queue.""" From 5d3b9c2c0700269b851f29948eb6eec88508b2a5 Mon Sep 17 00:00:00 2001 From: Ali Tavallaie Date: Fri, 20 Sep 2024 04:06:41 +0330 Subject: [PATCH 11/18] feat: adding example for transaction --- .../example/example_app_async.py | 87 ++++++++++++++ tembo-pgmq-python/example/example_app_sync.py | 108 ++++++++++++++++++ 2 files changed, 195 insertions(+) create mode 100644 tembo-pgmq-python/example/example_app_async.py create mode 100644 tembo-pgmq-python/example/example_app_sync.py diff --git a/tembo-pgmq-python/example/example_app_async.py b/tembo-pgmq-python/example/example_app_async.py new file mode 100644 index 00000000..16a6a5bc --- /dev/null +++ b/tembo-pgmq-python/example/example_app_async.py @@ -0,0 +1,87 @@ +import asyncio +from tembo_pgmq_python.async_queue import PGMQueue +from tembo_pgmq_python.decorators import async_transaction as transaction + + +async def main(): + # Initialize the queue + queue = PGMQueue( + host="0.0.0.0", + port="5432", + username="postgres", + password="postgres", + database="postgres", + verbose=True, + log_filename="pgmq_async.log", + ) + await queue.init() + + test_queue = "transactional_queue_async" + + # Clean up if the queue already exists + queues = await queue.list_queues() + if test_queue in queues: + await queue.drop_queue(test_queue) + await queue.create_queue(test_queue) + + # Example messages + message1 = {"id": 1, "content": "First message"} + message2 = {"id": 2, "content": "Second message"} + + # Transactional operation: send messages within a transaction + @transaction + async def transactional_operation(queue: PGMQueue, conn=None): + # Perform multiple queue operations within a transaction + await queue.send(test_queue, message1, conn=conn) + await queue.send(test_queue, message2, conn=conn) + # If an exception occurs here, all previous operations will be rolled back + + # Execute the transactional function (Success Case) + try: + await transactional_operation(queue) + print("Transaction committed successfully.") + except Exception as e: + print(f"Transaction failed: {e}") + + # Read messages outside of the transaction + read_message1 = await queue.read(test_queue) + read_message2 = await queue.read(test_queue) + print("Messages read after transaction commit:") + if read_message1: + print(f"Message 1: {read_message1.message}") + if read_message2: + print(f"Message 2: {read_message2.message}") + + # Purge the queue for the failure case + await queue.purge(test_queue) + + # Transactional operation: simulate failure + @transaction + async def transactional_operation_failure(queue: PGMQueue, conn=None): + await queue.send(test_queue, message1, conn=conn) + await queue.send(test_queue, message2, conn=conn) + # Simulate an error to trigger rollback + raise Exception("Simulated failure") + + # Execute the transactional function (Failure Case) + try: + await transactional_operation_failure(queue) + except Exception as e: + print(f"Transaction failed: {e}") + + # Attempt to read messages after failed transaction + read_message = await queue.read(test_queue) + if read_message: + print("Message read after failed transaction (should not exist):") + print(read_message.message) + else: + print("No messages found after transaction rollback.") + + # Clean up + await queue.drop_queue(test_queue) + await queue.pool.close() + + +# Run the main function +if __name__ == "__main__": + asyncio.run(main()) diff --git a/tembo-pgmq-python/example/example_app_sync.py b/tembo-pgmq-python/example/example_app_sync.py new file mode 100644 index 00000000..1630f3f2 --- /dev/null +++ b/tembo-pgmq-python/example/example_app_sync.py @@ -0,0 +1,108 @@ +from tembo_pgmq_python.queue import PGMQueue +from tembo_pgmq_python.decorators import transaction + +queue = PGMQueue( + host="localhost", + port="5432", + username="postgres", + password="postgres", + database="postgres", + verbose=True, + log_filename="pgmq_sync.log", +) + +test_queue = "transaction_queue_sync" + +# Clean up if the queue already exists +queues = queue.list_queues() +if test_queue in queues: + queue.drop_queue(test_queue) # Pass queue name as positional argument +queue.create_queue(test_queue) # Pass queue name as positional argument + +# Example messages +messages = [ + {"id": 1, "content": "First message"}, + {"id": 2, "content": "Second message"}, + {"id": 3, "content": "Third message"}, +] + + +# Transactional operation: send multiple messages and perform additional operations within a transaction +@transaction +def transactional_operations(queue, conn=None): + # Send multiple messages + msg_ids = queue.send_batch( + test_queue, # Positional argument + messages=messages, + conn=conn, + ) + print(f"Messages sent with IDs: {msg_ids}") + + # Read messages within the transaction + internal_messages = queue.read_batch( + test_queue, # Positional argument + batch_size=10, + conn=conn, + ) + print(f"Messages read within transaction: {internal_messages}") + + # Perform additional operations + if msg_ids: + queue.delete( + test_queue, # Positional argument + msg_id=msg_ids[0], + conn=conn, + ) + print(f"Deleted message ID: {msg_ids[0]} within transaction") + + +# Execute the transactional operations (Success Case) +print("=== Executing Transactional Operations (Success Case) ===") +try: + transactional_operations(queue) +except Exception as e: + print(f"Transaction failed: {e}") + +# Read messages after transaction commit +external_messages = queue.read_batch(test_queue, batch_size=10) +print("Messages read after transaction commit:") +for msg in external_messages: + print(f"ID: {msg.msg_id}, Content: {msg.message}") + +# Clean up for failure case +queue.purge(test_queue) + + +# Transactional operation: simulate failure +@transaction +def transactional_operations_failure(queue, conn=None): + # Send multiple messages + msg_ids = queue.send_batch( + test_queue, # Positional argument + messages=messages, + conn=conn, + ) + print(f"Messages sent with IDs: {msg_ids}") + + # Simulate an error to trigger a rollback + raise Exception("Simulated failure in transactional operations") + + +# Execute the transactional operations (Failure Case) +print("\n=== Executing Transactional Operations (Failure Case) ===") +try: + transactional_operations_failure(queue) +except Exception as e: + print(f"Transaction failed: {e}") + +# Read messages after transaction rollback +external_messages = queue.read_batch(test_queue, batch_size=10) +if external_messages: + print("Messages read after transaction rollback:") + for msg in external_messages: + print(f"ID: {msg.msg_id}, Content: {msg.message}") +else: + print("No messages found after transaction rollback.") + +# Clean up +queue.drop_queue(test_queue) From 34f11bb76a47f57099abdc4243f3fd9582d8ed82 Mon Sep 17 00:00:00 2001 From: Ali Tavallaie Date: Fri, 20 Sep 2024 04:15:03 +0330 Subject: [PATCH 12/18] feat: update readme for using transactions --- tembo-pgmq-python/README.md | 152 +++++++----------------------------- 1 file changed, 30 insertions(+), 122 deletions(-) diff --git a/tembo-pgmq-python/README.md b/tembo-pgmq-python/README.md index 102324aa..d925e188 100644 --- a/tembo-pgmq-python/README.md +++ b/tembo-pgmq-python/README.md @@ -258,125 +258,33 @@ ) ``` - ### Using Transactions - - To perform multiple operations within a single transaction, you can use the `@transaction` decorator from the `tembo_pgmq_python.decorators` module. This ensures that all operations within the transaction either complete successfully or are rolled back if an error occurs. - - First, import the `transaction` decorator: - - ```python - from tembo_pgmq_python.decorators import transaction - ``` - - #### Example: Transactional Operation - - ```python - @transaction - def transactional_operation(queue: PGMQueue, conn=None): - # Perform multiple queue operations within a transaction - queue.create_queue("transactional_queue", conn=conn) - queue.send("transactional_queue", {"message": "Hello, World!"}, conn=conn) - # If an exception occurs here, all previous operations will be rolled back - # Uncomment the following line to simulate an error - # raise Exception("Simulated failure") - - # Execute the transactional function - try: - transactional_operation(queue) - except Exception as e: - print(f"Transaction failed: {e}") - ``` - - In this example: - - - The `transactional_operation` function is decorated with `@transaction`, ensuring all operations within it are part of the same transaction. - - The `conn` parameter is passed to each method to use the same database connection within the transaction. - - If an exception occurs within the function (e.g., by raising an exception), the transaction is rolled back. - - #### Example: Transaction Rollback on Failure - - ```python - @transaction - def transactional_send_and_fail(queue: PGMQueue, conn=None): - queue.send("my_queue", {"data": "test"}, conn=conn) - # Simulate an error to trigger rollback - raise Exception("Intentional failure") - - try: - transactional_send_and_fail(queue) - except Exception as e: - print(f"Transaction failed: {e}") - # Verify that the message was not sent due to rollback - message = queue.read("my_queue") - assert message is None, "Message should not exist after rollback" - ``` - - #### Using Transactions with Async Queue - - For the async version, you can use the `@transaction` decorator in a similar way. Make sure to import the decorator and define your transactional functions as async. - - ```python - from tembo_pgmq_python.decorators import transaction - from tembo_pgmq_python.async_queue import PGMQueue - - async def main(): - queue = PGMQueue() - await queue.init() - - @transaction - async def transactional_operation_async(queue: PGMQueue, conn=None): - await queue.create_queue("async_transactional_queue", conn=conn) - await queue.send("async_transactional_queue", {"message": "Hello, Async World!"}, conn=conn) - # Uncomment to simulate an error - # raise Exception("Simulated failure") - - try: - await transactional_operation_async(queue) - except Exception as e: - print(f"Transaction failed: {e}") - ``` - - In this async example: - - - Use `async def` to define asynchronous functions. - - Use `await` when calling async methods. - - The `@transaction` decorator manages the transaction context. - - ### Important Notes on Transactions - - - All methods used within a transaction must accept the `conn` parameter and pass it to the query execution methods. - - If an exception occurs within the transactional function, the transaction will be rolled back. - - Transactions help maintain data integrity by ensuring that a group of operations either all succeed or all fail together. - - ### Enabling Transactions by Default - - You can set the `perform_transaction` parameter to `True` when initializing the `PGMQueue` instance to enable transactions by default for all methods. - - ```python - queue = PGMQueue( - host="0.0.0.0", - port="5432", - username="postgres", - password="postgres", - database="postgres", - perform_transaction=True - ) - ``` - - However, be cautious with this approach, as it will wrap every method call in a transaction, which might not be necessary or optimal for all operations. - - ### Customizing Transaction Behavior - - You can control transaction behavior on a per-method basis by using the `perform_transaction` keyword argument. - - ```python - # This method call will be executed within a transaction - queue.send("my_queue", {"data": "test"}, perform_transaction=True) - - # This method call will not use a transaction - queue.send("my_queue", {"data": "test"}, perform_transaction=False) - ``` - - ### Conclusion - - Using transactions allows you to group multiple database operations into a single atomic unit of work, ensuring consistency and integrity of your data when performing complex operations. +# Using Transactions + + To perform multiple operations within a single transaction, use the `@transaction` decorator from the `tembo_pgmq_python.decorators` module. + This ensures that all operations within the function are executed within the same transaction and are either committed together or rolled back if an error occurs. + + First, import the transaction decorator: + +```python +from tembo_pgmq_python.decorators import transaction +``` + +### Example: Transactional Operation + +```python +@transaction +def transactional_operation(queue: PGMQueue, conn=None): + # Perform multiple queue operations within a transaction + queue.create_queue("transactional_queue", conn=conn) + queue.send("transactional_queue", {"message": "Hello, World!"}, conn=conn) + +``` + To execute the transaction: + +```python +try: + transactional_operation(queue) +except Exception as e: + print(f"Transaction failed: {e}") +``` + In this example, the transactional_operation function is decorated with `@transaction`, ensuring all operations inside it are part of a single transaction. If an error occurs, the entire transaction is rolled back automatically. \ No newline at end of file From 5e922233ce5273d38b46efe30d1541b5d3490ea2 Mon Sep 17 00:00:00 2001 From: Ali Tavallaie Date: Fri, 20 Sep 2024 04:16:34 +0330 Subject: [PATCH 13/18] chore: linting --- .../tembo_pgmq_python/decorators.py | 16 ++++------------ 1 file changed, 4 insertions(+), 12 deletions(-) diff --git a/tembo-pgmq-python/tembo_pgmq_python/decorators.py b/tembo-pgmq-python/tembo_pgmq_python/decorators.py index 9e9c3633..9c3bb7b2 100644 --- a/tembo-pgmq-python/tembo_pgmq_python/decorators.py +++ b/tembo-pgmq-python/tembo_pgmq_python/decorators.py @@ -17,14 +17,10 @@ def wrapper(*args, **kwargs): try: kwargs["conn"] = conn # Inject 'conn' into kwargs result = func(*args, **kwargs) - self.logger.debug( - f"Transaction completed with conn: {conn}" - ) + self.logger.debug(f"Transaction completed with conn: {conn}") return result except Exception as e: - self.logger.error( - f"Transaction failed with exception: {e}, rolling back." - ) + self.logger.error(f"Transaction failed with exception: {e}, rolling back.") raise else: return func(*args, **kwargs) @@ -39,14 +35,10 @@ def wrapper(*args, **kwargs): try: kwargs["conn"] = conn # Inject 'conn' into kwargs result = func(*args, **kwargs) - queue.logger.debug( - f"Transaction completed with conn: {conn}" - ) + queue.logger.debug(f"Transaction completed with conn: {conn}") return result except Exception as e: - queue.logger.error( - f"Transaction failed with exception: {e}, rolling back." - ) + queue.logger.error(f"Transaction failed with exception: {e}, rolling back.") raise else: return func(*args, **kwargs) From bba66f4f37ba0c29c5d1c6ef263472e043c028ee Mon Sep 17 00:00:00 2001 From: Ali Tavallaie Date: Fri, 20 Sep 2024 04:19:05 +0330 Subject: [PATCH 14/18] chore: remove unused tnx variable --- tembo-pgmq-python/tembo_pgmq_python/decorators.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tembo-pgmq-python/tembo_pgmq_python/decorators.py b/tembo-pgmq-python/tembo_pgmq_python/decorators.py index 9c3bb7b2..537f3843 100644 --- a/tembo-pgmq-python/tembo_pgmq_python/decorators.py +++ b/tembo-pgmq-python/tembo_pgmq_python/decorators.py @@ -12,7 +12,7 @@ def wrapper(*args, **kwargs): if "conn" not in kwargs: with self.pool.connection() as conn: - with conn.transaction() as txn: + with conn.transaction(): self.logger.debug(f"Transaction started with conn: {conn}") try: kwargs["conn"] = conn # Inject 'conn' into kwargs @@ -30,7 +30,7 @@ def wrapper(*args, **kwargs): if "conn" not in kwargs: with queue.pool.connection() as conn: - with conn.transaction() as txn: + with conn.transaction(): queue.logger.debug(f"Transaction started with conn: {conn}") try: kwargs["conn"] = conn # Inject 'conn' into kwargs From 895a4684cddf612b33cdfbddeb54dec026417116 Mon Sep 17 00:00:00 2001 From: Ali Tavallaie Date: Fri, 20 Sep 2024 23:22:08 +0330 Subject: [PATCH 15/18] feat: update examples for non-db and non-pgmq --- .../example/example_app_async.py | 79 +++++- tembo-pgmq-python/example/example_app_sync.py | 241 ++++++++++++++---- 2 files changed, 272 insertions(+), 48 deletions(-) diff --git a/tembo-pgmq-python/example/example_app_async.py b/tembo-pgmq-python/example/example_app_async.py index 16a6a5bc..df505ebe 100644 --- a/tembo-pgmq-python/example/example_app_async.py +++ b/tembo-pgmq-python/example/example_app_async.py @@ -34,7 +34,7 @@ async def transactional_operation(queue: PGMQueue, conn=None): # Perform multiple queue operations within a transaction await queue.send(test_queue, message1, conn=conn) await queue.send(test_queue, message2, conn=conn) - # If an exception occurs here, all previous operations will be rolled back + # Transaction commits if no exception occurs # Execute the transactional function (Success Case) try: @@ -77,6 +77,83 @@ async def transactional_operation_failure(queue: PGMQueue, conn=None): else: print("No messages found after transaction rollback.") + # Simulate conditional rollback + await queue.purge(test_queue) # Clear the queue before the next test + + @transaction + async def conditional_failure(queue: PGMQueue, conn=None): + # Send messages + msg_ids = await queue.send_batch(test_queue, [message1, message2], conn=conn) + print(f"Messages sent with IDs: {msg_ids}") + + # Read messages in queue + messages_in_queue = await queue.read_batch(test_queue, batch_size=10, conn=conn) + print( + f"Messages currently in queue before conditional failure: {messages_in_queue}" + ) + + # Conditional rollback based on number of messages + if len(messages_in_queue) > 3: + await queue.delete( + test_queue, msg_id=messages_in_queue[0].msg_id, conn=conn + ) + print( + f"Message ID {messages_in_queue[0].msg_id} deleted within transaction." + ) + else: + # Simulate failure if queue size is not greater than 3 + print( + "Transaction failed: Not enough messages in queue to proceed with deletion." + ) + raise Exception("Queue size too small to proceed.") + + print("\n=== Executing Conditional Failure Scenario ===") + try: + await conditional_failure(queue) + except Exception as e: + print(f"Conditional Failure Transaction failed: {e}") + + # Simulate success for conditional scenario + @transaction + async def conditional_success(queue: PGMQueue, conn=None): + # Send additional messages to ensure queue has more than 3 messages + additional_messages = [ + {"id": 3, "content": "Third message"}, + {"id": 4, "content": "Fourth message"}, + ] + msg_ids = await queue.send_batch(test_queue, additional_messages, conn=conn) + print(f"Additional messages sent with IDs: {msg_ids}") + + # Read messages in queue + messages_in_queue = await queue.read_batch(test_queue, batch_size=10, conn=conn) + print( + f"Messages currently in queue before successful conditional deletion: {messages_in_queue}" + ) + + # Proceed with deletion if more than 3 messages are in the queue + if len(messages_in_queue) > 3: + await queue.delete( + test_queue, msg_id=messages_in_queue[0].msg_id, conn=conn + ) + print( + f"Message ID {messages_in_queue[0].msg_id} deleted within transaction." + ) + + print("\n=== Executing Conditional Success Scenario ===") + try: + await conditional_success(queue) + except Exception as e: + print(f"Conditional Success Transaction failed: {e}") + + # Read messages after the conditional scenarios + read_messages = await queue.read_batch(test_queue, batch_size=10) + if read_messages: + print("Messages read after conditional scenarios:") + for msg in read_messages: + print(f"ID: {msg.msg_id}, Content: {msg.message}") + else: + print("No messages found after transactions.") + # Clean up await queue.drop_queue(test_queue) await queue.pool.close() diff --git a/tembo-pgmq-python/example/example_app_sync.py b/tembo-pgmq-python/example/example_app_sync.py index 1630f3f2..29ec47fb 100644 --- a/tembo-pgmq-python/example/example_app_sync.py +++ b/tembo-pgmq-python/example/example_app_sync.py @@ -16,8 +16,8 @@ # Clean up if the queue already exists queues = queue.list_queues() if test_queue in queues: - queue.drop_queue(test_queue) # Pass queue name as positional argument -queue.create_queue(test_queue) # Pass queue name as positional argument + queue.drop_queue(test_queue) +queue.create_queue(test_queue) # Example messages messages = [ @@ -27,82 +27,229 @@ ] -# Transactional operation: send multiple messages and perform additional operations within a transaction +# Create table function for non-PGMQ DB operation +def create_mytable(conn): + try: + with conn.cursor() as cur: + cur.execute(""" + CREATE TABLE IF NOT EXISTS mytable ( + id SERIAL PRIMARY KEY, + name VARCHAR(100) NOT NULL + ) + """) + print("Table 'mytable' created or already exists.") + except Exception as e: + print(f"Failed to create table 'mytable': {e}") + raise + + +# Transaction with only PGMQ operations @transaction -def transactional_operations(queue, conn=None): +def pgmq_operations(queue, conn=None): # Send multiple messages msg_ids = queue.send_batch( - test_queue, # Positional argument + test_queue, messages=messages, conn=conn, ) - print(f"Messages sent with IDs: {msg_ids}") + print(f"PGMQ: Messages sent with IDs: {msg_ids}") # Read messages within the transaction internal_messages = queue.read_batch( - test_queue, # Positional argument + test_queue, batch_size=10, conn=conn, ) - print(f"Messages read within transaction: {internal_messages}") + print(f"PGMQ: Messages read within transaction: {internal_messages}") - # Perform additional operations - if msg_ids: - queue.delete( - test_queue, # Positional argument - msg_id=msg_ids[0], - conn=conn, - ) - print(f"Deleted message ID: {msg_ids[0]} within transaction") +# Transaction with non-PGMQ DB operation and PGMQ operation - Success case +@transaction +def non_pgmq_db_operations_success(queue, conn=None): + create_mytable(conn) -# Execute the transactional operations (Success Case) -print("=== Executing Transactional Operations (Success Case) ===") -try: - transactional_operations(queue) -except Exception as e: - print(f"Transaction failed: {e}") + # Non-PGMQ database operation (simulating a custom DB operation) + with conn.cursor() as cur: + cur.execute("INSERT INTO mytable (name) VALUES ('Alice')") + print("Non-PGMQ DB: Inserted into 'mytable'.") + + # Send multiple PGMQ messages + msg_ids = queue.send_batch( + test_queue, + messages=messages, + conn=conn, + ) + print(f"PGMQ: Messages sent with IDs: {msg_ids}") -# Read messages after transaction commit -external_messages = queue.read_batch(test_queue, batch_size=10) -print("Messages read after transaction commit:") -for msg in external_messages: - print(f"ID: {msg.msg_id}, Content: {msg.message}") -# Clean up for failure case -queue.purge(test_queue) +# Transaction with non-PGMQ DB operation and PGMQ operation - Failure case +@transaction +def non_pgmq_db_operations_failure(queue, conn=None): + create_mytable(conn) + + # Non-PGMQ database operation (simulating a custom DB operation) + with conn.cursor() as cur: + cur.execute("INSERT INTO mytable (name) VALUES ('Bob')") + print("Non-PGMQ DB: Inserted into 'mytable'.") + # Simulating a failure after a PGMQ operation + raise Exception( + "Simulated failure after inserting into mytable and sending messages" + ) -# Transactional operation: simulate failure + +# Transaction with PGMQ operations and non-database operation (simple print statement) @transaction -def transactional_operations_failure(queue, conn=None): +def non_db_operations(queue, conn=None): # Send multiple messages msg_ids = queue.send_batch( - test_queue, # Positional argument + test_queue, + messages=messages, + conn=conn, + ) + print(f"PGMQ: Messages sent with IDs: {msg_ids}") + + # Non-database operation: Print statement + print("Non-DB: Simulating a non-database operation (printing).") + + +# Transaction failure: only delete if queue size is larger than threshold +@transaction +def conditional_failure(queue, conn=None): + # Send multiple messages within the transaction + msg_ids = queue.send_batch( + test_queue, messages=messages, conn=conn, ) print(f"Messages sent with IDs: {msg_ids}") - # Simulate an error to trigger a rollback - raise Exception("Simulated failure in transactional operations") + # Read messages currently in the queue within the transaction + messages_in_queue = queue.read_batch( + test_queue, + batch_size=10, + conn=conn, + ) + print( + f"Messages currently in queue before conditional failure: {messages_in_queue}" + ) + # Simulate a condition: only delete if the queue has more than 3 messages + if len(messages_in_queue) > 3: + queue.delete( + test_queue, + msg_id=messages_in_queue[0].msg_id, + conn=conn, + ) + print(f"Message ID {messages_in_queue[0].msg_id} deleted within transaction.") + else: + # Simulate a failure if the queue size is not greater than 3 + print( + "Transaction failed: Not enough messages in queue to proceed with deletion." + ) + raise Exception("Queue size too small to proceed.") -# Execute the transactional operations (Failure Case) -print("\n=== Executing Transactional Operations (Failure Case) ===") + print("Transaction completed successfully.") + + +# Transaction success for conditional scenario +@transaction +def conditional_success(queue, conn=None): + # Send additional messages to ensure the queue has more than 3 messages + additional_messages = [ + {"id": 4, "content": "Fourth message"}, + {"id": 5, "content": "Fifth message"}, + ] + msg_ids = queue.send_batch( + test_queue, + messages=additional_messages, + conn=conn, + ) + print(f"Messages sent with IDs: {msg_ids}") + + # Read messages currently in the queue within the transaction + messages_in_queue = queue.read_batch( + test_queue, + batch_size=10, + conn=conn, + ) + print( + f"Messages currently in queue before successful conditional deletion: {messages_in_queue}" + ) + + # Proceed with deletion if more than 3 messages are in the queue + if len(messages_in_queue) > 3: + queue.delete( + test_queue, + msg_id=messages_in_queue[0].msg_id, + conn=conn, + ) + print(f"Message ID {messages_in_queue[0].msg_id} deleted within transaction.") + + print("Conditional success transaction completed.") + + +# Read messages after transaction to see if changes were committed +def read_queue_after_transaction(): + external_messages = queue.read_batch(test_queue, batch_size=10) + if external_messages: + print("Messages read after transaction:") + for msg in external_messages: + print(f"ID: {msg.msg_id}, Content: {msg.message}") + else: + print("No messages found after transaction rollback.") + + +# Execute transactions and handle exceptions +print("=== Executing PGMQ Operations ===") +try: + pgmq_operations(queue) +except Exception as e: + print(f"PGMQ Transaction failed: {e}") + +print("\n=== Executing Non-PGMQ DB and PGMQ Operations (Success Case) ===") +try: + non_pgmq_db_operations_success(queue) +except Exception as e: + print(f"Non-PGMQ DB Transaction failed: {e}") + +print("\n=== Executing Non-PGMQ DB and PGMQ Operations (Failure Case) ===") +try: + non_pgmq_db_operations_failure(queue) +except Exception as e: + print(f"Non-PGMQ DB Transaction failed: {e}") + +print("\n=== Executing Non-DB and PGMQ Operations ===") try: - transactional_operations_failure(queue) + non_db_operations(queue) except Exception as e: - print(f"Transaction failed: {e}") - -# Read messages after transaction rollback -external_messages = queue.read_batch(test_queue, batch_size=10) -if external_messages: - print("Messages read after transaction rollback:") - for msg in external_messages: - print(f"ID: {msg.msg_id}, Content: {msg.message}") -else: - print("No messages found after transaction rollback.") + print(f"Non-DB Transaction failed: {e}") + +print("\n=== Reading Queue After Transactions ===") +read_queue_after_transaction() + +# Purge the queue for failure case +queue.purge(test_queue) + +print("\n=== Executing Conditional Failure Scenario ===") +try: + conditional_failure(queue) +except Exception as e: + print(f"Conditional Failure Transaction failed: {e}") +read_queue_after_transaction() + +print("\n=== Executing Conditional Success Scenario ===") +try: + conditional_success(queue) + + +except Exception as e: + print(f"Conditional Success Transaction failed: {e}") + read_queue_after_transaction() + +# Read the queue after the conditional failure and success +print("\n=== Reading Queue After Conditional Scenarios ===") +read_queue_after_transaction() # Clean up queue.drop_queue(test_queue) From e3900f19f31366a972fb84b4a2a34787cf27db47 Mon Sep 17 00:00:00 2001 From: Ali Tavallaie Date: Sat, 28 Sep 2024 18:49:24 +0330 Subject: [PATCH 16/18] chore: remove extra space in README --- tembo-pgmq-python/README.md | 542 ++++++++++++++++++------------------ 1 file changed, 271 insertions(+), 271 deletions(-) diff --git a/tembo-pgmq-python/README.md b/tembo-pgmq-python/README.md index d925e188..d6279840 100644 --- a/tembo-pgmq-python/README.md +++ b/tembo-pgmq-python/README.md @@ -1,269 +1,269 @@ - # Tembo's Python Client for PGMQ - - ## Installation - - Install with `pip` from pypi.org: - - ```bash - pip install tembo-pgmq-python - ``` - - To use the async version, install with the optional dependencies: - - ```bash - pip install tembo-pgmq-python[async] - ``` - - Dependencies: - - - Postgres running the [Tembo PGMQ extension](https://github.com/tembo-io/tembo/tree/main/pgmq). - - ## Usage - - ### Start a Postgres Instance with the Tembo extension installed - - ```bash - docker run -d --name postgres -e POSTGRES_PASSWORD=postgres -p 5432:5432 quay.io/tembo/pg16-pgmq:latest - ``` - - ### Using Environment Variables - - Set environment variables: - - ```bash - export PG_HOST=127.0.0.1 - export PG_PORT=5432 - export PG_USERNAME=postgres - export PG_PASSWORD=postgres - export PG_DATABASE=test_db - ``` - - Initialize a connection to Postgres using environment variables: - - ```python - from tembo_pgmq_python import PGMQueue, Message - - queue = PGMQueue() - ``` - - ### Note on the async version - - Initialization for the async version requires an explicit call of the initializer: - - ```python - from tembo_pgmq_python.async_queue import PGMQueue - - async def main(): - queue = PGMQueue() - await queue.init() - ``` - - Then, the interface is exactly the same as the sync version. - - ### Initialize a connection to Postgres without environment variables - - ```python - from tembo_pgmq_python import PGMQueue, Message - - queue = PGMQueue( - host="0.0.0.0", - port="5432", - username="postgres", - password="postgres", - database="postgres" - ) - ``` - - ### Create a queue - - ```python - queue.create_queue("my_queue") - ``` - - ### Or create a partitioned queue - - ```python - queue.create_partitioned_queue("my_partitioned_queue", partition_interval=10000) - ``` - - ### List all queues - - ```python - queues = queue.list_queues() - for q in queues: - print(f"Queue name: {q}") - ``` - - ### Send a message - - ```python - msg_id: int = queue.send("my_queue", {"hello": "world"}) - ``` - - ### Send a batch of messages - - ```python - msg_ids: list[int] = queue.send_batch("my_queue", [{"hello": "world"}, {"foo": "bar"}]) - ``` - - ### Read a message, set it invisible for 30 seconds - - ```python - read_message: Message = queue.read("my_queue", vt=30) - print(read_message) - ``` - - ### Read a batch of messages - - ```python - read_messages: list[Message] = queue.read_batch("my_queue", vt=30, batch_size=5) - for message in read_messages: - print(message) - ``` - - ### Read messages with polling - - The `read_with_poll` method allows you to repeatedly check for messages in the queue until either a message is found or the specified polling duration is exceeded. This can be useful in scenarios where you want to wait for new messages to arrive without continuously querying the queue in a tight loop. - - In the following example, the method will check for up to 5 messages in the queue `my_queue`, making the messages invisible for 30 seconds (`vt`), and will poll for a maximum of 5 seconds (`max_poll_seconds`) with intervals of 100 milliseconds (`poll_interval_ms`) between checks. - - ```python - read_messages: list[Message] = queue.read_with_poll( - "my_queue", vt=30, qty=5, max_poll_seconds=5, poll_interval_ms=100 - ) - for message in read_messages: - print(message) - ``` - - This method will continue polling until it either finds the specified number of messages (`qty`) or the `max_poll_seconds` duration is reached. The `poll_interval_ms` parameter controls the interval between successive polls, allowing you to avoid hammering the database with continuous queries. - - ### Archive the message after we're done with it - - Archived messages are moved to an archive table. - - ```python - archived: bool = queue.archive("my_queue", read_message.msg_id) - ``` - - ### Archive a batch of messages - - ```python - archived_ids: list[int] = queue.archive_batch("my_queue", [msg_id1, msg_id2]) - ``` - - ### Delete a message completely - - ```python - read_message: Message = queue.read("my_queue") - deleted: bool = queue.delete("my_queue", read_message.msg_id) - ``` - - ### Delete a batch of messages - - ```python - deleted_ids: list[int] = queue.delete_batch("my_queue", [msg_id1, msg_id2]) - ``` - - ### Set the visibility timeout (VT) for a specific message - - ```python - updated_message: Message = queue.set_vt("my_queue", msg_id, 60) - print(updated_message) - ``` - - ### Pop a message, deleting it and reading it in one transaction - - ```python - popped_message: Message = queue.pop("my_queue") - print(popped_message) - ``` - - ### Purge all messages from a queue - - ```python - purged_count: int = queue.purge("my_queue") - print(f"Purged {purged_count} messages from the queue.") - ``` - - ### Detach an archive from a queue - - ```python - queue.detach_archive("my_queue") - ``` - - ### Drop a queue - - ```python - dropped: bool = queue.drop_queue("my_queue") - print(f"Queue dropped: {dropped}") - ``` - - ### Validate the length of a queue name - - ```python - queue.validate_queue_name("my_queue") - ``` - - ### Get queue metrics - - The `metrics` method retrieves various statistics for a specific queue, such as the queue length, the age of the newest and oldest messages, the total number of messages, and the time of the metrics scrape. - - ```python - metrics = queue.metrics("my_queue") - print(f"Metrics: {metrics}") - ``` - - ### Access individual metrics - - You can access individual metrics directly from the `metrics` method's return value: - - ```python - metrics = queue.metrics("my_queue") - print(f"Queue name: {metrics.queue_name}") - print(f"Queue length: {metrics.queue_length}") - print(f"Newest message age (seconds): {metrics.newest_msg_age_sec}") - print(f"Oldest message age (seconds): {metrics.oldest_msg_age_sec}") - print(f"Total messages: {metrics.total_messages}") - print(f"Scrape time: {metrics.scrape_time}") - ``` - - ### Get metrics for all queues - - The `metrics_all` method retrieves metrics for all queues, allowing you to iterate through each queue's metrics. - - ```python - all_metrics = queue.metrics_all() - for metrics in all_metrics: - print(f"Queue name: {metrics.queue_name}") - print(f"Queue length: {metrics.queue_length}") - print(f"Newest message age (seconds): {metrics.newest_msg_age_sec}") - print(f"Oldest message age (seconds): {metrics.oldest_msg_age_sec}") - print(f"Total messages: {metrics.total_messages}") - print(f"Scrape time: {metrics.scrape_time}") - ``` - - ### Optional Logging Configuration - - You can enable verbose logging and specify a custom log filename. - - ```python - queue = PGMQueue( - host="0.0.0.0", - port="5432", - username="postgres", - password="postgres", - database="postgres", - verbose=True, - log_filename="my_custom_log.log" - ) - ``` - +# Tembo's Python Client for PGMQ + +## Installation + +Install with `pip` from pypi.org: + +```bash +pip install tembo-pgmq-python +``` + +To use the async version, install with the optional dependencies: + +```bash +pip install tembo-pgmq-python[async] +``` + +Dependencies: + +- Postgres running the [Tembo PGMQ extension](https://github.com/tembo-io/tembo/tree/main/pgmq). + +## Usage + +### Start a Postgres Instance with the Tembo extension installed + +```bash +docker run -d --name postgres -e POSTGRES_PASSWORD=postgres -p 5432:5432 quay.io/tembo/pg16-pgmq:latest +``` + +### Using Environment Variables + +Set environment variables: + +```bash +export PG_HOST=127.0.0.1 +export PG_PORT=5432 +export PG_USERNAME=postgres +export PG_PASSWORD=postgres +export PG_DATABASE=test_db +``` + +Initialize a connection to Postgres using environment variables: + +```python +from tembo_pgmq_python import PGMQueue, Message + +queue = PGMQueue() +``` + +### Note on the async version + +Initialization for the async version requires an explicit call of the initializer: + +```python +from tembo_pgmq_python.async_queue import PGMQueue + +async def main(): + queue = PGMQueue() + await queue.init() +``` + +Then, the interface is exactly the same as the sync version. + +### Initialize a connection to Postgres without environment variables + +```python +from tembo_pgmq_python import PGMQueue, Message + +queue = PGMQueue( + host="0.0.0.0", + port="5432", + username="postgres", + password="postgres", + database="postgres" +) +``` + +### Create a queue + +```python +queue.create_queue("my_queue") +``` + +### Or create a partitioned queue + +```python +queue.create_partitioned_queue("my_partitioned_queue", partition_interval=10000) +``` + +### List all queues + +```python +queues = queue.list_queues() +for q in queues: + print(f"Queue name: {q}") +``` + +### Send a message + +```python +msg_id: int = queue.send("my_queue", {"hello": "world"}) +``` + +### Send a batch of messages + +```python +msg_ids: list[int] = queue.send_batch("my_queue", [{"hello": "world"}, {"foo": "bar"}]) +``` + +### Read a message, set it invisible for 30 seconds + +```python +read_message: Message = queue.read("my_queue", vt=30) +print(read_message) +``` + +### Read a batch of messages + +```python +read_messages: list[Message] = queue.read_batch("my_queue", vt=30, batch_size=5) +for message in read_messages: + print(message) +``` + +### Read messages with polling + +The `read_with_poll` method allows you to repeatedly check for messages in the queue until either a message is found or the specified polling duration is exceeded. This can be useful in scenarios where you want to wait for new messages to arrive without continuously querying the queue in a tight loop. + +In the following example, the method will check for up to 5 messages in the queue `my_queue`, making the messages invisible for 30 seconds (`vt`), and will poll for a maximum of 5 seconds (`max_poll_seconds`) with intervals of 100 milliseconds (`poll_interval_ms`) between checks. + +```python +read_messages: list[Message] = queue.read_with_poll( + "my_queue", vt=30, qty=5, max_poll_seconds=5, poll_interval_ms=100 +) +for message in read_messages: + print(message) +``` + +This method will continue polling until it either finds the specified number of messages (`qty`) or the `max_poll_seconds` duration is reached. The `poll_interval_ms` parameter controls the interval between successive polls, allowing you to avoid hammering the database with continuous queries. + +### Archive the message after we're done with it + +Archived messages are moved to an archive table. + +```python +archived: bool = queue.archive("my_queue", read_message.msg_id) +``` + +### Archive a batch of messages + +```python +archived_ids: list[int] = queue.archive_batch("my_queue", [msg_id1, msg_id2]) +``` + +### Delete a message completely + +```python +read_message: Message = queue.read("my_queue") +deleted: bool = queue.delete("my_queue", read_message.msg_id) +``` + +### Delete a batch of messages + +```python +deleted_ids: list[int] = queue.delete_batch("my_queue", [msg_id1, msg_id2]) +``` + +### Set the visibility timeout (VT) for a specific message + +```python +updated_message: Message = queue.set_vt("my_queue", msg_id, 60) +print(updated_message) +``` + +### Pop a message, deleting it and reading it in one transaction + +```python +popped_message: Message = queue.pop("my_queue") +print(popped_message) +``` + +### Purge all messages from a queue + +```python +purged_count: int = queue.purge("my_queue") +print(f"Purged {purged_count} messages from the queue.") +``` + +### Detach an archive from a queue + +```python +queue.detach_archive("my_queue") +``` + +### Drop a queue + +```python +dropped: bool = queue.drop_queue("my_queue") +print(f"Queue dropped: {dropped}") +``` + +### Validate the length of a queue name + +```python +queue.validate_queue_name("my_queue") +``` + +### Get queue metrics + +The `metrics` method retrieves various statistics for a specific queue, such as the queue length, the age of the newest and oldest messages, the total number of messages, and the time of the metrics scrape. + +```python +metrics = queue.metrics("my_queue") +print(f"Metrics: {metrics}") +``` + +### Access individual metrics + +You can access individual metrics directly from the `metrics` method's return value: + +```python +metrics = queue.metrics("my_queue") +print(f"Queue name: {metrics.queue_name}") +print(f"Queue length: {metrics.queue_length}") +print(f"Newest message age (seconds): {metrics.newest_msg_age_sec}") +print(f"Oldest message age (seconds): {metrics.oldest_msg_age_sec}") +print(f"Total messages: {metrics.total_messages}") +print(f"Scrape time: {metrics.scrape_time}") +``` + +### Get metrics for all queues + +The `metrics_all` method retrieves metrics for all queues, allowing you to iterate through each queue's metrics. + +```python +all_metrics = queue.metrics_all() +for metrics in all_metrics: + print(f"Queue name: {metrics.queue_name}") + print(f"Queue length: {metrics.queue_length}") + print(f"Newest message age (seconds): {metrics.newest_msg_age_sec}") + print(f"Oldest message age (seconds): {metrics.oldest_msg_age_sec}") + print(f"Total messages: {metrics.total_messages}") + print(f"Scrape time: {metrics.scrape_time}") +``` + +### Optional Logging Configuration + +You can enable verbose logging and specify a custom log filename. + +```python +queue = PGMQueue( + host="0.0.0.0", + port="5432", + username="postgres", + password="postgres", + database="postgres", + verbose=True, + log_filename="my_custom_log.log" +) +``` + # Using Transactions - To perform multiple operations within a single transaction, use the `@transaction` decorator from the `tembo_pgmq_python.decorators` module. - This ensures that all operations within the function are executed within the same transaction and are either committed together or rolled back if an error occurs. +To perform multiple operations within a single transaction, use the `@transaction` decorator from the `tembo_pgmq_python.decorators` module. +This ensures that all operations within the function are executed within the same transaction and are either committed together or rolled back if an error occurs. - First, import the transaction decorator: +First, import the transaction decorator: ```python from tembo_pgmq_python.decorators import transaction @@ -274,17 +274,17 @@ from tembo_pgmq_python.decorators import transaction ```python @transaction def transactional_operation(queue: PGMQueue, conn=None): - # Perform multiple queue operations within a transaction - queue.create_queue("transactional_queue", conn=conn) - queue.send("transactional_queue", {"message": "Hello, World!"}, conn=conn) - +# Perform multiple queue operations within a transaction +queue.create_queue("transactional_queue", conn=conn) +queue.send("transactional_queue", {"message": "Hello, World!"}, conn=conn) + ``` - To execute the transaction: +To execute the transaction: ```python try: - transactional_operation(queue) +transactional_operation(queue) except Exception as e: - print(f"Transaction failed: {e}") +print(f"Transaction failed: {e}") ``` - In this example, the transactional_operation function is decorated with `@transaction`, ensuring all operations inside it are part of a single transaction. If an error occurs, the entire transaction is rolled back automatically. \ No newline at end of file +In this example, the transactional_operation function is decorated with `@transaction`, ensuring all operations inside it are part of a single transaction. If an error occurs, the entire transaction is rolled back automatically. \ No newline at end of file From 2d6794931e04711112650e8fa4a36f5729dec3c2 Mon Sep 17 00:00:00 2001 From: Ali Tavallaie Date: Sat, 28 Sep 2024 18:49:51 +0330 Subject: [PATCH 17/18] feat: complete async example app --- tembo-pgmq-python/example/example_app_async.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/tembo-pgmq-python/example/example_app_async.py b/tembo-pgmq-python/example/example_app_async.py index df505ebe..296b23c4 100644 --- a/tembo-pgmq-python/example/example_app_async.py +++ b/tembo-pgmq-python/example/example_app_async.py @@ -85,8 +85,6 @@ async def conditional_failure(queue: PGMQueue, conn=None): # Send messages msg_ids = await queue.send_batch(test_queue, [message1, message2], conn=conn) print(f"Messages sent with IDs: {msg_ids}") - - # Read messages in queue messages_in_queue = await queue.read_batch(test_queue, batch_size=10, conn=conn) print( f"Messages currently in queue before conditional failure: {messages_in_queue}" @@ -130,7 +128,6 @@ async def conditional_success(queue: PGMQueue, conn=None): f"Messages currently in queue before successful conditional deletion: {messages_in_queue}" ) - # Proceed with deletion if more than 3 messages are in the queue if len(messages_in_queue) > 3: await queue.delete( test_queue, msg_id=messages_in_queue[0].msg_id, conn=conn @@ -154,7 +151,6 @@ async def conditional_success(queue: PGMQueue, conn=None): else: print("No messages found after transactions.") - # Clean up await queue.drop_queue(test_queue) await queue.pool.close() From c4e5b5177a129850ef1846fa23d7cdece609c17c Mon Sep 17 00:00:00 2001 From: Ali Tavallaie Date: Sat, 28 Sep 2024 18:56:10 +0330 Subject: [PATCH 18/18] chore: fixing some python code intention within README --- tembo-pgmq-python/README.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tembo-pgmq-python/README.md b/tembo-pgmq-python/README.md index d6279840..74ffd902 100644 --- a/tembo-pgmq-python/README.md +++ b/tembo-pgmq-python/README.md @@ -274,17 +274,17 @@ from tembo_pgmq_python.decorators import transaction ```python @transaction def transactional_operation(queue: PGMQueue, conn=None): -# Perform multiple queue operations within a transaction -queue.create_queue("transactional_queue", conn=conn) -queue.send("transactional_queue", {"message": "Hello, World!"}, conn=conn) + # Perform multiple queue operations within a transaction + queue.create_queue("transactional_queue", conn=conn) + queue.send("transactional_queue", {"message": "Hello, World!"}, conn=conn) ``` To execute the transaction: ```python try: -transactional_operation(queue) + transactional_operation(queue) except Exception as e: -print(f"Transaction failed: {e}") + print(f"Transaction failed: {e}") ``` In this example, the transactional_operation function is decorated with `@transaction`, ensuring all operations inside it are part of a single transaction. If an error occurs, the entire transaction is rolled back automatically. \ No newline at end of file