From d78c8a9c4afd07971d777a50406d8b48ef60fbdb Mon Sep 17 00:00:00 2001 From: Luke Hinds Date: Tue, 31 Dec 2024 18:31:06 +0000 Subject: [PATCH] Logs are to verbose and make it a challenge to follow I expect a lot of these were from during development. Let's turn down the volume and if needed devs can uncomment --- src/codegate/codegate_logging.py | 1 + src/codegate/db/connection.py | 7 ++++--- src/codegate/pipeline/base.py | 6 ++++-- src/codegate/storage/storage_engine.py | 17 +++++++++-------- 4 files changed, 18 insertions(+), 13 deletions(-) diff --git a/src/codegate/codegate_logging.py b/src/codegate/codegate_logging.py index 7bf77fc..14c5c49 100644 --- a/src/codegate/codegate_logging.py +++ b/src/codegate/codegate_logging.py @@ -148,6 +148,7 @@ def setup_logging( # Set explicitly the log level for other modules logging.getLogger("sqlalchemy").disabled = True logging.getLogger("uvicorn.error").disabled = True + logging.getLogger("aiosqlite").disabled = True # Create a logger for our package logger = structlog.get_logger("codegate") diff --git a/src/codegate/db/connection.py b/src/codegate/db/connection.py index bdf7525..c8fb60d 100644 --- a/src/codegate/db/connection.py +++ b/src/codegate/db/connection.py @@ -144,7 +144,8 @@ async def record_outputs(self, outputs: List[Output]) -> Optional[Output]: """ ) recorded_output = await self._insert_pydantic_model(output_db, sql) - logger.debug(f"Recorded output: {recorded_output}") + # Uncomment to debug + # logger.debug(f"Recorded output: {recorded_output}") return recorded_output async def record_alerts(self, alerts: List[Alert]) -> List[Alert]: @@ -177,8 +178,8 @@ async def record_alerts(self, alerts: List[Alert]) -> List[Alert]: recorded_alerts.append(alert_result) if alert_result and alert_result.trigger_category == "critical": await alert_queue.put(f"New alert detected: {alert.timestamp}") - - logger.debug(f"Recorded alerts: {recorded_alerts}") + # Uncomment to debug the recorded alerts + # logger.debug(f"Recorded alerts: {recorded_alerts}") return recorded_alerts def _should_record_context(self, context: Optional[PipelineContext]) -> bool: diff --git a/src/codegate/pipeline/base.py b/src/codegate/pipeline/base.py index ba2d24b..9ac86d9 100644 --- a/src/codegate/pipeline/base.py +++ b/src/codegate/pipeline/base.py @@ -117,7 +117,8 @@ def add_alert( timestamp=datetime.datetime.now(datetime.timezone.utc), ) ) - logger.debug(f"Added alert to context: {self.alerts_raised[-1]}") + # Uncomment the below to debug + # logger.debug(f"Added alert to context: {self.alerts_raised[-1]}") def add_input_request( self, normalized_request: ChatCompletionRequest, is_fim_request: bool, provider: str @@ -159,7 +160,8 @@ def add_output(self, model_response: ModelResponse) -> None: output=output_str, ) ) - logger.debug(f"Added output to context: {self.output_responses[-1]}") + # Uncomment the below to debug the responses + # logger.debug(f"Added output to context: {self.output_responses[-1]}") except Exception as e: logger.error(f"Failed to serialize output: {model_response}", error=str(e)) return diff --git a/src/codegate/storage/storage_engine.py b/src/codegate/storage/storage_engine.py index ffb697b..7f4f22b 100644 --- a/src/codegate/storage/storage_engine.py +++ b/src/codegate/storage/storage_engine.py @@ -200,14 +200,15 @@ async def search( # Log the raw SQL results rows = cursor.fetchall() - logger.debug( - "Raw SQL results", - row_count=len(rows), - rows=[ - {"name": row[0], "type": row[1], "status": row[2], "description": row[3]} - for row in rows - ], - ) + # Uncomment the following lines to log + # logger.debug( + # "Raw SQL results", + # row_count=len(rows), + # rows=[ + # {"name": row[0], "type": row[1], "status": row[2], "description": row[3]} + # for row in rows + # ], + # ) results = [] query_words = None