From e9dad4b1691d18d4df47e99c9feecfa2a180eba4 Mon Sep 17 00:00:00 2001 From: Brianna Smart Date: Thu, 15 Feb 2024 16:04:50 -0800 Subject: [PATCH] Change logging to self.log --- python/lsst/ap/association/diaPipe.py | 4 ---- python/lsst/ap/association/packageAlerts.py | 24 +++++++-------------- 2 files changed, 8 insertions(+), 20 deletions(-) diff --git a/python/lsst/ap/association/diaPipe.py b/python/lsst/ap/association/diaPipe.py index 318b4512..5e62779d 100644 --- a/python/lsst/ap/association/diaPipe.py +++ b/python/lsst/ap/association/diaPipe.py @@ -34,7 +34,6 @@ import numpy as np import pandas as pd -import logging from lsst.daf.base import DateTime import lsst.dax.apdb as daxApdb @@ -51,9 +50,6 @@ PackageAlertsTask) from lsst.ap.association.ssoAssociation import SolarSystemAssociationTask -_log = logging.getLogger("lsst." + __name__) -_log.setLevel(logging.DEBUG) - class DiaPipelineConnections( pipeBase.PipelineTaskConnections, diff --git a/python/lsst/ap/association/packageAlerts.py b/python/lsst/ap/association/packageAlerts.py index 08a2d968..110c1e43 100644 --- a/python/lsst/ap/association/packageAlerts.py +++ b/python/lsst/ap/association/packageAlerts.py @@ -41,12 +41,6 @@ from lsst.utils.timer import timeMethod import fastavro -"""Methods for packaging Apdb and Pipelines data into Avro alerts. -""" - -_log = logging.getLogger("lsst." + __name__) -_log.setLevel(logging.DEBUG) - class PackageAlertsConfig(pexConfig.Config): """Config class for AssociationTask. @@ -71,13 +65,13 @@ class PackageAlertsConfig(pexConfig.Config): doProduceAlerts = pexConfig.Field( dtype=bool, - doc="Turn on alert production to kafka if true. Set to false by default", + doc="Turn on alert production to kafka if true and if confluent_kafka is in the environment.", default=False, ) doWriteAlerts = pexConfig.Field( dtype=bool, - doc="Write alerts to disk if true. Set to true by default", + doc="Write alerts to disk if true.", default=True, ) @@ -94,7 +88,6 @@ def __init__(self, **kwargs): super().__init__(**kwargs) self.alertSchema = alertPack.Schema.from_uri(self.config.schemaFile) os.makedirs(self.config.alertWriteLocation, exist_ok=True) - if self.config.doProduceAlerts: self.password = os.getenv("AP_KAFKA_PRODUCER_PASSWORD") @@ -127,10 +120,9 @@ def __init__(self, **kwargs): from confluent_kafka import KafkaException self.kafka_exception = KafkaException import confluent_kafka - except ImportError as error: - error.add_note("Could not import confluent_kafka. Alerts will not be sent " - "to the alert stream") - _log.error(error) + except ImportError as e: + e.add_note("Alerts will not be sent to the alert stream.") + self.log.error(e) if not self.password: raise ValueError("Kafka password environment variable was not set.") @@ -278,7 +270,7 @@ def produceAlerts(self, alerts, ccdVisitId): self.producer.flush() except self.kafka_exception as e: - _log.error('Kafka error: {}, message was {} bytes'.format(e, sys.getsizeof(alert_bytes))) + self.log.error('Kafka error: {}, message was {} bytes'.format(e, sys.getsizeof(alert_bytes))) with open(os.path.join(self.config.alertWriteLocation, f"{ccdVisitId}_{alert['alertId']}.avro"), "wb") as f: @@ -567,7 +559,7 @@ def _deserialize_confluent_wire_header(raw): def _delivery_callback(self, err, msg): if err: - _log.debug('%% Message failed delivery: %s\n' % err) + self.log.debug('%% Message failed delivery: %s\n' % err) else: - _log.debug('%% Message delivered to %s [%d] @ %d\n' % + self.log.debug('%% Message delivered to %s [%d] @ %d\n' % (msg.topic(), msg.partition(), msg.offset()))