diff --git a/logprep/abc/processor.py b/logprep/abc/processor.py index 99bce20e2..2e0dc2153 100644 --- a/logprep/abc/processor.py +++ b/logprep/abc/processor.py @@ -69,25 +69,9 @@ class Config(Component.Config): """Set if the processor should be applied multiple times. This enables further processing of an output with the same processor.""" - @define(kw_only=True) - class Metrics(Component.Metrics): - """Tracks statistics about this processor""" - - number_of_processed_events = field(default=None) - number_of_failed_events = field(default=None) - - processing_time_per_event: HistogramMetric = field( - factory=lambda: HistogramMetric( - description="Time in seconds that it took to process an event", - name="processing_time_per_event", - ) - ) - """Time in seconds that it took to process an event""" - __slots__ = [ "rule_class", "has_custom_tests", - "metrics", "_event", "_specific_tree", "_generic_tree", @@ -95,7 +79,6 @@ class Metrics(Component.Metrics): rule_class: "Rule" has_custom_tests: bool - metrics: "Processor.Metrics" _event: dict _specific_tree: RuleTree _generic_tree: RuleTree diff --git a/tests/unit/component/base.py b/tests/unit/component/base.py index a60960c9d..2a5687418 100644 --- a/tests/unit/component/base.py +++ b/tests/unit/component/base.py @@ -22,16 +22,11 @@ class BaseComponentTestCase(ABC): CONFIG: dict = {} object: Connector = None logger = getLogger() - expected_metrics: list + expected_metrics: list = [] block_list = [ "_labels", "_prefix", - "processing_time_per_event", - "number_of_processed_events", - "number_of_failed_events", - "number_of_warnings", - "number_of_errors", ] def setup_method(self) -> None: @@ -89,7 +84,7 @@ def test_no_metrics_with_same_name(self): def test_custom_metrics_adds_custom_prefix_to_metrics_name(self): for attribute in self.metric_attributes.values(): assert attribute.fullname.startswith( - f"logprep_{camel_to_snake(self.object.__class__.__name__)}" + f"logprep_" ), f"{attribute.fullname}, logprep_{camel_to_snake(self.object.__class__.__name__)}" def test_expected_metrics_attributes(self): @@ -106,5 +101,10 @@ def test_expected_metrics_attributes_are_initialized(self): assert isinstance(metric_attribute.tracker, possibile_tracker_types) def test_all_metric_attributes_are_tested(self): - difference = set(self.metric_attributes).difference(set(self.expected_metrics)) + if self.object.__class__.Metrics is Component.Metrics: + return + assert self.expected_metrics, "expected_metrics is empty" + fullnames = {metric.fullname for metric in self.metric_attributes.values()} + difference = fullnames.difference(set(self.expected_metrics)) + assert fullnames == set(self.expected_metrics) assert not difference, f"{difference} are not defined in `expected_metrics`" diff --git a/tests/unit/connector/base.py b/tests/unit/connector/base.py index e3ab1a7c2..8b6ecd129 100644 --- a/tests/unit/connector/base.py +++ b/tests/unit/connector/base.py @@ -22,6 +22,14 @@ class BaseConnectorTestCase(BaseComponentTestCase): object: Connector = None logger = getLogger() + expected_metrics = [ + "logprep_processing_time_per_event", + "logprep_number_of_processed_events", + "logprep_number_of_failed_events", + "logprep_number_of_warnings", + "logprep_number_of_errors", + ] + def test_is_a_connector_implementation(self): assert isinstance(self.object, Connector) diff --git a/tests/unit/connector/test_confluent_kafka_input.py b/tests/unit/connector/test_confluent_kafka_input.py index 9f9ae1735..9cdc86465 100644 --- a/tests/unit/connector/test_confluent_kafka_input.py +++ b/tests/unit/connector/test_confluent_kafka_input.py @@ -34,22 +34,27 @@ class TestConfluentKafkaInput(BaseInputTestCase, CommonConfluentKafkaTestCase): } expected_metrics = { - "commit_failures", - "commit_success", - "current_offsets", - "committed_offsets", - "librdkafka_age", - "librdkafka_rx", - "librdkafka_rx_bytes", - "librdkafka_rxmsgs", - "librdkafka_rxmsg_bytes", - "librdkafka_cgrp_stateage", - "librdkafka_cgrp_rebalance_age", - "librdkafka_cgrp_rebalance_cnt", - "librdkafka_cgrp_assignment_size", - "librdkafka_replyq", - "librdkafka_tx", - "librdkafka_tx_bytes", + "logprep_confluent_kafka_input_commit_failures", + "logprep_confluent_kafka_input_commit_success", + "logprep_confluent_kafka_input_current_offsets", + "logprep_confluent_kafka_input_committed_offsets", + "logprep_confluent_kafka_input_librdkafka_age", + "logprep_confluent_kafka_input_librdkafka_rx", + "logprep_confluent_kafka_input_librdkafka_rx_bytes", + "logprep_confluent_kafka_input_librdkafka_rxmsgs", + "logprep_confluent_kafka_input_librdkafka_rxmsg_bytes", + "logprep_confluent_kafka_input_librdkafka_cgrp_stateage", + "logprep_confluent_kafka_input_librdkafka_cgrp_rebalance_age", + "logprep_confluent_kafka_input_librdkafka_cgrp_rebalance_cnt", + "logprep_confluent_kafka_input_librdkafka_cgrp_assignment_size", + "logprep_confluent_kafka_input_librdkafka_replyq", + "logprep_confluent_kafka_input_librdkafka_tx", + "logprep_confluent_kafka_input_librdkafka_tx_bytes", + "logprep_processing_time_per_event", + "logprep_number_of_processed_events", + "logprep_number_of_failed_events", + "logprep_number_of_warnings", + "logprep_number_of_errors", } @mock.patch("logprep.connector.confluent_kafka.input.Consumer") diff --git a/tests/unit/connector/test_confluent_kafka_output.py b/tests/unit/connector/test_confluent_kafka_output.py index 744047c4e..7f9472cc6 100644 --- a/tests/unit/connector/test_confluent_kafka_output.py +++ b/tests/unit/connector/test_confluent_kafka_output.py @@ -38,17 +38,22 @@ class TestConfluentKafkaOutput(BaseOutputTestCase, CommonConfluentKafkaTestCase) } expected_metrics = { - "librdkafka_age", - "librdkafka_msg_cnt", - "librdkafka_msg_size", - "librdkafka_msg_max", - "librdkafka_msg_size_max", - "librdkafka_tx", - "librdkafka_tx_bytes", - "librdkafka_rx", - "librdkafka_rx_bytes", - "librdkafka_txmsgs", - "librdkafka_txmsg_bytes", + "logprep_confluent_kafka_output_librdkafka_age", + "logprep_confluent_kafka_output_librdkafka_msg_cnt", + "logprep_confluent_kafka_output_librdkafka_msg_size", + "logprep_confluent_kafka_output_librdkafka_msg_max", + "logprep_confluent_kafka_output_librdkafka_msg_size_max", + "logprep_confluent_kafka_output_librdkafka_tx", + "logprep_confluent_kafka_output_librdkafka_tx_bytes", + "logprep_confluent_kafka_output_librdkafka_rx", + "logprep_confluent_kafka_output_librdkafka_rx_bytes", + "logprep_confluent_kafka_output_librdkafka_txmsgs", + "logprep_confluent_kafka_output_librdkafka_txmsg_bytes", + "logprep_processing_time_per_event", + "logprep_number_of_processed_events", + "logprep_number_of_failed_events", + "logprep_number_of_warnings", + "logprep_number_of_errors", } @mock.patch("logprep.connector.confluent_kafka.output.Producer", return_value="The Producer") diff --git a/tests/unit/processor/base.py b/tests/unit/processor/base.py index 0f7e92cde..84cdd96ec 100644 --- a/tests/unit/processor/base.py +++ b/tests/unit/processor/base.py @@ -85,6 +85,7 @@ def setup_method(self) -> None: """ setUp class for the imported TestCase """ + super().setup_method() self.patchers = [] for name, kwargs in self.mocks.items(): patcher = mock.patch(name, **kwargs)