Skip to content

Commit

Permalink
ensure no custom metrics are settable without tests
Browse files Browse the repository at this point in the history
  • Loading branch information
ekneg54 committed Oct 23, 2023
1 parent 488e825 commit 54a9995
Show file tree
Hide file tree
Showing 6 changed files with 54 additions and 52 deletions.
17 changes: 0 additions & 17 deletions logprep/abc/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,33 +69,16 @@ class Config(Component.Config):
"""Set if the processor should be applied multiple times. This enables further processing
of an output with the same processor."""

@define(kw_only=True)
class Metrics(Component.Metrics):
"""Tracks statistics about this processor"""

number_of_processed_events = field(default=None)
number_of_failed_events = field(default=None)

processing_time_per_event: HistogramMetric = field(
factory=lambda: HistogramMetric(
description="Time in seconds that it took to process an event",
name="processing_time_per_event",
)
)
"""Time in seconds that it took to process an event"""

__slots__ = [
"rule_class",
"has_custom_tests",
"metrics",
"_event",
"_specific_tree",
"_generic_tree",
]

rule_class: "Rule"
has_custom_tests: bool
metrics: "Processor.Metrics"
_event: dict
_specific_tree: RuleTree
_generic_tree: RuleTree
Expand Down
16 changes: 8 additions & 8 deletions tests/unit/component/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,16 +22,11 @@ class BaseComponentTestCase(ABC):
CONFIG: dict = {}
object: Connector = None
logger = getLogger()
expected_metrics: list
expected_metrics: list = []

block_list = [
"_labels",
"_prefix",
"processing_time_per_event",
"number_of_processed_events",
"number_of_failed_events",
"number_of_warnings",
"number_of_errors",
]

def setup_method(self) -> None:
Expand Down Expand Up @@ -89,7 +84,7 @@ def test_no_metrics_with_same_name(self):
def test_custom_metrics_adds_custom_prefix_to_metrics_name(self):
for attribute in self.metric_attributes.values():
assert attribute.fullname.startswith(
f"logprep_{camel_to_snake(self.object.__class__.__name__)}"
f"logprep_"
), f"{attribute.fullname}, logprep_{camel_to_snake(self.object.__class__.__name__)}"

def test_expected_metrics_attributes(self):
Expand All @@ -106,5 +101,10 @@ def test_expected_metrics_attributes_are_initialized(self):
assert isinstance(metric_attribute.tracker, possibile_tracker_types)

def test_all_metric_attributes_are_tested(self):
difference = set(self.metric_attributes).difference(set(self.expected_metrics))
if self.object.__class__.Metrics is Component.Metrics:
return
assert self.expected_metrics, "expected_metrics is empty"
fullnames = {metric.fullname for metric in self.metric_attributes.values()}
difference = fullnames.difference(set(self.expected_metrics))
assert fullnames == set(self.expected_metrics)
assert not difference, f"{difference} are not defined in `expected_metrics`"
8 changes: 8 additions & 0 deletions tests/unit/connector/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,14 @@ class BaseConnectorTestCase(BaseComponentTestCase):
object: Connector = None
logger = getLogger()

expected_metrics = [
"logprep_processing_time_per_event",
"logprep_number_of_processed_events",
"logprep_number_of_failed_events",
"logprep_number_of_warnings",
"logprep_number_of_errors",
]

def test_is_a_connector_implementation(self):
assert isinstance(self.object, Connector)

Expand Down
37 changes: 21 additions & 16 deletions tests/unit/connector/test_confluent_kafka_input.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,22 +34,27 @@ class TestConfluentKafkaInput(BaseInputTestCase, CommonConfluentKafkaTestCase):
}

expected_metrics = {
"commit_failures",
"commit_success",
"current_offsets",
"committed_offsets",
"librdkafka_age",
"librdkafka_rx",
"librdkafka_rx_bytes",
"librdkafka_rxmsgs",
"librdkafka_rxmsg_bytes",
"librdkafka_cgrp_stateage",
"librdkafka_cgrp_rebalance_age",
"librdkafka_cgrp_rebalance_cnt",
"librdkafka_cgrp_assignment_size",
"librdkafka_replyq",
"librdkafka_tx",
"librdkafka_tx_bytes",
"logprep_confluent_kafka_input_commit_failures",
"logprep_confluent_kafka_input_commit_success",
"logprep_confluent_kafka_input_current_offsets",
"logprep_confluent_kafka_input_committed_offsets",
"logprep_confluent_kafka_input_librdkafka_age",
"logprep_confluent_kafka_input_librdkafka_rx",
"logprep_confluent_kafka_input_librdkafka_rx_bytes",
"logprep_confluent_kafka_input_librdkafka_rxmsgs",
"logprep_confluent_kafka_input_librdkafka_rxmsg_bytes",
"logprep_confluent_kafka_input_librdkafka_cgrp_stateage",
"logprep_confluent_kafka_input_librdkafka_cgrp_rebalance_age",
"logprep_confluent_kafka_input_librdkafka_cgrp_rebalance_cnt",
"logprep_confluent_kafka_input_librdkafka_cgrp_assignment_size",
"logprep_confluent_kafka_input_librdkafka_replyq",
"logprep_confluent_kafka_input_librdkafka_tx",
"logprep_confluent_kafka_input_librdkafka_tx_bytes",
"logprep_processing_time_per_event",
"logprep_number_of_processed_events",
"logprep_number_of_failed_events",
"logprep_number_of_warnings",
"logprep_number_of_errors",
}

@mock.patch("logprep.connector.confluent_kafka.input.Consumer")
Expand Down
27 changes: 16 additions & 11 deletions tests/unit/connector/test_confluent_kafka_output.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,17 +38,22 @@ class TestConfluentKafkaOutput(BaseOutputTestCase, CommonConfluentKafkaTestCase)
}

expected_metrics = {
"librdkafka_age",
"librdkafka_msg_cnt",
"librdkafka_msg_size",
"librdkafka_msg_max",
"librdkafka_msg_size_max",
"librdkafka_tx",
"librdkafka_tx_bytes",
"librdkafka_rx",
"librdkafka_rx_bytes",
"librdkafka_txmsgs",
"librdkafka_txmsg_bytes",
"logprep_confluent_kafka_output_librdkafka_age",
"logprep_confluent_kafka_output_librdkafka_msg_cnt",
"logprep_confluent_kafka_output_librdkafka_msg_size",
"logprep_confluent_kafka_output_librdkafka_msg_max",
"logprep_confluent_kafka_output_librdkafka_msg_size_max",
"logprep_confluent_kafka_output_librdkafka_tx",
"logprep_confluent_kafka_output_librdkafka_tx_bytes",
"logprep_confluent_kafka_output_librdkafka_rx",
"logprep_confluent_kafka_output_librdkafka_rx_bytes",
"logprep_confluent_kafka_output_librdkafka_txmsgs",
"logprep_confluent_kafka_output_librdkafka_txmsg_bytes",
"logprep_processing_time_per_event",
"logprep_number_of_processed_events",
"logprep_number_of_failed_events",
"logprep_number_of_warnings",
"logprep_number_of_errors",
}

@mock.patch("logprep.connector.confluent_kafka.output.Producer", return_value="The Producer")
Expand Down
1 change: 1 addition & 0 deletions tests/unit/processor/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ def setup_method(self) -> None:
"""
setUp class for the imported TestCase
"""
super().setup_method()
self.patchers = []
for name, kwargs in self.mocks.items():
patcher = mock.patch(name, **kwargs)
Expand Down

0 comments on commit 54a9995

Please sign in to comment.