Skip to content

Commit

Permalink
standardize exception inheritance to Exception (#695)
Browse files Browse the repository at this point in the history
  • Loading branch information
dtrai2 authored Nov 8, 2024
1 parent 8bbb38e commit 7e95536
Show file tree
Hide file tree
Showing 24 changed files with 40 additions and 42 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
### Breaking
### Features
### Improvements
* replace `BaseException` with `Exception` for custom errors
### Bugfix

- fix `confluent_kafka.store_offsets` if `last_valid_record` is `None`, can happen if a rebalancing happens
Expand Down
2 changes: 1 addition & 1 deletion logprep/abc/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -375,7 +375,7 @@ def _has_missing_values(self, event, rule, source_field_dict):
if missing_fields:
if rule.ignore_missing_fields:
return True
error = BaseException(f"{self.name}: no value for fields: {missing_fields}")
error = Exception(f"{self.name}: no value for fields: {missing_fields}")
self._handle_warning_error(event, rule, error)
return True
return False
Expand Down
6 changes: 3 additions & 3 deletions logprep/connector/dummy/input.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
A dummy input that returns the documents it was initialized with.
If a "document" is derived from BaseException, that exception will be thrown instead of
If a "document" is derived from Exception, that exception will be thrown instead of
returning a document. The exception will be removed and subsequent calls may return documents or
throw other exceptions in the given order.
Expand Down Expand Up @@ -36,7 +36,7 @@ class DummyInput(Input):
class Config(Input.Config):
"""DummyInput specific configuration"""

documents: List[Union[dict, type, BaseException]]
documents: List[Union[dict, type, Exception]]
"""A list of documents that should be returned."""
repeat_documents: Optional[str] = field(
validator=validators.instance_of(bool), default=False
Expand All @@ -57,6 +57,6 @@ def _get_event(self, timeout: float) -> tuple:

document = self._documents.pop(0)

if (document.__class__ == type) and issubclass(document, BaseException):
if (document.__class__ == type) and issubclass(document, Exception):
raise document
return document, None
2 changes: 1 addition & 1 deletion logprep/connector/file/input.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
FileInput
==========
A generic line input that returns the documents it was initialized with.
If a "document" is derived from BaseException, that exception will be thrown instead of
If a "document" is derived from Exception, that exception will be thrown instead of
returning a document. The exception will be removed and subsequent calls may return documents or
throw other exceptions in the given order.
Expand Down
2 changes: 1 addition & 1 deletion logprep/connector/json/input.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
A json input that returns the documents it was initialized with.
If a "document" is derived from BaseException, that exception will be thrown instead of
If a "document" is derived from Exception, that exception will be thrown instead of
returning a document. The exception will be removed and subsequent calls may return documents or
throw other exceptions in the given order.
Expand Down
2 changes: 1 addition & 1 deletion logprep/connector/jsonl/input.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
A json line input that returns the documents it was initialized with.
If a "document" is derived from BaseException, that exception will be thrown instead of
If a "document" is derived from Exception, that exception will be thrown instead of
returning a document. The exception will be removed and subsequent calls may return documents or
throw other exceptions in the given order.
Expand Down
2 changes: 1 addition & 1 deletion logprep/filter/expression/filter_expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from typing import Any, List


class FilterExpressionError(BaseException):
class FilterExpressionError(Exception):
"""Base class for FilterExpression related exceptions."""


Expand Down
2 changes: 1 addition & 1 deletion logprep/filter/lucene_filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@
logger = logging.getLogger("LuceneFilter")


class LuceneFilterError(BaseException):
class LuceneFilterError(Exception):
"""Base class for LuceneFilter related exceptions."""


Expand Down
4 changes: 2 additions & 2 deletions logprep/processor/amides/detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from logprep.processor.amides.features import CommaSeparation


class DetectionModelError(BaseException):
class DetectionModelError(Exception):
"""Base exception class for all RuleModel-related errors."""


Expand Down Expand Up @@ -98,7 +98,7 @@ def detect(self, sample: str) -> Tuple[bool, float]:
return False, round(confidence_value, 3)


class RuleAttributorError(BaseException):
class RuleAttributorError(Exception):
"""Base class for all RuleAttributor-related Errors."""


Expand Down
4 changes: 1 addition & 3 deletions logprep/processor/dissector/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,9 +70,7 @@ def _get_mappings(self, event, rule) -> List[Tuple[Callable, dict, str, str, str
if loop_content is None:
if rule.ignore_missing_fields:
continue
error = BaseException(
f"dissector: mapping field '{source_field}' does not exist"
)
error = Exception(f"dissector: mapping field '{source_field}' does not exist")
self._handle_warning_error(event, rule, error)
if delimiter is not None and loop_content is not None:
content, _, loop_content = loop_content.partition(delimiter)
Expand Down
3 changes: 1 addition & 2 deletions logprep/processor/field_manager/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,8 +197,7 @@ def _get_field_values(event, source):

def _get_missing_fields_error(self, source_fields, field_values):
missing_fields = [key for key, value in zip(source_fields, field_values) if value is None]
error = BaseException(f"{self.name}: missing source_fields: {missing_fields}")
return error
return Exception(f"{self.name}: missing source_fields: {missing_fields}")

@staticmethod
def _get_flatten_source_fields(source_fields_values):
Expand Down
2 changes: 1 addition & 1 deletion logprep/processor/labeler/labeling_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from logprep.util.getter import GetterFactory


class LabelingSchemaError(BaseException):
class LabelingSchemaError(Exception):
"""Base class for LabelingSchema related exceptions."""


Expand Down
2 changes: 1 addition & 1 deletion logprep/processor/list_comparison/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
from logprep.util.helper import add_field_to, get_dotted_field_value


class ListComparisonError(BaseException):
class ListComparisonError(Exception):
"""Base class for ListComparison related exceptions."""

def __init__(self, name: str, message: str):
Expand Down
2 changes: 1 addition & 1 deletion logprep/processor/template_replacer/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@
from logprep.util.helper import add_field_to, get_dotted_field_value


class TemplateReplacerError(BaseException):
class TemplateReplacerError(Exception):
"""Base class for TemplateReplacer related exceptions."""

def __init__(self, name: str, message: str):
Expand Down
2 changes: 1 addition & 1 deletion logprep/util/auto_rule_tester/auto_rule_tester.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@


# pylint: disable=protected-access
class AutoRuleTesterException(BaseException):
class AutoRuleTesterException(Exception):
"""Base class for AutoRuleTester related exceptions."""

def __init__(self, message: str):
Expand Down
2 changes: 1 addition & 1 deletion logprep/util/grok_pattern_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
PATTERN_CONVERSION = [("[[:alnum:]]", r"\w")]


class GrokPatternLoaderError(BaseException):
class GrokPatternLoaderError(Exception):
"""Base class for GrokPatternLoader related exceptions."""

def __init__(self, message: str):
Expand Down
2 changes: 1 addition & 1 deletion logprep/util/pre_detector_rule_matching_tester.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@


# pylint: disable=protected-access
class MatchingRuleTesterException(BaseException):
class MatchingRuleTesterException(Exception):
"""Base class for MatchingRuleTester related exceptions."""

def __init__(self, message: str):
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/connector/test_confluent_kafka_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def test_create_fails_for_unknown_option(self):
def test_error_callback_logs_error(self):
self.object.metrics.number_of_errors = 0
with mock.patch("logging.Logger.error") as mock_error:
test_error = BaseException("test error")
test_error = Exception("test error")
self.object._error_callback(test_error)
mock_error.assert_called()
mock_error.assert_called_with(f"{self.object.describe()}: {test_error}")
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/connector/test_confluent_kafka_input.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ def test_get_next_raises_critical_input_parsing_error(self):

def test_commit_callback_raises_warning_error_and_counts_failures(self):
with pytest.raises(InputWarning, match="Could not commit offsets"):
self.object._commit_callback(BaseException, ["topic_partition"])
self.object._commit_callback(Exception, ["topic_partition"])
assert self.object._commit_failures == 1

def test_commit_callback_counts_commit_success(self):
Expand Down
6 changes: 3 additions & 3 deletions tests/unit/connector/test_dummy_input.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from tests.unit.connector.base import BaseInputTestCase


class DummyError(BaseException):
class DummyError(Exception):
pass


Expand Down Expand Up @@ -44,9 +44,9 @@ def test_raises_exceptions_instead_of_returning_them_in_document(self):

def test_raises_exceptions_instead_of_returning_them(self):
config = copy.deepcopy(self.CONFIG)
config["documents"] = [BaseException]
config["documents"] = [Exception]
self.object = Factory.create({"Test Instance Name": config})
with raises(BaseException):
with raises(Exception):
self.object.get_next(self.timeout)

def test_repeat_documents_repeats_documents(self):
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/connector/test_dummy_output.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def test_raises_exception_on_call_to_store(self):
config.update({"exceptions": ["FatalOutputError"]})
dummy_output = Factory.create({"test connector": config})

with raises(BaseException, match="FatalOutputError"):
with raises(Exception, match="FatalOutputError"):
dummy_output.store({"order": 0})

def test_raises_exception_on_call_to_store_custom(self):
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/connector/test_json_input.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from tests.unit.connector.base import BaseInputTestCase


class DummyError(BaseException):
class DummyError(Exception):
pass


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,14 +48,14 @@ def test_apply_signature_engine(self):
def test_exception_if_raw_text_with_start_tag():
log_record = LogRecord(raw_text="Test log with start tag <+> must raise an exception")
sign_engine = SignatureEngine()
with pytest.raises(BaseException, match=r"Start-tag <\+> in raw log message"):
with pytest.raises(Exception, match=r"Start-tag <\+> in raw log message"):
sign_engine.run(log_record, LogSaltModeTestComposition.rules[0])

@staticmethod
def test_exception_if_raw_text_with_end_tag():
log_record = LogRecord(raw_text="Test log with end tag </+> must raise an exception")
sign_engine = SignatureEngine()
with pytest.raises(BaseException, match=r"End-tag </\+> in raw log message"):
with pytest.raises(Exception, match=r"End-tag </\+> in raw log message"):
sign_engine.run(log_record, LogSaltModeTestComposition.rules[0])

@staticmethod
Expand All @@ -64,7 +64,7 @@ def test_missing_end_tag_in_sig_text():
"Test log with a start tag <+>, but a missing end tag, " "must raise an exception"
)
stp = SignatureTagParser()
with pytest.raises(BaseException):
with pytest.raises(Exception):
stp.calculate_signature(sig_text)


Expand Down
20 changes: 10 additions & 10 deletions tests/unit/processor/generic_adder/test_generic_adder.py
Original file line number Diff line number Diff line change
Expand Up @@ -489,11 +489,11 @@ def test_check_if_file_not_stale_after_initialization_of_the_generic_adder(self)
assert not self.object._check_if_file_not_exists_or_stale(time.time())

def test_check_if_file_stale_after_enough_time_has_passed(self):
time.sleep(0.2) # nosemgrep
time.sleep(0.2)
assert self.object._check_if_file_not_exists_or_stale(time.time())

def test_check_if_file_not_stale_after_enough_time_has_passed_but_file_has_been_changed(self):
time.sleep(0.2) # nosemgrep
time.sleep(0.2)
with open(self.object._db_file_path, "r", encoding="utf-8") as db_file:
file_temp = db_file.read()
now = time.time()
Expand Down Expand Up @@ -561,7 +561,7 @@ def test_sql_database_reloads_table_on_change_after_wait(self):
document_2 = {"add_from_sql_db_table": "Test", "source": "TEST_0.test.123"}

self.object.process(document_1)
time.sleep(0.2) # nosemgrep
time.sleep(0.2)
mock_simulate_table_change()
self.object.process(document_2)

Expand All @@ -579,7 +579,7 @@ def test_sql_database_with_empty_table_load_after_change(self):
self.object._db_table = {}
self.object._initialize_sql(self.CONFIG["sql_config"])
mock_simulate_table_change()
time.sleep(0.2) # nosemgrep
time.sleep(0.2)
self.object.process(document)

assert document == expected
Expand Down Expand Up @@ -621,14 +621,14 @@ def test_time_to_check_for_change_not_read_for_change(self):
assert self.object._db_connector.time_to_check_for_change() is False

def test_time_to_check_for_change_read_for_change(self):
time.sleep(self.object._file_check_interval) # nosemgrep
time.sleep(self.object._file_check_interval)
assert self.object._db_connector.time_to_check_for_change() is True

def test_update_from_db_and_write_to_file_change_and_stale(self):
assert os.path.isfile(self.object._db_file_path)
last_file_change = os.path.getmtime(self.object._db_file_path)
mock_simulate_table_change()
time.sleep(self.object._file_check_interval) # nosemgrep
time.sleep(self.object._file_check_interval)
self.object._update_from_db_and_write_to_file()
assert self.object._db_table == {
"TEST_0": (["b", "fi"], ["c", "fo"]),
Expand All @@ -640,7 +640,7 @@ def test_update_from_db_and_write_to_file_change_and_stale(self):
def test_update_from_db_and_write_to_file_no_change_and_stale(self):
assert os.path.isfile(self.object._db_file_path)
last_file_change = os.path.getmtime(self.object._db_file_path)
time.sleep(self.object._file_check_interval) # nosemgrep
time.sleep(self.object._file_check_interval)
self.object._update_from_db_and_write_to_file()
assert self.object._db_table == {
"TEST_0": (["b", "foo"], ["c", "bar"]),
Expand All @@ -654,7 +654,7 @@ def test_update_from_db_and_write_to_file_change_and_not_stale(self):
last_file_change = os.path.getmtime(self.object._db_file_path)
self.object._file_check_interval = 9999999
mock_simulate_table_change()
time.sleep(0.01) # nosemgrep
time.sleep(0.01)
self.object._update_from_db_and_write_to_file()
assert self.object._db_table == {
"TEST_0": (["b", "fi"], ["c", "fo"]),
Expand All @@ -667,7 +667,7 @@ def test_update_from_db_and_write_to_file_no_change_and_not_stale(self):
assert os.path.isfile(self.object._db_file_path)
last_file_change = os.path.getmtime(self.object._db_file_path)
self.object._file_check_interval = 9999999
time.sleep(0.01) # nosemgrep
time.sleep(0.01)
self.object._update_from_db_and_write_to_file()
assert self.object._db_table == {
"TEST_0": (["b", "foo"], ["c", "bar"]),
Expand All @@ -679,7 +679,7 @@ def test_update_from_db_and_write_to_file_no_change_and_not_stale(self):
def test_update_from_db_and_write_to_file_no_existing_file_stale(self):
assert os.path.isfile(self.object._db_file_path)
os.remove(self.object._db_file_path)
time.sleep(self.object._file_check_interval) # nosemgrep
time.sleep(self.object._file_check_interval)
self.object._db_connector._last_table_checksum = None
self.object._update_from_db_and_write_to_file()
assert self.object._db_table == {
Expand Down

0 comments on commit 7e95536

Please sign in to comment.