Skip to content

Commit

Permalink
Merge branch 'main' into find-peaktimes
Browse files Browse the repository at this point in the history
  • Loading branch information
benrady authored Aug 5, 2024
2 parents 094d08f + c612762 commit d538fdc
Show file tree
Hide file tree
Showing 10 changed files with 84 additions and 39 deletions.
7 changes: 3 additions & 4 deletions src/alogamous/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,16 +41,15 @@
[
# echo_analyzer.EchoAnalyzer(),
daily_count_analyzer.DailyCountAnalyzer(line_parser),
error_counter_analyzer.ErrorCounterAnalyzer(),
flag_duplicate_log_messages.FlagDuplicateLogMessages(),
error_counter_analyzer.ErrorCounterAnalyzer(line_parser),
flag_duplicate_log_messages.FlagDuplicateLogMessages(line_parser),
line_count_analyzer.LineCountAnalyzer(),
format_analyzer.FormatAnalyzer(line_parser),
warning_analyzer.WarningAnalyzer(),
loginfo_analyzer.InfoAnalyzer(line_parser),
stack_trace_analyzer.StackTraceAnalyzer(line_parser),
# startup_header_analyzer.StartupHeaderAnalyzer(line_parser),
warning_analyzer.WarningAnalyzer(),
peak_time_analyzer.PeakTimeAnalyzer(),
warning_analyzer.WarningAnalyzer(line_parser),
],
reader.read(),
output_file,
Expand Down
2 changes: 2 additions & 0 deletions src/alogamous/daily_warning_analyzer.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from __future__ import annotations

import datetime

from alogamous import analyzer, log_line_parser
Expand Down
8 changes: 5 additions & 3 deletions src/alogamous/error_counter_analyzer.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
from alogamous import analyzer
from alogamous import analyzer, log_line_parser


class ErrorCounterAnalyzer(analyzer.Analyzer):
def __init__(self):
def __init__(self, line_parser):
self.parser = line_parser
self.error_number = 0

def read_log_line(self, line):
if "ERROR" in line:
parsed_line = self.parser.parse(line)
if parsed_line["type"] == log_line_parser.LineType.LOG_LINE and parsed_line["level"].lower() == "error":
self.error_number += 1

def report(self, out_stream):
Expand Down
21 changes: 12 additions & 9 deletions src/alogamous/flag_duplicate_log_messages.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,24 @@
from alogamous import analyzer
from alogamous import analyzer, log_line_parser


class FlagDuplicateLogMessages(analyzer.Analyzer):
def __init__(self):
def __init__(self, line_parser):
self.parser = line_parser
self.logMessages = set()
self.duplicateMessages = set()

def read_log_line(self, line):
message = line.split("-")[-1].strip()
if message in self.logMessages:
self.duplicateMessages.add(message)
else:
self.logMessages.add(message)
parsed_line = self.parser.parse(line)
if parsed_line["type"] == log_line_parser.LineType.LOG_LINE:
message = parsed_line["message"]
if message in self.logMessages:
self.duplicateMessages.add(message)
else:
self.logMessages.add(message)

def report(self, out_stream):
if len(self.duplicateMessages) > 0:
out_stream.write("Duplicate Log Messages:\n")
out_stream.write("\n".join(self.duplicateMessages))
out_stream.write("Duplicate Log Messages:\n- ")
out_stream.write("\n- ".join(self.duplicateMessages))
else:
out_stream.write("No duplicate log messages")
2 changes: 1 addition & 1 deletion src/alogamous/loginfo_analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ def __init__(self, parser):
def read_log_line(self, line):
parsed_line = self.parser.parse(line)
line_type = parsed_line["type"]
if line_type == LineType.LOG_LINE and parsed_line["log_level"].lower() == "info":
if line_type == LineType.LOG_LINE and parsed_line["level"].lower() == "info":
self.infomessage_counter += 1

def report(self, out_stream):
Expand Down
10 changes: 5 additions & 5 deletions src/alogamous/warning_analyzer.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
from alogamous import analyzer
from alogamous import analyzer, log_line_parser


class WarningAnalyzer(analyzer.Analyzer):
def __init__(self):
def __init__(self, line_parser):
self.parser = line_parser
self.count = 0

def read_log_line(self, line):
line_list = line.split(" ")
# List should have format [date, timestamp, -, root, -, log message type, -, first word of message...]
if line_list[5].lower() == "warning":
parsed_line = self.parser.parse(line)
if parsed_line["type"] == log_line_parser.LineType.LOG_LINE and parsed_line["level"].lower().startswith("warn"):
self.count += 1

def report(self, out_stream):
Expand Down
39 changes: 33 additions & 6 deletions tests/flag_duplicate_log_messages_test.py
Original file line number Diff line number Diff line change
@@ -1,27 +1,54 @@
import io

from alogamous import flag_duplicate_log_messages
from alogamous import flag_duplicate_log_messages, log_line_parser


def test_flag_duplicate_log_messages():
flagger = flag_duplicate_log_messages.FlagDuplicateLogMessages()
line_parser = log_line_parser.LogLineParser(
["datetime", "source", "level", "message"], " - ", "===================================================="
)
flagger = flag_duplicate_log_messages.FlagDuplicateLogMessages(line_parser)
in_stream = io.StringIO("""Date - root - INFO - log message 1
Date - root - WARNING - log message 2
Date - root - WARNING - log message 1""")
out_stream = io.StringIO()
for line in in_stream:
flagger.read_log_line(line)
flagger.read_log_line(line.rstrip())
flagger.report(out_stream)
assert out_stream.getvalue() == """Duplicate Log Messages:\nlog message 1"""
assert out_stream.getvalue() == """Duplicate Log Messages:\n- log message 1"""


def test_flag_duplicate_log_messages_no_duplicates():
flagger = flag_duplicate_log_messages.FlagDuplicateLogMessages()
line_parser = log_line_parser.LogLineParser(
["datetime", "source", "level", "message"], " - ", "===================================================="
)
flagger = flag_duplicate_log_messages.FlagDuplicateLogMessages(line_parser)
in_stream = io.StringIO("""Date - root - INFO - log message 1
Date - root - WARNING - log message 2
Date - root - WARNING - log message 3""")
out_stream = io.StringIO()
for line in in_stream:
flagger.read_log_line(line)
flagger.read_log_line(line.rstrip())
flagger.report(out_stream)
assert out_stream.getvalue() == """No duplicate log messages"""


def test_flag_duplicate_messages_with_header_and_dashes():
line_parser = log_line_parser.LogLineParser(
["datetime", "source", "level", "message"], " - ", "===================================================="
)
flagger = flag_duplicate_log_messages.FlagDuplicateLogMessages(line_parser)
in_stream = io.StringIO("""====================================================
STARTING Tracking service
Start time: 2024-06-20 09:00:00.001550+00:00
Version: 2729a
Command line: ['.venv/bin/python3', '-m', 'app.tracking_service', '--market', 'US', '--version', '2729a']
====================================================
2024-06-20 11:00:17,983 - root - INFO - Adding subscription for pid None
2024-06-20 11:00:18,115 - root - INFO - Initialized Influx DB Client to host
2024-06-20 11:00:18,185 - root - INFO - Kafka reading from start of day 2024-06-20 05:00:00+00:00 on topic internal""")
out_stream = io.StringIO()
for line in in_stream:
flagger.read_log_line(line.rstrip())
flagger.report(out_stream)
assert out_stream.getvalue() == """No duplicate log messages"""
10 changes: 5 additions & 5 deletions tests/info_counter_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@


def test_report():
infomessage_counter = InfoAnalyzer(LogLineParser(["log_level", "message"], ":", "HEADER"))
infomessage_counter = InfoAnalyzer(LogLineParser(["level", "message"], ":", "HEADER"))
out_stream = io.StringIO()

infomessage_counter.read_log_line("HEADER")
Expand All @@ -20,7 +20,7 @@ def test_report():


def test_capital_info():
infomessage_counter = InfoAnalyzer(LogLineParser(["log_level", "message"], ":", "HEADER"))
infomessage_counter = InfoAnalyzer(LogLineParser(["level", "message"], ":", "HEADER"))
out_stream = io.StringIO()

infomessage_counter.read_log_line("Info: line 1")
Expand All @@ -33,7 +33,7 @@ def test_capital_info():


def test_report_mixedlines():
infomessage_counter = InfoAnalyzer(LogLineParser(["log_level", "message"], ":", "HEADER"))
infomessage_counter = InfoAnalyzer(LogLineParser(["level", "message"], ":", "HEADER"))
out_stream = io.StringIO()

infomessage_counter.read_log_line("info: line 1")
Expand All @@ -46,7 +46,7 @@ def test_report_mixedlines():


def test_report_without_info_messages():
infomessage_counter = InfoAnalyzer(LogLineParser(["log_level", "message"], ":", "HEADER"))
infomessage_counter = InfoAnalyzer(LogLineParser(["level", "message"], ":", "HEADER"))
out_stream = io.StringIO()

infomessage_counter.read_log_line("Warning: line 1")
Expand All @@ -59,7 +59,7 @@ def test_report_without_info_messages():


def test_no_imput_lines():
infomessage_counter = InfoAnalyzer(LogLineParser(["log_level", "message"], ":", "HEADER"))
infomessage_counter = InfoAnalyzer(LogLineParser(["level", "message"], ":", "HEADER"))
out_stream = io.StringIO()

infomessage_counter.report(out_stream)
Expand Down
17 changes: 13 additions & 4 deletions tests/test_error_counter.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
import io

from alogamous import error_counter_analyzer
from alogamous import error_counter_analyzer, log_line_parser


def test_error_counter():
counter = error_counter_analyzer.ErrorCounterAnalyzer()
parser = log_line_parser.LogLineParser(
["datetime", "source", "level", "message"], " - ", "===================================================="
)
counter = error_counter_analyzer.ErrorCounterAnalyzer(parser)
in_stream = io.StringIO(
"""2024-06-20 17:16:03,660 - root - ERROR - Caught exception N/A.
2024-06-20 17:16:03,660 - root - ERROR - Caught exception N/A. Message: Unclosed connector NoneType: None"""
Expand All @@ -17,7 +20,10 @@ def test_error_counter():


def test_no_errors():
counter = error_counter_analyzer.ErrorCounterAnalyzer()
parser = log_line_parser.LogLineParser(
["datetime", "source", "level", "message"], " - ", "===================================================="
)
counter = error_counter_analyzer.ErrorCounterAnalyzer(parser)
in_stream = io.StringIO("""2024-06-20 17:17:04,278 - root - INFO - Updating prices
2024-06-20 17:24:34,091 - root - INFO - Closing client connection.""")
out_stream = io.StringIO()
Expand All @@ -28,7 +34,10 @@ def test_no_errors():


def test_no_input_lines():
counter = error_counter_analyzer.ErrorCounterAnalyzer()
parser = log_line_parser.LogLineParser(
["datetime", "source", "level", "message"], " - ", "===================================================="
)
counter = error_counter_analyzer.ErrorCounterAnalyzer(parser)
out_stream = io.StringIO()
counter.report(out_stream)
assert out_stream.getvalue().strip() == "Number of error lines: 0"
7 changes: 5 additions & 2 deletions tests/warning_analyzer_test.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
import io

from alogamous import warning_analyzer
from alogamous import log_line_parser, warning_analyzer


def test_warning_count():
counter = warning_analyzer.WarningAnalyzer()
parser = log_line_parser.LogLineParser(
["datetime", "source", "level", "message"], " - ", "===================================================="
)
counter = warning_analyzer.WarningAnalyzer(parser)
in_stream = io.StringIO("""2024-06-20 11:00:18,185 - root - INFO - Kafka reading from start of day 2024-06-20 05:00:00+00:00 on topic internal from kafka.servers:9092
2024-06-20 11:00:19,328 - root - INFO - Kafka source starting for topic internal at current offset 7924032 end offset 7928950 on servers kafka.servers:9092
2024-06-20 11:00:22,329 - root - INFO - Kafka topic internal is caught up at offset 7928949
Expand Down

0 comments on commit d538fdc

Please sign in to comment.