Skip to content

Commit

Permalink
Working for telemetry
Browse files Browse the repository at this point in the history
  • Loading branch information
boazhaim committed Nov 11, 2024
1 parent 2bd0a85 commit deecc02
Show file tree
Hide file tree
Showing 3 changed files with 40 additions and 31 deletions.
20 changes: 1 addition & 19 deletions plugins/ufm_log_analyzer_plugin/src/loganalyze/log_analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -389,25 +389,7 @@ def create_analyzer(parsed_args, full_extracted_logs_list,

# Adding telemetry stats to the PDF
for cur_telemetry in [ibdianget_2_ports_primary_analyzer, ibdianget_2_ports_secondary_analyzer]:
txt = f"{cur_telemetry.telemetry_type} info: {os.linesep}"
txt += f"Found the following collectx version(s):{os.linesep}"
for collectx_version in cur_telemetry.get_collectx_versions():
txt += f"{collectx_version}, "
txt += os.linesep
txt += f"Found {cur_telemetry.get_number_of_core_dumps()} core dumps{os.linesep}"
txt += str(cur_telemetry.get_number_of_switches_and_ports())
iteration_stats = cur_telemetry.get_last_iterations_time_stats()
if iteration_stats is None:
cur_telemetry.analyze_iteration_time()
iteration_stats = cur_telemetry.get_last_iterations_time_stats()
txt += f"Iteration time stats:{os.linesep}"
txt += str(iteration_stats)
text_to_show_in_pdf += os.linesep + os.linesep + txt

#Also print the stats to the screen
print(f"stats for {cur_telemetry.telemetry_type}:")
print(cur_telemetry.get_last_iterations_time_stats())
print(cur_telemetry.get_number_of_switches_and_ports())
text_to_show_in_pdf += cur_telemetry.text_to_show_in_pdf
# PDF creator gets all the images and to add to the report
pdf = PDFCreator(pdf_path, pdf_header, png_images, text_to_show_in_pdf)
pdf.created_pdf()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
# pylint: disable=missing-function-docstring
# pylint: disable=missing-module-docstring

import logging
import os
import csv
import shutil
Expand All @@ -21,15 +22,17 @@
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
import matplotlib.dates as mdates

from loganalyze.log_analyzers.constants import DataConstants
import loganalyze.logger as log
logging.getLogger('matplotlib').setLevel(logging.ERROR) # This makes sure the user does not see the warning from plotting
matplotlib.use('Agg') # This allows to run the tool on servers without graphic card/headless

pd.set_option("display.max_colwidth", None)
warnings.filterwarnings("ignore")


class BaseImageCreator:
# Setting the graph time interval to 1 hour
# This is out side of the constructor since
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,18 +10,19 @@
# provided with the software product.
#

import os
from typing import List
import warnings
import pandas as pd
from loganalyze.log_analyzers.base_analyzer import BaseAnalyzer
from datetime import timedelta


class Ibdiagnet2PortCountersAnalyzer(BaseAnalyzer):
def __init__(self, logs_csvs: List[str], hours: int, dest_image_path: str, sort_timestamp=False):
super().__init__(logs_csvs, hours, dest_image_path, sort_timestamp)
self._iteration_time_data = None
self._iteration_time_stats = None
self.text_to_show_in_pdf = ""
# This will make sure all the extra columns are int
extra_columns = ['extra1', 'extra2', 'extra3', 'extra4', 'extra5']
for col in extra_columns:
Expand Down Expand Up @@ -132,21 +133,44 @@ def get_last_iterations_time_stats(self):
return self._iteration_time_stats

def plot_iteration_time_over_time(self):
if not self._iteration_time_data:
if self._iteration_time_data is None:
self.analyze_iteration_time()

self._iteration_time_data.set_index('timestamp', inplace=True)
self._iteration_time_data.set_index('timestamp', inplace=True)

# Plot the data using the existing method
with warnings.catch_warnings():
warnings.filterwarnings("ignore", ".*Locator attempting to generate.*")
self._save_data_based_on_timestamp(
data_to_plot=self._iteration_time_data['data'],
x_label='Timestamp',
y_label='Iteration Time (s)',
title=f'{self.telemetry_type} Iteration Time',
large_sample=True)
with warnings.catch_warnings():
warnings.filterwarnings("ignore", ".*Locator attempting to generate.*")
self._save_data_based_on_timestamp(
data_to_plot=self._iteration_time_data['data'],
x_label='Timestamp',
y_label='Iteration Time (s)',
title=f'{self.telemetry_type} Iteration Time',
large_sample=True)

def get_number_of_core_dumps(self):
core_dumps = self._log_data_sorted[self._log_data_sorted['type'] == 'timeout_dump_core']
return len(core_dumps)

def full_analysis(self):
txt_for_pdf = os.linesep + os.linesep
txt_for_pdf += f"{self.telemetry_type} info: {os.linesep}"
txt_for_pdf += f"Found the following collectx version(s):{os.linesep}"
for collectx_version in self.get_collectx_versions():
txt_for_pdf += f"{collectx_version}, "
txt_for_pdf += os.linesep
txt_for_pdf += f"Found {self.get_number_of_core_dumps()} core dumps{os.linesep}"
txt_for_pdf += str(self.get_number_of_switches_and_ports())
iteration_stats = self.get_last_iterations_time_stats()
if iteration_stats is None:
self.analyze_iteration_time()
iteration_stats = self.get_last_iterations_time_stats()
txt_for_pdf += f"Iteration time stats:{os.linesep}"
txt_for_pdf += str(iteration_stats)
self.text_to_show_in_pdf = txt_for_pdf
print(f"stats for {self.telemetry_type}:")
print(self.get_last_iterations_time_stats())
print(self.get_number_of_switches_and_ports())
print(f"Collectx versions {self.get_collectx_versions()}")

return super().full_analysis()

0 comments on commit deecc02

Please sign in to comment.