From 2c965a97d2e77421d1f59a772e5eaba406299ef7 Mon Sep 17 00:00:00 2001 From: Marco Mambelli Date: Sat, 12 Aug 2023 09:44:01 -0500 Subject: [PATCH] Added script to parse structlogs and get traditional messages or select fields/keys and improved log initialization and consolidated handler's setup Using booleans True/False instead of 1/0 in rollover control in logSupport Reverted manual_glidein_submit.py to use std Python logging (it is and standalone script not using the framework log files) Added documentation and adapted unit tests Consolidated also is_true() to lib/util.py Cleaned up some docstrings and comments --- CHANGELOG.md | 1 + build/packaging/rpm/glideinwms.spec | 2 + creation/lib/cWParamDict.py | 13 +- creation/lib/cWParams.py | 16 +- creation/lib/cgWParamDict.py | 5 - creation/lib/cgWParams.py | 15 +- creation/lib/cvWParams.py | 1 + creation/lib/factory_defaults.xml | 2 +- creation/reconfig_glidein | 26 +- doc/factory/configuration.html | 12 +- doc/frontend/configuration.html | 25 +- factory/glideFactory.py | 28 +- factory/glideFactoryEntry.py | 15 +- factory/glideFactoryEntryGroup.py | 101 ++---- factory/tools/OSG_autoconf.py | 14 +- factory/tools/manual_glidein_submit.py | 10 +- frontend/glideinFrontend.py | 25 +- frontend/glideinFrontendElement.py | 21 +- lib/logSupport.py | 209 ++++++++----- lib/util.py | 16 + lib/xmlParse.py | 10 - requirements.txt | 1 + tools/gwms-logparser.py | 287 ++++++++++++++++++ tox.ini | 1 + ...t_logSupport.py => test_lib_logSupport.py} | 22 +- unittests/worker_scripts/log_writer.py | 22 +- 26 files changed, 539 insertions(+), 361 deletions(-) create mode 100755 tools/gwms-logparser.py rename unittests/{test_logSupport.py => test_lib_logSupport.py} (93%) diff --git a/CHANGELOG.md b/CHANGELOG.md index f40f9f879..c6ec788bc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ Changes since the last release ### New features / functionalities - Added support for Debian 11 and Ubuntu 22 worker nodes (PR #320) +- Added structured logging. It is a hybrid format with some fields followed by a JSON dictionary. The exact format of the messages may change in the future, and we plan for it to become the default. Now it is disabled by default. Add `structured="True"` to all `` elements (PR #327) ### Changed defaults / behaviours diff --git a/build/packaging/rpm/glideinwms.spec b/build/packaging/rpm/glideinwms.spec index 57b8c768a..bc5728944 100644 --- a/build/packaging/rpm/glideinwms.spec +++ b/build/packaging/rpm/glideinwms.spec @@ -185,11 +185,13 @@ Requires: python3-pyyaml Requires: python3-jwt Requires: python3-cryptography Requires: python3-m2crypto +#Requires: python3-structlog %else Requires: PyYAML Requires: python36-jwt Requires: python36-cryptography Requires: python36-m2crypto +Requires: python36-structlog %endif Requires: python3-rrdtool %description libs diff --git a/creation/lib/cWParamDict.py b/creation/lib/cWParamDict.py index feec09c4c..8288e2612 100644 --- a/creation/lib/cWParamDict.py +++ b/creation/lib/cWParamDict.py @@ -1,12 +1,6 @@ # SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC # SPDX-License-Identifier: Apache-2.0 -# -# Project: -# glideinWMS -# -# File Version: -# # Description: # Frontend creation module # Classes and functions needed to handle dictionary files @@ -17,12 +11,9 @@ import os.path -from . import cWConsts, cWDictFile - +from glideinwms.lib.util import is_true -def is_true(s): - """Case insensitive string parsing helper. Return True for true (case insensitive matching), False otherwise.""" - return type(s) == str and s.lower() == "true" +from . import cWConsts, cWDictFile def has_file_wrapper(dicts): diff --git a/creation/lib/cWParams.py b/creation/lib/cWParams.py index ab5197348..ffce2776c 100644 --- a/creation/lib/cWParams.py +++ b/creation/lib/cWParams.py @@ -1,21 +1,8 @@ # SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC # SPDX-License-Identifier: Apache-2.0 -# -# Project: -# glideinWMS -# -# File Version: -# # Description: # This module contains the generic params classes -# -# Extracted from: -# cgWParams.py -# -# Author: -# Igor Sfiligoi -# import copy import os @@ -95,6 +82,7 @@ def validate(self, base, path_text): """ for k in self.data: + # TODO: MMBFIX is the next line doing anything? should it be removed? check history? self.data if k not in base: # element not in base, report @@ -663,7 +651,7 @@ def shorten_text(text, width): def defdict2string(defaults, indent, width=80): - """Convert defualts to a string + """Convert defaults to a string Args: defaults: diff --git a/creation/lib/cgWParamDict.py b/creation/lib/cgWParamDict.py index 8a7113b49..e7d260d69 100644 --- a/creation/lib/cgWParamDict.py +++ b/creation/lib/cgWParamDict.py @@ -1,11 +1,6 @@ # SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC # SPDX-License-Identifier: Apache-2.0 -# Project: -# glideinWMS -# -# File Version: -# # Description: # Glidein creation module # Classes and functions needed to handle dictionary files diff --git a/creation/lib/cgWParams.py b/creation/lib/cgWParams.py index c45c98c98..879fe01c4 100644 --- a/creation/lib/cgWParams.py +++ b/creation/lib/cgWParams.py @@ -1,18 +1,8 @@ # SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC # SPDX-License-Identifier: Apache-2.0 -# -# Project: -# glideinWMS -# -# File Version: -# # Desscription: # This module contains the create_glidein params class -# -# Author: -# Igor Sfiligoi -# import copy import os @@ -29,10 +19,6 @@ from . import cWParams -# import types -# import traceback -# from collections import OrderedDict - ###################################################### class GlideinParams(cWParams.CommonParams): @@ -297,6 +283,7 @@ def init_defaults(self): self.defaults["monitor_footer"] = monitor_footer_defaults process_log_defaults = copy.deepcopy(one_log_retention_defaults) + process_log_defaults["structured"] = ["False", "Bool", "True to use structured logs", None] process_log_defaults["extension"] = ["all", "string", "name of the log extention", None] process_log_defaults["msg_types"] = ["INFO, WARN, ERR", "string", "types of log messages", None] process_log_defaults["backup_count"] = ["5", "string", "Number of backup logs to keep", None] diff --git a/creation/lib/cvWParams.py b/creation/lib/cvWParams.py index 009dcc30f..c3bf66951 100644 --- a/creation/lib/cvWParams.py +++ b/creation/lib/cvWParams.py @@ -406,6 +406,7 @@ def init_defaults(self): self.defaults["work"] = work_defaults process_log_defaults = cWParams.CommentedOrderedDict() + process_log_defaults["structured"] = ["False", "Bool", "True to use structured logs", None] process_log_defaults["min_days"] = [ "3.0", "days", diff --git a/creation/lib/factory_defaults.xml b/creation/lib/factory_defaults.xml index 7a6fa3b2d..9013640a2 100644 --- a/creation/lib/factory_defaults.xml +++ b/creation/lib/factory_defaults.xml @@ -10,7 +10,7 @@ SPDX-License-Identifier: Apache-2.0 - + diff --git a/creation/reconfig_glidein b/creation/reconfig_glidein index 4d64f7aff..88d4cb3e8 100755 --- a/creation/reconfig_glidein +++ b/creation/reconfig_glidein @@ -61,31 +61,7 @@ def logReconfig(msg): # Set the Log directory logSupport.log_dir = os.path.join(glideinDescript.data["LogDir"], "factory") # Configure factory process logging - process_logs = eval(glideinDescript.data["ProcessLogs"]) - for plog in process_logs: - if "ADMIN" in plog["msg_types"]: - logSupport.add_processlog_handler( - "factoryadmin", - logSupport.log_dir, - "DEBUG,INFO,WARN,ERR", - plog["extension"], - int(float(plog["max_days"])), - int(float(plog["min_days"])), - int(float(plog["max_mbytes"])), - plog["compression"], - ) - else: - logSupport.add_processlog_handler( - "factoryadmin", - logSupport.log_dir, - plog["msg_types"], - plog["extension"], - int(float(plog["max_days"])), - int(float(plog["min_days"])), - int(float(plog["max_mbytes"])), - plog["compression"], - ) - logSupport.log = logSupport.getLogger("factoryadmin") + logSupport.log = logSupport.get_logger_with_handlers("factoryadmin", logSupport.log_dir, glideinDescript.data) logSupport.log.info("Reconfiguring factory: %s" % msg) diff --git a/doc/factory/configuration.html b/doc/factory/configuration.html index 633db0e1d..48275dcf7 100644 --- a/doc/factory/configuration.html +++ b/doc/factory/configuration.html @@ -170,9 +170,9 @@

Example Configuration

<process_logs >
<process_log extension="info" max_days="7.0" - max_mbytes="100.0" min_days="3.0" msg_types="INFO" - backup_count="5" compression="gz" /><process_log structured="True" extension="info" + max_days="7.0" max_mbytes="100.0" min_days="3.0" + msg_types="INFO" backup_count="5" compression="gz" />
<process_log extension="debug" max_days="7.0" @@ -611,6 +611,12 @@

Global arguments

with other message types. + If any of the logs has structured=True, then the logs are + written in structured format. It is actuallly a hybrid format, with + some initial field and a JSON dictionary. Since all logs share the + same logger, it is not possible to have some in structured and some + in classic format. In the future we plan for the structured format + to become the default.

The extension is added to the log name to create separate logs.

Log Retention and Rotation Policy: diff --git a/doc/frontend/configuration.html b/doc/frontend/configuration.html index 802b15fa6..d0d2194eb 100644 --- a/doc/frontend/configuration.html +++ b/doc/frontend/configuration.html @@ -159,14 +159,14 @@

Example Configuration

<process_logs >
<process_log extension="info" max_days="7.0" - max_mbytes="100.0" min_days="3.0" msg_types="INFO" - backup_count="5" compression="gz" /><process_log structured="False" extension="info" + max_days="7.0" max_mbytes="100.0" min_days="3.0" + msg_types="INFO" backup_count="5" compression="gz" />
<process_log extension="debug" max_days="7.0" - max_mbytes="100.0" min_days="3.0" msg_types="DEBUG,ERR,WARN" - backup_count="5" /><process_log structured="False" extension="debug" + max_days="7.0" max_mbytes="100.0" min_days="3.0" + msg_types="DEBUG,ERR,WARN" backup_count="5" />
</process_logs >
@@ -518,9 +518,10 @@

Frontend Configuration

<frontend><log_retention><process_logs><process_log - max_days="max days" min_days="min days" max_bytes="max bytes" - backup_count="backup count" + structured="True" max_days="max days" + min_days="min days" max_bytes="max bytes" backup_count="backup count" type="ALL" compression="gz"/>

@@ -541,6 +542,12 @@

Frontend Configuration

that don't necessarily cause abnormal execution. + If any of the logs has structured=True, then the logs are + written in structured format. It is actuallly a hybrid format, with + some initial field and a JSON dictionary. Since all logs share the + same logger, it is not possible to have some in structured and some + in classic format. In the future we plan for the structured format + to become the default.

The extension is added to the log name to create separate logs.

Log Retention and Rotation Policy: diff --git a/factory/glideFactory.py b/factory/glideFactory.py index c1ede6471..d7ae7f9b6 100755 --- a/factory/glideFactory.py +++ b/factory/glideFactory.py @@ -847,33 +847,7 @@ def main(startup_dir): logSupport.log_dir = os.path.join(glideinDescript.data["LogDir"], "factory") # Configure factory process logging - process_logs = eval(glideinDescript.data["ProcessLogs"]) - for plog in process_logs: - if "ADMIN" in plog["msg_types"].upper(): - logSupport.add_processlog_handler( - "factoryadmin", - logSupport.log_dir, - "DEBUG,INFO,WARN,ERR", - plog["extension"], - int(float(plog["max_days"])), - int(float(plog["min_days"])), - int(float(plog["max_mbytes"])), - int(float(plog["backup_count"])), - plog["compression"], - ) - else: - logSupport.add_processlog_handler( - "factory", - logSupport.log_dir, - plog["msg_types"], - plog["extension"], - int(float(plog["max_days"])), - int(float(plog["min_days"])), - int(float(plog["max_mbytes"])), - int(float(plog["backup_count"])), - plog["compression"], - ) - logSupport.log = logSupport.getLogger("factory") + logSupport.log = logSupport.get_logger_with_handlers("factory", logSupport.log_dir, glideinDescript.data) logSupport.log.info("Logging initialized") if glideinDescript.data["Entries"].strip() in ("", ","): diff --git a/factory/glideFactoryEntry.py b/factory/glideFactoryEntry.py index 1d23820da..1261d544e 100644 --- a/factory/glideFactoryEntry.py +++ b/factory/glideFactoryEntry.py @@ -72,20 +72,7 @@ def __init__(self, name, startup_dir, glidein_descript, frontend_descript): self.scheddName = self.jobDescript.data["Schedd"] # glideFactoryLib.log_files - process_logs = eval(self.glideinDescript.data["ProcessLogs"]) - for plog in process_logs: - logSupport.add_processlog_handler( - self.name, - self.logDir, - plog["msg_types"], - plog["extension"], - int(float(plog["max_days"])), - int(float(plog["min_days"])), - int(float(plog["max_mbytes"])), - int(float(plog["backup_count"])), - plog["compression"], - ) - self.log = logSupport.getLogger(self.name) + self.log = logSupport.get_logger_with_handlers(self.name, self.logDir, self.glideinDescript.data) cleaner = cleanupSupport.DirCleanupWSpace( self.logDir, diff --git a/factory/glideFactoryEntryGroup.py b/factory/glideFactoryEntryGroup.py index 07a2d3b86..0b4533d25 100755 --- a/factory/glideFactoryEntryGroup.py +++ b/factory/glideFactoryEntryGroup.py @@ -3,25 +3,18 @@ # SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC # SPDX-License-Identifier: Apache-2.0 -# -# Project: -# glideinWMS -# -# File Version: -# # Description: # This is the glideinFactoryEntryGroup. Common Tasks like querying collector # and advertizing the work done by group are done here # # Arguments: -# $1 = poll period (in seconds) -# $2 = advertize rate (every $2 loops) -# $3 = glidein submit_dir -# $4 = entry name -# -# Author: -# Parag Mhashilkar (October 2012) -# +# $1 = parent_pid (int): The pid for the Factory daemon +# $2 = sleep_time (int): The number of seconds to sleep between iterations +# $3 = advertize_rate (int): The rate at which advertising should occur (every $3 loops) +# $4 = startup_dir (str|Path): The "home" directory for the entry. +# $5 = entry_names (str): Colon separated list with the names of the entries this process should work on +# $6 = group_id (str): Group id, normally a number (with the "group_" prefix formes the group name), +# It can change between Factory reconfigurations import logging import os @@ -578,17 +571,18 @@ def iterate(parent_pid, sleep_time, advertize_rate, glideinDescript, frontendDes entry.writeStats() return_dict[entry.name] = entry.getState() except: - entry.log.warning("Error writing stats for entry '%s'" % (entry.name)) - entry.log.exception("Error writing stats for entry '%s': " % (entry.name)) + entry.log.warning(f"Error writing stats for entry '{entry.name}'") + entry.log.exception(f"Error writing stats for entry '{entry.name}': ") try: os.write(w, pickle.dumps(return_dict)) except: # Catch and log exceptions if any to avoid # runaway processes. - entry.log.exception("Error writing pickled state for entry '%s': " % (entry.name)) + logSupport.log.exception(f"Error writing pickled state for entries '{entrylists[cpu]}': ") os.close(w) # Exit without triggering SystemExit exception + # Note that this is skippihg also all the cleanup (files closing, finally clauses) os._exit(0) try: @@ -629,66 +623,35 @@ def iterate(parent_pid, sleep_time, advertize_rate, glideinDescript, frontendDes is_first = False # Entering following iterations -############################################################ -# Initialize log_files for entries and groups - - -def init_logs(name, log_dir, process_logs): - for plog in process_logs: - logSupport.add_processlog_handler( - name, - log_dir, - plog["msg_types"], - plog["extension"], - int(float(plog["max_days"])), - int(float(plog["min_days"])), - int(float(plog["max_mbytes"])), - int(float(plog["backup_count"])), - plog["compression"], - ) - logSupport.log = logSupport.getLogger(name) - logSupport.log.info("Logging initialized for %s" % name) - - ############################################################ def main(parent_pid, sleep_time, advertize_rate, startup_dir, entry_names, group_id): - """ - GlideinFactoryEntryGroup main function + """GlideinFactoryEntryGroup main function Setup logging, monitoring, and configuration information. Starts the Entry group main loop and handles cleanup at shutdown. - @type parent_pid: int - @param parent_pid: The pid for the Factory daemon + Args: + parent_pid (int): The pid for the Factory daemon + sleep_time (int): The number of seconds to sleep between iterations + advertize_rate (int): The rate at which advertising should occur + startup_dir (str|Path): The "home" directory for the entry. + entry_names (str): Colon separated list with the names of the entries this process should work on + group_id (str): Group id, normally a number (with the "group_" prefix formes the group name), + It can change between Factory reconfigurations - @type sleep_time: int - @param sleep_time: The number of seconds to sleep between iterations - - @type advertize_rate: int - @param advertize_rate: The rate at which advertising should occur - - @type startup_dir: string - @param startup_dir: The "home" directory for the entry. - - @type entry_names: string - @param entry_names: The CVS name of the entries this process should work on - - @type group_id: string - @param group_id: Group id """ # Assume name to be group_[0,1,2] etc. Only required to create log_dir # where tasks common to the group will be stored. There is no other # significance to the group_name and number of entries supported by a group # can change between factory reconfigs - group_name = "group_%s" % group_id os.chdir(startup_dir) - # Setup the lock_dir + # Set up the lock_dir gfi.factoryConfig.lock_dir = os.path.join(startup_dir, "lock") # Read information about the glidein and frontends @@ -706,10 +669,10 @@ def main(parent_pid, sleep_time, advertize_rate, startup_dir, entry_names, group my_entries = {} glidein_entries = glideinDescript.data["Entries"] - # Initiate the logs + # Initialize log files for entry groups logSupport.log_dir = os.path.join(glideinDescript.data["LogDir"], "factory") - process_logs = eval(glideinDescript.data["ProcessLogs"]) - init_logs(group_name, logSupport.log_dir, process_logs) + logSupport.log = logSupport.get_logger_with_handlers(group_name, logSupport.log_dir, glideinDescript.data) + logSupport.log.info(f"Logging initialized for {group_name}") logSupport.log.info("Starting up") logSupport.log.info(f"Entries processed by {group_name}: {entry_names} ") @@ -752,19 +715,17 @@ def main(parent_pid, sleep_time, advertize_rate, startup_dir, entry_names, group def compile_pickle_data(entry, work_done): - """ - Extract the state of the entry after doing work + """Extract the state of the entry after doing work - @type entry: Entry - @param entry: Entry object + Args: + entry (Entry): Entry object + work_done (int): Work done info - @type work_done: int - @param work_done: Work done info + Returns: + dict: pickle-friendly version of the Entry (state of the Entry) """ - return_dict = entry.getState() return_dict["work_done"] = work_done - return return_dict @@ -780,4 +741,4 @@ def compile_pickle_data(entry, work_done): # Force integrity checks on all condor operations gfl.set_condor_integrity_checks() - main(sys.argv[1], int(sys.argv[2]), int(sys.argv[3]), sys.argv[4], sys.argv[5], sys.argv[6]) + main(int(sys.argv[1]), int(sys.argv[2]), int(sys.argv[3]), sys.argv[4], sys.argv[5], sys.argv[6]) diff --git a/factory/tools/OSG_autoconf.py b/factory/tools/OSG_autoconf.py index eb20c69c5..e1d653cff 100644 --- a/factory/tools/OSG_autoconf.py +++ b/factory/tools/OSG_autoconf.py @@ -30,6 +30,7 @@ write_to_xml_file, write_to_yaml_file, ) +from glideinwms.lib.util import is_true def parse_opts(): @@ -293,19 +294,6 @@ def get_entries_configuration(data): # -def is_true(param): - """Determine if the parameter passed as argument is true or false - - Args: - param: the parameter we need to determine if it is True or False. Can be any type. - - Returns: - bool: True if the the string representation of param is "true" - """ - - return str(param).lower() == "true" - - def sanitize(whitelist_info): """Sanitize the yaml file edited by factory operators. diff --git a/factory/tools/manual_glidein_submit.py b/factory/tools/manual_glidein_submit.py index 5aa4f8ddf..87e133db0 100644 --- a/factory/tools/manual_glidein_submit.py +++ b/factory/tools/manual_glidein_submit.py @@ -4,7 +4,7 @@ # SPDX-License-Identifier: Apache-2.0 import argparse -import logging +import logging # This script is using straight logging instead of logSupport or structlog import os import pprint import socket @@ -77,9 +77,9 @@ def parse_opts(): # Initialize logging if options.debug: logging.basicConfig(format="%(levelname)s: %(message)s") - logSupport.getLogger().setLevel(logging.DEBUG) + logging.getLogger().setLevel(logging.DEBUG) else: - logSupport.getLogger().setLevel(logging.INFO) + logging.getLogger().setLevel(logging.INFO) return options @@ -221,7 +221,7 @@ def main(): params["Report_Failed"] = "NEVER" # Now that we have everything submit the pilot! - logSupport.getLogger().setLevel(logging.DEBUG) + logging.getLogger().setLevel(logging.DEBUG) submitGlideins( entry_name, "test.test", @@ -232,7 +232,7 @@ def main(): client_web, params, status_sf, - log=logSupport.getLogger(), + log=logging.getLogger(), factoryConfig=factory_config, ) diff --git a/frontend/glideinFrontend.py b/frontend/glideinFrontend.py index dbbd49469..b790a7cb7 100755 --- a/frontend/glideinFrontend.py +++ b/frontend/glideinFrontend.py @@ -3,21 +3,12 @@ # SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC # SPDX-License-Identifier: Apache-2.0 -# -# Project: -# glideinWMS -# -# File Version: -# # Description: # This is the main of the glideinFrontend # # Arguments: # $1 = work_dir # -# Author: -# Igor Sfiligoi -# import fcntl @@ -529,20 +520,8 @@ def main(work_dir, action): logSupport.log_dir = os.path.join(frontendDescript.data["LogDir"], "frontend") # Configure frontend process logging - process_logs = eval(frontendDescript.data["ProcessLogs"]) - for plog in process_logs: - logSupport.add_processlog_handler( - "frontend", - logSupport.log_dir, - plog["msg_types"], - plog["extension"], - int(float(plog["max_days"])), - int(float(plog["min_days"])), - int(float(plog["max_mbytes"])), - int(float(plog["backup_count"])), - plog["compression"], - ) - logSupport.log = logSupport.getLogger("frontend") + logSupport.log = logSupport.get_logger_with_handlers("frontend", logSupport.log_dir, frontendDescript.data) + logSupport.log.info("Logging initialized") logSupport.log.debug("Frontend startup time: %s" % str(startup_time)) diff --git a/frontend/glideinFrontendElement.py b/frontend/glideinFrontendElement.py index 31bddb89a..df279a1d7 100755 --- a/frontend/glideinFrontendElement.py +++ b/frontend/glideinFrontendElement.py @@ -210,21 +210,9 @@ def configure(self): ) # Configure frontend group process logging - process_logs = eval(self.elementDescript.frontend_data["ProcessLogs"]) - for plog in process_logs: - logSupport.add_processlog_handler( - self.group_name, - logSupport.log_dir, - plog["msg_types"], - plog["extension"], - int(float(plog["max_days"])), - int(float(plog["min_days"])), - int(float(plog["max_mbytes"])), - int(float(plog["backup_count"])), - plog["compression"], - ) - - logSupport.log = logSupport.getLogger(self.group_name) + logSupport.log = logSupport.get_logger_with_handlers( + self.group_name, logSupport.log_dir, self.elementDescript.frontend_data + ) # We will be starting often, so reduce the clutter # logSupport.log.info("Logging initialized") @@ -244,8 +232,7 @@ def configure(self): if not proxy_plugins.get(self.elementDescript.merged_data["ProxySelectionPlugin"]): logSupport.log.warning( "Invalid ProxySelectionPlugin '%s', supported plugins are %s" - % (self.elementDescript.merged_data["ProxySelectionPlugin"]), - list(proxy_plugins.keys()), + % (self.elementDescript.merged_data["ProxySelectionPlugin"], list(proxy_plugins.keys())) ) return 1 self.x509_proxy_plugin = proxy_plugins[self.elementDescript.merged_data["ProxySelectionPlugin"]]( diff --git a/lib/logSupport.py b/lib/logSupport.py index 2e837a30d..127d576a0 100644 --- a/lib/logSupport.py +++ b/lib/logSupport.py @@ -1,17 +1,10 @@ # SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC # SPDX-License-Identifier: Apache-2.0 -# -# Project: -# glideinWMS -# -# File Version: -# # Description: log support module -# +# Uses the Python built-in logging to log anything anywhere +# and structlog to improve machine parsing -import codecs -import structlog import logging import os import re @@ -21,6 +14,10 @@ from logging.handlers import BaseRotatingHandler +import structlog + +from . import util + # Compressions depend on the available module COMPRESSION_SUPPORTED = {} try: @@ -37,7 +34,7 @@ pass -# create a place holder for a global logger (logging.Logger), +# Create a placeholder for a global logger (logging.Logger), # individual modules can create their own loggers if necessary log = None @@ -48,9 +45,14 @@ DEFAULT_FORMATTER = logging.Formatter("[%(asctime)s] %(levelname)s: %(message)s") DEBUG_FORMATTER = logging.Formatter("[%(asctime)s] %(levelname)s: %(module)s:%(lineno)d: %(message)s") -# Adding in the capability to use the built in Python logging Module -# This will allow us to log anything, anywhere -# +# A reminder of the logging levels: +# 0 NOTSET +# 10 DEBUG +# 20 INFO +# 30 WARN WARNING +# 40 ERROR +# 50 FATAL CRITICAL +# A message will be printed if it's level >= max(handler.level, logger.level) def alternate_log(msg): @@ -83,23 +85,20 @@ class GlideinHandler(BaseRotatingHandler): """ def __init__(self, filename, maxDays=1, minDays=0, maxMBytes=10, backupCount=5, compression=None): - """ - Initialize the Handler. We assume the following: + """Initialize the Handler. We assume the following: 1. Interval is in days 2. No special encoding 3. No delays are set 4. Timestamps are not in UTC - @type filename: string - @param filename: The full path of the log file - @type interval: int - @param interval: Number of days before file rotation - @type maxMBytes: int - @param maxMBytes: Maximum size of the logfile in MB before file rotation - @type backupCount: int - @param backupCount: Number of backups to keep - + Args: + filename (str|Path): The full path of the log file + maxDays (int): Max number of days before file rotation + minDays (int): Minimum number of days before file rotation (used with max bytes) + maxMBytes (int): Maximum size of the logfile in MB before file rotation (used with min days) + backupCount (int): Number of backups to keep + compression (str): Compression to use (gz, zip, depending on available compression modules) """ # Make dirs if logging directory does not exist if not os.path.exists(os.path.dirname(filename)): @@ -132,42 +131,45 @@ def __init__(self, filename, maxDays=1, minDays=0, maxMBytes=10, backupCount=5, self.rolloverAt = begin_interval_time + self.interval def shouldRollover(self, record, empty_record=False): - """ - Determine if rollover should occur. + """Determine if rollover should occur. Basically, we are combining the checks for size and time interval - @type record: string - @param record: The message that will be logged. + Args: + record (str): The message that will be logged. + empty_record (bool): If False (default) count also `record` length to evaluate if a rollover is needed + + Returns: + bool: True if rollover should be performed, False otherwise @attention: Due to the architecture decision to fork "workers" we run into an issue where the child that was forked could cause a log - rotation. However the parent will never know and the parent's file + rotation. However, the parent will never know and the parent's file descriptor will still be pointing at the old log file (now renamed by the child). This will in turn cause the parent to immediately request a log rotate, which results in what appears to be truncated logs. To - handle this we add a flag to disable log rotation. By default this is + handle this we add a flag to disable log rotation. By default, this is set to False, but anywhere we want to fork a child (or in any object that will be forked) we set the flag to True. Then in the parent, we initiate a log function that will log and rotate if necessary. """ if disable_rotate: - return 0 + return False - do_timed_rollover = 0 + do_timed_rollover = False t = int(time.time()) if t >= self.rolloverAt: - do_timed_rollover = 1 + do_timed_rollover = True - do_size_rollover = 0 + do_size_rollover = False if self.maxBytes > 0: # are we rolling over? if empty_record: msg = "" else: - msg = "%s\n" % self.format(record) + msg = f"{self.format(record)}\n" self.stream.seek(0, 2) # due to non-posix-compliant Windows feature if self.stream.tell() + len(msg) >= self.maxBytes: - do_size_rollover = 1 + do_size_rollover = True return do_timed_rollover or do_size_rollover @@ -194,8 +196,9 @@ def getFilesToDelete(self): return result def doRollover(self): - """ - do a rollover; in this case, a date/time stamp is appended to the filename + """Do a rollover + + In this case, a date/time stamp is appended to the filename when the rollover happens. If there is a backup count, then we have to get a list of matching filenames, sort them and remove the one with the oldest suffix. @@ -259,25 +262,6 @@ def doRollover(self): except OSError as e: alternate_log("Log file gzip compression failed: %s" % e) - # TODO: remove if all OK - # in python 3 _open() and ancoding are safe to use all the time - # def _open_new_log(self): - # """ - # This function is here to bridge the gap between the old (python 2.4) way - # of opening new log files and the new (python 2.7) way. - # """ - # new_stream = None - # try: - # # pylint: disable=E1101 - # new_stream = self._open() - # # pylint: enable=E1101 - # except: - # if self.encoding: - # new_stream = codecs.open(self.baseFilename, self.mode, self.encoding) - # else: - # new_stream = open(self.baseFilename, self.mode) - # return new_stream - def check_and_perform_rollover(self): if self.shouldRollover(None, empty_record=True): self.doRollover() @@ -288,35 +272,60 @@ def roll_all_logs(): handler.check_and_perform_rollover() -def add_processlog_handler( - logger_name, log_dir, msg_types, extension, maxDays, minDays, maxMBytes, backupCount=5, compression=None +def get_processlog_handler( + log_file_name, log_dir, msg_types, extension, maxDays, minDays, maxMBytes, backupCount=5, compression=None ): - """ - Adds a handler to the GlideinLogger logger referenced by logger_name. - """ + """Return a configured handler for the GlideinLogger logger - logfile = os.path.expandvars(f"{log_dir}/{logger_name}.{extension.lower()}.log") + The file name is `"{log_dir}/{log_file_name}.{extension.lower()}.log"` and can include env variables - mylog = structlog.getLogger(logger_name) - mylog.setLevel(logging.DEBUG) + Args: + log_file_name (str): log file name (same as the logger name) + log_dir (str|Path): log directory + msg_types (str): log levels to include (comma separated list). Keywords are: + DEBUG,INFO,WARN,ERR, ADMIN or ALL (ADMIN and ALL both mean all the previous) + ADMIN adds also the "admin" prefix to the `log_file_name` + extension (str): file name extension + maxDays (int): Max number of days before file rotation + minDays (int): Minimum number of days before file rotation (used with max bytes) + maxMBytes (int): Maximum size of the logfile in MB before file rotation (used with min days) + backupCount (int): Number of backups to keep + compression (str): Compression to use (gz, zip, depending on available compression modules) + + Returns: + GlideinHandler: configured handler + """ + # Parameter adjustments + msg_types = msg_types.upper() + if "ADMIN" in msg_types: + msg_types = "DEBUG,INFO,WARN,ERR" + if not log_file_name.endswith("admin"): + log_file_name = log_file_name + "admin" + if "ALL" in msg_types: + msg_types = "DEBUG,INFO,WARN,ERR" + # File name + logfile = os.path.expandvars(f"{log_dir}/{log_file_name}.{extension.lower()}.log") handler = GlideinHandler(logfile, maxDays, minDays, maxMBytes, backupCount, compression) handler.setFormatter(DEFAULT_FORMATTER) + # Setting the handler logging level to DEBUG to control all from the logger level and the + # filter. This allows to pick any level combination, but may be less performant than a + # min level selection. + # TODO: Check if min level should be used instead and if the handler level should be logging.NOTSET (0) ? handler.setLevel(logging.DEBUG) - has_debug = False msg_type_list = [] for msg_type in msg_types.split(","): msg_type = msg_type.upper().strip() if msg_type == "INFO": msg_type_list.append(logging.INFO) - if msg_type == "WARN": + elif msg_type == "WARN": msg_type_list.append(logging.WARN) msg_type_list.append(logging.WARNING) - if msg_type == "ERR": + elif msg_type == "ERR": msg_type_list.append(logging.ERROR) msg_type_list.append(logging.CRITICAL) - if msg_type == "DEBUG": + elif msg_type == "DEBUG": msg_type_list.append(logging.DEBUG) has_debug = True @@ -327,13 +336,14 @@ def add_processlog_handler( handler.addFilter(MsgFilter(msg_type_list)) - mylog.addHandler(handler) handlers.append(handler) + return handler class MsgFilter(logging.Filter): - """ - Filter used in handling records for the info logs. + """Filter used in handling records for the info logs. + + Default to logging.INFO """ msg_type_list = [logging.INFO] @@ -363,9 +373,6 @@ def format_dict(unformated_dict, log_format=" %-25s : %s\n"): return formatted_string -############################### -# structlog - # From structlog's suggested configurations - separate rendering, using same output structlog.configure( processors=[ @@ -413,6 +420,54 @@ def format_dict(unformated_dict, log_format=" %-25s : %s\n"): ) -def getLogger(name): - return structlog.getLogger(name) +def get_logging_logger(name): + return logging.getLogger(name) + + +def get_structlog_logger(name): + return structlog.get_logger(name) + +def get_logger_with_handlers(name, directory, config_data, level=logging.DEBUG): + """Create/retrieve a logger, set the handlers, set the starting logging level, and return the logger + + The file name is {name}.{plog["extension"].lower()}.log + + Args: + name (str): logger name (and file base name) + directory (str|Path): log directory + config_data (dict): logging configuration + (the "ProcessLogs" value evaluates to list of dictionary with process_logs section values) + level: logger's logging level (default: logging.DEBUG) + + Returns: + logging.Logger: configured logger + """ + # Contains a dictionary in a string + process_logs = eval(config_data["ProcessLogs"]) + is_structured = False + handlers_list = [] + for plog in process_logs: + # If at least one handler is structured, it will use structured logging + # All handlers should be consistent and use the same + is_structured = is_structured or util.is_true(plog["structured"]) + handler = get_processlog_handler( + name, + directory, + plog["msg_types"], + plog["extension"], + int(float(plog["max_days"])), + int(float(plog["min_days"])), + int(float(plog["max_mbytes"])), + int(float(plog["backup_count"])), + plog["compression"], + ) + handlers_list.append(handler) + if is_structured: + mylog = structlog.get_logger(name) + else: + mylog = logging.getLogger(name) + for handler in handlers_list: + mylog.addHandler(handler) + mylog.setLevel(level) + return mylog diff --git a/lib/util.py b/lib/util.py index 9d2814ded..c83822954 100644 --- a/lib/util.py +++ b/lib/util.py @@ -402,6 +402,22 @@ def safe_boolcomp(value, expected): return str(value).lower() == str(expected).lower() +# DEV NOTE: merging of creation.lib.CWParamDict.is_true() and factory.tools.OSG_autoconf.is_true() +# the first one required the argument to be a string. OK to drop that +def is_true(value): + """Case-insensitive "True" string parsing helper. + Return True for true (case-insensitive string representation matching), False otherwise. + + Args: + value: argument to evaluate as True or False. Can be any type. + + Returns: + bool: True if the string representation of value is "true" + """ + + return str(value).lower() == "true" + + def str2bool(val): """Convert u"True" or u"False" to boolean or raise ValueError""" if val not in ["True", "False"]: diff --git a/lib/xmlParse.py b/lib/xmlParse.py index 2da431995..de3116708 100644 --- a/lib/xmlParse.py +++ b/lib/xmlParse.py @@ -1,17 +1,7 @@ # SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC # SPDX-License-Identifier: Apache-2.0 -# -# Project: -# glideinWMS -# -# File Version: -# # Description: general purpose XML decoder -# -# Author: -# Igor Sfiligoi (Mar 27th, 2007) -# import xml.dom.minidom diff --git a/requirements.txt b/requirements.txt index 08eba4cc4..f41a24f37 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,6 +4,7 @@ htcondor # classad # pure python 3rd party classad implementation m2crypto requests +structlog pyyaml pyjwt diff --git a/tools/gwms-logparser.py b/tools/gwms-logparser.py new file mode 100755 index 000000000..2586df4b6 --- /dev/null +++ b/tools/gwms-logparser.py @@ -0,0 +1,287 @@ +#!/usr/bin/env python3 + +# SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC +# SPDX-License-Identifier: Apache-2.0 + +import argparse +import fileinput +import json +import os + + +def arg_parser(): + epilog_text = """examples: + %(prog)s -f 0,1,2 -k msg mylog.txt + %(prog)s -f 3 mylog.txt""" + parser = argparse.ArgumentParser( + description="Simple log file parser to filter the records and print only part of them.", + epilog=epilog_text, + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + # need to use the default group, otherwise --help is in a separate group by itself + parser.add_argument( + "-f", + "--fields", + metavar="", + default="", + help="comma separated list of field numbers to select in the structured log message. Start from 0", + ) + parser.add_argument( + "-k", + "--keys", + metavar="", + default="", + help="comma separated list of keys to select in the structured log message", + ) + parser.add_argument( + "-i", + "--input_separator", + metavar="", + default=" - ", + help="input separator (string). Defaults to ' - '.", + ) + parser.add_argument( + "-s", + "--separator", + metavar="", + default=",", + help="output separator (string). Defaults to comma, ','", + ) + parser.add_argument( + "-c", + "--constraint", + metavar="", + action="append", + help="line selection constraint. Repeat the option for multiple constraints. Format: '(field # | key) value'." + " Integers are always considered field numbers.", + ) + parser.add_argument( + "-l", + "--loglevel", + metavar="", + action="store", + help="add constraint for log level. Same as '-c 3 '", + ) + parser.add_argument( + "-e", + "--logdirectory", + metavar="", + action="store", + default="/var/log/gwms", + help="log files directory. Default is '/var/log/gwms'", + ) + parser.add_argument("-v", "--verbose", action="store_true", help="Include exception messages") + parser.add_argument("-d", "--debug", action="store_true", help="Enable debug, e.g. to use for unit test") + positional = parser.add_argument_group("positional arguments") + positional.add_argument( + "logfile", + metavar="", + nargs="?", + help="log file to parse. File names without directory are searched also in the default log files directory." + " Using stdin if '-' or if none is provided", + ) + return parser + + +def parse_constraints(constraints, loglevel=None): + """Parse and combine the constraints + + Args: + constraints (list|None): List of constraints + loglevel (str): Logging level, e.g. DEBUG, INFO, ... + + Returns: + dict: combined constraint dictionary + + """ + if not constraints and not loglevel: + return None + constraint = {"fields": [], "keys": []} + if loglevel: + # The log level is the 4th field in each log line + constraint["fields"].append((3, loglevel)) + if constraints: + for c in constraints: + i, v = c.split(maxsplit=1) + try: + constraint["fields"].append((int(i), v)) + except ValueError: + # TODO: add also time constraints: before, after, ... + constraint["keys"].append((i, v)) + return constraint + + +def matches_constraint(constraint, linelist, linedict): + """Return True if all constraints are marched + + Args: + constraint (dict|None): combined constraints + linelist (list): List of line fields + linedict (dict): Dictionary with structured elements + + Returns: + bool: True is all constraints are matched, False otherwise + + """ + if constraint is None: + return True + # if constraint is not None, it will always have "fields" and "keys" + if constraint["fields"]: + try: + if not all(linelist[i] == v for i, v in constraint["fields"]): + return False + except IndexError: + # In case records have variable number of fields (change in the log format) + return False + if constraint["keys"]: + try: + if not all(linedict[k] == v for k, v in constraint["keys"]): + return False + except KeyError: + # Some records may have keys not present in others. Only positive matches are True + return False + return True + + +def execute_command_from_args(argsparsed, logfile=None, constraint=None): + """Parse the log file as requested. + + Args: + argsparsed (Namespace): Parsed arguments from arg_parser in this file. + logfile (path): Log file path. + constraint (dict): Combined constraints dictionary + + Returns: + str: Output of the command. + """ + + outlines = [] + fields = [] + keys = [] + if argsparsed.fields: + fields = [int(i) for i in argsparsed.fields.split(",")] + if argsparsed.keys: + keys = argsparsed.keys.split(",") + if not fields and not keys: + raise ValueError("No field or key specified") + possible_dict_split = False + if argsparsed.input_separator == ": " or argsparsed.input_separator == ", ": + possible_dict_split = True + # If you would call fileinput.input() without files it would try to process all arguments. + # We pass '-' as only file when argparse got no files which will cause fileinput to read from stdin + with fileinput.input(files=(logfile,) if logfile else ("-",)) as f: + for full_line in f: + line = full_line.strip() + if not line: + # Skip empty lines + continue + linelist = line.split(argsparsed.input_separator) + if possible_dict_split and linelist[-1][-1] == "}": # The line is stripped, no rstrip needed in last + i = 0 + while i < len(linelist): + if linelist[i].lstrip()[0] == "{": + break + i += 1 + if i < len(linelist): + # found split dictionary, trying to remediate + linelist[i] = argsparsed.input_separator.join(linelist[i:]) + linelist = linelist[: i + 1] + if linelist[-1][0] == "{": + linedict = json.loads(linelist[-1]) + else: + linedict = {} + if not matches_constraint(constraint, linelist, linedict): + continue + outline = "" + # Guaranteed to have at least one field or key + # IndexError and KeyError needed to cover irregular lines + for i in fields: + try: + outline += f"{argsparsed.separator}{linelist[i]}" + except IndexError: + outline += f"{argsparsed.separator}NOT_AVAILABLE" + for k in keys: + try: + outline += f"{argsparsed.separator}{linedict[k]}" + except KeyError: + outline += f"{argsparsed.separator}NOT_AVAILABLE" + print(outline[len(argsparsed.separator) :]) + if argsparsed.debug: + outlines.append(outline[len(argsparsed.separator) :]) + return "\n".join(outlines) + + +def main(args_to_parse=None): + """Main function for logparser + + Args: + args_to_parse (list, optional): If you pass a list of args, they will be used instead of sys.argv. + Defaults to None. + + Returns: + str: Parsing result + """ + parser = arg_parser() + args = parser.parse_args(args_to_parse) + if args.verbose: + if args.input_separator == ": " or args.input_separator == ", ": + print(f"Input separator '{args.input_separator}' could split the JSON dictionary failing the parsing.") + logfile = args.logfile + if ( + logfile + and logfile != "-" + and not os.path.exists(logfile) + and logfile == os.path.basename(logfile) + and os.path.exists(os.path.join(args.logdirectory, logfile)) + ): + logfile = os.path.join(args.logdirectory, logfile) + if args.verbose: + print(f"Found log file in the default log directory: '{logfile}'") + constraint = parse_constraints(args.constraint, args.loglevel) + try: + return execute_command_from_args(args, logfile, constraint) + except KeyboardInterrupt: # pragma: no cover + # When working in a pipe from stdin must be interrupted w/ a signal + return "" + except ValueError as e: + msg = ( + f"An error occurred while trying to parse '{logfile}'\n" + + "Please ensure that you requested some fields or keys." + ) + if args.verbose: + msg = f" {msg}\n{e}" + return msg + except FileNotFoundError as e: + msg = ( + f"An error occurred while trying to parse '{logfile}'\n" + + "Please ensure that the log file name is correct." + ) + if args.verbose: + msg = f" {msg}\n{e}" + return msg + except Exception as e: # pragma: no cover + msg = f"An error occurred while trying to parse '{logfile}'." + if args.verbose: + msg = f" {msg}\n{e}" + return msg + + +def console_scripts_main(args_to_parse=None): + """ + This is the entry point for the setuptools auto generated scripts. + Setuptools thinks a return from this function is an error message. + """ + msg = main(args_to_parse) + if "An error occurred while trying to parse" in msg: + return msg + # Regular output already printed line by line. + # Returned here only for test purposes when bebug is enabled + + +if __name__ == "__main__": + mmsg = console_scripts_main() + # Checking the return value to emulate the behavior of the setuptools invoker + if mmsg: + if mmsg[0] == " ": + print(mmsg[1:]) + exit(1) diff --git a/tox.ini b/tox.ini index 9f5aa41b1..4576480ea 100644 --- a/tox.ini +++ b/tox.ini @@ -26,6 +26,7 @@ changedir = doc/api # A better practice is to specify a specific version of sphinx. deps = sphinx + structlog m2crypto # Everything must be in the dependency or whitelist (* is allowed here) whitelist_externals = diff --git a/unittests/test_logSupport.py b/unittests/test_lib_logSupport.py similarity index 93% rename from unittests/test_logSupport.py rename to unittests/test_lib_logSupport.py index c6a0a2045..c42910990 100755 --- a/unittests/test_logSupport.py +++ b/unittests/test_lib_logSupport.py @@ -4,16 +4,11 @@ # SPDX-License-Identifier: Apache-2.0 """ -Project: - glideinwms Purpose: test glideinwms/lib/logSupport.py -Author: - Anthony Tiradani, tiradani@fnal.gov """ -import logging import os import shutil import sys @@ -63,9 +58,9 @@ def load_log(self, section): log_name = str(self.config[section]["log_name"]) extension = str(self.config[section]["extension"]) msg_types = str(self.config[section]["msg_types"]) - max_days = float(self.config[section]["max_days"]) - min_days = float(self.config[section]["min_days"]) - max_mbytes = float(self.config[section]["max_mbytes"]) + max_days = int(float(self.config[section]["max_days"])) + min_days = int(float(self.config[section]["min_days"])) + max_mbytes = int(float(self.config[section]["max_mbytes"])) backupCount = 5 try: @@ -82,7 +77,7 @@ def load_log(self, section): log_dir = f"{self.log_base_dir}/{log_name}" os.makedirs(log_dir) - logSupport.add_processlog_handler( + handler = logSupport.get_processlog_handler( log_name, log_dir, msg_types, @@ -93,13 +88,12 @@ def load_log(self, section): backupCount=backupCount, compression=compression, ) - - return logSupport.getLogger(log_name), log_dir + log = logSupport.get_logging_logger(log_name) + log.addHandler(handler) + return log, log_dir def rotated_log_tests(self, section, log_dir): - log_file_name = "{}.{}.log".format( - str(self.config[section]["log_name"]), str(self.config[section]["extension"]) - ) + log_file_name = f'{str(self.config[section]["log_name"])}.{str(self.config[section]["extension"])}.log' # ls self.log_dir file_list = os.listdir(log_dir) # are there at least two files? diff --git a/unittests/worker_scripts/log_writer.py b/unittests/worker_scripts/log_writer.py index 9a523820d..faa1dea6c 100755 --- a/unittests/worker_scripts/log_writer.py +++ b/unittests/worker_scripts/log_writer.py @@ -3,13 +3,12 @@ # SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC # SPDX-License-Identifier: Apache-2.0 -import logging import os import sys import yaml -# this shoud be not needed #was-pylint: disable=import-error +# this should be not needed #was-pylint: disable=import-error import glideinwms.lib.logSupport as logSupport from glideinwms.unittests.unittest_utils import create_random_string @@ -31,15 +30,16 @@ def main(): log_name = str(config[section]["log_name"]) extension = str(config[section]["extension"]) msg_types = str(config[section]["msg_types"]) - max_days = float(config[section]["max_days"]) - min_days = float(config[section]["min_days"]) - max_mbytes = float(config[section]["max_mbytes"]) + max_days = int(float(config[section]["max_days"])) + min_days = int(float(config[section]["min_days"])) + max_mbytes = int(float(config[section]["max_mbytes"])) backupCount = 5 compression = "" + structured = False - log_dir = "/tmp/%s" % log_name + log_dir = os.path.join("/tmp", log_name) - logSupport.add_processlog_handler( + handler = logSupport.get_processlog_handler( log_name, log_dir, msg_types, @@ -50,9 +50,13 @@ def main(): backupCount=backupCount, compression=compression, ) + if structured: + log = logSupport.get_structlog_logger(log_name) + else: + log = logSupport.get_logging_logger(log_name) + log.addHandler(handler) - log = logSupport.getLogger(log_name) - log.info("%s\n" % create_random_string(length=2048)) + log.info(f"{create_random_string(length=2048)}\n") return 0 except: