diff --git a/creation/lib/cWDictFile.py b/creation/lib/cWDictFile.py index 80159e923..85ba23175 100644 --- a/creation/lib/cWDictFile.py +++ b/creation/lib/cWDictFile.py @@ -1,16 +1,12 @@ # SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC # SPDX-License-Identifier: Apache-2.0 -# -# Project: -# glideinWMS -# -# File Version: -# # Description: # Classes needed to handle dictionary files # And other support functions -# + +# TODO: in this file there are several directory creation functions. Use what's available in +# Python 3 and reduce custom code import copy import io @@ -209,7 +205,7 @@ def save( with open(filepath, "wb") as fd: self.save_into_fd(fd, sort_keys, set_readonly, reset_changed, want_comments) except OSError as e: - raise DictFileError(f"Error creating or writing to {filepath}: {e}") + raise DictFileError(f"Error creating or writing to {filepath}: {e}") from e # ensure that the file permissions are 644 # This is to minimize a security risk where we load python code from @@ -257,7 +253,7 @@ def save_into_fd(self, fd, sort_keys=None, set_readonly=True, reset_changed=True fd.write(b"%s\n" % footer.encode(BINARY_ENCODING)) except AttributeError as e: # .encode() attribute may be missing because bytes are passed - raise DictFileError(f"str received while writing {self.fname} ({type(self).__name__}): {e}") + raise DictFileError(f"str received while writing {self.fname} ({type(self).__name__}): {e}") from e if set_readonly: self.set_readonly(True) @@ -336,7 +332,7 @@ def load(self, dir=None, fname=None, change_self=True, erase_first=True, set_not with fd: self.load_from_fd(fd, erase_first, set_not_changed) except RuntimeError as e: - raise DictFileError(f"File {filepath}: {str(e)}") + raise DictFileError(f"File {filepath}: {str(e)}") from e if change_self: self.dir = dir @@ -372,7 +368,7 @@ def load_from_fd(self, fd, erase_first=True, set_not_changed=True): try: self.parse_val(line.decode(BINARY_ENCODING)) except RuntimeError as e: - raise DictFileError("Line %i: %s" % (idx, str(e))) + raise DictFileError("Line %i: %s" % (idx, str(e))) from e if set_not_changed: self.changed = False # the memory copy is now same as the one on disk @@ -394,7 +390,7 @@ def load_from_str(self, data, erase_first=True, set_not_changed=True): try: self.load_from_fd(fd, erase_first, set_not_changed) except RuntimeError as e: - raise DictFileError("Memory buffer: %s" % (str(e))) + raise DictFileError("Memory buffer: %s" % (str(e))) from e return def is_equal(self, other, compare_dir=False, compare_fname=False, compare_keys=None): @@ -913,7 +909,7 @@ def add_from_file(self, key, val, filepath, allow_overwrite=False): with open(filepath, "rb") as fd: self.add_from_fd(key, val, fd, allow_overwrite) except OSError as e: - raise DictFileError("Could not open file or read from it: %s" % filepath) + raise DictFileError("Could not open file or read from it: %s" % filepath) from e def format_val(self, key, want_comments): """Print lines: only the file name (key) the first item of the value tuple if not None @@ -988,12 +984,12 @@ def save_files(self, allow_overwrite=False): try: fd = open(filepath, "wb") except OSError as e: - raise DictFileError("Could not create file %s" % filepath) + raise DictFileError("Could not create file %s" % filepath) from None try: with fd: fd.write(fdata) except OSError as e: - raise DictFileError("Error writing into file %s" % filepath) + raise DictFileError("Error writing into file %s" % filepath) from None class FileDictFile(SimpleFileDictFile): @@ -1125,8 +1121,10 @@ def add( # TODO: check parameters!! try: int(val[2]) # to check if is integer. Period must be int or convertible to int - except (ValueError, IndexError): - raise DictFileError("Values '%s' not (real_fname,cache/exec,period,prefix,cond_download,config_out)" % val) + except (ValueError, IndexError) as e: + raise DictFileError( + "Values '%s' not (real_fname,cache/exec,period,prefix,cond_download,config_out)" % val + ) from e if len(val) == self.DATA_LENGTH: # Alt: return self.add_from_str(key, val[:self.DATA_LENGTH-1], val[self.DATA_LENGTH-1], allow_overwrite) @@ -1411,6 +1409,7 @@ def add_extended( elif user_name == True: user_name = "+" + # TODO: check .add end set allow_overwrite=False above instead allow_overwrite=0 self.add(key, (type_str, val_default, condor_name, req_str, export_condor_str, user_name), allow_overwrite) def format_val(self, key, want_comments): @@ -1551,7 +1550,7 @@ def save( with open(filepath, "wb") as fd: self.save_into_fd(fd, sort_keys, set_readonly, reset_changed, want_comments) except OSError as e: - raise RuntimeError(f"Error creating or writing to {filepath}: {e}") + raise RuntimeError(f"Error creating or writing to {filepath}: {e}") from e chmod(filepath, 0o755) return @@ -1597,17 +1596,17 @@ def __init__(self, dir, dir_name): self.dir = dir self.dir_name = dir_name + # TODO: there is os.mkdirs with fail_if_exists def create_dir(self, fail_if_exists=True): if os.path.isdir(self.dir): if fail_if_exists: raise RuntimeError(f"Cannot create {self.dir_name} dir {self.dir}, already exists.") else: return False # already exists, nothing to do - try: os.mkdir(self.dir) except OSError as e: - raise RuntimeError(f"Failed to create {self.dir_name} dir: {e}") + raise RuntimeError(f"Failed to create {self.dir_name} dir: {e}") from None return True def delete_dir(self): @@ -1619,6 +1618,7 @@ def __init__(self, dir, chmod, dir_name): simpleDirSupport.__init__(self, dir, dir_name) self.chmod = chmod + # TODO: there is os.mkdirs with fail_if_exists def create_dir(self, fail_if_exists=True): if os.path.isdir(self.dir): if fail_if_exists: @@ -1629,7 +1629,7 @@ def create_dir(self, fail_if_exists=True): try: os.mkdir(self.dir, self.chmod) except OSError as e: - raise RuntimeError(f"Failed to create {self.dir_name} dir: {e}") + raise RuntimeError(f"Failed to create {self.dir_name} dir: {e}") from None return True @@ -1641,6 +1641,7 @@ def __init__(self, target_dir, symlink, dir_name): self.symlink = symlink self.dir_name = dir_name + # TODO: there is os.mkdirs with fail_if_exists, check if something similar for symlink def create_dir(self, fail_if_exists=True): if os.path.islink(self.symlink): if fail_if_exists: @@ -1651,7 +1652,7 @@ def create_dir(self, fail_if_exists=True): try: os.symlink(self.target_dir, self.symlink) except OSError as e: - raise RuntimeError(f"Failed to create {self.dir_name} symlink: {e}") + raise RuntimeError(f"Failed to create {self.dir_name} symlink: {e}") from None return True def delete_dir(self): @@ -1667,6 +1668,7 @@ def __init__(self): def add_dir_obj(self, dir_obj): self.dir_list.append(dir_obj) + # TODO: there is os.mkdirs with fail_if_exists def create_dirs(self, fail_if_exists=True): created_dirs = [] try: diff --git a/creation/lib/cWParams.py b/creation/lib/cWParams.py index ab5197348..a7a61b28d 100644 --- a/creation/lib/cWParams.py +++ b/creation/lib/cWParams.py @@ -267,7 +267,7 @@ def __init__(self, usage_prefix, src_dir, argv): # create derived values self.derive() except RuntimeError as e: - raise RuntimeError("Unexpected error occurred loading the configuration file.\n\n%s" % e) + raise RuntimeError("Unexpected error occurred loading the configuration file.\n\n%s" % e) from e def derive(self): return # by default nothing... children should overwrite this @@ -308,9 +308,9 @@ def load_file(self, fname): try: self.data = xmlParse.xmlfile2dict(fname, use_ord_dict=True) except xml.parsers.expat.ExpatError as e: - raise RuntimeError("XML error parsing config file: %s" % e) + raise RuntimeError("XML error parsing config file: %s" % e) from e except OSError as e: - raise RuntimeError("Config file error: %s" % e) + raise RuntimeError("Config file error: %s" % e) from e self.subparams = self.get_subparams_class()(self.data) return diff --git a/creation/lib/cvWParamDict.py b/creation/lib/cvWParamDict.py index 0dedd3f0e..326712a21 100644 --- a/creation/lib/cvWParamDict.py +++ b/creation/lib/cvWParamDict.py @@ -56,7 +56,7 @@ def translate_match_attrs(loc_str, match_attrs_name, match_attrs): try: translated_attrs[attr_name] = translations[attr_type] except KeyError as e: - raise RuntimeError(f"Invalid {loc_str} {match_attrs_name} attr type '{attr_type}'") + raise RuntimeError(f"Invalid {loc_str} {match_attrs_name} attr type '{attr_type}'") from e return translated_attrs @@ -106,9 +106,9 @@ def validate_match(loc_str, match_str, factory_attrs, job_attrs, attr_dict, poli match_obj = compile(match_str, "", "exec") eval(match_obj, env) except KeyError as e: - raise RuntimeError(f"Invalid {loc_str} match_expr '{match_str}': Missing attribute {e}") + raise RuntimeError(f"Invalid {loc_str} match_expr '{match_str}': Missing attribute {e}") from e except Exception as e: - raise RuntimeError(f"Invalid {loc_str} match_expr '{match_str}': {e}") + raise RuntimeError(f"Invalid {loc_str} match_expr '{match_str}': {e}") from e # Validate the match(job, glidein) from the policy modules for pmodule in policy_modules: @@ -118,9 +118,9 @@ def validate_match(loc_str, match_str, factory_attrs, job_attrs, attr_dict, poli except KeyError as e: raise RuntimeError( f"Error in {loc_str} policy module's {pmodule.name}.match(job, glidein): Missing attribute {e}" - ) + ) from e except Exception as e: - raise RuntimeError(f"Error in {loc_str} policy module's {pmodule.name}.match(job, glidein): {e}") + raise RuntimeError(f"Error in {loc_str} policy module's {pmodule.name}.match(job, glidein): {e}") from e return @@ -770,7 +770,7 @@ def add_attr_unparsed(attr_name, params, dicts, description): try: add_attr_unparsed_real(attr_name, params, dicts) except RuntimeError as e: - raise RuntimeError(f"Error parsing attr {description}[{attr_name}]: {str(e)}") + raise RuntimeError(f"Error parsing attr {description}[{attr_name}]: {str(e)}") from e def validate_attribute(attr_name, attr_val): diff --git a/creation/lib/xmlConfig.py b/creation/lib/xmlConfig.py index 95d4641ba..12c528199 100644 --- a/creation/lib/xmlConfig.py +++ b/creation/lib/xmlConfig.py @@ -217,7 +217,7 @@ def check_sort_key(self): try: LIST_TAGS[self.tag](child) except KeyError as e: - raise RuntimeError(child.err_str('missing "%s" attribute' % e)) + raise RuntimeError(child.err_str('missing "%s" attribute' % e)) from None # this creates references into other rather than deep copies for efficiency def merge(self, other): @@ -293,7 +293,7 @@ def validate(self): try: period = int(self["period"]) except ValueError: - raise RuntimeError(self.err_str("period must be an int")) + raise RuntimeError(self.err_str("period must be an int")) from None if is_exec + is_wrapper + is_tar > 1: raise RuntimeError(self.err_str('must be exactly one of type "executable", "wrapper", or "untar"')) diff --git a/creation/reconfig_glidein b/creation/reconfig_glidein index 413297b32..36ba7a743 100755 --- a/creation/reconfig_glidein +++ b/creation/reconfig_glidein @@ -292,7 +292,7 @@ if __name__ == "__main__": main(conf, update_scripts, update_def_cfg, comment=comment) except RuntimeError as e: - raise ReconfigError(str(e)) + raise ReconfigError(str(e)) from e except ReconfigError as re: print2(re) diff --git a/factory/glideFactory.py b/factory/glideFactory.py index 6260ddcc1..79404a9b3 100755 --- a/factory/glideFactory.py +++ b/factory/glideFactory.py @@ -3,21 +3,11 @@ # SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC # SPDX-License-Identifier: Apache-2.0 -# -# Project: -# glideinWMS -# -# File Version: -# # Description: # This is the main of the glideinFactory # # Arguments: # $1 = glidein submit_dir -# -# Author: -# Igor Sfiligoi (Apr 9th 2007 - moved old glideFactory to glideFactoryEntry) -# import copy import fcntl @@ -168,12 +158,11 @@ def write_descript(glideinDescript, frontendDescript, monitor_dir): def generate_log_tokens(startup_dir, glideinDescript): - """ - Generate the JSON Web Tokens used to authenticate with the remote HTTP log server. + """Generate the JSON Web Tokens used to authenticate with the remote HTTP log server. Note: tokens are generated for disabled entries too Args: - startup_dir: Path to the glideinsubmit directory + startup_dir (str|Path): Path to the glideinsubmit directory glideinDescript: Factory config's glidein description object Returns: @@ -805,7 +794,6 @@ def increase_process_limit(new_limit=10000): logSupport.log.info("Raised RLIMIT_NPROC from %d to %d" % (soft, new_limit)) except ValueError: logSupport.log.info("Warning: could not raise RLIMIT_NPROC " "from %d to %d" % (soft, new_limit)) - else: logSupport.log.info("RLIMIT_NPROC already %d, not changing to %d" % (soft, new_limit)) @@ -969,8 +957,10 @@ def main(startup_dir): % (pid_obj.mypid, err) ) raise + # TODO: use a single try.. except.. finally when moving to Python 3.8 or above (dropping 3.6) try: try: + # Spawn the EntryGroup processes handling the work spawn( sleep_time, advertize_rate, @@ -981,11 +971,13 @@ def main(startup_dir): restart_attempts, restart_interval, ) - except KeyboardInterrupt as e: - raise e + # No need for special handling of KeyboardInterrupt + # It is not in Exception so it will remain un-handled + # except KeyboardInterrupt as e: + # raise e # raise e is re-raising a different exceptoin from here? Use raise instead? except HUPException as e: # inside spawn(), outermost try will catch HUPException, - # then the code within the finally will run + # then the code within the finally clouse of spawn() will run # which will terminate glideFactoryEntryGroup children processes # and then the following 3 lines will be executed. logSupport.log.info("Received SIGHUP, reload config uid = %d" % os.getuid()) @@ -1004,8 +996,12 @@ def main(startup_dir): "/etc/gwms-factory/glideinWMS.xml", ], ) - except: - logSupport.log.exception("Exception occurred spawning the factory: ") + # TODO: verify. This is invoking reconfig but how is the Factory/EntryGroups re-started? + # Should there be an infinite loop around spawn? + except Exception as e: + # Exception excludes SystemExit, KeyboardInterrupt, GeneratorExit + # Log the exception and exit + logSupport.log.exception("Exception occurred spawning the factory: ", e) finally: pid_obj.relinquish() @@ -1016,14 +1012,18 @@ def main(startup_dir): # ############################################################ class HUPException(Exception): + """Used to catch SIGHUP and trigger a reconfig""" + pass def termsignal(signr, frame): + """Signal handler. Raise KeyboardInterrupt when receiving SIGTERN or SIGQUIT""" raise KeyboardInterrupt("Received signal %s" % signr) def hupsignal(signr, frame): + """Signal handler. Raise HUPException when receiving SIGHUP. Used to trigger a reconfig and restart.""" signal.signal(signal.SIGHUP, signal.SIG_IGN) raise HUPException("Received signal %s" % signr) diff --git a/factory/glideFactoryCredentials.py b/factory/glideFactoryCredentials.py index 94ebd438f..5a71ca01c 100644 --- a/factory/glideFactoryCredentials.py +++ b/factory/glideFactoryCredentials.py @@ -1,14 +1,6 @@ # SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC # SPDX-License-Identifier: Apache-2.0 -# -# Project: -# glideinWMS -# -# File Version: -# - - import base64 import gzip import io @@ -191,11 +183,11 @@ def process_global(classad, glidein_descript, frontend_descript): msg = "updating credential for %s" % username logSupport.log.debug(msg) update_credential_file(username, cred_id, cred_data, request_clientname) - except: + except Exception as e: logSupport.log.debug(f"\nclassad {classad}\nfrontend_descript {frontend_descript}\npub_key_obj {pub_key_obj})") error_str = "Error occurred processing the globals classads." logSupport.log.exception(error_str) - raise CredentialError(error_str) + raise CredentialError(error_str) from e def get_key_obj(pub_key_obj, classad): @@ -211,11 +203,11 @@ def get_key_obj(pub_key_obj, classad): try: sym_key_obj = pub_key_obj.extract_sym_key(classad["ReqEncKeyCode"]) return sym_key_obj - except: + except Exception as e: logSupport.log.debug(f"\nclassad {classad}\npub_key_obj {pub_key_obj}\n") error_str = "Symmetric key extraction failed." logSupport.log.exception(error_str) - raise CredentialError(error_str) + raise CredentialError(error_str) from e else: error_str = "Classad does not contain a key. We cannot decrypt." raise CredentialError(error_str) diff --git a/factory/glideFactoryDowntimeLib.py b/factory/glideFactoryDowntimeLib.py index eba605d17..3a2906947 100644 --- a/factory/glideFactoryDowntimeLib.py +++ b/factory/glideFactoryDowntimeLib.py @@ -1,19 +1,9 @@ # SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC # SPDX-License-Identifier: Apache-2.0 -# -# Project: -# glideinWMS -# -# File Version: -# # Description: # This module implements the functions needed to # handle the downtimes -# -# Author: -# Igor Sfiligoi (July 7th 2008) -# import fcntl import os.path @@ -22,22 +12,38 @@ from glideinwms.lib import timeConversion -# -# Handle a downtime file -# -# Each line in the file has two entries -# start_time end_time -# expressed in utime -# if end_time is None, the downtime does not have a set expiration -# (i.e. it runs forever) class DowntimeFile: + """Handle a downtime file + + space separated file with downtime information + Each line has space-separated values + The first line is a comment (starts with #) and header line : + "#%-29s %-30s %-20s %-30s %-20s # %s\n" % ("Start", "End", "Entry", "Frontend", "Sec_Class", "Comment") + Each non-comment line in the file has at least two entries + start_time end_time + expressed in utime + if end_time is None, the downtime does not have a set expiration + (i.e. it runs forever) + Additional entries are used to limit the scope (Entry, Frontend, Sec_Class) and to add a comment + """ + def __init__(self, fname): self.fname = fname - # return a list of downtime periods (utimes) - # a value of None idicates "forever" - # for example: [(1215339200,1215439170),(1215439271,None)] def read(self, raise_on_error=False): + """Return a list of downtime periods (utimes) + a value of None idicates "forever" + for example: `[(1215339200,1215439170),(1215439271,None)]` + + Args: + raise_on_error (bool): if not True mask all the exceptions + + Returns: + list: list of downtime periods [(start, end), ...] + a value of None idicates "forever", no start time, or no end time + timestamps are in seconds from epoch (utime) + `[]` returned when `raise_on_error` is False (default) and there is no downtime file + """ return read(self.fname, raise_on_error) def printDowntime(self, entry="Any", check_time=None): @@ -49,14 +55,27 @@ def checkDowntime(self, entry="Any", frontend="Any", security_class="Any", check self.downtime_comment = msg return rtn - # add a scheduled downtime def addPeriod( self, start_time, end_time, entry="All", frontend="All", security_class="All", comment="", create_if_empty=True ): + """Add a scheduled downtime + Maintin a lock (fcntl.LOCK_EX) on the downtime file while writing + entry, frontend, and security_class default to "All" + + Args: + start_time (int): start time in seconds from Epoch + end_time (int): end time in seconds from Epoch + entry (str): entry name or "All" + frontend (str): frontend name os "All" + security_class (str): security class name or "All" + comment (str): comment to add + create_if_empty (bool): if False, raise FileNotFoundError if there is not already a downtime file + + Returns: + int: 0 + """ return addPeriod(self.fname, start_time, end_time, entry, frontend, security_class, comment, create_if_empty) - # start a downtime that we don't know when it will end - # if start_time==None, use current time def startDowntime( self, start_time=None, @@ -67,17 +86,55 @@ def startDowntime( comment="", create_if_empty=True, ): + """start a downtime that we don't know when it will end + if start_time==None, use current time + entry, frontend, and security_class default to "All" + + Args: + start_time (int|None): start time in seconds from Epoch + end_time (int|None): end time in seconds from Epoch + entry (str): entry name or "All" + frontend (str): frontend name os "All" + security_class (str): security class name or "All" + comment (str): comment to add + create_if_empty (bool): if False, raise FileNotFoundError if there is not already a downtime file + + Returns: + + """ if start_time is None: start_time = int(time.time()) return self.addPeriod(start_time, end_time, entry, frontend, security_class, comment, create_if_empty) - # end a downtime (not a scheduled one) - # if end_time==None, use current time def endDowntime(self, end_time=None, entry="All", frontend="All", security_class="All", comment=""): + """End a downtime (not a scheduled one) + if end_time==None, use current time + entry, frontend, and security_class default to "All" + + Args: + end_time (int|None): end time in seconds from Epoch. If end_time==None, default, use current time + entry (str): entry name or "All" + frontend (str): frontend name os "All" + security_class (str): security class name or "All" + comment (str): comment to add + + Returns: + int: number of records closed + """ return endDowntime(self.fname, end_time, entry, frontend, security_class, comment) - # if cut time<0, use current_time-abs(cut_time) def purgeOldPeriods(self, cut_time=None, raise_on_error=False): + """Purge old downtime periods + if cut time<0, use current_time-abs(cut_time) + + Args: + cut_time (int): cut time in seconds from epoch, if cut_time==None or 0, use current time, + if cut time<0, use current_time-abs(cut_time) + raise_on_error (bool): if not True, mask all exceptions + + Returns: + int: number of records purged + """ return purgeOldPeriods(self.fname, cut_time, raise_on_error) @@ -85,17 +142,29 @@ def purgeOldPeriods(self, cut_time=None, raise_on_error=False): # INTERNAL - Do not use ############################# -# return a list of downtime periods (utimes) -# a value of None idicates "forever" -# for example: [(1215339200,1215439170),(1215439271,None)] + def read(fname, raise_on_error=False): + """Return a list of downtime periods (utimes) + a value of None idicates "forever" + for example: `[(1215339200,1215439170),(1215439271,None)]` + + Args: + fname (str|Path): downtimes file + raise_on_error (bool): if not True mask all the exceptions + + Returns: + list: list of downtime periods [(start, end), ...] + a value of None idicates "forever", no start time, or no end time + timestamps are in seconds from epoch (utime) + `[]` returned when `raise_on_error` is False (default) and there is no file + """ try: with open(fname) as fd: fcntl.flock(fd, fcntl.LOCK_SH) lines = fd.readlines() except OSError as e: if raise_on_error: - raise + raise # re-rise the exact same exception like no except else: return [] # no file -> no downtimes @@ -120,7 +189,7 @@ def read(fname, raise_on_error=False): start_time = timeConversion.extractISO8601_Local(arr[0]) except ValueError as e: if raise_on_error: - raise ValueError("%s:%i: 1st element: %s" % (fname, lnr, e)) + raise ValueError("%s:%i: 1st element: %s" % (fname, lnr, e)) from e else: continue # ignore errors @@ -131,7 +200,7 @@ def read(fname, raise_on_error=False): end_time = timeConversion.extractISO8601_Local(arr[1]) except ValueError as e: if raise_on_error: - raise ValueError("%s:%i: 2nd element: %s" % (fname, lnr, e)) + raise ValueError("%s:%i: 2nd element: %s" % (fname, lnr, e)) from e else: continue # ignore errors @@ -191,8 +260,22 @@ def printDowntime(fname, entry="Any", check_time=None): print("%-30s Up \tAll:All" % (entry)) -# if check_time==None, use current time def checkDowntime(fname, entry="Any", frontend="Any", security_class="Any", check_time=None): + """Check if there is a downtime at `check_time` + if check_time==None, use current time + "All" (default) is a wildcard for entry, frontend and security_class + + Args: + fname (str|Path): Downtime file + entry (str): entry name or "All" + frontend (str): frontend name os "All" + security_class (str): security class name or "All" + check_time: time to check in seconds from epoch, if check_time==None, use current time + + Returns: + (str, bool): tuple with the comment string and True is in downtime + or ("", False) is not in downtime + """ if check_time is None: check_time = int(time.time()) time_list = read(fname) @@ -224,9 +307,25 @@ def checkDowntime(fname, entry="Any", frontend="Any", security_class="Any", chec def addPeriod( fname, start_time, end_time, entry="All", frontend="All", security_class="All", comment="", create_if_empty=True ): + """Add a downtime period + Maintin a lock (fcntl.LOCK_EX) on the downtime file while writing + + Args: + fname (str|Path): downtime file + start_time (int): start time in seconds from Epoch + end_time (int): end time in seconds from Epoch + entry (str): entry name or "All" + frontend (str): frontend name os "All" + security_class (str): security class name or "All" + comment (str): comment to add + create_if_empty (bool): if False, raise FileNotFoundError if there is not already a downtime file + + Returns: + int: 0 + """ exists = os.path.isfile(fname) if (not exists) and (not create_if_empty): - raise OSError("[Errno 2] No such file or directory: '%s'" % fname) + raise FileNotFoundError("[Errno 2] No such file or directory: '%s'" % fname) comment = comment.replace("\n", " ") comment = comment.replace("\r", " ") @@ -256,9 +355,20 @@ def addPeriod( return 0 -# if cut_time==None or 0, use current time -# if cut time<0, use current_time-abs(cut_time) def purgeOldPeriods(fname, cut_time=None, raise_on_error=False): + """Purge old rules using cut_time + if cut_time==None or 0, use current time + if cut time<0, use current_time-abs(cut_time) + + Args: + fname (str|Path): downtime file + cut_time (int): cut time in seconds from epoch, if cut_time==None or 0, use current time, + if cut time<0, use current_time-abs(cut_time) + raise_on_error (bool): if not True, mask all exceptions + + Returns: + int: number of records purged + """ if cut_time is None: cut_time = int(time.time()) elif cut_time <= 0: @@ -268,7 +378,7 @@ def purgeOldPeriods(fname, cut_time=None, raise_on_error=False): fd = open(fname, "r+") except OSError as e: if raise_on_error: - raise + raise # re-rise the exact same exception like no except else: return 0 # no file -> nothing to purge with fd: @@ -303,7 +413,7 @@ def purgeOldPeriods(fname, cut_time=None, raise_on_error=False): end_time = timeConversion.extractISO8601_Local(arr[1]) except ValueError as e: if raise_on_error: - raise ValueError("%s:%i: 2nd element: %s" % (fname, lnr, e)) + raise ValueError("%s:%i: 2nd element: %s" % (fname, lnr, e)) from e else: outlines.append(long_line) continue # unknown, pass on @@ -328,9 +438,22 @@ def purgeOldPeriods(fname, cut_time=None, raise_on_error=False): return cut_nr -# end a downtime (not a scheduled one) -# if end_time==None, use current time def endDowntime(fname, end_time=None, entry="All", frontend="All", security_class="All", comment=""): + """End a downtime (not a scheduled one) + if end_time==None, use current time + "All" (default) is a wildcard for entry, frontend and security_class + + Args: + fname (str|Path): Downtime file + end_time (int): end time in seconds from epoch, if end_time==None, use current time + entry (str): entry name or "All" + frontend (str): frontend name os "All" + security_class (str): security class name or "All" + comment (str): comment to add + + Returns: + int: Number of downtime records closed + """ comment = comment.replace("\r", " ") comment = comment.replace("\n", " ") if end_time is None: diff --git a/factory/glideFactoryEntry.py b/factory/glideFactoryEntry.py index 6973537d4..dce9d187a 100644 --- a/factory/glideFactoryEntry.py +++ b/factory/glideFactoryEntry.py @@ -3,15 +3,8 @@ # SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC # SPDX-License-Identifier: Apache-2.0 -# -# Project: -# glideinWMS -# -# File Version: -# # Description: # Entry class -# import copy @@ -375,7 +368,7 @@ def queryQueuedGlideins(self): self.log.info("Schedd %s not responding, skipping" % self.scheddName) tb = traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]) self.log.warning("getCondorQData failed, traceback: %s" % "".join(tb)) - raise e + raise def glideinsWithinLimits(self, condorQ): """ diff --git a/factory/glideFactoryInterface.py b/factory/glideFactoryInterface.py index fc2013f26..b25008a23 100644 --- a/factory/glideFactoryInterface.py +++ b/factory/glideFactoryInterface.py @@ -1,19 +1,9 @@ # SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC # SPDX-License-Identifier: Apache-2.0 -# -# Project: -# glideinWMS -# -# File Version: -# # Description: # This module implements the functions needed to advertize # and get commands from the Collector -# -# Author: -# Igor Sfiligoi (Sept 7th 2006) -# import fcntl import os diff --git a/factory/glideFactoryLib.py b/factory/glideFactoryLib.py index c20c20fe1..f5111f748 100644 --- a/factory/glideFactoryLib.py +++ b/factory/glideFactoryLib.py @@ -1571,21 +1571,32 @@ def escapeParam(param_str): def executeSubmit(log, factoryConfig, username, schedd, exe_env, submitFile): + """Submit Glideins using the `condor_submit` command in a custom environment + + Args: + log: logger to use + factoryConfig: + username: + schedd (str): HTCSS schedd name + exe_env (list): environment list + submitFile (str): path os the submit file + + Returns: + list: list of submitted Glideins + """ # check to see if the username for the proxy is # same as the factory username + submit_out = [] try: submit_out = condorExe.iexe_cmd(f"condor_submit -name {schedd} {submitFile}", child_env=env_list2dict(exe_env)) - except condorExe.ExeError as e: - submit_out = [] msg = "condor_submit failed: %s" % str(e) log.error(msg) - raise RuntimeError(msg) + raise RuntimeError(msg) from e except Exception as e: - submit_out = [] msg = "condor_submit failed: Unknown error: %s" % str(e) log.error(msg) - raise RuntimeError(msg) + raise RuntimeError(msg) from e return submit_out @@ -1602,17 +1613,17 @@ def submitGlideins( log=logSupport.log, factoryConfig=None, ): - - """Submit the glidein + """Submit the Glideins + Calls `executeSubmit` to run the HTCSS command Args: entry_name (str): - client_name (str): + client_name (str): client (e.g. Frontend group) name nr_glideins (int): idle_lifetime (int): frontend_name (str): submit_credentials (dict): - client_web client_web (str): None means client did not pass one, backwards compatibility + client_web (str): None means client did not pass one, backwards compatibility params: status_sf (dict): keys are GlideinEntrySubmitFile(s) and values is a jobStatus/numJobs dict log: @@ -1646,11 +1657,11 @@ def submitGlideins( log=log, factoryConfig=factoryConfig, ) - except: + except Exception as e: msg = "Failed to setup execution environment." log.error(msg) log.exception(msg) - raise RuntimeError(msg) + raise RuntimeError(msg) from e if username != MY_USERNAME: # need to push all the relevant env variables through @@ -1702,14 +1713,22 @@ def submitGlideins( log.info("Submitted %i glideins to %s: %s" % (len(submitted_jids), schedd, submitted_jids)) -# remove the glideins in the list def removeGlideins(schedd_name, jid_list, force=False, log=logSupport.log, factoryConfig=None): - #### - # We are assuming the gfactory to be - # a condor superuser and thus does not need - # identity switching to remove jobs - #### + """Remove the Glideins in the list + We are assuming the gfactory to be a condor superuser or the only user owning jobs (Glideins) + and thus does not need identity switching to remove jobs + + Args: + schedd_name (str): HTCSS schedd name + jid_list: + force: + log: + factoryConfig: + + Returns: + None + """ if factoryConfig is None: factoryConfig = globals()["factoryConfig"] @@ -1746,14 +1765,21 @@ def removeGlideins(schedd_name, jid_list, force=False, log=logSupport.log, facto log.info("Removed %i glideins on %s: %s" % (len(removed_jids), schedd_name, removed_jids)) -# release the glideins in the list def releaseGlideins(schedd_name, jid_list, log=logSupport.log, factoryConfig=None): - #### - # We are assuming the gfactory to be - # a condor superuser and thus does not need - # identity switching to release jobs - #### + """Release the glideins in the list + + We are assuming the gfactory to be a condor superuser or the only user owning jobs (Glideins) + and thus does not need identity switching to release jobs + + Args: + schedd_name: + jid_list: + log: + factoryConfig: + Returns: + + """ if factoryConfig is None: factoryConfig = globals()["factoryConfig"] @@ -2017,10 +2043,13 @@ def get_submit_environment( msg = "Error setting up submission environment (bad key)" log.debug(msg) log.exception(msg) + # Known error, can continue without the missing elements + # (from the one missing to the end of the section) except Exception: msg = "Error setting up submission environment (in %s section)" % grid_type log.debug(msg) log.exception(msg) + # Unknown error, re-raise to stop the environment build raise else: proxy = submit_credentials.security_credentials.get("SubmitProxy", "") @@ -2054,6 +2083,7 @@ def get_submit_environment( msg = "Error setting up submission environment: %s" % str(e) log.debug(msg) log.exception(msg) + # Exception logged, continuing. No valid environment, returning None def isGlideinWithinHeldLimits(jobInfo, factoryConfig=None): diff --git a/factory/tools/lib/gWftArgsHelper.py b/factory/tools/lib/gWftArgsHelper.py index d01869287..07645f68f 100644 --- a/factory/tools/lib/gWftArgsHelper.py +++ b/factory/tools/lib/gWftArgsHelper.py @@ -1,22 +1,15 @@ # SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC # SPDX-License-Identifier: Apache-2.0 -# -# Project: -# glideinWMS -# -# File Version: -# # Description: # Argument parser helper -# def str2int_range(str, min, max): try: val = int(str) except ValueError as e: - raise ValueError("Must be a number.") + raise ValueError("Must be a number.") from None if (val < min) or (val > max): raise ValueError("Must be in the range %i-%i." % (min, max)) return val @@ -36,12 +29,12 @@ def parse_date(str): try: mon = str2int_range(arr[1], 1, 12) except ValueError as e: - raise ValueError(f"Invalid month '{arr[1]}'. {e}") + raise ValueError(f"Invalid month '{arr[1]}'. {e}") from None try: day = str2int_range(arr[2], 1, 31) except ValueError as e: - raise ValueError(f"Invalid day '{arr[2]}'. {e}") + raise ValueError(f"Invalid day '{arr[2]}'. {e}") from None return (year, mon, day) @@ -53,16 +46,16 @@ def parse_time(str): try: hour = str2int_range(arr[0], 0, 23) except ValueError as e: - raise ValueError(f"Invalid hour '{arr[0]}'. {e}") + raise ValueError(f"Invalid hour '{arr[0]}'. {e}") from None try: min = str2int_range(arr[1], 0, 59) except ValueError as e: - raise ValueError(f"Invalid minute '{arr[1]}'. {e}") + raise ValueError(f"Invalid minute '{arr[1]}'. {e}") from None try: sec = str2int_range(arr[2], 0, 59) except ValueError as e: - raise ValueError(f"Invalid second '{arr[2]}'. {e}") + raise ValueError(f"Invalid second '{arr[2]}'. {e}") from None return (hour, min, sec) diff --git a/lib/classadSupport.py b/lib/classadSupport.py index e45612b79..a9ebc84eb 100644 --- a/lib/classadSupport.py +++ b/lib/classadSupport.py @@ -1,15 +1,8 @@ # SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC # SPDX-License-Identifier: Apache-2.0 -# -# Project: -# glideinWMS -# # Description: This module describes base classes for classads and advertisers -# -# Author: -# Parag Mhashilkar -# + import os import string @@ -79,7 +72,7 @@ def writeToFile(self, filename, append=True): try: f = open(filename, o_flag) - except: + except Exception: raise with f: diff --git a/lib/condorExe.py b/lib/condorExe.py index 1d1b63fe2..a64d2a71a 100644 --- a/lib/condorExe.py +++ b/lib/condorExe.py @@ -1,18 +1,8 @@ # SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC # SPDX-License-Identifier: Apache-2.0 -# -# Project: -# glideinWMS -# -# File Version: -# # Description: # This module implements the functions to execute condor commands -# -# Author: -# Igor Sfiligoi (Sept 7th 2006) -# import os @@ -162,9 +152,9 @@ def iexe_cmd(cmd, stdin_data=None, child_env=None): try: logSupport.log.error(msg) logSupport.log.debug(generate_bash_script(cmd, os.environ)) - except: + except Exception: pass - raise ExeError(msg) + raise ExeError(msg) from ex return stdout_data.splitlines() diff --git a/lib/condorLogParser.py b/lib/condorLogParser.py index ced47ff73..3b1d04810 100644 --- a/lib/condorLogParser.py +++ b/lib/condorLogParser.py @@ -1,19 +1,9 @@ # SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC # SPDX-License-Identifier: Apache-2.0 -# -# Project: -# glideinWMS -# -# File Version: -# # Description: # This module implements classes and functions to parse # the condor log files. -# -# Author: -# Igor Sfiligoi (Feb 1st 2007) -# # NOTE: # Inactive files are log files that have only completed or removed entries @@ -1384,8 +1374,8 @@ def loadCache(fname): """ try: data = util.file_pickle_load(fname) - except Exception: - raise RuntimeError("Could not read %s" % fname) + except Exception as e: + raise RuntimeError("Could not read %s" % fname) from e return data diff --git a/lib/condorMonitor.py b/lib/condorMonitor.py index 45cdba314..b05e2f927 100644 --- a/lib/condorMonitor.py +++ b/lib/condorMonitor.py @@ -390,7 +390,7 @@ def executeAll(self, joblist=None, attributes=None, values=None): "Error querying schedd %s in pool %s using python bindings (qedit of job/attr/val %s/%s/%s): %s" % (s, p, j1, j2, j3, ex) ) - raise QueryError(err_str) + raise QueryError(err_str) from ex # @@ -502,7 +502,7 @@ def fetch(self, constraint=None, format_list=None): "Error executing htcondor query to pool %s with constraint %s and format_list %s: %s. Env is %s" % (self.pool_name, constraint, format_list, ex, os.environ) ) - raise QueryError(err_str).with_traceback(sys.exc_info()[2]) + raise QueryError(err_str) from ex def fetch_using_exe(self, constraint=None, format_list=None): """Return the results obtained from executing the HTCondor query command @@ -648,7 +648,7 @@ def fetch_using_bindings(self, constraint=None, format_list=None): if self.pool_name is not None: p = self.pool_name err_str = f"Error querying schedd {s} in pool {p} using python bindings: {ex}" - raise PBError(err_str).with_traceback(sys.exc_info()[2]) + raise PBError(err_str) from ex finally: self.security_obj.restore_state() @@ -698,7 +698,7 @@ def fetch_using_bindings(self, constraint=None, format_list=None): if self.pool_name is not None: p = self.pool_name err_str = f"Error querying pool {p} using python bindings: {ex}" - raise PBError(err_str).with_traceback(sys.exc_info()[2]) + raise PBError(err_str) from ex finally: self.security_obj.restore_state() @@ -988,9 +988,9 @@ def xml2list(xml_data): try: p.Parse(" ".join(xml_data[found_xml:]), 1) except TypeError as e: - raise RuntimeError("Failed to parse XML data, TypeError: %s" % e) - except: - raise RuntimeError("Failed to parse XML data, generic error") + raise RuntimeError("Failed to parse XML data, TypeError: %s" % e) from e + except Exception as e: + raise RuntimeError("Failed to parse XML data, generic error") from e # else no xml, so return an empty list return xml2list_data diff --git a/lib/fork.py b/lib/fork.py index e49c93c4a..25f8a3841 100644 --- a/lib/fork.py +++ b/lib/fork.py @@ -96,26 +96,23 @@ def add(i, j): return i+j ############################### def fetch_fork_result(r, pid): - """ - Used with fork clients + """Used with fork clients to retrieve results Can raise: OSError if Bad file descriptor or file already closed or if waitpid syscall returns -1 FetchError if a os.read error was encountered - Possible errors from os.read (catched here): + Possible errors from os.read and pickle.load (catched here): - EOFError if the forked process failed an nothing was written to the pipe, if cPickle finds an empty string - - IOError failure for an I/O-related reason, e.g., "pipe file not found" or "disk full". - - OSError other system-related error - - @type r: pipe - @param r: Input pipe + - IOError failure for an I/O-related reason, e.g., "pipe file not found" or "disk full" + - OSError other system-related error (includes both former OSError and IOError since Py3.4) + - pickle.UnpicklingError incomplete pickled data - @type pid: int - @param pid: pid of the child + Args: + r (pipe): Input pipe + pid (int): pid of the child - @rtype: Object - @return: Unpickled object + Returns: + Object: Unpickled object """ - rin = b"" out = None try: @@ -129,10 +126,11 @@ def fetch_fork_result(r, pid): etype, evalue, etraceback = sys.exc_info() # Adding message in case close/waitpid fail and preempt raise logSupport.log.exception("Re-raising exception during read: %s" % err) + # Removed .with_traceback(etraceback) since already in the chaining raise FetchError( "Exception during read probably due to worker failure, original exception and trace %s: %s" % (etype, evalue) - ).with_traceback(etraceback) + ) from err finally: os.close(r) os.waitpid(pid, 0) @@ -140,17 +138,15 @@ def fetch_fork_result(r, pid): def fetch_fork_result_list(pipe_ids): - """ - Read the output pipe of the children, used after forking to perform work + """Read the output pipe of the children, used after forking to perform work and after forking to entry.writeStats() - @type pipe_ids: dict - @param pipe_ids: Dictinary of pipe and pid + Args: + pipe_ids (dict): Dictionary of pipe and pid - @rtype: dict - @return: Dictionary of fork_results + Returns: + dict: Dictionary of fork_results """ - out = {} failures = 0 failed = [] @@ -179,11 +175,11 @@ def fetch_ready_fork_result_list(pipe_ids): on the pipes to consume, read the data and close the pipe. and after forking to entry.writeStats() - @type pipe_ids: dict - @param pipe_ids: Dictinary of pipe and pid + Args: + pipe_ids (dict): Dictionary of pipe and pid - @rtype: dict - @return: Dictionary of work_done + Returns: + dict: Dictionary of work_done """ # Timeout for epoll/poll in milliseconds: -1 is blocking, 0 non blocking, >0 timeout diff --git a/lib/pidSupport.py b/lib/pidSupport.py index c35feb04b..cc2b1fcf8 100644 --- a/lib/pidSupport.py +++ b/lib/pidSupport.py @@ -1,18 +1,8 @@ # SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC # SPDX-License-Identifier: Apache-2.0 -# -# Project: -# glideinWMS -# -# File Version: -# # Description: # Handle pid lock files -# -# Author: -# Igor Sfiligoi -# import fcntl import os @@ -73,9 +63,9 @@ def register(self, pid=None, started_time=None): # if none, will default to os. try: fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB) self.lock_in_place = True - except OSError: + except OSError as e: fd.close() - raise AlreadyRunning("Another process already running. Unable to acquire lock %s" % self.pid_fname) + raise AlreadyRunning("Another process already running. Unable to acquire lock %s" % self.pid_fname) from e fd.seek(0) fd.truncate() fd.write(self.format_pid_file_content()) @@ -152,8 +142,8 @@ def parse_pid_file_content(self, lines): try: pid = int(pidarr[1]) - except: - raise RuntimeError("Corrupted lock file: invalid PID") + except Exception: + raise RuntimeError("Corrupted lock file: invalid PID") from None self.mypid = pid return @@ -204,7 +194,7 @@ def parse_pid_file_content(self, lines): try: pid = int(pidarr[1]) - except: + except Exception: raise RuntimeError("Corrupted lock file: invalid PID") pidarr = lines[1].split(":") @@ -213,8 +203,8 @@ def parse_pid_file_content(self, lines): try: parent_pid = int(pidarr[1]) - except: - raise RuntimeError("Corrupted lock file: invalid Parent PID") + except Exception: + raise RuntimeError("Corrupted lock file: invalid Parent PID") from None self.mypid = pid self.parent_pid = parent_pid diff --git a/lib/pubCrypto.py b/lib/pubCrypto.py index afec97ada..f9211f839 100644 --- a/lib/pubCrypto.py +++ b/lib/pubCrypto.py @@ -141,7 +141,7 @@ def __init__( # This helps operator understand which file might be corrupted so that they can try to delete it e.key_fname = key_fname e.cwd = os.getcwd() - raise + raise e from e # Need to raise a new exception to have the modified values (only raise keeps the original) return ########################################### @@ -169,7 +169,7 @@ def load(self, key_str=None, key_fname=None): bio = M2Crypto.BIO.MemoryBuffer(key_str) self._load_from_bio(bio) except M2Crypto.RSA.RSAError as e: - raise PubCryptoError("M2Crypto.RSA.RSAError: %s" % e) + raise PubCryptoError("M2Crypto.RSA.RSAError: %s" % e) from e elif key_fname is not None: bio = M2Crypto.BIO.openfile(key_fname) if bio is None: diff --git a/lib/subprocessSupport.py b/lib/subprocessSupport.py index 26c5ff019..4f0fd7362 100644 --- a/lib/subprocessSupport.py +++ b/lib/subprocessSupport.py @@ -88,7 +88,7 @@ def iexe_cmd(cmd, useShell=False, stdin_data=None, child_env=None, text=True): except OSError as e: err_str = "Error running '%s'\nStdout:%s\nStderr:%s\nException OSError:%s" - raise RuntimeError(err_str % (cmd, stdoutdata, stderrdata, e)) - if exitStatus: + raise RuntimeError(err_str % (cmd, stdoutdata, stderrdata, e)) from e + if exitStatus: # True if exitStatus<>0 raise CalledProcessError(exitStatus, cmd, output="".join(stderrdata)) return stdoutdata diff --git a/lib/symCrypto.py b/lib/symCrypto.py index a12fbf80f..f8c21e2f2 100644 --- a/lib/symCrypto.py +++ b/lib/symCrypto.py @@ -1,13 +1,6 @@ # SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC # SPDX-License-Identifier: Apache-2.0 -# -# Project: -# glideinWMS -# -# File Version: -# - """symCrypto - This module defines classes to perform symmetric key cryptography (shared or hidden key) It uses M2Crypto: https://github.com/mcepl/M2Crypto diff --git a/lib/util.py b/lib/util.py index 1ab1360bb..406451f7f 100644 --- a/lib/util.py +++ b/lib/util.py @@ -201,11 +201,13 @@ def print_funct(*args, **kwargs): def conditional_raise(mask_exceptions): """Auxiliary function to handle conditional raising - :param mask_exceptions: callback function and arguments to use if an exception happens (Default: None) - The callback function can access the exception via sys.exc_info() - If a function is not provided, the exception is re-risen - if provided it is called using mask_exceptions[0](*mask_exceptions[1:]) - :return: None + Args: + mask_exceptions: callback function and arguments to use if an exception happens (Default: None) + The callback function can access the exception via sys.exc_info() + If a function is not provided, the exception is re-risen + if provided it is called using mask_exceptions[0](*mask_exceptions[1:]) + Returns: + None """ if mask_exceptions and hasattr(mask_exceptions[0], "__call__"): # protect and report @@ -221,21 +223,24 @@ def file_pickle_dump(fname, content, tmp_type="PID", mask_exceptions=None, proto """Serialize and save content To avoid inconsistent content - @param fname: file storing the serialized content - @param content: content to serialize - @param tmp_type: tmp file type as defined in file_get_tmp (Default: PID, .$PID.tmp suffix) - @param mask_exceptions: callback function and arguments to use if an exception happens (Default: None) - The callback function can access the exception via sys.exc_info() - If a function is not provided, the exception is re-risen - if provided it is called using mask_exceptions[0](*mask_exceptions[1:]) - @param protocol: Pickle protocol to be used (Default: pickle.HIGHEST_PROTOCOL, 5 as of py3.8) - @return: True if the saving was successful, False or an exception otherwise + Args: + fname: file storing the serialized content + content: content to serialize + tmp_type: tmp file type as defined in file_get_tmp (Default: PID, .$PID.tmp suffix) + mask_exceptions: callback function and arguments to use if an exception happens (Default: None) + The callback function can access the exception via sys.exc_info() + If a function is not provided, the exception is re-risen + if provided it is called using mask_exceptions[0](*mask_exceptions[1:]) + protocol: Pickle protocol to be used (Default: pickle.HIGHEST_PROTOCOL, 5 as of py3.8) + + Returns: + bool: True if the saving was successful, False or an exception otherwis """ tmp_fname = file_get_tmp(fname, tmp_type) try: with open(tmp_fname, "wb") as pfile: pickle.dump(content, pfile, protocol) - except: + except Exception: conditional_raise(mask_exceptions) return False else: @@ -247,23 +252,28 @@ def file_pickle_load(fname, mask_exceptions=None, default=None, expiration=-1, r """Load a serialized dictionary This implementation does not use file locking, it relies on the atomicity of file movement/replacement and deletion - @param fname: name of the file with the serialized data - @param mask_exceptions: callback function and arguments to use if an exception happens (Default: None) - The callback function can access the exception via sys.exc_info() - If a function is not provided, the exception is re-risen - if provided it is called using mask_exceptions[0](*mask_exceptions[1:]) - @param default: value returned if the unpickling fails (Default: None) - @param expiration: input file expiration in seconds (Default: -1) - -1 file never expires - 0 file always expires after reading - @param remove_expired: remove expired file (Default: False) - NOTE: if you remove the obsolete file from the reader you may run into a race condition with undesired effects: - 1. the reader detects the obsolete file, 2. the writer writes a new version, 3. the reader deletes the new version - This can happen only in cycles where there is an obsolete data file to start with, so the number of data files - lost because of this is smaller than the occurrences of obsoleted files. When the expiration time is much bigger - than the loop time of the writer this is generally acceptable. - @param last_time: last time a file has been used, persistent to keep history (Default: {}, first time called) - @return: python objects (e.g. data dictionary) + + Args: + fname: name of the file with the serialized data + mask_exceptions: callback function and arguments to use if an exception happens (Default: None) + The callback function can access the exception via sys.exc_info() + If a function is not provided, the exception is re-risen + if provided it is called using mask_exceptions[0](*mask_exceptions[1:]) + default: value returned if the unpickling fails (Default: None) + expiration (int): input file expiration in seconds (Default: -1) + -1 file never expires + 0 file always expires after reading + remove_expired (bool): remove expired file (Default: False) + NOTE: if you remove the obsolete file from the reader you may run into a race condition with undesired effects: + 1. the reader detects the obsolete file, 2. the writer writes a new version, 3. the reader deletes the new version + This can happen only in cycles where there is an obsolete data file to start with, so the number of data files + lost because of this is smaller than the occurrences of obsoleted files. When the expiration time is much bigger + than the loop time of the writer this is generally acceptable. + last_time (dict): last time a file has been used, persistent to keep history (Default: {}, first time called) + Dictionary file_name->time + + Returns: + Object: python objects (e.g. data dictionary) """ data = default try: @@ -293,18 +303,15 @@ def file_pickle_load(fname, mask_exceptions=None, default=None, expiration=-1, r # the file produced at the next iteration will be used try: os.remove(fname) - except: + except OSError: pass conditional_raise(mask_exceptions) - except: + except Exception: conditional_raise(mask_exceptions) return data # One writer, avoid partial write due to code, OS or file system problems -# from factory/glideFactoryMonitoring -# KEL this exact method is also in glideinFrontendMonitoring.py -# TODO: replace all definitions with this one def file_tmp2final( @@ -343,7 +350,7 @@ def file_tmp2final( pass try: os.replace(tmp_fname, fname) - except: + except Exception: # print "Failed renaming %s into %s" % (tmp_fname, fname) conditional_raise(mask_exceptions) return False @@ -399,7 +406,6 @@ def safe_boolcomp(value, expected): Returns: bool: True if str(value).lower() is True """ - return str(value).lower() == str(expected).lower() @@ -416,7 +422,6 @@ def str2bool(val): def handle_hooks(basedir, script_dir): """The function itaretes over the script_dir directory and executes any script found there""" - dirname = os.path.join(basedir, script_dir) if not os.path.isdir(dirname): return @@ -437,12 +442,10 @@ def hash_nc(data, len=None): Returns: str: Hash """ - # TODO set md5 usedforsecurity to False when updating to Python 3.9 out = b32encode(md5(force_bytes(data)).digest()).decode(BINARY_ENCODING_ASCII) if len: out = out[:len] - return out diff --git a/lib/xmlFormat.py b/lib/xmlFormat.py index 53d28fb49..dd9408554 100644 --- a/lib/xmlFormat.py +++ b/lib/xmlFormat.py @@ -1,17 +1,7 @@ # SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC # SPDX-License-Identifier: Apache-2.0 -# -# Project: -# glideinWMS -# -# File Version: -# # Description: general purpose XML formatter -# -# Author: -# Igor Sfiligoi (part of the CDF CAF) -# import string @@ -46,7 +36,7 @@ ########################################################## # -# The following Global varables are used to set defaults +# The following Global variables are used to set defaults # When the user does not specify anything # ########################################################## diff --git a/lib/xmlParse.py b/lib/xmlParse.py index 2da431995..7f60c518d 100644 --- a/lib/xmlParse.py +++ b/lib/xmlParse.py @@ -1,17 +1,7 @@ # SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC # SPDX-License-Identifier: Apache-2.0 -# -# Project: -# glideinWMS -# -# File Version: -# # Description: general purpose XML decoder -# -# Author: -# Igor Sfiligoi (Mar 27th, 2007) -# import xml.dom.minidom @@ -101,7 +91,7 @@ def xmlfile2dict( try: doc = xml.dom.minidom.parse(fname) except xml.parsers.expat.ExpatError as e: - raise CorruptXML(f"XML corrupt in file {fname}: {e}") + raise CorruptXML(f"XML corrupt in file {fname}: {e}") from e data = domel2dict(doc.documentElement, use_ord_dict, always_singular_list)