From 62b4cec0e93855e1ad953389606d2d147672263c Mon Sep 17 00:00:00 2001 From: Jake Hunsaker Date: Fri, 22 Sep 2023 11:14:28 -0400 Subject: [PATCH] [plugins,distros] Resolve code alerts for file not always closed Fix a number of outstanding "File is not always closed" errors highlighted by GitHub's code scanning. This involves (almost exclusively) changing such as: for line in open('/some/file', 'r').read().splitlines(): To using a context manager to ensure the file is always closed: with open('/some/file', 'r') as sfile: for line in sfile.read().splitlines(): ...or something similar. Overall the logic of what a specific block of code was doing (mostly in plugins) is not changed however. Signed-off-by: Jake Hunsaker --- sos/archive.py | 20 +++++----- sos/cleaner/__init__.py | 17 ++++---- sos/policies/distros/redhat.py | 10 +++-- sos/report/plugins/__init__.py | 15 ++++--- sos/report/plugins/candlepin.py | 4 +- sos/report/plugins/firewall_tables.py | 6 ++- sos/report/plugins/foreman.py | 4 +- sos/report/plugins/haproxy.py | 11 +++--- sos/report/plugins/libvirt.py | 7 ++-- sos/report/plugins/mssql.py | 27 +++++++------ sos/report/plugins/pcp.py | 5 +-- sos/report/plugins/pulp.py | 4 +- sos/report/plugins/pulpcore.py | 57 ++++++++++++++------------- sos/report/plugins/sos_extras.py | 45 ++++++++++----------- 14 files changed, 122 insertions(+), 110 deletions(-) diff --git a/sos/archive.py b/sos/archive.py index d76b7aa04e..2a9706a27f 100644 --- a/sos/archive.py +++ b/sos/archive.py @@ -390,14 +390,14 @@ def add_string(self, content, dest, mode='w'): # on file content. dest = self.check_path(dest, P_FILE, force=True) - f = codecs.open(dest, mode, encoding='utf-8') - if isinstance(content, bytes): - content = content.decode('utf8', 'ignore') - f.write(content) - if os.path.exists(src): - self._copy_attributes(src, dest) - self.log_debug("added string at '%s' to FileCacheArchive '%s'" - % (src, self._archive_root)) + with codecs.open(dest, mode, encoding='utf-8') as f: + if isinstance(content, bytes): + content = content.decode('utf8', 'ignore') + f.write(content) + if os.path.exists(src): + self._copy_attributes(src, dest) + self.log_debug("added string at '%s' to FileCacheArchive '%s'" + % (src, self._archive_root)) def add_binary(self, content, dest): with self._path_lock: @@ -405,8 +405,8 @@ def add_binary(self, content, dest): if not dest: return - f = codecs.open(dest, 'wb', encoding=None) - f.write(content) + with codecs.open(dest, 'wb', encoding=None) as f: + f.write(content) self.log_debug("added binary content at '%s' to archive '%s'" % (dest, self._archive_root)) diff --git a/sos/cleaner/__init__.py b/sos/cleaner/__init__.py index fbcaa9c3c7..9aabd9c4f3 100644 --- a/sos/cleaner/__init__.py +++ b/sos/cleaner/__init__.py @@ -525,15 +525,14 @@ def get_new_checksum(self, archive_path): """ try: hash_size = 1024**2 # Hash 1MiB of content at a time. - archive_fp = open(archive_path, 'rb') - digest = hashlib.new(self.hash_name) - while True: - hashdata = archive_fp.read(hash_size) - if not hashdata: - break - digest.update(hashdata) - archive_fp.close() - return digest.hexdigest() + '\n' + with open(archive_path, 'rb') as archive_fp: + digest = hashlib.new(self.hash_name) + while True: + hashdata = archive_fp.read(hash_size) + if not hashdata: + break + digest.update(hashdata) + return digest.hexdigest() + '\n' except Exception as err: self.log_debug("Could not generate new checksum: %s" % err) return None diff --git a/sos/policies/distros/redhat.py b/sos/policies/distros/redhat.py index 8085aae811..15241e2782 100644 --- a/sos/policies/distros/redhat.py +++ b/sos/policies/distros/redhat.py @@ -466,8 +466,9 @@ def check(cls, remote=''): if not os.path.exists(host_release): return False try: - for line in open(host_release, "r").read().splitlines(): - atomic |= ATOMIC_RELEASE_STR in line + with open(host_release, 'r') as afile: + for line in afile.read().splitlines(): + atomic |= ATOMIC_RELEASE_STR in line except IOError: pass return atomic @@ -551,8 +552,9 @@ def check(cls, remote=''): return coreos host_release = os.environ[ENV_HOST_SYSROOT] + OS_RELEASE try: - for line in open(host_release, 'r').read().splitlines(): - coreos |= 'Red Hat Enterprise Linux CoreOS' in line + with open(host_release, 'r') as hfile: + for line in hfile.read().splitlines(): + coreos |= 'Red Hat Enterprise Linux CoreOS' in line except IOError: pass return coreos diff --git a/sos/report/plugins/__init__.py b/sos/report/plugins/__init__.py index 60910c39fa..b43bc4ea4a 100644 --- a/sos/report/plugins/__init__.py +++ b/sos/report/plugins/__init__.py @@ -3175,10 +3175,9 @@ def collection_file(self, fname, subdir=None, tags=[]): _pfname = self._make_command_filename(fname, subdir=subdir) self.archive.check_path(_pfname, P_FILE) _name = self.archive.dest_path(_pfname) - _file = open(_name, 'w') - self._log_debug(f"manual collection file opened: {_name}") - yield _file - _file.close() + with open(_name, 'w') as _file: + self._log_debug(f"manual collection file opened: {_name}") + yield _file end = time() run = end - start self._log_info(f"manual collection '{fname}' finished in {run}") @@ -3447,10 +3446,10 @@ def check_process_by_name(self, process): try: cmd_line_paths = glob.glob(cmd_line_glob) for path in cmd_line_paths: - f = open(self.path_join(path), 'r') - cmd_line = f.read().strip() - if process in cmd_line: - status = True + with open(self.path_join(path), 'r') as pfile: + cmd_line = pfile.read().strip() + if process in cmd_line: + status = True except IOError: return False return status diff --git a/sos/report/plugins/candlepin.py b/sos/report/plugins/candlepin.py index 67dc8d2ee0..9c14ea2d63 100644 --- a/sos/report/plugins/candlepin.py +++ b/sos/report/plugins/candlepin.py @@ -30,7 +30,9 @@ def setup(self): self.dbpasswd = "" cfg_file = "/etc/candlepin/candlepin.conf" try: - for line in open(cfg_file).read().splitlines(): + with open(cfg_file, 'r') as cfile: + candle_lines = cfile.read().splitlines() + for line in candle_lines: # skip empty lines and lines with comments if not line or line[0] == '#': continue diff --git a/sos/report/plugins/firewall_tables.py b/sos/report/plugins/firewall_tables.py index 590e984988..372e6e47a4 100644 --- a/sos/report/plugins/firewall_tables.py +++ b/sos/report/plugins/firewall_tables.py @@ -77,7 +77,8 @@ def setup(self): # do collect them only when relevant nft list ruleset exists default_ip_tables = "mangle\nfilter\nnat\n" try: - ip_tables_names = open("/proc/net/ip_tables_names").read() + with open('/proc/net/ip_tables_names', 'r') as ifile: + ip_tables_names = ifile.read() except IOError: ip_tables_names = default_ip_tables for table in ip_tables_names.splitlines(): @@ -85,7 +86,8 @@ def setup(self): self.collect_iptable(table) # collect the same for ip6tables try: - ip_tables_names = open("/proc/net/ip6_tables_names").read() + with open('/proc/net/ip6_tables_names', 'r') as ipfile: + ip_tables_names = ipfile.read() except IOError: ip_tables_names = default_ip_tables for table in ip_tables_names.splitlines(): diff --git a/sos/report/plugins/foreman.py b/sos/report/plugins/foreman.py index cea68ae28e..a3ee81b7df 100644 --- a/sos/report/plugins/foreman.py +++ b/sos/report/plugins/foreman.py @@ -43,7 +43,9 @@ def setup(self): self.dbhost = "localhost" self.dbpasswd = "" try: - for line in open("/etc/foreman/database.yml").read().splitlines(): + with open('/etc/foreman/database.yml', 'r') as dfile: + foreman_lines = dfile.read().splitlines() + for line in foreman_lines: # skip empty lines and lines with comments if not line or line[0] == '#': continue diff --git a/sos/report/plugins/haproxy.py b/sos/report/plugins/haproxy.py index 1b3a26d254..5d71e22338 100644 --- a/sos/report/plugins/haproxy.py +++ b/sos/report/plugins/haproxy.py @@ -48,11 +48,12 @@ def setup(self): matched = None provision_ip = None try: - for line in open("/etc/haproxy/haproxy.cfg").read().splitlines(): - if matched: - provision_ip = line.split()[1] - break - matched = match(r".*haproxy\.stats.*", line) + with open("/etc/haproxy/haproxy.cfg", 'r') as hfile: + for line in hfile.read().splitlines(): + if matched: + provision_ip = line.split()[1] + break + matched = match(r".*haproxy\.stats.*", line) except IOError: # fallback when the cfg file is not accessible pass diff --git a/sos/report/plugins/libvirt.py b/sos/report/plugins/libvirt.py index c36bd022df..60c439ef05 100644 --- a/sos/report/plugins/libvirt.py +++ b/sos/report/plugins/libvirt.py @@ -69,9 +69,10 @@ def setup(self): # get details of processes of KVM hosts for pidfile in glob.glob("/run/libvirt/*/*.pid"): - pid = open(pidfile).read().splitlines()[0] - for pf in ["environ", "cgroup", "maps", "numa_maps", "limits"]: - self.add_copy_spec("/proc/%s/%s" % (pid, pf)) + with open(pidfile, 'r') as pfile: + pid = pfile.read().splitlines()[0] + for pf in ["environ", "cgroup", "maps", "numa_maps", "limits"]: + self.add_copy_spec("/proc/%s/%s" % (pid, pf)) self.add_file_tags({ "/run/libvirt/qemu/*.xml": "var_qemu_xml", diff --git a/sos/report/plugins/mssql.py b/sos/report/plugins/mssql.py index d9dce0f947..9a4d643cdc 100644 --- a/sos/report/plugins/mssql.py +++ b/sos/report/plugins/mssql.py @@ -43,19 +43,20 @@ def setup(self): sqlagent_errorlogfile = '/var/opt/mssql/log/sqlagentstartup.log' kerberoskeytabfile = None try: - for line in open(mssql_conf).read().splitlines(): - if line.startswith('['): - section = line - continue - words = line.split('=') - if words[0].strip() == 'errorlogfile': - if section == '[filelocation]': - errorlogfile = words[1].strip() - elif section == '[sqlagent]': - sqlagent_errorlogfile = words[1].strip() - elif words[0].strip() == 'kerberoskeytabfile': - if section == '[network]': - kerberoskeytabfile = words[1].strip() + with open(mssql_conf, 'r') as mfile: + for line in mfile.read().splitlines(): + if line.startswith('['): + section = line + continue + words = line.split('=') + if words[0].strip() == 'errorlogfile': + if section == '[filelocation]': + errorlogfile = words[1].strip() + elif section == '[sqlagent]': + sqlagent_errorlogfile = words[1].strip() + elif words[0].strip() == 'kerberoskeytabfile': + if section == '[network]': + kerberoskeytabfile = words[1].strip() except IOError as ex: self._log_error('Could not open conf file %s: %s' % (mssql_conf, ex)) diff --git a/sos/report/plugins/pcp.py b/sos/report/plugins/pcp.py index a979deab08..2c6772e297 100644 --- a/sos/report/plugins/pcp.py +++ b/sos/report/plugins/pcp.py @@ -47,9 +47,8 @@ def get_size(self, path): def pcp_parse_conffile(self): try: - pcpconf = open(self.pcp_conffile, "r") - lines = pcpconf.readlines() - pcpconf.close() + with open(self.pcp_conffile, "r") as pcpconf: + lines = pcpconf.readlines() except IOError: return False env_vars = {} diff --git a/sos/report/plugins/pulp.py b/sos/report/plugins/pulp.py index 19484b074d..df007168ac 100644 --- a/sos/report/plugins/pulp.py +++ b/sos/report/plugins/pulp.py @@ -45,7 +45,9 @@ def setup(self): self.messaging_cert_file = "" in_messaging_section = False try: - for line in open("/etc/pulp/server.conf").read().splitlines(): + with open("/etc/pulp/server.conf", 'r') as pfile: + pulp_lines = pfile.read().splitlines() + for line in pulp_lines: if match(r"^\s*seeds:\s+\S+:\S+", line): uri = line.split()[1].split(',')[0].split(':') self.dbhost = uri[0] diff --git a/sos/report/plugins/pulpcore.py b/sos/report/plugins/pulpcore.py index ad4591110e..04efae9f8a 100644 --- a/sos/report/plugins/pulpcore.py +++ b/sos/report/plugins/pulpcore.py @@ -45,34 +45,35 @@ def separate_value(line, sep=':'): return val try: - # split the lines to "one option per line" format - for line in open("/etc/pulp/settings.py").read() \ - .replace(',', ',\n').replace('{', '{\n') \ - .replace('}', '\n}').splitlines(): - # skip empty lines and lines with comments - if not line or line[0] == '#': - continue - if line.startswith("DATABASES"): - databases_scope = True - continue - # example HOST line to parse: - # 'HOST': 'localhost', - pattern = r"\s*['|\"]%s['|\"]\s*:\s*\S+" - if databases_scope and match(pattern % 'HOST', line): - self.dbhost = separate_value(line) - if databases_scope and match(pattern % 'PORT', line): - self.dbport = separate_value(line) - if databases_scope and match(pattern % 'NAME', line): - self.dbname = separate_value(line) - if databases_scope and match(pattern % 'PASSWORD', line): - self.dbpasswd = separate_value(line) - # if line contains closing '}' database_scope end - if databases_scope and '}' in line: - databases_scope = False - if line.startswith("STATIC_ROOT = "): - self.staticroot = separate_value(line, sep='=') - if line.startswith("CHUNKED_UPLOAD_DIR = "): - self.uploaddir = separate_value(line, sep='=') + with open("/etc/pulp/settings.py", 'r') as pfile: + # split the lines to "one option per line" format + for line in pfile.read() \ + .replace(',', ',\n').replace('{', '{\n') \ + .replace('}', '\n}').splitlines(): + # skip empty lines and lines with comments + if not line or line[0] == '#': + continue + if line.startswith("DATABASES"): + databases_scope = True + continue + # example HOST line to parse: + # 'HOST': 'localhost', + pattern = r"\s*['|\"]%s['|\"]\s*:\s*\S+" + if databases_scope and match(pattern % 'HOST', line): + self.dbhost = separate_value(line) + if databases_scope and match(pattern % 'PORT', line): + self.dbport = separate_value(line) + if databases_scope and match(pattern % 'NAME', line): + self.dbname = separate_value(line) + if databases_scope and match(pattern % 'PASSWORD', line): + self.dbpasswd = separate_value(line) + # if line contains closing '}' database_scope end + if databases_scope and '}' in line: + databases_scope = False + if line.startswith("STATIC_ROOT = "): + self.staticroot = separate_value(line, sep='=') + if line.startswith("CHUNKED_UPLOAD_DIR = "): + self.uploaddir = separate_value(line, sep='=') except IOError: # fallback when the cfg file is not accessible pass diff --git a/sos/report/plugins/sos_extras.py b/sos/report/plugins/sos_extras.py index 55bc4dc00a..71cc878711 100644 --- a/sos/report/plugins/sos_extras.py +++ b/sos/report/plugins/sos_extras.py @@ -61,28 +61,29 @@ def setup(self): _file = self.path_join(path, f) self._log_warn("Collecting data from extras file %s" % _file) try: - for line in open(_file).read().splitlines(): - # ignore empty lines or comments - if len(line.split()) == 0 or line.startswith('#'): - continue - # lines starting by ':' specify file pattern to collect - # optionally followed by sizelimit - if line.startswith(':'): - words = line.split() - limit = None - if len(words) > 1: - try: - limit = int(words[1]) - except ValueError: - self._log_warn("Can't decode integer" - " sizelimit on line '%s'" - " in file %s, using" - " default." - % (line, _file)) - self.add_copy_spec(words[0][1:], sizelimit=limit) - else: - # command to execute - self.add_cmd_output(line, subdir=f) + with open(_file, 'r') as sfile: + for line in sfile.read().splitlines(): + # ignore empty lines or comments + if len(line.split()) == 0 or line.startswith('#'): + continue + # lines starting by ':' specify file pattern to + # collect optionally followed by sizelimit + if line.startswith(':'): + words = line.split() + limit = None + if len(words) > 1: + try: + limit = int(words[1]) + except ValueError: + self._log_warn( + f"Can't decode size limit on line" + f"{line} in {_file}, using default" + ) + self.add_copy_spec(words[0][1:], + sizelimit=limit) + else: + # command to execute + self.add_cmd_output(line, subdir=f) except IOError: self._log_warn("unable to read extras file %s" % _file)