Skip to content

Commit

Permalink
[plugins,distros] Resolve code alerts for file not always closed
Browse files Browse the repository at this point in the history
Fix a number of outstanding "File is not always closed" errors highlighted by
GitHub's code scanning.

This involves (almost exclusively) changing such as:

   for line in open('/some/file', 'r').read().splitlines():

To using a context manager to ensure the file is always closed:

   with open('/some/file', 'r') as sfile:
       for line in sfile.read().splitlines():

...or something similar. Overall the logic of what a specific block of code was
doing (mostly in plugins) is not changed however.

Signed-off-by: Jake Hunsaker <[email protected]>
  • Loading branch information
TurboTurtle committed Sep 23, 2023
1 parent 9e12fba commit 62b4cec
Show file tree
Hide file tree
Showing 14 changed files with 122 additions and 110 deletions.
20 changes: 10 additions & 10 deletions sos/archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -390,23 +390,23 @@ def add_string(self, content, dest, mode='w'):
# on file content.
dest = self.check_path(dest, P_FILE, force=True)

f = codecs.open(dest, mode, encoding='utf-8')
if isinstance(content, bytes):
content = content.decode('utf8', 'ignore')
f.write(content)
if os.path.exists(src):
self._copy_attributes(src, dest)
self.log_debug("added string at '%s' to FileCacheArchive '%s'"
% (src, self._archive_root))
with codecs.open(dest, mode, encoding='utf-8') as f:
if isinstance(content, bytes):
content = content.decode('utf8', 'ignore')
f.write(content)
if os.path.exists(src):
self._copy_attributes(src, dest)
self.log_debug("added string at '%s' to FileCacheArchive '%s'"
% (src, self._archive_root))

def add_binary(self, content, dest):
with self._path_lock:
dest = self.check_path(dest, P_FILE)
if not dest:
return

f = codecs.open(dest, 'wb', encoding=None)
f.write(content)
with codecs.open(dest, 'wb', encoding=None) as f:
f.write(content)
self.log_debug("added binary content at '%s' to archive '%s'"
% (dest, self._archive_root))

Expand Down
17 changes: 8 additions & 9 deletions sos/cleaner/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -525,15 +525,14 @@ def get_new_checksum(self, archive_path):
"""
try:
hash_size = 1024**2 # Hash 1MiB of content at a time.
archive_fp = open(archive_path, 'rb')
digest = hashlib.new(self.hash_name)
while True:
hashdata = archive_fp.read(hash_size)
if not hashdata:
break
digest.update(hashdata)
archive_fp.close()
return digest.hexdigest() + '\n'
with open(archive_path, 'rb') as archive_fp:
digest = hashlib.new(self.hash_name)
while True:
hashdata = archive_fp.read(hash_size)
if not hashdata:
break
digest.update(hashdata)
return digest.hexdigest() + '\n'
except Exception as err:
self.log_debug("Could not generate new checksum: %s" % err)
return None
Expand Down
10 changes: 6 additions & 4 deletions sos/policies/distros/redhat.py
Original file line number Diff line number Diff line change
Expand Up @@ -466,8 +466,9 @@ def check(cls, remote=''):
if not os.path.exists(host_release):
return False
try:
for line in open(host_release, "r").read().splitlines():
atomic |= ATOMIC_RELEASE_STR in line
with open(host_release, 'r') as afile:
for line in afile.read().splitlines():
atomic |= ATOMIC_RELEASE_STR in line
except IOError:
pass
return atomic
Expand Down Expand Up @@ -551,8 +552,9 @@ def check(cls, remote=''):
return coreos
host_release = os.environ[ENV_HOST_SYSROOT] + OS_RELEASE
try:
for line in open(host_release, 'r').read().splitlines():
coreos |= 'Red Hat Enterprise Linux CoreOS' in line
with open(host_release, 'r') as hfile:
for line in hfile.read().splitlines():
coreos |= 'Red Hat Enterprise Linux CoreOS' in line
except IOError:
pass
return coreos
Expand Down
15 changes: 7 additions & 8 deletions sos/report/plugins/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3175,10 +3175,9 @@ def collection_file(self, fname, subdir=None, tags=[]):
_pfname = self._make_command_filename(fname, subdir=subdir)
self.archive.check_path(_pfname, P_FILE)
_name = self.archive.dest_path(_pfname)
_file = open(_name, 'w')
self._log_debug(f"manual collection file opened: {_name}")
yield _file
_file.close()
with open(_name, 'w') as _file:
self._log_debug(f"manual collection file opened: {_name}")
yield _file
end = time()
run = end - start
self._log_info(f"manual collection '{fname}' finished in {run}")
Expand Down Expand Up @@ -3447,10 +3446,10 @@ def check_process_by_name(self, process):
try:
cmd_line_paths = glob.glob(cmd_line_glob)
for path in cmd_line_paths:
f = open(self.path_join(path), 'r')
cmd_line = f.read().strip()
if process in cmd_line:
status = True
with open(self.path_join(path), 'r') as pfile:
cmd_line = pfile.read().strip()
if process in cmd_line:
status = True
except IOError:
return False
return status
Expand Down
4 changes: 3 additions & 1 deletion sos/report/plugins/candlepin.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,9 @@ def setup(self):
self.dbpasswd = ""
cfg_file = "/etc/candlepin/candlepin.conf"
try:
for line in open(cfg_file).read().splitlines():
with open(cfg_file, 'r') as cfile:
candle_lines = cfile.read().splitlines()
for line in candle_lines:
# skip empty lines and lines with comments
if not line or line[0] == '#':
continue
Expand Down
6 changes: 4 additions & 2 deletions sos/report/plugins/firewall_tables.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,15 +77,17 @@ def setup(self):
# do collect them only when relevant nft list ruleset exists
default_ip_tables = "mangle\nfilter\nnat\n"
try:
ip_tables_names = open("/proc/net/ip_tables_names").read()
with open('/proc/net/ip_tables_names', 'r') as ifile:
ip_tables_names = ifile.read()
except IOError:
ip_tables_names = default_ip_tables
for table in ip_tables_names.splitlines():
if nft_list['status'] == 0 and table in nft_ip_tables['ip']:
self.collect_iptable(table)
# collect the same for ip6tables
try:
ip_tables_names = open("/proc/net/ip6_tables_names").read()
with open('/proc/net/ip6_tables_names', 'r') as ipfile:
ip_tables_names = ipfile.read()
except IOError:
ip_tables_names = default_ip_tables
for table in ip_tables_names.splitlines():
Expand Down
4 changes: 3 additions & 1 deletion sos/report/plugins/foreman.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,9 @@ def setup(self):
self.dbhost = "localhost"
self.dbpasswd = ""
try:
for line in open("/etc/foreman/database.yml").read().splitlines():
with open('/etc/foreman/database.yml', 'r') as dfile:
foreman_lines = dfile.read().splitlines()
for line in foreman_lines:
# skip empty lines and lines with comments
if not line or line[0] == '#':
continue
Expand Down
11 changes: 6 additions & 5 deletions sos/report/plugins/haproxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,11 +48,12 @@ def setup(self):
matched = None
provision_ip = None
try:
for line in open("/etc/haproxy/haproxy.cfg").read().splitlines():
if matched:
provision_ip = line.split()[1]
break
matched = match(r".*haproxy\.stats.*", line)
with open("/etc/haproxy/haproxy.cfg", 'r') as hfile:
for line in hfile.read().splitlines():
if matched:
provision_ip = line.split()[1]
break
matched = match(r".*haproxy\.stats.*", line)
except IOError:
# fallback when the cfg file is not accessible
pass
Expand Down
7 changes: 4 additions & 3 deletions sos/report/plugins/libvirt.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,9 +69,10 @@ def setup(self):

# get details of processes of KVM hosts
for pidfile in glob.glob("/run/libvirt/*/*.pid"):
pid = open(pidfile).read().splitlines()[0]
for pf in ["environ", "cgroup", "maps", "numa_maps", "limits"]:
self.add_copy_spec("/proc/%s/%s" % (pid, pf))
with open(pidfile, 'r') as pfile:
pid = pfile.read().splitlines()[0]
for pf in ["environ", "cgroup", "maps", "numa_maps", "limits"]:
self.add_copy_spec("/proc/%s/%s" % (pid, pf))

self.add_file_tags({
"/run/libvirt/qemu/*.xml": "var_qemu_xml",
Expand Down
27 changes: 14 additions & 13 deletions sos/report/plugins/mssql.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,19 +43,20 @@ def setup(self):
sqlagent_errorlogfile = '/var/opt/mssql/log/sqlagentstartup.log'
kerberoskeytabfile = None
try:
for line in open(mssql_conf).read().splitlines():
if line.startswith('['):
section = line
continue
words = line.split('=')
if words[0].strip() == 'errorlogfile':
if section == '[filelocation]':
errorlogfile = words[1].strip()
elif section == '[sqlagent]':
sqlagent_errorlogfile = words[1].strip()
elif words[0].strip() == 'kerberoskeytabfile':
if section == '[network]':
kerberoskeytabfile = words[1].strip()
with open(mssql_conf, 'r') as mfile:
for line in mfile.read().splitlines():
if line.startswith('['):
section = line
continue
words = line.split('=')
if words[0].strip() == 'errorlogfile':
if section == '[filelocation]':
errorlogfile = words[1].strip()
elif section == '[sqlagent]':
sqlagent_errorlogfile = words[1].strip()
elif words[0].strip() == 'kerberoskeytabfile':
if section == '[network]':
kerberoskeytabfile = words[1].strip()
except IOError as ex:
self._log_error('Could not open conf file %s: %s' %
(mssql_conf, ex))
Expand Down
5 changes: 2 additions & 3 deletions sos/report/plugins/pcp.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,8 @@ def get_size(self, path):

def pcp_parse_conffile(self):
try:
pcpconf = open(self.pcp_conffile, "r")
lines = pcpconf.readlines()
pcpconf.close()
with open(self.pcp_conffile, "r") as pcpconf:
lines = pcpconf.readlines()
except IOError:
return False
env_vars = {}
Expand Down
4 changes: 3 additions & 1 deletion sos/report/plugins/pulp.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,9 @@ def setup(self):
self.messaging_cert_file = ""
in_messaging_section = False
try:
for line in open("/etc/pulp/server.conf").read().splitlines():
with open("/etc/pulp/server.conf", 'r') as pfile:
pulp_lines = pfile.read().splitlines()
for line in pulp_lines:
if match(r"^\s*seeds:\s+\S+:\S+", line):
uri = line.split()[1].split(',')[0].split(':')
self.dbhost = uri[0]
Expand Down
57 changes: 29 additions & 28 deletions sos/report/plugins/pulpcore.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,34 +45,35 @@ def separate_value(line, sep=':'):
return val

try:
# split the lines to "one option per line" format
for line in open("/etc/pulp/settings.py").read() \
.replace(',', ',\n').replace('{', '{\n') \
.replace('}', '\n}').splitlines():
# skip empty lines and lines with comments
if not line or line[0] == '#':
continue
if line.startswith("DATABASES"):
databases_scope = True
continue
# example HOST line to parse:
# 'HOST': 'localhost',
pattern = r"\s*['|\"]%s['|\"]\s*:\s*\S+"
if databases_scope and match(pattern % 'HOST', line):
self.dbhost = separate_value(line)
if databases_scope and match(pattern % 'PORT', line):
self.dbport = separate_value(line)
if databases_scope and match(pattern % 'NAME', line):
self.dbname = separate_value(line)
if databases_scope and match(pattern % 'PASSWORD', line):
self.dbpasswd = separate_value(line)
# if line contains closing '}' database_scope end
if databases_scope and '}' in line:
databases_scope = False
if line.startswith("STATIC_ROOT = "):
self.staticroot = separate_value(line, sep='=')
if line.startswith("CHUNKED_UPLOAD_DIR = "):
self.uploaddir = separate_value(line, sep='=')
with open("/etc/pulp/settings.py", 'r') as pfile:
# split the lines to "one option per line" format
for line in pfile.read() \
.replace(',', ',\n').replace('{', '{\n') \
.replace('}', '\n}').splitlines():
# skip empty lines and lines with comments
if not line or line[0] == '#':
continue
if line.startswith("DATABASES"):
databases_scope = True
continue
# example HOST line to parse:
# 'HOST': 'localhost',
pattern = r"\s*['|\"]%s['|\"]\s*:\s*\S+"
if databases_scope and match(pattern % 'HOST', line):
self.dbhost = separate_value(line)
if databases_scope and match(pattern % 'PORT', line):
self.dbport = separate_value(line)
if databases_scope and match(pattern % 'NAME', line):
self.dbname = separate_value(line)
if databases_scope and match(pattern % 'PASSWORD', line):
self.dbpasswd = separate_value(line)
# if line contains closing '}' database_scope end
if databases_scope and '}' in line:
databases_scope = False
if line.startswith("STATIC_ROOT = "):
self.staticroot = separate_value(line, sep='=')
if line.startswith("CHUNKED_UPLOAD_DIR = "):
self.uploaddir = separate_value(line, sep='=')
except IOError:
# fallback when the cfg file is not accessible
pass
Expand Down
45 changes: 23 additions & 22 deletions sos/report/plugins/sos_extras.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,28 +61,29 @@ def setup(self):
_file = self.path_join(path, f)
self._log_warn("Collecting data from extras file %s" % _file)
try:
for line in open(_file).read().splitlines():
# ignore empty lines or comments
if len(line.split()) == 0 or line.startswith('#'):
continue
# lines starting by ':' specify file pattern to collect
# optionally followed by sizelimit
if line.startswith(':'):
words = line.split()
limit = None
if len(words) > 1:
try:
limit = int(words[1])
except ValueError:
self._log_warn("Can't decode integer"
" sizelimit on line '%s'"
" in file %s, using"
" default."
% (line, _file))
self.add_copy_spec(words[0][1:], sizelimit=limit)
else:
# command to execute
self.add_cmd_output(line, subdir=f)
with open(_file, 'r') as sfile:
for line in sfile.read().splitlines():
# ignore empty lines or comments
if len(line.split()) == 0 or line.startswith('#'):
continue
# lines starting by ':' specify file pattern to
# collect optionally followed by sizelimit
if line.startswith(':'):
words = line.split()
limit = None
if len(words) > 1:
try:
limit = int(words[1])
except ValueError:
self._log_warn(
f"Can't decode size limit on line"
f"{line} in {_file}, using default"
)
self.add_copy_spec(words[0][1:],
sizelimit=limit)
else:
# command to execute
self.add_cmd_output(line, subdir=f)

except IOError:
self._log_warn("unable to read extras file %s" % _file)
Expand Down

0 comments on commit 62b4cec

Please sign in to comment.