Skip to content

Commit

Permalink
chore: add timer to io and string manipulation code
Browse files Browse the repository at this point in the history
refactors existing code to utilize timer codepaths:

- replace manual timer implementations in subp.py and sockets.py
- replace open() / read() calls with util.load_text_file() where appropriate
  • Loading branch information
holmanb committed Sep 6, 2024
1 parent 306878c commit 3169a13
Show file tree
Hide file tree
Showing 22 changed files with 241 additions and 172 deletions.
58 changes: 37 additions & 21 deletions cloudinit/atomic_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,35 +7,50 @@
import tempfile
from base64 import b64decode, b64encode

from cloudinit import util
from cloudinit import log, util

_DEF_PERMS = 0o644
LOG = logging.getLogger(__name__)


def b64d(source):
# Base64 decode some data, accepting bytes or unicode/str, and returning
# str/unicode if the result is utf-8 compatible, otherwise returning bytes.
decoded = b64decode(source)
try:
return decoded.decode("utf-8")
except UnicodeDecodeError:
return decoded
"""base64 decode data
:param source: a bytes or str to decode
:return: base64 as a decoded str if utf-8 encoded, otherwise bytes
"""
with log.log_time("Base64 decoding"):
decoded = b64decode(source)
try:
return decoded.decode("utf-8")
except UnicodeDecodeError:
return decoded


def b64e(source):
# Base64 encode some data, accepting bytes or unicode/str, and returning
# str/unicode if the result is utf-8 compatible, otherwise returning bytes.
if not isinstance(source, bytes):
source = source.encode("utf-8")
return b64encode(source).decode("utf-8")
"""base64 encode data
:param source: a bytes or str to decode
:return: base64 encoded str
"""
with log.log_time("Base64 encodeing"):
if not isinstance(source, bytes):
source = source.encode("utf-8")
return b64encode(source).decode("utf-8")


def write_file(
filename, content, mode=_DEF_PERMS, omode="wb", preserve_mode=False
):
# open filename in mode 'omode', write content, set permissions to 'mode'
"""open filename in mode omode, write content, set permissions to mode"""

with log.log_time(f"Writing {filename}"):
return _write_file(filename, content, mode, omode, preserve_mode)


def _write_file(
filename, content, mode=_DEF_PERMS, omode="wb", preserve_mode=False
):
if preserve_mode:
try:
file_stat = os.stat(filename)
Expand Down Expand Up @@ -77,13 +92,14 @@ def json_serialize_default(_obj):

def json_dumps(data):
"""Return data in nicely formatted json."""
return json.dumps(
data,
indent=1,
sort_keys=True,
separators=(",", ": "),
default=json_serialize_default,
)
with log.log_time("Dumping json"):
return json.dumps(
data,
indent=1,
sort_keys=True,
separators=(",", ": "),
default=json_serialize_default,
)


def write_json(filename, data, mode=_DEF_PERMS):
Expand Down
15 changes: 8 additions & 7 deletions cloudinit/cmd/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,12 +134,13 @@ def close_stdin(logger: Callable[[str], None] = LOG.debug):
logger: a function for logging messages
"""
if not os.isatty(sys.stdin.fileno()):
logger("Closing stdin")
with open(os.devnull) as fp:
os.dup2(fp.fileno(), sys.stdin.fileno())
else:
logger("Not closing stdin, stdin is a tty.")
with log.log_time("Closing stdin"):
if not os.isatty(sys.stdin.fileno()):
logger("Closing stdin")
with open(os.devnull) as fp:
os.dup2(fp.fileno(), sys.stdin.fileno())
else:
logger("Not closing stdin, stdin is a tty.")


def extract_fns(args):
Expand Down Expand Up @@ -310,7 +311,7 @@ def purge_cache_on_python_version_change(init):
init.paths.get_cpath("data"), "python-version"
)
if os.path.exists(python_version_path):
cached_python_version = open(python_version_path).read()
cached_python_version = util.load_text_file(python_version_path)
# The Python version has changed out from under us, anything that was
# pickled previously is likely useless due to API changes.
if cached_python_version != current_python_version:
Expand Down
4 changes: 2 additions & 2 deletions cloudinit/config/cc_growpart.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from pathlib import Path
from typing import Optional, Tuple

from cloudinit import lifecycle, subp, temp_utils, util
from cloudinit import lifecycle, log, subp, temp_utils, util
from cloudinit.cloud import Cloud
from cloudinit.config import Config
from cloudinit.config.schema import MetaSchema
Expand Down Expand Up @@ -318,7 +318,7 @@ def resize_encrypted(blockdev, partition) -> Tuple[str, str]:
if not KEYDATA_PATH.exists():
return (RESIZE.SKIPPED, "No encryption keyfile found")
try:
with KEYDATA_PATH.open() as f:
with log.log_time(f"Reading {KEYDATA_PATH}"), KEYDATA_PATH.open() as f:
keydata = json.load(f)
key = keydata["key"]
decoded_key = base64.b64decode(key)
Expand Down
14 changes: 9 additions & 5 deletions cloudinit/config/modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from types import ModuleType
from typing import Dict, List, NamedTuple, Optional

from cloudinit import config, importer, lifecycle, type_utils, util
from cloudinit import config, importer, lifecycle, log, type_utils, util
from cloudinit.distros import ALL_DISTROS
from cloudinit.helpers import ConfigMerger
from cloudinit.reporting.events import ReportEventStack
Expand Down Expand Up @@ -283,11 +283,15 @@ def _run_modules(self, mostly_mods: List[ModuleDetails]):
deprecated_version="23.2",
)
func_args.update({"log": LOG})
ran, _r = cc.run(
run_name, mod.handle, func_args, freq=freq
)

timer = log.log_time("and", skip_log=True)
with timer:
ran, _r = cc.run(
run_name, mod.handle, func_args, freq=freq
)
if ran:
myrep.message = "%s ran successfully" % run_name
time = f" {timer.output}" if timer.output else ""
myrep.message = f"{run_name} ran successfully {time}"
else:
myrep.message = "%s previously ran" % run_name

Expand Down
23 changes: 22 additions & 1 deletion cloudinit/config/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@

import yaml

from cloudinit import features, importer, lifecycle, safeyaml
from cloudinit import features, importer, lifecycle, log, safeyaml
from cloudinit.cmd.devel import read_cfg_paths
from cloudinit.handlers import INCLUSION_TYPES_MAP, type_from_starts_with
from cloudinit.helpers import Paths
Expand Down Expand Up @@ -732,6 +732,27 @@ def validate_cloudconfig_schema(
@raises: ValueError on invalid schema_type not in CLOUD_CONFIG or
NETWORK_CONFIG_V1 or NETWORK_CONFIG_V2
"""
with log.log_time(f"Validating {schema_type}"):
return _validate_cloudconfig_schema(
config,
schema,
schema_type,
strict,
strict_metaschema,
log_details,
log_deprecations,
)


def _validate_cloudconfig_schema(
config: dict,
schema: Optional[dict] = None,
schema_type: SchemaType = SchemaType.CLOUD_CONFIG,
strict: bool = False,
strict_metaschema: bool = False,
log_details: bool = True,
log_deprecations: bool = False,
) -> bool:
from cloudinit.net.netplan import available as netplan_available

if schema_type == SchemaType.NETWORK_CONFIG:
Expand Down
6 changes: 4 additions & 2 deletions cloudinit/dmi.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from collections import namedtuple
from typing import Optional

from cloudinit import subp
from cloudinit import log, subp
from cloudinit.util import (
is_container,
is_DragonFlyBSD,
Expand Down Expand Up @@ -91,7 +91,9 @@ def _read_dmi_syspath(key: str) -> Optional[str]:
return None

try:
with open(dmi_key_path, "rb") as fp:
with log.log_time(f"Reading {dmi_key_path}"), open(
dmi_key_path, "rb"
) as fp:
key_data = fp.read()
except PermissionError:
LOG.debug("Could not read %s", dmi_key_path)
Expand Down
4 changes: 3 additions & 1 deletion cloudinit/log/log_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
import sys
import time

LOG = logging.getLogger(__name__)


def logexc(
log, msg, *args, log_level: int = logging.WARNING, exc_info=True
Expand Down Expand Up @@ -57,7 +59,7 @@ def __exit__(self, exc_type, exc_val, exc_tb):


def write_to_console(conpath, text):
with open(conpath, "w") as wfh:
with log_time(f"Writing to {conpath}"), open(conpath, "w") as wfh:
wfh.write(text)
wfh.flush()

Expand Down
12 changes: 6 additions & 6 deletions cloudinit/net/eni.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import re
from typing import Optional

from cloudinit import subp, util
from cloudinit import log, subp, util
from cloudinit.net import ParserError, renderer, subnet_is_ipv6
from cloudinit.net.network_state import NetworkState

Expand Down Expand Up @@ -208,8 +208,7 @@ def _parse_deb_config_data(ifaces, contents, src_dir, src_path):
)
]
for entry in dir_contents:
with open(entry, "r") as fp:
src_data = fp.read().strip()
src_data = util.load_text_file(entry).strip()
abs_entry = os.path.abspath(entry)
_parse_deb_config_data(
ifaces, src_data, os.path.dirname(abs_entry), abs_entry
Expand Down Expand Up @@ -310,9 +309,10 @@ def _parse_deb_config_data(ifaces, contents, src_dir, src_path):

def convert_eni_data(eni_data):
# return a network config representation of what is in eni_data
ifaces = {}
_parse_deb_config_data(ifaces, eni_data, src_dir=None, src_path=None)
return _ifaces_to_net_config_data(ifaces)
with log.log_time("Converting eni data"):
ifaces = {}
_parse_deb_config_data(ifaces, eni_data, src_dir=None, src_path=None)
return _ifaces_to_net_config_data(ifaces)


def _ifaces_to_net_config_data(ifaces):
Expand Down
5 changes: 4 additions & 1 deletion cloudinit/reporting/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,12 @@
They can be published to registered handlers with report_event.
"""
import base64
import logging
import os.path
import time
from typing import List

from cloudinit import log
from cloudinit.reporting import (
available_handlers,
instantiated_handler_registry,
Expand All @@ -23,6 +25,7 @@
START_EVENT_TYPE = "start"

DEFAULT_EVENT_ORIGIN = "cloudinit"
LOG = logging.getLogger(__name__)


class _nameset(set):
Expand Down Expand Up @@ -301,7 +304,7 @@ def _collect_file_info(files):
if not os.path.isfile(fname):
content = None
else:
with open(fname, "rb") as fp:
with log.log_time(f"Reading {fname}"), open(fname, "rb") as fp:
content = base64.b64encode(fp.read()).decode()
ret.append({"path": fname, "content": content, "encoding": "base64"})
return ret
6 changes: 4 additions & 2 deletions cloudinit/reporting/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from threading import Event
from typing import Union

from cloudinit import url_helper, util
from cloudinit import log, url_helper, util
from cloudinit.registry import DictRegistry

LOG = logging.getLogger(__name__)
Expand Down Expand Up @@ -310,7 +310,9 @@ def _decode_kvp_item(self, record_data):
return {"key": k, "value": v}

def _append_kvp_item(self, record_data):
with open(self._kvp_file_path, "ab") as f:
with log.log_time(f"Appending {self._kvp_file_path}"), open(
self._kvp_file_path, "ab"
) as f:
fcntl.flock(f, fcntl.LOCK_EX)
for data in record_data:
f.write(data)
Expand Down
37 changes: 22 additions & 15 deletions cloudinit/safeyaml.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,17 @@
#
# This file is part of cloud-init. See LICENSE file for license information.

import logging
from collections import defaultdict
from itertools import chain
from typing import Any, Dict, List, Tuple

import yaml

from cloudinit import log

LOG = logging.getLogger(__name__)


# SchemaPathMarks track the path to an element within a loaded YAML file.
# The start_mark and end_mark contain the row and column indicators
Expand Down Expand Up @@ -250,23 +255,25 @@ def load_with_marks(blob) -> Tuple[Any, Dict[str, int]]:
"""
result = yaml.load(blob, Loader=_CustomSafeLoaderWithMarks)
if not isinstance(result, dict):
schemamarks = {}
else:
schemamarks = result.pop("schemamarks")
return result, schemamarks
with log.log_time("Loading yaml"):
result = yaml.load(blob, Loader=_CustomSafeLoaderWithMarks)
if not isinstance(result, dict):
schemamarks = {}
else:
schemamarks = result.pop("schemamarks")
return result, schemamarks


def dumps(obj, explicit_start=True, explicit_end=True, noalias=False):
"""Return data in nicely formatted yaml."""

return yaml.dump(
obj,
line_break="\n",
indent=4,
explicit_start=explicit_start,
explicit_end=explicit_end,
default_flow_style=False,
Dumper=(NoAliasSafeDumper if noalias else yaml.dumper.SafeDumper),
)
with log.log_time("Dumping yaml"):
return yaml.dump(
obj,
line_break="\n",
indent=4,
explicit_start=explicit_start,
explicit_end=explicit_end,
default_flow_style=False,
Dumper=(NoAliasSafeDumper if noalias else yaml.dumper.SafeDumper),
)
Loading

0 comments on commit 3169a13

Please sign in to comment.