Skip to content

Commit

Permalink
Export warning logs to status.json (#4455)
Browse files Browse the repository at this point in the history
  • Loading branch information
holmanb authored Sep 30, 2023
1 parent 376af8e commit d0312fa
Show file tree
Hide file tree
Showing 4 changed files with 75 additions and 8 deletions.
10 changes: 10 additions & 0 deletions cloudinit/cmd/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
from cloudinit.config.modules import Modules
from cloudinit.config.schema import validate_cloudconfig_schema
from cloudinit.log import (
LogExporter,
setup_basic_logging,
setup_logging,
reset_logging,
Expand Down Expand Up @@ -702,6 +703,7 @@ def status_wrapper(name, args, data_d=None, link_d=None):
status_link = os.path.join(link_d, "status.json")
result_path = os.path.join(data_d, "result.json")
result_link = os.path.join(link_d, "result.json")
root_logger = logging.getLogger()

util.ensure_dirs(
(
Expand Down Expand Up @@ -761,7 +763,11 @@ def status_wrapper(name, args, data_d=None, link_d=None):
v1 = status["v1"]
v1["stage"] = mode
v1[mode]["start"] = time.time()
v1[mode]["exported_errors"] = next(
filter(lambda h: isinstance(h, LogExporter), root_logger.handlers)
).export_logs()

# Write status.json prior to running init / module code
atomic_helper.write_json(status_path, status)
util.sym_link(
os.path.relpath(status_path, link_d), status_link, force=True
Expand All @@ -786,6 +792,10 @@ def status_wrapper(name, args, data_d=None, link_d=None):
v1[mode]["finished"] = time.time()
v1["stage"] = None

# Write status.json after running init / module code
v1[mode]["exported_errors"] = next(
filter(lambda h: isinstance(h, LogExporter), root_logger.handlers)
).export_logs()
atomic_helper.write_json(status_path, status)

if mode == "modules-final":
Expand Down
31 changes: 31 additions & 0 deletions cloudinit/log.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,17 @@
# This file is part of cloud-init. See LICENSE file for license information.

import collections.abc
import copy
import io
import logging
import logging.config
import logging.handlers
import os
import sys
import time
from collections import defaultdict
from contextlib import suppress
from typing import DefaultDict

DEFAULT_LOG_FORMAT = "%(asctime)s - %(filename)s[%(levelname)s]: %(message)s"

Expand Down Expand Up @@ -62,6 +65,10 @@ def setup_logging(cfg=None):
if not cfg:
cfg = {}

root_logger = logging.getLogger()
exporter = LogExporter()
exporter.setLevel(logging.WARN)

log_cfgs = []
log_cfg = cfg.get("logcfg")
if log_cfg and isinstance(log_cfg, str):
Expand Down Expand Up @@ -98,9 +105,15 @@ def setup_logging(cfg=None):
# Attempt to load its config.
logging.config.fileConfig(log_cfg)

# Configure warning exporter after loading logging configuration
root_logger.addHandler(exporter)

# Use the first valid configuration.
return

# Configure warning exporter for basic logging
root_logger.addHandler(exporter)

# If it didn't work, at least setup a basic logger (if desired)
basic_enabled = cfg.get("log_basic", True)

Expand All @@ -112,6 +125,19 @@ def setup_logging(cfg=None):
setup_basic_logging()


class LogExporter(logging.StreamHandler):
holder: DefaultDict[str, list] = defaultdict(list)

def emit(self, record: logging.LogRecord):
self.holder[record.levelname].append(record.getMessage())

def export_logs(self):
return copy.deepcopy(self.holder)

def flush(self):
pass


def reset_logging():
"""Remove all current handlers and unset log level."""
log = logging.getLogger()
Expand Down Expand Up @@ -153,3 +179,8 @@ def configure_root_logger():
define_deprecation_logger()
setup_backup_logging()
reset_logging()

# add handler only to the root logger
handler = LogExporter()
handler.setLevel(logging.WARN)
logging.getLogger().addHandler(handler)
17 changes: 17 additions & 0 deletions tests/integration_tests/cmd/test_status.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
"""Tests for `cloud-init status`"""
import json

import pytest

from tests.integration_tests.clouds import IntegrationCloud
Expand Down Expand Up @@ -47,3 +49,18 @@ def test_wait_when_no_datasource(session_cloud: IntegrationCloud, setup_image):
status_out = wait_for_cloud_init(client).stdout.strip()
assert "status: disabled" in status_out
assert client.execute("cloud-init status --wait").ok


USER_DATA = """\
#cloud-config
ca-certs:
remove_defaults: false
"""


@pytest.mark.user_data(USER_DATA)
def test_status_json_errors(client):
"""Ensure that deprecated logs end up in the exported errors"""
assert json.loads(
client.execute("cat /run/cloud-init/status.json").stdout
)["v1"]["init"]["exported_errors"].get("DEPRECATED")
25 changes: 17 additions & 8 deletions tests/unittests/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,8 @@

import pytest

from cloudinit import helpers
from cloudinit import helpers, log
from cloudinit.cmd import main as cli
from cloudinit.util import load_file, load_json
from tests.unittests import helpers as test_helpers

mock = test_helpers.mock
Expand All @@ -30,6 +29,7 @@ def _call_main(self, sysv_args=None):
if not sysv_args:
sysv_args = ["cloud-init"]
try:
log.setup_logging()
return cli.main(sysv_args=sysv_args)
except SystemExit as e:
return e.code
Expand Down Expand Up @@ -62,14 +62,18 @@ def test_status_wrapper_errors(self, action, name, match, caplog, tmpdir):
cli.status_wrapper(name, myargs, data_d, link_d)
assert [] == my_action.call_args_list

def test_status_wrapper_init_local_writes_fresh_status_info(self, tmpdir):
@mock.patch("cloudinit.cmd.main.atomic_helper.write_json")
def test_status_wrapper_init_local_writes_fresh_status_info(
self,
m_json,
tmpdir,
):
"""When running in init-local mode, status_wrapper writes status.json.
Old status and results artifacts are also removed.
"""
data_d = tmpdir.join("data")
link_d = tmpdir.join("link")
status_link = link_d.join("status.json")
# Write old artifacts which will be removed or updated.
for _dir in data_d, link_d:
test_helpers.populate_dir(
Expand All @@ -83,9 +87,10 @@ def myaction(name, args):
return "SomeDatasource", ["an error"]

myargs = FakeArgs(("ignored_name", myaction), True, "bogusmode")
log.configure_root_logger()
cli.status_wrapper("init", myargs, data_d, link_d)
# No errors reported in status
status_v1 = load_json(load_file(status_link))["v1"]
status_v1 = m_json.call_args_list[1][0][1]["v1"]
assert ["an error"] == status_v1["init-local"]["errors"]
assert "SomeDatasource" == status_v1["datasource"]
assert False is os.path.exists(
Expand All @@ -95,7 +100,10 @@ def myaction(name, args):
link_d.join("result.json")
), "unexpected result.json link found"

def test_status_wrapper_init_local_honor_cloud_dir(self, mocker, tmpdir):
@mock.patch("cloudinit.cmd.main.atomic_helper.write_json")
def test_status_wrapper_init_local_honor_cloud_dir(
self, m_json, mocker, tmpdir
):
"""When running in init-local mode, status_wrapper honors cloud_dir."""
cloud_dir = tmpdir.join("cloud")
paths = helpers.Paths({"cloud_dir": str(cloud_dir)})
Expand All @@ -111,8 +119,9 @@ def myaction(name, args):

myargs = FakeArgs(("ignored_name", myaction), True, "bogusmode")
cli.status_wrapper("init", myargs, link_d=link_d) # No explicit data_d

# Access cloud_dir directly
status_v1 = load_json(load_file(data_d.join("status.json")))["v1"]
status_v1 = m_json.call_args_list[1][0][1]["v1"]
assert ["an_error"] == status_v1["init-local"]["errors"]
assert "SomeDatasource" == status_v1["datasource"]
assert False is os.path.exists(
Expand Down Expand Up @@ -235,7 +244,7 @@ def test_wb_schema_subcommand_parser(self, m_read_cfg, capsys):
assert (
"Error:\n"
"Expected one of --config-file, --system or --docs arguments\n"
== err
in err
)

@pytest.mark.parametrize(
Expand Down

0 comments on commit d0312fa

Please sign in to comment.