From 90fd38f3d359ae3d8723a1c2b0a75aa16fd9bf6f Mon Sep 17 00:00:00 2001 From: Xincun Li <147451452+xincunli-sonic@users.noreply.github.com> Date: Thu, 16 May 2024 23:16:53 -0700 Subject: [PATCH 1/3] Add full configuration validation. (#3316) #### What I did Before apply the json patch, we will precheck and simulate-patch the payload in entire box level. #### How I did it 1. Add Duplication check 2. JSON patch structure validating 3. Simulating patch to full configuration 4. Verifying simulating result match YANG validation. #### How to verify it 1. Single ASIC ``` admin@str2-msn2700-spy-2:~/gcu$ cat empty.json [] admin@str2-msn2700-spy-2:~/gcu$ sudo config apply-patch empty.json Patch Applier: localhost: Patch application starting. Patch Applier: localhost: Patch: [] Patch Applier: localhost getting current config db. Patch Applier: localhost: simulating the target full config after applying the patch. Patch Applier: localhost: validating all JsonPatch operations are permitted on the specified fields Patch Applier: localhost: validating target config does not have empty tables, since they do not show up in ConfigDb. Patch Applier: localhost: sorting patch updates. Patch Applier: The localhost patch was converted into 0 changes. Patch Applier: localhost: applying 0 changes in order. Patch Applier: localhost: verifying patch updates are reflected on ConfigDB. Patch Applier: localhost patch application completed. Patch applied successfully. ``` 2. Multi ASIC ``` stli@str2-7250-2-lc01:~/gcu$ cat empty.json [] stli@str2-7250-2-lc01:~/gcu$ sudo config apply-patch empty.json sonic_yang(6):Note: Below table(s) have no YANG models: DHCP_SERVER, KUBERNETES_MASTER sonic_yang(6):Note: Below table(s) have no YANG models: KUBERNETES_MASTER sonic_yang(6):Note: Below table(s) have no YANG models: KUBERNETES_MASTER Patch Applier: localhost: Patch application starting. Patch Applier: localhost: Patch: [] Patch Applier: localhost getting current config db. Patch Applier: localhost: simulating the target full config after applying the patch. Patch Applier: localhost: validating all JsonPatch operations are permitted on the specified fields Patch Applier: localhost: validating target config does not have empty tables, since they do not show up in ConfigDb. Patch Applier: localhost: sorting patch updates. Patch Applier: The localhost patch was converted into 0 changes. Patch Applier: localhost: applying 0 changes in order. Patch Applier: localhost: verifying patch updates are reflected on ConfigDB. Patch Applier: localhost patch application completed. Patch Applier: asic0: Patch application starting. Patch Applier: asic0: Patch: [] Patch Applier: asic0 getting current config db. Patch Applier: asic0: simulating the target full config after applying the patch. Patch Applier: asic0: validating all JsonPatch operations are permitted on the specified fields Patch Applier: asic0: validating target config does not have empty tables, since they do not show up in ConfigDb. Patch Applier: asic0: sorting patch updates. Patch Applier: The asic0 patch was converted into 0 changes. Patch Applier: asic0: applying 0 changes in order. Patch Applier: asic0: verifying patch updates are reflected on ConfigDB. Patch Applier: asic0 patch application completed. Patch Applier: asic1: Patch application starting. Patch Applier: asic1: Patch: [] Patch Applier: asic1 getting current config db. Patch Applier: asic1: simulating the target full config after applying the patch. Patch Applier: asic1: validating all JsonPatch operations are permitted on the specified fields Patch Applier: asic1: validating target config does not have empty tables, since they do not show up in ConfigDb. Patch Applier: asic1: sorting patch updates. Patch Applier: The asic1 patch was converted into 0 changes. Patch Applier: asic1: applying 0 changes in order. Patch Applier: asic1: verifying patch updates are reflected on ConfigDB. Patch Applier: asic1 patch application completed. Patch applied successfully. ``` --- config/main.py | 61 +++++++-- generic_config_updater/generic_updater.py | 32 +++-- generic_config_updater/gu_common.py | 1 + tests/config_test.py | 145 +++++++++++++++++++++- 4 files changed, 208 insertions(+), 31 deletions(-) diff --git a/config/main.py b/config/main.py index 15b9e2d34..10e39bbb6 100644 --- a/config/main.py +++ b/config/main.py @@ -17,12 +17,14 @@ from collections import OrderedDict from generic_config_updater.generic_updater import GenericUpdater, ConfigFormat, extract_scope +from generic_config_updater.gu_common import HOST_NAMESPACE, GenericConfigUpdaterError from minigraph import parse_device_desc_xml, minigraph_encoder from natsort import natsorted from portconfig import get_child_ports from socket import AF_INET, AF_INET6 from sonic_py_common import device_info, multi_asic from sonic_py_common.interface import get_interface_table_name, get_port_table_name, get_intf_longname +from sonic_yang_cfg_generator import SonicYangCfgDbGenerator from utilities_common import util_base from swsscommon.swsscommon import SonicV2Connector, ConfigDBConnector from utilities_common.db import Db @@ -1085,19 +1087,54 @@ def validate_gre_type(ctx, _, value): def apply_patch_for_scope(scope_changes, results, config_format, verbose, dry_run, ignore_non_yang_tables, ignore_path): scope, changes = scope_changes # Replace localhost to DEFAULT_NAMESPACE which is db definition of Host - if scope.lower() == "localhost" or scope == "": + if scope.lower() == HOST_NAMESPACE or scope == "": scope = multi_asic.DEFAULT_NAMESPACE - - scope_for_log = scope if scope else "localhost" + + scope_for_log = scope if scope else HOST_NAMESPACE try: # Call apply_patch with the ASIC-specific changes and predefined parameters - GenericUpdater(namespace=scope).apply_patch(jsonpatch.JsonPatch(changes), config_format, verbose, dry_run, ignore_non_yang_tables, ignore_path) + GenericUpdater(namespace=scope).apply_patch(jsonpatch.JsonPatch(changes), + config_format, + verbose, + dry_run, + ignore_non_yang_tables, + ignore_path) results[scope_for_log] = {"success": True, "message": "Success"} log.log_notice(f"'apply-patch' executed successfully for {scope_for_log} by {changes}") except Exception as e: results[scope_for_log] = {"success": False, "message": str(e)} log.log_error(f"'apply-patch' executed failed for {scope_for_log} by {changes} due to {str(e)}") + +def validate_patch(patch): + try: + command = ["show", "runningconfiguration", "all"] + proc = subprocess.Popen(command, text=True, stdout=subprocess.PIPE) + all_running_config, returncode = proc.communicate() + if returncode: + log.log_notice(f"Fetch all runningconfiguration failed as output:{all_running_config}") + return False + + # Structure validation and simulate apply patch. + all_target_config = patch.apply(json.loads(all_running_config)) + + # Verify target config by YANG models + target_config = all_target_config.pop(HOST_NAMESPACE) if multi_asic.is_multi_asic() else all_target_config + target_config.pop("bgpraw", None) + if not SonicYangCfgDbGenerator().validate_config_db_json(target_config): + return False + + if multi_asic.is_multi_asic(): + for asic in multi_asic.get_namespace_list(): + target_config = all_target_config.pop(asic) + target_config.pop("bgpraw", None) + if not SonicYangCfgDbGenerator().validate_config_db_json(target_config): + return False + + return True + except Exception as e: + raise GenericConfigUpdaterError(f"Validate json patch: {patch} failed due to:{e}") + # This is our main entrypoint - the main 'config' command @click.group(cls=clicommon.AbbreviationGroup, context_settings=CONTEXT_SETTINGS) @click.pass_context @@ -1296,6 +1333,9 @@ def apply_patch(ctx, patch_file_path, format, dry_run, ignore_non_yang_tables, i patch_as_json = json.loads(text) patch = jsonpatch.JsonPatch(patch_as_json) + if not validate_patch(patch): + raise GenericConfigUpdaterError(f"Failed validating patch:{patch}") + results = {} config_format = ConfigFormat[format.upper()] # Initialize a dictionary to hold changes categorized by scope @@ -1318,7 +1358,8 @@ def apply_patch(ctx, patch_file_path, format, dry_run, ignore_non_yang_tables, i # Empty case to force validate YANG model. if not changes_by_scope: asic_list = [multi_asic.DEFAULT_NAMESPACE] - asic_list.extend(multi_asic.get_namespace_list()) + if multi_asic.is_multi_asic(): + asic_list.extend(multi_asic.get_namespace_list()) for asic in asic_list: changes_by_scope[asic] = [] @@ -1331,7 +1372,7 @@ def apply_patch(ctx, patch_file_path, format, dry_run, ignore_non_yang_tables, i if failures: failure_messages = '\n'.join([f"- {failed_scope}: {results[failed_scope]['message']}" for failed_scope in failures]) - raise Exception(f"Failed to apply patch on the following scopes:\n{failure_messages}") + raise GenericConfigUpdaterError(f"Failed to apply patch on the following scopes:\n{failure_messages}") log.log_notice(f"Patch applied successfully for {patch}.") click.secho("Patch applied successfully.", fg="cyan", underline=True) @@ -1538,9 +1579,9 @@ def reload(db, filename, yes, load_sysinfo, no_service_restart, force, file_form file_input = read_json_file(file) platform = file_input.get("DEVICE_METADATA", {}).\ - get("localhost", {}).get("platform") + get(HOST_NAMESPACE, {}).get("platform") mac = file_input.get("DEVICE_METADATA", {}).\ - get("localhost", {}).get("mac") + get(HOST_NAMESPACE, {}).get("mac") if not platform or not mac: log.log_warning("Input file does't have platform or mac. platform: {}, mac: {}" @@ -1905,8 +1946,8 @@ def override_config_table(db, input_config_db, dry_run): if multi_asic.is_multi_asic() and len(config_input): # Golden Config will use "localhost" to represent host name if ns == DEFAULT_NAMESPACE: - if "localhost" in config_input.keys(): - ns_config_input = config_input["localhost"] + if HOST_NAMESPACE in config_input.keys(): + ns_config_input = config_input[HOST_NAMESPACE] else: click.secho("Wrong config format! 'localhost' not found in host config! cannot override.. abort") sys.exit(1) diff --git a/generic_config_updater/generic_updater.py b/generic_config_updater/generic_updater.py index b6fc2f1fc..ae26709d6 100644 --- a/generic_config_updater/generic_updater.py +++ b/generic_config_updater/generic_updater.py @@ -2,7 +2,7 @@ import jsonpointer import os from enum import Enum -from .gu_common import GenericConfigUpdaterError, ConfigWrapper, \ +from .gu_common import HOST_NAMESPACE, GenericConfigUpdaterError, EmptyTableError, ConfigWrapper, \ DryRunConfigWrapper, PatchWrapper, genericUpdaterLogging from .patch_sorter import StrictPatchSorter, NonStrictPatchSorter, ConfigSplitter, \ TablesWithoutYangConfigSplitter, IgnorePathsFromYangConfigSplitter @@ -16,21 +16,18 @@ def extract_scope(path): if not path: raise Exception("Wrong patch with empty path.") - try: - pointer = jsonpointer.JsonPointer(path) - parts = pointer.parts - except Exception as e: - raise Exception(f"Error resolving path: '{path}' due to {e}") + pointer = jsonpointer.JsonPointer(path) + parts = pointer.parts if not parts: - raise Exception("Wrong patch with empty path.") + raise GenericConfigUpdaterError("Wrong patch with empty path.") if parts[0].startswith("asic"): if not parts[0][len("asic"):].isnumeric(): - raise Exception(f"Error resolving path: '{path}' due to incorrect ASIC number.") + raise GenericConfigUpdaterError(f"Error resolving path: '{path}' due to incorrect ASIC number.") scope = parts[0] remainder = "/" + "/".join(parts[1:]) - elif parts[0] == "localhost": - scope = "localhost" + elif parts[0] == HOST_NAMESPACE: + scope = HOST_NAMESPACE remainder = "/" + "/".join(parts[1:]) else: scope = "" @@ -38,6 +35,7 @@ def extract_scope(path): return scope, remainder + class ConfigLock: def acquire_lock(self): # TODO: Implement ConfigLock @@ -67,7 +65,7 @@ def __init__(self, self.changeapplier = changeapplier if changeapplier is not None else ChangeApplier(namespace=self.namespace) def apply(self, patch, sort=True): - scope = self.namespace if self.namespace else 'localhost' + scope = self.namespace if self.namespace else HOST_NAMESPACE self.logger.log_notice(f"{scope}: Patch application starting.") self.logger.log_notice(f"{scope}: Patch: {patch}") @@ -84,10 +82,10 @@ def apply(self, patch, sort=True): self.config_wrapper.validate_field_operation(old_config, target_config) # Validate target config does not have empty tables since they do not show up in ConfigDb - self.logger.log_notice(f"{scope}: validating target config does not have empty tables, " \ - "since they do not show up in ConfigDb.") + self.logger.log_notice(f"""{scope}: validating target config does not have empty tables, + since they do not show up in ConfigDb.""") empty_tables = self.config_wrapper.get_empty_tables(target_config) - if empty_tables: # if there are empty tables + if empty_tables: # if there are empty tables empty_tables_txt = ", ".join(empty_tables) raise ValueError("Given patch is not valid because it will result in empty tables " \ "which is not allowed in ConfigDb. " \ @@ -97,10 +95,8 @@ def apply(self, patch, sort=True): self.logger.log_notice(f"{scope}: sorting patch updates.") changes = self.patchsorter.sort(patch) changes_len = len(changes) - self.logger.log_notice(f"The patch was sorted into {changes_len} " \ - f"change{'s' if changes_len != 1 else ''}{':' if changes_len > 0 else '.'}") - for change in changes: - self.logger.log_notice(f" * {change}") + self.logger.log_notice(f"The {scope} patch was converted into {changes_len} " \ + f"change{'s' if changes_len != 1 else ''}{':' if changes_len > 0 else '.'}") # Apply changes in order self.logger.log_notice(f"{scope}: applying {changes_len} change{'s' if changes_len != 1 else ''} " \ diff --git a/generic_config_updater/gu_common.py b/generic_config_updater/gu_common.py index 974c540c0..c15334222 100644 --- a/generic_config_updater/gu_common.py +++ b/generic_config_updater/gu_common.py @@ -16,6 +16,7 @@ SYSLOG_IDENTIFIER = "GenericConfigUpdater" SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) GCU_FIELD_OP_CONF_FILE = f"{SCRIPT_DIR}/gcu_field_operation_validators.conf.json" +HOST_NAMESPACE = "localhost" class GenericConfigUpdaterError(Exception): pass diff --git a/tests/config_test.py b/tests/config_test.py index 5a1b90596..1d12c6547 100644 --- a/tests/config_test.py +++ b/tests/config_test.py @@ -1,3 +1,5 @@ +import copy +import pytest import filecmp import importlib import os @@ -127,9 +129,38 @@ Reloading Monit configuration ... """ -reload_config_with_untriggered_timer_output="""\ -Relevant services are not up. Retry later or use -f to avoid system checks -""" +config_temp = { + "scope": { + "ACL_TABLE": { + "MY_ACL_TABLE": { + "policy_desc": "My ACL", + "ports": ["Ethernet1", "Ethernet2"], + "stage": "ingress", + "type": "L3" + } + }, + "PORT": { + "Ethernet1": { + "alias": "fortyGigE0/0", + "description": "fortyGigE0/0", + "index": "0", + "lanes": "29,30,31,32", + "mtu": "9100", + "pfc_asym": "off", + "speed": "40000" + }, + "Ethernet2": { + "alias": "fortyGigE0/100", + "description": "fortyGigE0/100", + "index": "25", + "lanes": "125,126,127,128", + "mtu": "9100", + "pfc_asym": "off", + "speed": "40000" + } + } + } + } def mock_run_command_side_effect(*args, **kwargs): command = args[0] @@ -941,6 +972,7 @@ def setUp(self): self.any_checkpoints_list = ["checkpoint1", "checkpoint2", "checkpoint3"] self.any_checkpoints_list_as_text = json.dumps(self.any_checkpoints_list, indent=4) + @patch('config.main.validate_patch', mock.Mock(return_value=True)) def test_apply_patch__no_params__get_required_params_error_msg(self): # Arrange unexpected_exit_code = 0 @@ -953,6 +985,7 @@ def test_apply_patch__no_params__get_required_params_error_msg(self): self.assertNotEqual(unexpected_exit_code, result.exit_code) self.assertTrue(expected_output in result.output) + @patch('config.main.validate_patch', mock.Mock(return_value=True)) def test_apply_patch__help__gets_help_msg(self): # Arrange expected_exit_code = 0 @@ -965,6 +998,7 @@ def test_apply_patch__help__gets_help_msg(self): self.assertEqual(expected_exit_code, result.exit_code) self.assertTrue(expected_output in result.output) + @patch('config.main.validate_patch', mock.Mock(return_value=True)) def test_apply_patch__only_required_params__default_values_used_for_optional_params(self): # Arrange expected_exit_code = 0 @@ -983,6 +1017,7 @@ def test_apply_patch__only_required_params__default_values_used_for_optional_par mock_generic_updater.apply_patch.assert_called_once() mock_generic_updater.apply_patch.assert_has_calls([expected_call_with_default_values]) + @patch('config.main.validate_patch', mock.Mock(return_value=True)) def test_apply_patch__all_optional_params_non_default__non_default_values_used(self): # Arrange expected_exit_code = 0 @@ -1012,6 +1047,7 @@ def test_apply_patch__all_optional_params_non_default__non_default_values_used(s mock_generic_updater.apply_patch.assert_called_once() mock_generic_updater.apply_patch.assert_has_calls([expected_call_with_non_default_values]) + @patch('config.main.validate_patch', mock.Mock(return_value=True)) def test_apply_patch__exception_thrown__error_displayed_error_code_returned(self): # Arrange unexpected_exit_code = 0 @@ -1047,6 +1083,7 @@ def test_apply_patch__optional_parameters_passed_correctly(self): ["--ignore-path", "/ANY_TABLE"], mock.call(self.any_patch, ConfigFormat.CONFIGDB, False, False, False, ("/ANY_TABLE",))) + @patch('config.main.validate_patch', mock.Mock(return_value=True)) def validate_apply_patch_optional_parameter(self, param_args, expected_call): # Arrange expected_exit_code = 0 @@ -1712,6 +1749,16 @@ def setUp(self): } ] + test_config = copy.deepcopy(config_temp) + data = test_config.pop("scope") + self.all_config = {} + self.all_config["localhost"] = data + self.all_config["asic0"] = data + self.all_config["asic0"]["bgpraw"] = "" + self.all_config["asic1"] = data + self.all_config["asic1"]["bgpraw"] = "" + + @patch('config.main.validate_patch', mock.Mock(return_value=True)) def test_apply_patch_multiasic(self): # Mock open to simulate file reading with mock.patch('builtins.open', mock.mock_open(read_data=json.dumps(self.patch_content)), create=True) as mocked_open: @@ -1731,6 +1778,7 @@ def test_apply_patch_multiasic(self): # Verify mocked_open was called as expected mocked_open.assert_called_with(self.patch_file_path, 'r') + @patch('config.main.validate_patch', mock.Mock(return_value=True)) def test_apply_patch_dryrun_multiasic(self): # Mock open to simulate file reading with mock.patch('builtins.open', mock.mock_open(read_data=json.dumps(self.patch_content)), create=True) as mocked_open: @@ -1765,6 +1813,97 @@ def test_apply_patch_dryrun_multiasic(self): # Ensure ConfigDBConnector was never instantiated or called mock_config_db_connector.assert_not_called() + @patch('config.main.subprocess.Popen') + @patch('config.main.SonicYangCfgDbGenerator.validate_config_db_json', mock.Mock(return_value=True)) + def test_apply_patch_validate_patch_multiasic(self, mock_subprocess_popen): + mock_instance = MagicMock() + mock_instance.communicate.return_value = (json.dumps(self.all_config), 0) + mock_subprocess_popen.return_value = mock_instance + + # Mock open to simulate file reading + with patch('builtins.open', mock_open(read_data=json.dumps(self.patch_content)), create=True) as mocked_open: + # Mock GenericUpdater to avoid actual patch application + with patch('config.main.GenericUpdater') as mock_generic_updater: + mock_generic_updater.return_value.apply_patch = MagicMock() + + print("Multi ASIC: {}".format(multi_asic.is_multi_asic())) + # Invocation of the command with the CliRunner + result = self.runner.invoke(config.config.commands["apply-patch"], + [self.patch_file_path], + catch_exceptions=True) + + print("Exit Code: {}, output: {}".format(result.exit_code, result.output)) + # Assertions and verifications + self.assertEqual(result.exit_code, 0, "Command should succeed.") + self.assertIn("Patch applied successfully.", result.output) + + # Verify mocked_open was called as expected + mocked_open.assert_called_with(self.patch_file_path, 'r') + + @patch('config.main.subprocess.Popen') + @patch('config.main.SonicYangCfgDbGenerator.validate_config_db_json', mock.Mock(return_value=True)) + def test_apply_patch_validate_patch_with_badpath_multiasic(self, mock_subprocess_popen): + mock_instance = MagicMock() + mock_instance.communicate.return_value = (json.dumps(self.all_config), 0) + mock_subprocess_popen.return_value = mock_instance + + bad_patch = copy.deepcopy(self.patch_content) + bad_patch.append({ + "value": { + "policy_desc": "New ACL Table", + "ports": ["Ethernet3", "Ethernet4"], + "stage": "ingress", + "type": "L3" + } + }) + + # Mock open to simulate file reading + with patch('builtins.open', mock_open(read_data=json.dumps(bad_patch)), create=True) as mocked_open: + # Mock GenericUpdater to avoid actual patch application + with patch('config.main.GenericUpdater') as mock_generic_updater: + mock_generic_updater.return_value.apply_patch = MagicMock() + + print("Multi ASIC: {}".format(multi_asic.is_multi_asic())) + # Invocation of the command with the CliRunner + result = self.runner.invoke(config.config.commands["apply-patch"], + [self.patch_file_path], + catch_exceptions=True) + + print("Exit Code: {}, output: {}".format(result.exit_code, result.output)) + # Assertions and verifications + self.assertNotEqual(result.exit_code, 0, "Command should failed.") + self.assertIn("Failed to apply patch", result.output) + + # Verify mocked_open was called as expected + mocked_open.assert_called_with(self.patch_file_path, 'r') + + @patch('config.main.subprocess.Popen') + @patch('config.main.SonicYangCfgDbGenerator.validate_config_db_json', mock.Mock(return_value=True)) + def test_apply_patch_validate_patch_with_wrong_fetch_config(self, mock_subprocess_popen): + mock_instance = MagicMock() + mock_instance.communicate.return_value = (json.dumps(self.all_config), 2) + mock_subprocess_popen.return_value = mock_instance + + # Mock open to simulate file reading + with patch('builtins.open', mock_open(read_data=json.dumps(self.patch_content)), create=True) as mocked_open: + # Mock GenericUpdater to avoid actual patch application + with patch('config.main.GenericUpdater') as mock_generic_updater: + mock_generic_updater.return_value.apply_patch = MagicMock() + + print("Multi ASIC: {}".format(multi_asic.is_multi_asic())) + # Invocation of the command with the CliRunner + result = self.runner.invoke(config.config.commands["apply-patch"], + [self.patch_file_path], + catch_exceptions=True) + + print("Exit Code: {}, output: {}".format(result.exit_code, result.output)) + # Assertions and verifications + self.assertNotEqual(result.exit_code, 0, "Command should failed.") + self.assertIn("Failed to apply patch", result.output) + + # Verify mocked_open was called as expected + mocked_open.assert_called_with(self.patch_file_path, 'r') + @classmethod def teardown_class(cls): print("TEARDOWN") From 621fc649ecc38ffaba7cc9c987ee2007d619b939 Mon Sep 17 00:00:00 2001 From: Xincun Li Date: Fri, 14 Jun 2024 09:42:29 -0700 Subject: [PATCH 2/3] Cherry pick 3299 --- config/main.py | 2 +- generic_config_updater/change_applier.py | 32 +- generic_config_updater/generic_updater.py | 301 +++++++++++++----- generic_config_updater/gu_common.py | 22 +- tests/config_test.py | 135 ++++++++ .../change_applier_test.py | 6 +- .../generic_updater_test.py | 4 +- .../generic_config_updater/gu_common_test.py | 22 ++ .../multiasic_change_applier_test.py | 22 +- .../multiasic_generic_updater_test.py | 6 +- 10 files changed, 420 insertions(+), 132 deletions(-) diff --git a/config/main.py b/config/main.py index 10e39bbb6..539f5427d 100644 --- a/config/main.py +++ b/config/main.py @@ -1093,7 +1093,7 @@ def apply_patch_for_scope(scope_changes, results, config_format, verbose, dry_ru scope_for_log = scope if scope else HOST_NAMESPACE try: # Call apply_patch with the ASIC-specific changes and predefined parameters - GenericUpdater(namespace=scope).apply_patch(jsonpatch.JsonPatch(changes), + GenericUpdater(scope=scope).apply_patch(jsonpatch.JsonPatch(changes), config_format, verbose, dry_run, diff --git a/generic_config_updater/change_applier.py b/generic_config_updater/change_applier.py index 904e95ccb..29afc1a52 100644 --- a/generic_config_updater/change_applier.py +++ b/generic_config_updater/change_applier.py @@ -16,6 +16,7 @@ print_to_console = False + def set_verbose(verbose=False): global print_to_console, logger @@ -34,11 +35,12 @@ def log_error(m): logger.log(logger.LOG_PRIORITY_ERROR, m, print_to_console) -def get_config_db(namespace=multi_asic.DEFAULT_NAMESPACE): - config_db = ConfigDBConnector(use_unix_socket_path=True, namespace=namespace) +def get_config_db(scope=multi_asic.DEFAULT_NAMESPACE): + config_db = ConfigDBConnector(use_unix_socket_path=True, namespace=scope) config_db.connect() return config_db + def set_config(config_db, tbl, key, data): config_db.set_entry(tbl, key, data) @@ -61,11 +63,9 @@ class DryRunChangeApplier: def __init__(self, config_wrapper): self.config_wrapper = config_wrapper - def apply(self, change): self.config_wrapper.apply_change_to_config_db(change) - def remove_backend_tables_from_config(self, data): return data @@ -74,9 +74,9 @@ class ChangeApplier: updater_conf = None - def __init__(self, namespace=multi_asic.DEFAULT_NAMESPACE): - self.namespace = namespace - self.config_db = get_config_db(self.namespace) + def __init__(self, scope=multi_asic.DEFAULT_NAMESPACE): + self.scope = scope + self.config_db = get_config_db(self.scope) self.backend_tables = [ "BUFFER_PG", "BUFFER_PROFILE", @@ -86,7 +86,6 @@ def __init__(self, namespace=multi_asic.DEFAULT_NAMESPACE): with open(UPDATER_CONF_FILE, "r") as s: ChangeApplier.updater_conf = json.load(s) - def _invoke_cmd(self, cmd, old_cfg, upd_cfg, keys): # cmd is in the format as . # @@ -98,7 +97,6 @@ def _invoke_cmd(self, cmd, old_cfg, upd_cfg, keys): return method_to_call(old_cfg, upd_cfg, keys) - def _services_validate(self, old_cfg, upd_cfg, keys): lst_svcs = set() lst_cmds = set() @@ -124,7 +122,6 @@ def _services_validate(self, old_cfg, upd_cfg, keys): log_debug("service invoked: {}".format(cmd)) return 0 - def _upd_data(self, tbl, run_tbl, upd_tbl, upd_keys): for key in set(run_tbl.keys()).union(set(upd_tbl.keys())): run_data = run_tbl.get(key, None) @@ -135,20 +132,17 @@ def _upd_data(self, tbl, run_tbl, upd_tbl, upd_keys): upd_keys[tbl][key] = {} log_debug("Patch affected tbl={} key={}".format(tbl, key)) - def _report_mismatch(self, run_data, upd_data): log_error("run_data vs expected_data: {}".format( str(jsondiff.diff(run_data, upd_data))[0:40])) - def apply(self, change): run_data = self._get_running_config() upd_data = prune_empty_table(change.apply(copy.deepcopy(run_data))) upd_keys = defaultdict(dict) for tbl in sorted(set(run_data.keys()).union(set(upd_data.keys()))): - self._upd_data(tbl, run_data.get(tbl, {}), - upd_data.get(tbl, {}), upd_keys) + self._upd_data(tbl, run_data.get(tbl, {}), upd_data.get(tbl, {}), upd_keys) ret = self._services_validate(run_data, upd_data, upd_keys) if not ret: @@ -168,9 +162,9 @@ def remove_backend_tables_from_config(self, data): def _get_running_config(self): _, fname = tempfile.mkstemp(suffix="_changeApplier") - - if self.namespace: - cmd = ['sonic-cfggen', '-d', '--print-data', '-n', self.namespace] + + if self.scope: + cmd = ['sonic-cfggen', '-d', '--print-data', '-n', self.scope] else: cmd = ['sonic-cfggen', '-d', '--print-data'] @@ -181,7 +175,9 @@ def _get_running_config(self): return_code = result.returncode if return_code: os.remove(fname) - raise GenericConfigUpdaterError(f"Failed to get running config for namespace: {self.namespace}, Return code: {return_code}, Error: {err}") + raise GenericConfigUpdaterError( + f"Failed to get running config for scope: {self.scope}," + + f"Return code: {return_code}, Error: {err}") run_data = {} try: diff --git a/generic_config_updater/generic_updater.py b/generic_config_updater/generic_updater.py index ae26709d6..3401d05a4 100644 --- a/generic_config_updater/generic_updater.py +++ b/generic_config_updater/generic_updater.py @@ -1,24 +1,25 @@ import json import jsonpointer import os +import subprocess + from enum import Enum from .gu_common import HOST_NAMESPACE, GenericConfigUpdaterError, EmptyTableError, ConfigWrapper, \ - DryRunConfigWrapper, PatchWrapper, genericUpdaterLogging + DryRunConfigWrapper, PatchWrapper, genericUpdaterLogging from .patch_sorter import StrictPatchSorter, NonStrictPatchSorter, ConfigSplitter, \ - TablesWithoutYangConfigSplitter, IgnorePathsFromYangConfigSplitter + TablesWithoutYangConfigSplitter, IgnorePathsFromYangConfigSplitter from .change_applier import ChangeApplier, DryRunChangeApplier from sonic_py_common import multi_asic CHECKPOINTS_DIR = "/etc/sonic/checkpoints" CHECKPOINT_EXT = ".cp.json" + def extract_scope(path): if not path: raise Exception("Wrong patch with empty path.") - pointer = jsonpointer.JsonPointer(path) parts = pointer.parts - if not parts: raise GenericConfigUpdaterError("Wrong patch with empty path.") if parts[0].startswith("asic"): @@ -32,10 +33,39 @@ def extract_scope(path): else: scope = "" remainder = path - return scope, remainder +def get_cmd_output(cmd): + proc = subprocess.Popen(cmd, text=True, stdout=subprocess.PIPE) + return proc.communicate()[0], proc.returncode + + +def get_config_json(): + scope_list = [multi_asic.DEFAULT_NAMESPACE] + all_running_config = {} + if multi_asic.is_multi_asic(): + scope_list.extend(multi_asic.get_namespace_list()) + for scope in scope_list: + command = ["sonic-cfggen", "-d", "--print-data"] + if scope != multi_asic.DEFAULT_NAMESPACE: + command += ["-n", scope] + + running_config_text, returncode = get_cmd_output(command) + if returncode: + raise GenericConfigUpdaterError( + f"Fetch all runningconfiguration failed as output:{running_config_text}") + running_config = json.loads(running_config_text) + + if multi_asic.is_multi_asic(): + if scope == multi_asic.DEFAULT_NAMESPACE: + scope = HOST_NAMESPACE + all_running_config[scope] = running_config + else: + all_running_config = running_config + return all_running_config + + class ConfigLock: def acquire_lock(self): # TODO: Implement ConfigLock @@ -50,22 +80,23 @@ class ConfigFormat(Enum): CONFIGDB = 1 SONICYANG = 2 + class PatchApplier: def __init__(self, patchsorter=None, changeapplier=None, config_wrapper=None, patch_wrapper=None, - namespace=multi_asic.DEFAULT_NAMESPACE): - self.namespace = namespace + scope=multi_asic.DEFAULT_NAMESPACE): + self.scope = scope self.logger = genericUpdaterLogging.get_logger(title="Patch Applier", print_all_to_console=True) - self.config_wrapper = config_wrapper if config_wrapper is not None else ConfigWrapper(namespace=self.namespace) - self.patch_wrapper = patch_wrapper if patch_wrapper is not None else PatchWrapper(namespace=self.namespace) + self.config_wrapper = config_wrapper if config_wrapper is not None else ConfigWrapper(scope=self.scope) + self.patch_wrapper = patch_wrapper if patch_wrapper is not None else PatchWrapper(scope=self.scope) self.patchsorter = patchsorter if patchsorter is not None else StrictPatchSorter(self.config_wrapper, self.patch_wrapper) - self.changeapplier = changeapplier if changeapplier is not None else ChangeApplier(namespace=self.namespace) + self.changeapplier = changeapplier if changeapplier is not None else ChangeApplier(scope=self.scope) def apply(self, patch, sort=True): - scope = self.namespace if self.namespace else HOST_NAMESPACE + scope = self.scope if self.scope else HOST_NAMESPACE self.logger.log_notice(f"{scope}: Patch application starting.") self.logger.log_notice(f"{scope}: Patch: {patch}") @@ -83,14 +114,13 @@ def apply(self, patch, sort=True): # Validate target config does not have empty tables since they do not show up in ConfigDb self.logger.log_notice(f"""{scope}: validating target config does not have empty tables, - since they do not show up in ConfigDb.""") + since they do not show up in ConfigDb.""") empty_tables = self.config_wrapper.get_empty_tables(target_config) if empty_tables: # if there are empty tables empty_tables_txt = ", ".join(empty_tables) - raise ValueError("Given patch is not valid because it will result in empty tables " \ - "which is not allowed in ConfigDb. " \ - f"Table{'s' if len(empty_tables) != 1 else ''}: {empty_tables_txt}") - + raise EmptyTableError(f"{scope}: given patch is not valid because it will result in empty tables \ + which is not allowed in ConfigDb. \ + Table{'s' if len(empty_tables) != 1 else ''}: {empty_tables_txt}") # Generate list of changes to apply self.logger.log_notice(f"{scope}: sorting patch updates.") changes = self.patchsorter.sort(patch) @@ -110,19 +140,19 @@ def apply(self, patch, sort=True): new_config = self.config_wrapper.get_config_db_as_json() self.changeapplier.remove_backend_tables_from_config(target_config) self.changeapplier.remove_backend_tables_from_config(new_config) - if not(self.patch_wrapper.verify_same_json(target_config, new_config)): + if not (self.patch_wrapper.verify_same_json(target_config, new_config)): raise GenericConfigUpdaterError(f"{scope}: after applying patch to config, there are still some parts not updated") self.logger.log_notice(f"{scope} patch application completed.") class ConfigReplacer: - def __init__(self, patch_applier=None, config_wrapper=None, patch_wrapper=None, namespace=multi_asic.DEFAULT_NAMESPACE): - self.namespace = namespace + def __init__(self, patch_applier=None, config_wrapper=None, patch_wrapper=None, scope=multi_asic.DEFAULT_NAMESPACE): + self.scope = scope self.logger = genericUpdaterLogging.get_logger(title="Config Replacer", print_all_to_console=True) - self.patch_applier = patch_applier if patch_applier is not None else PatchApplier(namespace=self.namespace) - self.config_wrapper = config_wrapper if config_wrapper is not None else ConfigWrapper(namespace=self.namespace) - self.patch_wrapper = patch_wrapper if patch_wrapper is not None else PatchWrapper(namespace=self.namespace) + self.patch_applier = patch_applier if patch_applier is not None else PatchApplier(scope=self.scope) + self.config_wrapper = config_wrapper if config_wrapper is not None else ConfigWrapper(scope=self.scope) + self.patch_wrapper = patch_wrapper if patch_wrapper is not None else PatchWrapper(scope=self.scope) def replace(self, target_config): self.logger.log_notice("Config replacement starting.") @@ -140,7 +170,7 @@ def replace(self, target_config): self.logger.log_notice("Verifying config replacement is reflected on ConfigDB.") new_config = self.config_wrapper.get_config_db_as_json() - if not(self.patch_wrapper.verify_same_json(target_config, new_config)): + if not (self.patch_wrapper.verify_same_json(target_config, new_config)): raise GenericConfigUpdaterError(f"After replacing config, there is still some parts not updated") self.logger.log_notice("Config replacement completed.") @@ -151,23 +181,24 @@ def __init__(self, checkpoints_dir=CHECKPOINTS_DIR, config_replacer=None, config_wrapper=None, - namespace=multi_asic.DEFAULT_NAMESPACE): - self.namespace = namespace + scope=multi_asic.DEFAULT_NAMESPACE): + self.scope = scope self.logger = genericUpdaterLogging.get_logger(title="Config Rollbacker", print_all_to_console=True) + self.util = Util(checkpoints_dir=checkpoints_dir) self.checkpoints_dir = checkpoints_dir - self.config_replacer = config_replacer if config_replacer is not None else ConfigReplacer(namespace=self.namespace) - self.config_wrapper = config_wrapper if config_wrapper is not None else ConfigWrapper(namespace=self.namespace) + self.config_replacer = config_replacer if config_replacer is not None else ConfigReplacer(scope=self.scope) + self.config_wrapper = config_wrapper if config_wrapper is not None else ConfigWrapper(scope=self.scope) def rollback(self, checkpoint_name): self.logger.log_notice("Config rollbacking starting.") self.logger.log_notice(f"Checkpoint name: {checkpoint_name}.") self.logger.log_notice(f"Verifying '{checkpoint_name}' exists.") - if not self._check_checkpoint_exists(checkpoint_name): + if not self.util.check_checkpoint_exists(checkpoint_name): raise ValueError(f"Checkpoint '{checkpoint_name}' does not exist") self.logger.log_notice(f"Loading checkpoint into memory.") - target_config = self._get_checkpoint_content(checkpoint_name) + target_config = self.util.get_checkpoint_content(checkpoint_name) self.logger.log_notice(f"Replacing config using 'Config Replacer'.") self.config_replacer.replace(target_config) @@ -179,16 +210,16 @@ def checkpoint(self, checkpoint_name): self.logger.log_notice(f"Checkpoint name: {checkpoint_name}.") self.logger.log_notice("Getting current config db.") - json_content = self.config_wrapper.get_config_db_as_json() + json_content = get_config_json() self.logger.log_notice("Getting checkpoint full-path.") - path = self._get_checkpoint_full_path(checkpoint_name) + path = self.util.get_checkpoint_full_path(checkpoint_name) self.logger.log_notice("Ensuring checkpoint directory exist.") - self._ensure_checkpoints_dir_exists() + self.util.ensure_checkpoints_dir_exists() self.logger.log_notice(f"Saving config db content to {path}.") - self._save_json_file(path, json_content) + self.util.save_json_file(path, json_content) self.logger.log_notice("Config checkpoint completed.") @@ -196,12 +227,12 @@ def list_checkpoints(self): self.logger.log_info("Listing checkpoints starting.") self.logger.log_info(f"Verifying checkpoints directory '{self.checkpoints_dir}' exists.") - if not self._checkpoints_dir_exist(): + if not self.util.checkpoints_dir_exist(): self.logger.log_info("Checkpoints directory is empty, returning empty checkpoints list.") return [] self.logger.log_info("Getting checkpoints in checkpoints directory.") - checkpoint_names = self._get_checkpoint_names() + checkpoint_names = self.util.get_checkpoint_names() checkpoints_len = len(checkpoint_names) self.logger.log_info(f"Found {checkpoints_len} checkpoint{'s' if checkpoints_len != 1 else ''}{':' if checkpoints_len > 0 else '.'}") @@ -217,59 +248,139 @@ def delete_checkpoint(self, checkpoint_name): self.logger.log_notice(f"Checkpoint name: {checkpoint_name}.") self.logger.log_notice(f"Checking checkpoint exists.") - if not self._check_checkpoint_exists(checkpoint_name): + if not self.util.check_checkpoint_exists(checkpoint_name): raise ValueError(f"Checkpoint '{checkpoint_name}' does not exist") self.logger.log_notice(f"Deleting checkpoint.") - self._delete_checkpoint(checkpoint_name) + self.util.delete_checkpoint(checkpoint_name) self.logger.log_notice("Deleting checkpoint completed.") - def _ensure_checkpoints_dir_exists(self): + +class MultiASICConfigReplacer(ConfigReplacer): + def __init__(self, + patch_applier=None, + config_wrapper=None, + patch_wrapper=None, + scope=multi_asic.DEFAULT_NAMESPACE): + self.logger = genericUpdaterLogging.get_logger(title="MultiASICConfigReplacer", + print_all_to_console=True) + self.scopelist = [HOST_NAMESPACE, *multi_asic.get_namespace_list()] + super().__init__(patch_applier, config_wrapper, patch_wrapper, scope) + + def replace(self, target_config): + config_keys = set(target_config.keys()) + missing_scopes = set(self.scopelist) - config_keys + if missing_scopes: + raise GenericConfigUpdaterError(f"To be replace config is missing scope: {missing_scopes}") + + for scope in self.scopelist: + scope_config = target_config.pop(scope) + if scope.lower() == HOST_NAMESPACE: + scope = multi_asic.DEFAULT_NAMESPACE + ConfigReplacer(scope=scope).replace(scope_config) + + +class MultiASICConfigRollbacker(FileSystemConfigRollbacker): + def __init__(self, + checkpoints_dir=CHECKPOINTS_DIR, + config_replacer=None, + config_wrapper=None): + self.logger = genericUpdaterLogging.get_logger(title="MultiASICConfigRollbacker", + print_all_to_console=True) + self.scopelist = [HOST_NAMESPACE, *multi_asic.get_namespace_list()] + self.checkpoints_dir = checkpoints_dir + self.util = Util(checkpoints_dir=checkpoints_dir) + super().__init__(config_wrapper=config_wrapper, config_replacer=config_replacer) + + def rollback(self, checkpoint_name): + self.logger.log_notice("Config rollbacking starting.") + self.logger.log_notice(f"Checkpoint name: {checkpoint_name}.") + self.logger.log_notice(f"Verifying '{checkpoint_name}' exists.") + + if not self.util.check_checkpoint_exists(checkpoint_name): + raise ValueError(f"Checkpoint '{checkpoint_name}' does not exist") + + self.logger.log_notice(f"Loading checkpoint '{checkpoint_name}' into memory.") + target_config = self.util.get_checkpoint_content(checkpoint_name) + self.logger.log_notice(f"Replacing config '{checkpoint_name}' using 'Config Replacer'.") + + for scope in self.scopelist: + config = target_config.pop(scope) + if scope.lower() == HOST_NAMESPACE: + scope = multi_asic.DEFAULT_NAMESPACE + ConfigReplacer(scope=scope).replace(config) + + self.logger.log_notice("Config rollbacking completed.") + + def checkpoint(self, checkpoint_name): + all_configs = get_config_json() + self.logger.log_notice("Config checkpoint starting.") + self.logger.log_notice(f"Checkpoint name: {checkpoint_name}.") + + self.logger.log_notice("Getting checkpoint full-path.") + path = self.util.get_checkpoint_full_path(checkpoint_name) + + self.logger.log_notice("Ensuring checkpoint directory exist.") + self.util.ensure_checkpoints_dir_exists() + + self.logger.log_notice(f"Saving config db content to {path}.") + self.util.save_json_file(path, all_configs) + + self.logger.log_notice("Config checkpoint completed.") + + +class Util: + def __init__(self, checkpoints_dir=CHECKPOINTS_DIR): + self.checkpoints_dir = checkpoints_dir + + def ensure_checkpoints_dir_exists(self): os.makedirs(self.checkpoints_dir, exist_ok=True) - def _save_json_file(self, path, json_content): + def save_json_file(self, path, json_content): with open(path, "w") as fh: fh.write(json.dumps(json_content)) - def _get_checkpoint_content(self, checkpoint_name): - path = self._get_checkpoint_full_path(checkpoint_name) + def get_checkpoint_content(self, checkpoint_name): + path = self.get_checkpoint_full_path(checkpoint_name) with open(path) as fh: text = fh.read() return json.loads(text) - def _get_checkpoint_full_path(self, name): + def get_checkpoint_full_path(self, name): return os.path.join(self.checkpoints_dir, f"{name}{CHECKPOINT_EXT}") - def _get_checkpoint_names(self): + def get_checkpoint_names(self): file_names = [] for file_name in os.listdir(self.checkpoints_dir): if file_name.endswith(CHECKPOINT_EXT): # Remove extension from file name. # Example assuming ext is '.cp.json', then 'checkpoint1.cp.json' becomes 'checkpoint1' file_names.append(file_name[:-len(CHECKPOINT_EXT)]) - return file_names - def _checkpoints_dir_exist(self): + def checkpoints_dir_exist(self): return os.path.isdir(self.checkpoints_dir) - def _check_checkpoint_exists(self, name): - path = self._get_checkpoint_full_path(name) + def check_checkpoint_exists(self, name): + path = self.get_checkpoint_full_path(name) return os.path.isfile(path) - def _delete_checkpoint(self, name): - path = self._get_checkpoint_full_path(name) + def delete_checkpoint(self, name): + path = self.get_checkpoint_full_path(name) return os.remove(path) class Decorator(PatchApplier, ConfigReplacer, FileSystemConfigRollbacker): - def __init__(self, decorated_patch_applier=None, decorated_config_replacer=None, decorated_config_rollbacker=None, namespace=multi_asic.DEFAULT_NAMESPACE): + def __init__(self, + decorated_patch_applier=None, + decorated_config_replacer=None, + decorated_config_rollbacker=None, + scope=multi_asic.DEFAULT_NAMESPACE): # initing base classes to make LGTM happy - PatchApplier.__init__(self, namespace=namespace) - ConfigReplacer.__init__(self, namespace=namespace) - FileSystemConfigRollbacker.__init__(self, namespace=namespace) - + PatchApplier.__init__(self, scope=scope) + ConfigReplacer.__init__(self, scope=scope) + FileSystemConfigRollbacker.__init__(self, scope=scope) self.decorated_patch_applier = decorated_patch_applier self.decorated_config_replacer = decorated_config_replacer self.decorated_config_rollbacker = decorated_config_rollbacker @@ -294,10 +405,14 @@ def delete_checkpoint(self, checkpoint_name): class SonicYangDecorator(Decorator): - def __init__(self, patch_wrapper, config_wrapper, decorated_patch_applier=None, decorated_config_replacer=None, namespace=multi_asic.DEFAULT_NAMESPACE): - Decorator.__init__(self, decorated_patch_applier, decorated_config_replacer, namespace=namespace) - - self.namespace = namespace + def __init__(self, + patch_wrapper, + config_wrapper, + decorated_patch_applier=None, + decorated_config_replacer=None, + scope=multi_asic.DEFAULT_NAMESPACE): + Decorator.__init__(self, decorated_patch_applier, decorated_config_replacer, scope=scope) + self.scope = scope self.patch_wrapper = patch_wrapper self.config_wrapper = config_wrapper @@ -316,9 +431,12 @@ def __init__(self, decorated_config_replacer=None, decorated_config_rollbacker=None, config_lock=ConfigLock(), - namespace=multi_asic.DEFAULT_NAMESPACE): - Decorator.__init__(self, decorated_patch_applier, decorated_config_replacer, decorated_config_rollbacker, namespace=namespace) - + scope=multi_asic.DEFAULT_NAMESPACE): + Decorator.__init__(self, + decorated_patch_applier, + decorated_config_replacer, + decorated_config_rollbacker, + scope=scope) self.config_lock = config_lock def apply(self, patch, sort=True): @@ -340,20 +458,20 @@ def execute_write_action(self, action, *args): class GenericUpdateFactory: - def __init__(self, namespace=multi_asic.DEFAULT_NAMESPACE): - self.namespace = namespace + def __init__(self, scope=multi_asic.DEFAULT_NAMESPACE): + self.scope = scope def create_patch_applier(self, config_format, verbose, dry_run, ignore_non_yang_tables, ignore_paths): self.init_verbose_logging(verbose) config_wrapper = self.get_config_wrapper(dry_run) change_applier = self.get_change_applier(dry_run, config_wrapper) - patch_wrapper = PatchWrapper(config_wrapper, namespace=self.namespace) + patch_wrapper = PatchWrapper(config_wrapper, scope=self.scope) patch_sorter = self.get_patch_sorter(ignore_non_yang_tables, ignore_paths, config_wrapper, patch_wrapper) patch_applier = PatchApplier(config_wrapper=config_wrapper, patchsorter=patch_sorter, patch_wrapper=patch_wrapper, changeapplier=change_applier, - namespace=self.namespace) + scope=self.scope) if config_format == ConfigFormat.CONFIGDB: pass @@ -361,62 +479,75 @@ def create_patch_applier(self, config_format, verbose, dry_run, ignore_non_yang_ patch_applier = SonicYangDecorator(decorated_patch_applier=patch_applier, patch_wrapper=patch_wrapper, config_wrapper=config_wrapper, - namespace=self.namespace) + scope=self.scope) else: raise ValueError(f"config-format '{config_format}' is not supported") if not dry_run: - patch_applier = ConfigLockDecorator(decorated_patch_applier=patch_applier, namespace=self.namespace) + patch_applier = ConfigLockDecorator(decorated_patch_applier=patch_applier, scope=self.scope) return patch_applier def create_config_replacer(self, config_format, verbose, dry_run, ignore_non_yang_tables, ignore_paths): self.init_verbose_logging(verbose) - config_wrapper = self.get_config_wrapper(dry_run) change_applier = self.get_change_applier(dry_run, config_wrapper) - patch_wrapper = PatchWrapper(config_wrapper, namespace=self.namespace) + patch_wrapper = PatchWrapper(config_wrapper, scope=self.scope) patch_sorter = self.get_patch_sorter(ignore_non_yang_tables, ignore_paths, config_wrapper, patch_wrapper) patch_applier = PatchApplier(config_wrapper=config_wrapper, patchsorter=patch_sorter, patch_wrapper=patch_wrapper, changeapplier=change_applier, - namespace=self.namespace) + scope=self.scope) + if multi_asic.is_multi_asic(): + config_replacer = MultiASICConfigReplacer(patch_applier=patch_applier, + config_wrapper=config_wrapper) + else: + config_replacer = ConfigReplacer(patch_applier=patch_applier, + config_wrapper=config_wrapper, + scope=self.scope) - config_replacer = ConfigReplacer(patch_applier=patch_applier, config_wrapper=config_wrapper, namespace=self.namespace) if config_format == ConfigFormat.CONFIGDB: pass elif config_format == ConfigFormat.SONICYANG: config_replacer = SonicYangDecorator(decorated_config_replacer=config_replacer, patch_wrapper=patch_wrapper, config_wrapper=config_wrapper, - namespace=self.namespace) + scope=self.scope) else: raise ValueError(f"config-format '{config_format}' is not supported") if not dry_run: - config_replacer = ConfigLockDecorator(decorated_config_replacer=config_replacer, namespace=self.namespace) + config_replacer = ConfigLockDecorator(decorated_config_replacer=config_replacer, scope=self.scope) return config_replacer def create_config_rollbacker(self, verbose, dry_run=False, ignore_non_yang_tables=False, ignore_paths=[]): self.init_verbose_logging(verbose) - config_wrapper = self.get_config_wrapper(dry_run) change_applier = self.get_change_applier(dry_run, config_wrapper) - patch_wrapper = PatchWrapper(config_wrapper, namespace=self.namespace) + patch_wrapper = PatchWrapper(config_wrapper, scope=self.scope) patch_sorter = self.get_patch_sorter(ignore_non_yang_tables, ignore_paths, config_wrapper, patch_wrapper) patch_applier = PatchApplier(config_wrapper=config_wrapper, patchsorter=patch_sorter, patch_wrapper=patch_wrapper, changeapplier=change_applier, - namespace=self.namespace) - - config_replacer = ConfigReplacer(config_wrapper=config_wrapper, patch_applier=patch_applier, namespace=self.namespace) - config_rollbacker = FileSystemConfigRollbacker(config_wrapper=config_wrapper, config_replacer=config_replacer, namespace=self.namespace) + scope=self.scope) + if multi_asic.is_multi_asic(): + config_replacer = MultiASICConfigReplacer(config_wrapper=config_wrapper, + patch_applier=patch_applier) + config_rollbacker = MultiASICConfigRollbacker(config_wrapper=config_wrapper, + config_replacer=config_replacer) + else: + config_replacer = ConfigReplacer(config_wrapper=config_wrapper, + patch_applier=patch_applier, + scope=self.scope) + config_rollbacker = FileSystemConfigRollbacker(config_wrapper=config_wrapper, + config_replacer=config_replacer, + scope=self.scope) if not dry_run: - config_rollbacker = ConfigLockDecorator(decorated_config_rollbacker=config_rollbacker, namespace=self.namespace) + config_rollbacker = ConfigLockDecorator(decorated_config_rollbacker=config_rollbacker, scope=self.scope) return config_rollbacker @@ -425,15 +556,15 @@ def init_verbose_logging(self, verbose): def get_config_wrapper(self, dry_run): if dry_run: - return DryRunConfigWrapper(namespace=self.namespace) + return DryRunConfigWrapper(scope=self.scope) else: - return ConfigWrapper(namespace=self.namespace) + return ConfigWrapper(scope=self.scope) def get_change_applier(self, dry_run, config_wrapper): if dry_run: return DryRunChangeApplier(config_wrapper) else: - return ChangeApplier(namespace=self.namespace) + return ChangeApplier(scope=self.scope) def get_patch_sorter(self, ignore_non_yang_tables, ignore_paths, config_wrapper, patch_wrapper): if not ignore_non_yang_tables and not ignore_paths: @@ -452,9 +583,9 @@ def get_patch_sorter(self, ignore_non_yang_tables, ignore_paths, config_wrapper, class GenericUpdater: - def __init__(self, generic_update_factory=None, namespace=multi_asic.DEFAULT_NAMESPACE): + def __init__(self, generic_update_factory=None, scope=multi_asic.DEFAULT_NAMESPACE): self.generic_update_factory = \ - generic_update_factory if generic_update_factory is not None else GenericUpdateFactory(namespace=namespace) + generic_update_factory if generic_update_factory is not None else GenericUpdateFactory(scope=scope) def apply_patch(self, patch, config_format, verbose, dry_run, ignore_non_yang_tables, ignore_paths): patch_applier = self.generic_update_factory.create_patch_applier(config_format, verbose, dry_run, ignore_non_yang_tables, ignore_paths) diff --git a/generic_config_updater/gu_common.py b/generic_config_updater/gu_common.py index c15334222..938aa1d03 100644 --- a/generic_config_updater/gu_common.py +++ b/generic_config_updater/gu_common.py @@ -18,6 +18,7 @@ GCU_FIELD_OP_CONF_FILE = f"{SCRIPT_DIR}/gcu_field_operation_validators.conf.json" HOST_NAMESPACE = "localhost" + class GenericConfigUpdaterError(Exception): pass @@ -53,8 +54,8 @@ def __eq__(self, other): return False class ConfigWrapper: - def __init__(self, yang_dir=YANG_DIR, namespace=multi_asic.DEFAULT_NAMESPACE): - self.namespace = namespace + def __init__(self, yang_dir=YANG_DIR, scope=multi_asic.DEFAULT_NAMESPACE): + self.scope = scope self.yang_dir = YANG_DIR self.sonic_yang_with_loaded_models = None @@ -65,8 +66,8 @@ def get_config_db_as_json(self): return config_db_json def _get_config_db_as_text(self): - if self.namespace is not None and self.namespace != multi_asic.DEFAULT_NAMESPACE: - cmd = ['sonic-cfggen', '-d', '--print-data', '-n', self.namespace] + if self.scope is not None and self.scope != multi_asic.DEFAULT_NAMESPACE: + cmd = ['sonic-cfggen', '-d', '--print-data', '-n', self.scope] else: cmd = ['sonic-cfggen', '-d', '--print-data'] @@ -74,7 +75,8 @@ def _get_config_db_as_text(self): text, err = result.communicate() return_code = result.returncode if return_code: # non-zero means failure - raise GenericConfigUpdaterError(f"Failed to get running config for namespace: {self.namespace}, Return code: {return_code}, Error: {err}") + raise GenericConfigUpdaterError(f"Failed to get running config for namespace: {self.scope}," + f" Return code: {return_code}, Error: {err}") return text def get_sonic_yang_as_json(self): @@ -301,8 +303,8 @@ def create_sonic_yang_with_loaded_models(self): class DryRunConfigWrapper(ConfigWrapper): # This class will simulate all read/write operations to ConfigDB on a virtual storage unit. - def __init__(self, initial_imitated_config_db = None, namespace=multi_asic.DEFAULT_NAMESPACE): - super().__init__(namespace=namespace) + def __init__(self, initial_imitated_config_db=None, scope=multi_asic.DEFAULT_NAMESPACE): + super().__init__(scope=scope) self.logger = genericUpdaterLogging.get_logger(title="** DryRun", print_all_to_console=True) self.imitated_config_db = copy.deepcopy(initial_imitated_config_db) @@ -322,9 +324,9 @@ def _init_imitated_config_db_if_none(self): class PatchWrapper: - def __init__(self, config_wrapper=None, namespace=multi_asic.DEFAULT_NAMESPACE): - self.namespace = namespace - self.config_wrapper = config_wrapper if config_wrapper is not None else ConfigWrapper(self.namespace) + def __init__(self, config_wrapper=None, scope=multi_asic.DEFAULT_NAMESPACE): + self.scope = scope + self.config_wrapper = config_wrapper if config_wrapper is not None else ConfigWrapper(self.scope) self.path_addressing = PathAddressing(self.config_wrapper) def validate_config_db_patch_has_yang_models(self, patch): diff --git a/tests/config_test.py b/tests/config_test.py index 1d12c6547..247b720f5 100644 --- a/tests/config_test.py +++ b/tests/config_test.py @@ -1721,6 +1721,7 @@ def setUp(self): self.runner = CliRunner() self.patch_file_path = 'path/to/patch.json' + self.replace_file_path = 'path/to/replace.json' self.patch_content = [ { "op": "add", @@ -1904,6 +1905,140 @@ def test_apply_patch_validate_patch_with_wrong_fetch_config(self, mock_subproces # Verify mocked_open was called as expected mocked_open.assert_called_with(self.patch_file_path, 'r') + @patch('generic_config_updater.generic_updater.ConfigReplacer.replace', MagicMock()) + def test_replace_multiasic(self): + # Mock open to simulate file reading + mock_replace_content = copy.deepcopy(self.all_config) + with patch('builtins.open', mock_open(read_data=json.dumps(mock_replace_content)), create=True) as mocked_open: + # Mock GenericUpdater to avoid actual patch application + with patch('config.main.GenericUpdater') as mock_generic_updater: + mock_generic_updater.return_value.replace_all = MagicMock() + + print("Multi ASIC: {}".format(multi_asic.is_multi_asic())) + # Invocation of the command with the CliRunner + result = self.runner.invoke(config.config.commands["replace"], + [self.replace_file_path], + catch_exceptions=True) + + print("Exit Code: {}, output: {}".format(result.exit_code, result.output)) + # Assertions and verifications + self.assertEqual(result.exit_code, 0, "Command should succeed") + self.assertIn("Config replaced successfully.", result.output) + + # Verify mocked_open was called as expected + mocked_open.assert_called_with(self.replace_file_path, 'r') + + @patch('generic_config_updater.generic_updater.ConfigReplacer.replace', MagicMock()) + def test_replace_multiasic_missing_scope(self): + # Mock open to simulate file reading + mock_replace_content = copy.deepcopy(self.all_config) + mock_replace_content.pop("asic0") + with patch('builtins.open', mock_open(read_data=json.dumps(mock_replace_content)), create=True): + print("Multi ASIC: {}".format(multi_asic.is_multi_asic())) + # Invocation of the command with the CliRunner + result = self.runner.invoke(config.config.commands["replace"], + [self.replace_file_path], + catch_exceptions=True) + + print("Exit Code: {}, output: {}".format(result.exit_code, result.output)) + # Assertions and verifications + self.assertNotEqual(result.exit_code, 0, "Command should failed") + self.assertIn("Failed to replace config", result.output) + + @patch('generic_config_updater.generic_updater.subprocess.Popen') + @patch('generic_config_updater.generic_updater.Util.ensure_checkpoints_dir_exists', mock.Mock(return_value=True)) + @patch('generic_config_updater.generic_updater.Util.save_json_file', MagicMock()) + def test_checkpoint_multiasic(self, mock_subprocess_popen): + allconfigs = copy.deepcopy(self.all_config) + + # Create mock instances for each subprocess call + mock_instance_localhost = MagicMock() + mock_instance_localhost.communicate.return_value = (json.dumps(allconfigs["localhost"]), 0) + mock_instance_localhost.returncode = 0 + + mock_instance_asic0 = MagicMock() + mock_instance_asic0.communicate.return_value = (json.dumps(allconfigs["asic0"]), 0) + mock_instance_asic0.returncode = 0 + + mock_instance_asic1 = MagicMock() + mock_instance_asic1.communicate.return_value = (json.dumps(allconfigs["asic1"]), 0) + mock_instance_asic1.returncode = 0 + + # Setup side effect to return different mock instances based on input arguments + def side_effect(*args, **kwargs): + if "asic" not in args[0]: + return mock_instance_localhost + elif "asic0" in args[0]: + return mock_instance_asic0 + elif "asic1" in args[0]: + return mock_instance_asic1 + else: + return MagicMock() # Default case + + mock_subprocess_popen.side_effect = side_effect + + checkpointname = "checkpointname" + print("Multi ASIC: {}".format(multi_asic.is_multi_asic())) + # Invocation of the command with the CliRunner + result = self.runner.invoke(config.config.commands["checkpoint"], + [checkpointname], + catch_exceptions=True) + + print("Exit Code: {}, output: {}".format(result.exit_code, result.output)) + # Assertions and verifications + self.assertEqual(result.exit_code, 0, "Command should succeed") + self.assertIn("Checkpoint created successfully.", result.output) + + @patch('generic_config_updater.generic_updater.Util.check_checkpoint_exists', mock.Mock(return_value=True)) + @patch('generic_config_updater.generic_updater.ConfigReplacer.replace', MagicMock()) + @patch('generic_config_updater.generic_updater.Util.get_checkpoint_content') + def test_rollback_multiasic(self, mock_get_checkpoint_content): + mock_get_checkpoint_content.return_value = copy.deepcopy(self.all_config) + checkpointname = "checkpointname" + print("Multi ASIC: {}".format(multi_asic.is_multi_asic())) + # Invocation of the command with the CliRunner + result = self.runner.invoke(config.config.commands["rollback"], + [checkpointname], + catch_exceptions=True) + + print("Exit Code: {}, output: {}".format(result.exit_code, result.output)) + # Assertions and verifications + self.assertEqual(result.exit_code, 0, "Command should succeed") + self.assertIn("Config rolled back successfully.", result.output) + + @patch('generic_config_updater.generic_updater.Util.checkpoints_dir_exist', mock.Mock(return_value=True)) + @patch('generic_config_updater.generic_updater.Util.get_checkpoint_names', + mock.Mock(return_value=["checkpointname"])) + def test_list_checkpoint_multiasic(self): + print("Multi ASIC: {}".format(multi_asic.is_multi_asic())) + # Invocation of the command with the CliRunner + result = self.runner.invoke(config.config.commands["list-checkpoints"], + catch_exceptions=True) + + print("Exit Code: {}, output: {}".format(result.exit_code, result.output)) + # Assertions and verifications + self.assertEqual(result.exit_code, 0, "Command should succeed") + self.assertIn("checkpointname", result.output) + + @patch('generic_config_updater.generic_updater.Util.delete_checkpoint', MagicMock()) + @patch('generic_config_updater.generic_updater.Util.check_checkpoint_exists', mock.Mock(return_value=True)) + def test_delete_checkpoint_multiasic(self): + checkpointname = "checkpointname" + # Mock GenericUpdater to avoid actual patch application + with patch('config.main.GenericUpdater') as mock_generic_updater: + mock_generic_updater.return_value.delete_checkpoint = MagicMock() + + print("Multi ASIC: {}".format(multi_asic.is_multi_asic())) + # Invocation of the command with the CliRunner + result = self.runner.invoke(config.config.commands["delete-checkpoint"], + [checkpointname], + catch_exceptions=True) + + print("Exit Code: {}, output: {}".format(result.exit_code, result.output)) + # Assertions and verifications + self.assertEqual(result.exit_code, 0, "Command should succeed") + self.assertIn("Checkpoint deleted successfully.", result.output) + @classmethod def teardown_class(cls): print("TEARDOWN") diff --git a/tests/generic_config_updater/change_applier_test.py b/tests/generic_config_updater/change_applier_test.py index 7e56d8b7b..423b3fc32 100644 --- a/tests/generic_config_updater/change_applier_test.py +++ b/tests/generic_config_updater/change_applier_test.py @@ -241,10 +241,11 @@ def test_change_apply(self, mock_set, mock_db, mock_subprocess_Popen): running_config = copy.deepcopy(read_data["running_data"]) json_changes = copy.deepcopy(read_data["json_changes"]) + generic_config_updater.change_applier.ChangeApplier.updater_conf = None generic_config_updater.change_applier.UPDATER_CONF_FILE = CONF_FILE generic_config_updater.change_applier.set_verbose(True) generic_config_updater.services_validator.set_verbose(True) - + applier = generic_config_updater.change_applier.ChangeApplier() debug_print("invoked applier") @@ -253,7 +254,7 @@ def test_change_apply(self, mock_set, mock_db, mock_subprocess_Popen): # Take copy for comparison start_running_config = copy.deepcopy(running_config) - + debug_print("main: json_change_index={}".format(json_change_index)) applier.apply(mock_obj()) @@ -296,4 +297,3 @@ def test_apply__calls_apply_change_to_config_db(self): # Assert applier.config_wrapper.apply_change_to_config_db.assert_has_calls([call(change)]) - diff --git a/tests/generic_config_updater/generic_updater_test.py b/tests/generic_config_updater/generic_updater_test.py index aab2eae27..4007f726b 100644 --- a/tests/generic_config_updater/generic_updater_test.py +++ b/tests/generic_config_updater/generic_updater_test.py @@ -2,7 +2,7 @@ import os import shutil import unittest -from unittest.mock import MagicMock, Mock, call +from unittest.mock import MagicMock, Mock, call, patch from .gutest_helpers import create_side_effect_dict, Files import generic_config_updater.generic_updater as gu @@ -124,6 +124,8 @@ def __create_config_replacer(self, changes=None, verified_same_config=True): return gu.ConfigReplacer(patch_applier, config_wrapper, patch_wrapper) + +@patch('generic_config_updater.generic_updater.get_config_json', MagicMock(return_value={})) class TestFileSystemConfigRollbacker(unittest.TestCase): def setUp(self): self.checkpoints_dir = os.path.join(os.getcwd(),"checkpoints") diff --git a/tests/generic_config_updater/gu_common_test.py b/tests/generic_config_updater/gu_common_test.py index a2a776c0b..4a16a5ca4 100644 --- a/tests/generic_config_updater/gu_common_test.py +++ b/tests/generic_config_updater/gu_common_test.py @@ -76,6 +76,28 @@ def test_ctor__default_values_set(self): self.assertEqual("/usr/local/yang-models", gu_common.YANG_DIR) + @patch('generic_config_updater.gu_common.subprocess.Popen') + def test_get_config_db_as_text(self, mock_popen): + config_wrapper = gu_common.ConfigWrapper() + mock_proc = MagicMock() + mock_proc.communicate = MagicMock( + return_value=("[]", None)) + mock_proc.returncode = 0 + mock_popen.return_value = mock_proc + actual = config_wrapper._get_config_db_as_text() + expected = "[]" + self.assertEqual(actual, expected) + + config_wrapper = gu_common.ConfigWrapper(scope="asic0") + mock_proc = MagicMock() + mock_proc.communicate = MagicMock( + return_value=("[]", None)) + mock_proc.returncode = 0 + mock_popen.return_value = mock_proc + actual = config_wrapper._get_config_db_as_text() + expected = "[]" + self.assertEqual(actual, expected) + def test_get_sonic_yang_as_json__returns_sonic_yang_as_json(self): # Arrange config_wrapper = self.config_wrapper_mock diff --git a/tests/generic_config_updater/multiasic_change_applier_test.py b/tests/generic_config_updater/multiasic_change_applier_test.py index e8b277618..d7f734d2e 100644 --- a/tests/generic_config_updater/multiasic_change_applier_test.py +++ b/tests/generic_config_updater/multiasic_change_applier_test.py @@ -40,7 +40,7 @@ def test_extract_scope(self): @patch('generic_config_updater.change_applier.ChangeApplier._get_running_config', autospec=True) @patch('generic_config_updater.change_applier.ConfigDBConnector', autospec=True) - def test_apply_change_default_namespace(self, mock_ConfigDBConnector, mock_get_running_config): + def test_apply_change_default_scope(self, mock_ConfigDBConnector, mock_get_running_config): # Setup mock for ConfigDBConnector mock_db = MagicMock() mock_ConfigDBConnector.return_value = mock_db @@ -67,7 +67,7 @@ def test_apply_change_default_namespace(self, mock_ConfigDBConnector, mock_get_r } } - # Instantiate ChangeApplier with the default namespace + # Instantiate ChangeApplier with the default scope applier = generic_config_updater.change_applier.ChangeApplier() # Prepare a change object or data that applier.apply would use @@ -81,7 +81,7 @@ def test_apply_change_default_namespace(self, mock_ConfigDBConnector, mock_get_r @patch('generic_config_updater.change_applier.ChangeApplier._get_running_config', autospec=True) @patch('generic_config_updater.change_applier.ConfigDBConnector', autospec=True) - def test_apply_change_given_namespace(self, mock_ConfigDBConnector, mock_get_running_config): + def test_apply_change_given_scope(self, mock_ConfigDBConnector, mock_get_running_config): # Setup mock for ConfigDBConnector mock_db = MagicMock() mock_ConfigDBConnector.return_value = mock_db @@ -108,8 +108,8 @@ def test_apply_change_given_namespace(self, mock_ConfigDBConnector, mock_get_run } } - # Instantiate ChangeApplier with the default namespace - applier = generic_config_updater.change_applier.ChangeApplier(namespace="asic0") + # Instantiate ChangeApplier with the default scope + applier = generic_config_updater.change_applier.ChangeApplier(scope="asic0") # Prepare a change object or data that applier.apply would use change = MagicMock() @@ -117,7 +117,7 @@ def test_apply_change_given_namespace(self, mock_ConfigDBConnector, mock_get_run # Call the apply method with the change object applier.apply(change) - # Assert ConfigDBConnector called with the correct namespace + # Assert ConfigDBConnector called with the correct scope mock_ConfigDBConnector.assert_called_once_with(use_unix_socket_path=True, namespace="asic0") @patch('generic_config_updater.change_applier.ChangeApplier._get_running_config', autospec=True) @@ -129,9 +129,9 @@ def test_apply_change_failure(self, mock_ConfigDBConnector, mock_get_running_con # Setup mock for json.load to return some running configuration mock_get_running_config.side_effect = Exception("Failed to get running config") - # Instantiate ChangeApplier with a specific namespace to simulate applying changes in a multi-asic environment - namespace = "asic0" - applier = generic_config_updater.change_applier.ChangeApplier(namespace=namespace) + # Instantiate ChangeApplier with a specific scope to simulate applying changes in a multi-asic environment + scope = "asic0" + applier = generic_config_updater.change_applier.ChangeApplier(scope=scope) # Prepare a change object or data that applier.apply would use change = MagicMock() @@ -159,8 +159,8 @@ def test_apply_patch_with_empty_tables_failure(self, mock_ConfigDBConnector, moc } } - # Instantiate ChangeApplier with a specific namespace to simulate applying changes in a multi-asic environment - applier = generic_config_updater.change_applier.ChangeApplier(namespace="asic0") + # Instantiate ChangeApplier with a specific scope to simulate applying changes in a multi-asic environment + applier = generic_config_updater.change_applier.ChangeApplier(scope="asic0") # Prepare a change object or data that applier.apply would use, simulating a patch that requires non-empty tables change = MagicMock() diff --git a/tests/generic_config_updater/multiasic_generic_updater_test.py b/tests/generic_config_updater/multiasic_generic_updater_test.py index 4a55eb98b..5acdd391f 100644 --- a/tests/generic_config_updater/multiasic_generic_updater_test.py +++ b/tests/generic_config_updater/multiasic_generic_updater_test.py @@ -19,7 +19,7 @@ class TestMultiAsicPatchApplier(unittest.TestCase): @patch('generic_config_updater.gu_common.PatchWrapper.simulate_patch') @patch('generic_config_updater.generic_updater.ChangeApplier') def test_apply_patch_specific_namespace(self, mock_ChangeApplier, mock_simulate_patch, mock_get_config, mock_get_empty_tables): - namespace = "asic0" + scope = "asic0" patch_data = jsonpatch.JsonPatch([ { "op": "add", @@ -158,10 +158,10 @@ def test_apply_patch_specific_namespace(self, mock_ChangeApplier, mock_simulate_ } } - patch_applier = generic_config_updater.generic_updater.PatchApplier(namespace=namespace) + patch_applier = generic_config_updater.generic_updater.PatchApplier(scope=scope) # Apply the patch and verify patch_applier.apply(patch_data) # Assertions to ensure the namespace is correctly used in underlying calls - mock_ChangeApplier.assert_called_once_with(namespace=namespace) + mock_ChangeApplier.assert_called_once_with(scope=scope) From c643f682a4b5da5658de5737c4ebe3136bc0e8fd Mon Sep 17 00:00:00 2001 From: Xincun Li Date: Mon, 17 Jun 2024 11:22:03 -0700 Subject: [PATCH 3/3] Add missing import --- tests/config_test.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/config_test.py b/tests/config_test.py index 247b720f5..5ad6aa95c 100644 --- a/tests/config_test.py +++ b/tests/config_test.py @@ -17,6 +17,7 @@ from sonic_py_common import device_info, multi_asic from utilities_common.db import Db from utilities_common.general import load_module_from_source +from unittest.mock import MagicMock, patch, mock_open from generic_config_updater.generic_updater import ConfigFormat