diff --git a/examples/kubernetes/compiled/removal/copy_target b/examples/kubernetes/compiled/removal/copy_target new file mode 100644 index 000000000..9301ae519 --- /dev/null +++ b/examples/kubernetes/compiled/removal/copy_target @@ -0,0 +1 @@ +for_testing \ No newline at end of file diff --git a/examples/kubernetes/inventory/targets/removal.yml b/examples/kubernetes/inventory/targets/removal.yml index 6f4d41694..e78ed053d 100644 --- a/examples/kubernetes/inventory/targets/removal.yml +++ b/examples/kubernetes/inventory/targets/removal.yml @@ -9,7 +9,7 @@ parameters: - copy_target output_path: . # test removal of a file - - input_type: remove - input_paths: - - compiled/${kapitan:vars:target}/copy_target - output_path: . \ No newline at end of file +# - input_type: remove +# input_paths: +# - compiled/${kapitan:vars:target}/copy_target +# output_path: . \ No newline at end of file diff --git a/kapitan/cli.py b/kapitan/cli.py index d1d81ee8b..ce25b3a67 100644 --- a/kapitan/cli.py +++ b/kapitan/cli.py @@ -94,6 +94,7 @@ def trigger_compile(args): verbose=hasattr(args, "verbose") and args.verbose, use_go_jsonnet=args.use_go_jsonnet, compose_node_name=args.compose_node_name, + multiprocess_objects=args.multiprocess_objects, ) @@ -305,6 +306,12 @@ def build_parser(): action="store_true", help="dumps all none-type entries as empty, default is dumping as 'null'", ) + compile_parser.add_argument( + "--multiprocess-objects", + default=from_dot_kapitan("compile", "multiprocess-objects", False), + action="store_true", + help="compute compile objects in parallel, default is 'false'", + ) compile_selector_parser = compile_parser.add_mutually_exclusive_group() compile_selector_parser.add_argument( diff --git a/kapitan/targets.py b/kapitan/targets.py index c5e18e36f..cbb06465a 100644 --- a/kapitan/targets.py +++ b/kapitan/targets.py @@ -151,6 +151,24 @@ def compile_targets( logger.info("Rendered inventory (%.2fs)", time.time() - rendering_start) + if kwargs.get("multiprocess_objects"): + # append target information to compile objects + new_target_objs = [] + for target_obj in target_objs: + compile_objs = target_obj["compile"] + objects_length = len(compile_objs) + + for id, compile_obj in enumerate(compile_objs): + generated_target_obj = target_obj.copy() + generated_target_obj["compile"] = [compile_obj] + generated_target_obj["id"] = id + generated_target_obj["max_id"] = objects_length - 1 + new_target_objs.append(generated_target_obj) + + target_objs = new_target_objs + + compile_start = time.time() + worker = partial( compile_target, search_paths=search_paths, @@ -165,6 +183,9 @@ def compile_targets( # so p is only not None when raising an exception [p.get() for p in pool.imap_unordered(worker, target_objs) if p] + if kwargs.get("multiprocess_objects"): + logger.info(f"\nCompiled {len(target_objs)} compile targets ({time.time() - compile_start:.2f}s)") + os.makedirs(compile_path, exist_ok=True) # if '-t' is set on compile or only a few changed, only override selected targets @@ -500,6 +521,18 @@ def compile_target(target_obj, search_paths, compile_path, ref_controller, globa input_compiler.make_compile_dirs(target_name, output_path, **kwargs) input_compiler.compile_obj(comp_obj, ext_vars, **kwargs) + if kwargs.get("multiprocess_objects"): + current_id = target_obj["id"] + max_id = target_obj["max_id"] + # contains only one element + if current_id != max_id: + logger.debug( + f"Compiled {target_obj['target_full_path']} - {compile_objs[0]['input_type']} ({current_id} / {max_id})" + ) + else: + logger.info(f"Compiled {target_obj['target_full_path']}") + return + logger.info("Compiled %s (%.2fs)", target_obj["target_full_path"], time.time() - start)