From e15f9f905d1137b5bc8fb2bfac4c48e950acc27b Mon Sep 17 00:00:00 2001 From: dongy Date: Mon, 10 Jul 2023 22:05:02 -0700 Subject: [PATCH 01/14] update Signed-off-by: dongy --- .../dints/configs/hyper_parameters.yaml | 17 ++++++++++++++++- .../algorithm_templates/dints/scripts/infer.py | 1 + .../algorithm_templates/dints/scripts/train.py | 7 ++++++- .../dints/scripts/validate.py | 1 + 4 files changed, 24 insertions(+), 2 deletions(-) diff --git a/auto3dseg/algorithm_templates/dints/configs/hyper_parameters.yaml b/auto3dseg/algorithm_templates/dints/configs/hyper_parameters.yaml index 854dc195..008a77e6 100644 --- a/auto3dseg/algorithm_templates/dints/configs/hyper_parameters.yaml +++ b/auto3dseg/algorithm_templates/dints/configs/hyper_parameters.yaml @@ -73,9 +73,24 @@ training: # fine-tuning finetune: - activate: false + activate_finetune: false pretrained_ckpt_name: "$@bundle_root + '/model_fold' + str(@fold) + '/best_metric_model.pt'" + overwrite: + learning_rate: 0.0005 + optimizer: + _target_: torch.optim.Adam + lr: "@training#learning_rate" + weight_decay: 4.0e-05 + lr_scheduler: + _target_: torch.optim.lr_scheduler.ConstantLR + optimizer: "$@training#optimizer" + factor: 1.0 + total_iters: '$@training#num_epochs // @training#num_epochs_per_validation + 1' + adapt_valid_mode: false + early_stop_mode: false + num_epochs_per_validation: 1 + # validation validate: ckpt_name: "$@bundle_root + '/model_fold' + str(@fold) + '/best_metric_model.pt'" diff --git a/auto3dseg/algorithm_templates/dints/scripts/infer.py b/auto3dseg/algorithm_templates/dints/scripts/infer.py index 9b4d6732..bac4b124 100644 --- a/auto3dseg/algorithm_templates/dints/scripts/infer.py +++ b/auto3dseg/algorithm_templates/dints/scripts/infer.py @@ -111,6 +111,7 @@ def pre_operation(config_file, **override): _factor *= 96.0 / float(_patch_size[2]) _factor /= 6.0 + _factor /= 6.0 # further reduce training time _factor = max(1.0, _factor) _estimated_epochs = 400.0 diff --git a/auto3dseg/algorithm_templates/dints/scripts/train.py b/auto3dseg/algorithm_templates/dints/scripts/train.py index 40debf76..9290b860 100644 --- a/auto3dseg/algorithm_templates/dints/scripts/train.py +++ b/auto3dseg/algorithm_templates/dints/scripts/train.py @@ -173,6 +173,7 @@ def pre_operation(config_file, **override): _factor *= 96.0 / float(_patch_size[2]) _factor /= 6.0 + _factor /= 6.0 # further reduce training time _factor = max(1.0, _factor) _estimated_epochs = 400.0 @@ -215,6 +216,10 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): parser.read_config(config_file_) parser.update(pairs=_args) + if parser["finetune"]["activate_finetune"] == True and "overwrite" in parser["finetune"]: + parser["training"].update(parser["finetune"]["overwrite"]) + parser["finetune"].pop("overwrite") + amp = parser.get_parsed_content("training#amp") bundle_root = parser.get_parsed_content("bundle_root") ckpt_path = parser.get_parsed_content("ckpt_path") @@ -457,7 +462,7 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): if torch.cuda.device_count() > 1: model = DistributedDataParallel(model, device_ids=[device], find_unused_parameters=True) - if finetune["activate"] and os.path.isfile(finetune["pretrained_ckpt_name"]): + if finetune["activate_finetune"] and os.path.isfile(finetune["pretrained_ckpt_name"]): logger.debug("fine-tuning pre-trained checkpoint {:s}".format(finetune["pretrained_ckpt_name"])) if torch.cuda.device_count() > 1: model.module.load_state_dict(torch.load(finetune["pretrained_ckpt_name"], map_location=device)) diff --git a/auto3dseg/algorithm_templates/dints/scripts/validate.py b/auto3dseg/algorithm_templates/dints/scripts/validate.py index ef1a2b17..9426f414 100644 --- a/auto3dseg/algorithm_templates/dints/scripts/validate.py +++ b/auto3dseg/algorithm_templates/dints/scripts/validate.py @@ -113,6 +113,7 @@ def pre_operation(config_file, **override): _factor *= 96.0 / float(_patch_size[2]) _factor /= 6.0 + _factor /= 6.0 # further reduce training time _factor = max(1.0, _factor) _estimated_epochs = 400.0 From 21e44f57d8f24f5de107fec675b2ccffde44cbe4 Mon Sep 17 00:00:00 2001 From: dongy Date: Tue, 11 Jul 2023 10:35:27 -0700 Subject: [PATCH 02/14] update Signed-off-by: dongy --- .../dints/configs/hyper_parameters.yaml | 7 ++----- .../algorithm_templates/dints/scripts/train.py | 16 ++++++++++++++++ 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/auto3dseg/algorithm_templates/dints/configs/hyper_parameters.yaml b/auto3dseg/algorithm_templates/dints/configs/hyper_parameters.yaml index 008a77e6..8e266e5f 100644 --- a/auto3dseg/algorithm_templates/dints/configs/hyper_parameters.yaml +++ b/auto3dseg/algorithm_templates/dints/configs/hyper_parameters.yaml @@ -77,11 +77,7 @@ finetune: pretrained_ckpt_name: "$@bundle_root + '/model_fold' + str(@fold) + '/best_metric_model.pt'" overwrite: - learning_rate: 0.0005 - optimizer: - _target_: torch.optim.Adam - lr: "@training#learning_rate" - weight_decay: 4.0e-05 + learning_rate: 0.001 lr_scheduler: _target_: torch.optim.lr_scheduler.ConstantLR optimizer: "$@training#optimizer" @@ -89,6 +85,7 @@ finetune: total_iters: '$@training#num_epochs // @training#num_epochs_per_validation + 1' adapt_valid_mode: false early_stop_mode: false + num_epochs: 20 num_epochs_per_validation: 1 # validation diff --git a/auto3dseg/algorithm_templates/dints/scripts/train.py b/auto3dseg/algorithm_templates/dints/scripts/train.py index 9290b860..4d34f4ca 100644 --- a/auto3dseg/algorithm_templates/dints/scripts/train.py +++ b/auto3dseg/algorithm_templates/dints/scripts/train.py @@ -480,6 +480,22 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): best_metric = -1 best_metric_epoch = -1 + + if finetune["activate_finetune"] and os.path.isfile(os.path.join(ckpt_path, "progress.yaml")): + with open(os.path.join(ckpt_path, "progress.yaml"), "r") as in_file: + _progress = yaml.safe_load(in_file) + + if isinstance(_progress, list): + for _i in range(len(_progress)): + _result = _progress[-1 - _i] + if _result["inverted_best_validation"] == False: + best_metric = _result["best_avg_dice_score"] + best_metric = float(best_metric) + best_metric_epoch = _result["best_avg_dice_score_epoch"] + best_metric_epoch = int(best_metric_epoch) + logger.debug(f"The optimal checkpoints to date have been successfully loaded, boasting a peak metric of {best_metric:.3f}.") + break + idx_iter = 0 metric_dim = output_classes - 1 if softmax else output_classes val_devices_input = {} From 11230a2bcbf3bd2e3f201b5712a0ef7707d3cd9b Mon Sep 17 00:00:00 2001 From: monai-bot Date: Wed, 12 Jul 2023 22:04:49 +0000 Subject: [PATCH 03/14] [MONAI] code formatting Signed-off-by: monai-bot --- auto3dseg/algorithm_templates/dints/scripts/infer.py | 2 +- auto3dseg/algorithm_templates/dints/scripts/train.py | 10 ++++++---- .../algorithm_templates/dints/scripts/validate.py | 2 +- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/auto3dseg/algorithm_templates/dints/scripts/infer.py b/auto3dseg/algorithm_templates/dints/scripts/infer.py index bac4b124..b96294a9 100644 --- a/auto3dseg/algorithm_templates/dints/scripts/infer.py +++ b/auto3dseg/algorithm_templates/dints/scripts/infer.py @@ -111,7 +111,7 @@ def pre_operation(config_file, **override): _factor *= 96.0 / float(_patch_size[2]) _factor /= 6.0 - _factor /= 6.0 # further reduce training time + _factor /= 6.0 # further reduce training time _factor = max(1.0, _factor) _estimated_epochs = 400.0 diff --git a/auto3dseg/algorithm_templates/dints/scripts/train.py b/auto3dseg/algorithm_templates/dints/scripts/train.py index 4d34f4ca..437ad867 100644 --- a/auto3dseg/algorithm_templates/dints/scripts/train.py +++ b/auto3dseg/algorithm_templates/dints/scripts/train.py @@ -173,7 +173,7 @@ def pre_operation(config_file, **override): _factor *= 96.0 / float(_patch_size[2]) _factor /= 6.0 - _factor /= 6.0 # further reduce training time + _factor /= 6.0 # further reduce training time _factor = max(1.0, _factor) _estimated_epochs = 400.0 @@ -216,7 +216,7 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): parser.read_config(config_file_) parser.update(pairs=_args) - if parser["finetune"]["activate_finetune"] == True and "overwrite" in parser["finetune"]: + if parser["finetune"]["activate_finetune"] and "overwrite" in parser["finetune"]: parser["training"].update(parser["finetune"]["overwrite"]) parser["finetune"].pop("overwrite") @@ -488,12 +488,14 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): if isinstance(_progress, list): for _i in range(len(_progress)): _result = _progress[-1 - _i] - if _result["inverted_best_validation"] == False: + if not _result["inverted_best_validation"]: best_metric = _result["best_avg_dice_score"] best_metric = float(best_metric) best_metric_epoch = _result["best_avg_dice_score_epoch"] best_metric_epoch = int(best_metric_epoch) - logger.debug(f"The optimal checkpoints to date have been successfully loaded, boasting a peak metric of {best_metric:.3f}.") + logger.debug( + f"The optimal checkpoints to date have been successfully loaded, boasting a peak metric of {best_metric:.3f}." + ) break idx_iter = 0 diff --git a/auto3dseg/algorithm_templates/dints/scripts/validate.py b/auto3dseg/algorithm_templates/dints/scripts/validate.py index 9426f414..e0f7915e 100644 --- a/auto3dseg/algorithm_templates/dints/scripts/validate.py +++ b/auto3dseg/algorithm_templates/dints/scripts/validate.py @@ -113,7 +113,7 @@ def pre_operation(config_file, **override): _factor *= 96.0 / float(_patch_size[2]) _factor /= 6.0 - _factor /= 6.0 # further reduce training time + _factor /= 6.0 # further reduce training time _factor = max(1.0, _factor) _estimated_epochs = 400.0 From 017caae021a6185a90cff19971c1fde4eb0f44bf Mon Sep 17 00:00:00 2001 From: dongy Date: Mon, 7 Aug 2023 22:23:17 -0700 Subject: [PATCH 04/14] update dints Signed-off-by: dongy --- .../dints/configs/hyper_parameters.yaml | 1 + .../dints/scripts/infer.py | 29 +++++++++++++++++-- 2 files changed, 28 insertions(+), 2 deletions(-) diff --git a/auto3dseg/algorithm_templates/dints/configs/hyper_parameters.yaml b/auto3dseg/algorithm_templates/dints/configs/hyper_parameters.yaml index 8e266e5f..6c33a6fe 100644 --- a/auto3dseg/algorithm_templates/dints/configs/hyper_parameters.yaml +++ b/auto3dseg/algorithm_templates/dints/configs/hyper_parameters.yaml @@ -99,6 +99,7 @@ validate: # inference infer: ckpt_name: "$@bundle_root + '/model_fold' + str(@fold) + '/best_metric_model.pt'" + save_prob: false fast: true data_list_key: testing log_output_file: "$@bundle_root + '/model_fold' + str(@fold) + '/inference.log'" diff --git a/auto3dseg/algorithm_templates/dints/scripts/infer.py b/auto3dseg/algorithm_templates/dints/scripts/infer.py index b96294a9..37212d1f 100644 --- a/auto3dseg/algorithm_templates/dints/scripts/infer.py +++ b/auto3dseg/algorithm_templates/dints/scripts/infer.py @@ -152,6 +152,7 @@ def __init__(self, config_file: Optional[Union[str, Sequence[str]]] = None, **ov ckpt_name = parser.get_parsed_content("infer")["ckpt_name"] data_list_key = parser.get_parsed_content("infer")["data_list_key"] output_path = parser.get_parsed_content("infer")["output_path"] + save_prob = parser.get_parsed_content("infer#save_prob") if not os.path.exists(output_path): os.makedirs(output_path, exist_ok=True) @@ -199,6 +200,29 @@ def __init__(self, config_file: Optional[Union[str, Sequence[str]]] = None, **ov ] self.post_transforms_prob = transforms.Compose(post_transforms) + if save_prob: + post_transforms += [ + transforms.CopyItemsd( + keys="pred", + times=1, + names="prob", + ), + transforms.Lambdad( + keys="prob", + func=lambda x: torch.floor(x * 255.0).type(torch.uint8) + ), + transforms.SaveImaged( + keys="prob", + meta_keys="pred_meta_dict", + output_dir=os.path.join(output_path, "prob"), + output_postfix="", + resample=False, + print_log=False, + data_root_dir=data_file_base_dir, + output_dtype=np.uint8, + ), + ] + if softmax: post_transforms += [transforms.AsDiscreted(keys="pred", argmax=True)] else: @@ -209,11 +233,12 @@ def __init__(self, config_file: Optional[Union[str, Sequence[str]]] = None, **ov keys="pred", meta_keys="pred_meta_dict", output_dir=output_path, - output_postfix="seg", + output_postfix="", resample=False, print_log=False, data_root_dir=data_file_base_dir, - ) + output_dtype=np.uint8, + ), ] self.post_transforms = transforms.Compose(post_transforms) From a42ef8739876d1f97fd7102cb2b83c4f11a9b3b3 Mon Sep 17 00:00:00 2001 From: dongy Date: Fri, 11 Aug 2023 09:21:35 -0700 Subject: [PATCH 05/14] update dints Signed-off-by: dongy --- .../dints/configs/hyper_parameters.yaml | 3 + .../dints/scripts/train.py | 399 ++++++++++++------ 2 files changed, 266 insertions(+), 136 deletions(-) diff --git a/auto3dseg/algorithm_templates/dints/configs/hyper_parameters.yaml b/auto3dseg/algorithm_templates/dints/configs/hyper_parameters.yaml index 6c33a6fe..ea68a494 100644 --- a/auto3dseg/algorithm_templates/dints/configs/hyper_parameters.yaml +++ b/auto3dseg/algorithm_templates/dints/configs/hyper_parameters.yaml @@ -12,6 +12,7 @@ training: amp: true auto_scale_allowed: true data_list_key: null + epoch_divided_factor: 36 input_channels: null learning_rate: 0.2 log_output_file: "$@bundle_root + '/model_fold' + str(@fold) + '/training.log'" @@ -60,11 +61,13 @@ training: batch: true smooth_nr: 1.0e-05 smooth_dr: 1.0e-05 + optimizer: _target_: torch.optim.SGD lr: "@training#learning_rate" momentum: 0.9 weight_decay: 4.0e-05 + lr_scheduler: _target_: torch.optim.lr_scheduler.PolynomialLR optimizer: "$@training#optimizer" diff --git a/auto3dseg/algorithm_templates/dints/scripts/train.py b/auto3dseg/algorithm_templates/dints/scripts/train.py index 437ad867..a810f952 100644 --- a/auto3dseg/algorithm_templates/dints/scripts/train.py +++ b/auto3dseg/algorithm_templates/dints/scripts/train.py @@ -109,7 +109,8 @@ def __call__(self, val_acc): elif val_acc + self.delta < self.best_score: self.counter += 1 if self.verbose: - logger.debug(f"EarlyStopping counter: {self.counter} out of {self.patience}") + logger.debug( + f"EarlyStopping counter: {self.counter} out of {self.patience}") if self.counter >= self.patience: self.early_stop = True else: @@ -146,20 +147,27 @@ def pre_operation(config_file, **override): mem = min(mem) if isinstance(mem, list) else mem mem = float(mem) / (1024.0**3) mem = max(1.0, mem - 1.0) - mem_bs2 = 6.0 + (20.0 - 6.0) * (output_classes - 2) / (105 - 2) - mem_bs9 = 24.0 + (74.0 - 24.0) * (output_classes - 2) / (105 - 2) - batch_size = 2 + (9 - 2) * (mem - mem_bs2) / (mem_bs9 - mem_bs2) + mem_bs2 = 6.0 + (20.0 - 6.0) * \ + (output_classes - 2) / (105 - 2) + mem_bs9 = 24.0 + (74.0 - 24.0) * \ + (output_classes - 2) / (105 - 2) + batch_size = 2 + (9 - 2) * \ + (mem - mem_bs2) / (mem_bs9 - mem_bs2) batch_size = int(batch_size) batch_size = max(batch_size, 1) - parser["training"].update({"num_patches_per_iter": batch_size}) - parser["training"].update({"num_patches_per_image": 2 * batch_size}) + parser["training"].update( + {"num_patches_per_iter": batch_size}) + parser["training"].update( + {"num_patches_per_image": 2 * batch_size}) - # estimate data size based on number of images and image size + # estimate data size based on number of images and image + # size _factor = 1.0 try: - _factor *= 1251.0 / float(parser["stats_summary"]["n_cases"]) + _factor *= 1251.0 / \ + float(parser["stats_summary"]["n_cases"]) _mean_shape = parser["stats_summary"]["image_stats"]["shape"]["mean"] _factor *= float(_mean_shape[0]) / 240.0 _factor *= float(_mean_shape[1]) / 240.0 @@ -172,16 +180,23 @@ def pre_operation(config_file, **override): _factor *= 96.0 / float(_patch_size[1]) _factor *= 96.0 / float(_patch_size[2]) - _factor /= 6.0 - _factor /= 6.0 # further reduce training time + if "training#epoch_divided_factor" in override: + epoch_divided_factor = override["training#epoch_divided_factor"] + else: + epoch_divided_factor = parser["training"]["epoch_divided_factor"] + epoch_divided_factor = float(epoch_divided_factor) + _factor /= epoch_divided_factor + _factor = max(1.0, _factor) _estimated_epochs = 400.0 _estimated_epochs *= _factor - parser["training"].update({"num_epochs": int(_estimated_epochs / float(batch_size))}) + parser["training"].update( + {"num_epochs": int(_estimated_epochs / float(batch_size))}) - ConfigParser.export_config_file(parser.get(), _file, fmt="yaml", default_flow_style=None) + ConfigParser.export_config_file( + parser.get(), _file, fmt="yaml", default_flow_style=None) return @@ -192,8 +207,13 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): # pre-operations logger.debug(f"number of GPUs: {torch.cuda.device_count()}") if torch.cuda.device_count() > 1: - logging.getLogger("torch.distributed.distributed_c10d").setLevel(logging.WARNING) - dist.init_process_group(backend="nccl", init_method="env://", timeout=timedelta(seconds=7200)) + logging.getLogger("torch.distributed.distributed_c10d").setLevel( + logging.WARNING) + dist.init_process_group( + backend="nccl", + init_method="env://", + timeout=timedelta( + seconds=7200)) world_size = dist.get_world_size() else: world_size = 1 @@ -225,23 +245,28 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): ckpt_path = parser.get_parsed_content("ckpt_path") data_file_base_dir = parser.get_parsed_content("data_file_base_dir") data_list_file_path = parser.get_parsed_content("data_list_file_path") - finetune = parser.get_parsed_content("finetune") fold = parser.get_parsed_content("fold") log_output_file = parser.get_parsed_content("training#log_output_file") - num_images_per_batch = parser.get_parsed_content("training#num_images_per_batch") + num_images_per_batch = parser.get_parsed_content( + "training#num_images_per_batch") num_epochs = parser.get_parsed_content("training#num_epochs") - num_epochs_per_validation = parser.get_parsed_content("training#num_epochs_per_validation") - num_patches_per_iter = parser.get_parsed_content("training#num_patches_per_iter") + num_epochs_per_validation = parser.get_parsed_content( + "training#num_epochs_per_validation") + num_patches_per_iter = parser.get_parsed_content( + "training#num_patches_per_iter") num_sw_batch_size = parser.get_parsed_content("training#num_sw_batch_size") output_classes = parser.get_parsed_content("training#output_classes") overlap_ratio = parser.get_parsed_content("training#overlap_ratio") - overlap_ratio_train = parser.get_parsed_content("training#overlap_ratio_train") + overlap_ratio_train = parser.get_parsed_content( + "training#overlap_ratio_train") patch_size_valid = parser.get_parsed_content("training#patch_size_valid") random_seed = parser.get_parsed_content("training#random_seed") softmax = parser.get_parsed_content("training#softmax") sw_input_on_cpu = parser.get_parsed_content("training#sw_input_on_cpu") - valid_at_orig_resolution_at_last = parser.get_parsed_content("training#valid_at_orig_resolution_at_last") - valid_at_orig_resolution_only = parser.get_parsed_content("training#valid_at_orig_resolution_only") + valid_at_orig_resolution_at_last = parser.get_parsed_content( + "training#valid_at_orig_resolution_at_last") + valid_at_orig_resolution_only = parser.get_parsed_content( + "training#valid_at_orig_resolution_only") if not valid_at_orig_resolution_only: train_transforms = parser.get_parsed_content("transforms_train") @@ -258,7 +283,9 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): ] ) - if "class_names" in parser and isinstance(parser["class_names"], list) and "index" in parser["class_names"][0]: + if "class_names" in parser and isinstance( + parser["class_names"], + list) and "index" in parser["class_names"][0]: class_index = [x["index"] for x in parser["class_names"]] infer_transforms = transforms.Compose( @@ -277,18 +304,23 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): try: class_names = parser.get_parsed_content("class_names") if isinstance(class_names[0], dict): - class_names = [class_names[_i]["name"] for _i in range(len(class_names))] + class_names = [class_names[_i]["name"] + for _i in range(len(class_names))] except BaseException: pass ad = parser.get_parsed_content("training#adapt_valid_mode") if ad: - ad_progress_percentages = parser.get_parsed_content("training#adapt_valid_progress_percentages") - ad_num_epochs_per_validation = parser.get_parsed_content("training#adapt_valid_num_epochs_per_validation") + ad_progress_percentages = parser.get_parsed_content( + "training#adapt_valid_progress_percentages") + ad_num_epochs_per_validation = parser.get_parsed_content( + "training#adapt_valid_num_epochs_per_validation") sorted_indices = np.argsort(ad_progress_percentages) - ad_progress_percentages = [ad_progress_percentages[_i] for _i in sorted_indices] - ad_num_epochs_per_validation = [ad_num_epochs_per_validation[_i] for _i in sorted_indices] + ad_progress_percentages = [ + ad_progress_percentages[_i] for _i in sorted_indices] + ad_num_epochs_per_validation = [ + ad_num_epochs_per_validation[_i] for _i in sorted_indices] es = parser.get_parsed_content("training#early_stop_mode") if es: @@ -297,12 +329,16 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): ad = parser.get_parsed_content("training#adapt_valid_mode") if ad: - ad_progress_percentages = parser.get_parsed_content("training#adapt_valid_progress_percentages") - ad_num_epochs_per_validation = parser.get_parsed_content("training#adapt_valid_num_epochs_per_validation") + ad_progress_percentages = parser.get_parsed_content( + "training#adapt_valid_progress_percentages") + ad_num_epochs_per_validation = parser.get_parsed_content( + "training#adapt_valid_num_epochs_per_validation") sorted_indices = np.argsort(ad_progress_percentages) - ad_progress_percentages = [ad_progress_percentages[_i] for _i in sorted_indices] - ad_num_epochs_per_validation = [ad_num_epochs_per_validation[_i] for _i in sorted_indices] + ad_progress_percentages = [ + ad_progress_percentages[_i] for _i in sorted_indices] + ad_num_epochs_per_validation = [ + ad_num_epochs_per_validation[_i] for _i in sorted_indices] es = parser.get_parsed_content("training#early_stop_mode") if es: @@ -312,7 +348,12 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): if not os.path.exists(ckpt_path): os.makedirs(ckpt_path, exist_ok=True) - if random_seed is not None and (isinstance(random_seed, int) or isinstance(random_seed, float)): + if random_seed is not None and ( + isinstance( + random_seed, + int) or isinstance( + random_seed, + float)): set_determinism(seed=random_seed) CONFIG["handlers"]["file"]["filename"] = log_output_file @@ -322,33 +363,41 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): valid_data_list_key = parser.get_parsed_content("validate#data_list_key") if valid_data_list_key is not None: train_files, _ = datafold_read( - datalist=data_list_file_path, basedir=data_file_base_dir, fold=-1, key=train_data_list_key - ) + datalist=data_list_file_path, basedir=data_file_base_dir, fold=-1, key=train_data_list_key) val_files, _ = datafold_read( - datalist=data_list_file_path, basedir=data_file_base_dir, fold=-1, key=valid_data_list_key - ) + datalist=data_list_file_path, basedir=data_file_base_dir, fold=-1, key=valid_data_list_key) else: - train_files, val_files = datafold_read(datalist=data_list_file_path, basedir=data_file_base_dir, fold=fold) + train_files, val_files = datafold_read( + datalist=data_list_file_path, basedir=data_file_base_dir, fold=fold) random.shuffle(train_files) if torch.cuda.device_count() > 1: - train_files = partition_dataset(data=train_files, shuffle=True, num_partitions=world_size, even_divisible=True)[ - dist.get_rank() - ] + train_files = partition_dataset( + data=train_files, + shuffle=True, + num_partitions=world_size, + even_divisible=True)[ + dist.get_rank()] logger.debug(f"train_files: {len(train_files)}") if torch.cuda.device_count() > 1: if len(val_files) < world_size: - val_files = val_files * math.ceil(float(world_size) / float(len(val_files))) + val_files = val_files * \ + math.ceil(float(world_size) / float(len(val_files))) - val_files = partition_dataset(data=val_files, shuffle=False, num_partitions=world_size, even_divisible=False)[ - dist.get_rank() - ] + val_files = partition_dataset( + data=val_files, + shuffle=False, + num_partitions=world_size, + even_divisible=False)[ + dist.get_rank()] logger.debug(f"val_files: {len(val_files)}") - train_cache_rate = float(parser.get_parsed_content("training#train_cache_rate")) - validate_cache_rate = float(parser.get_parsed_content("training#validate_cache_rate")) + train_cache_rate = float( + parser.get_parsed_content("training#train_cache_rate")) + validate_cache_rate = float( + parser.get_parsed_content("training#validate_cache_rate")) with warnings.catch_warnings(): warnings.simplefilter(action="ignore", category=FutureWarning) @@ -356,7 +405,8 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): if not valid_at_orig_resolution_only: train_ds = monai.data.CacheDataset( - data=train_files * num_epochs_per_validation, + data=train_files * + num_epochs_per_validation, transform=train_transforms, cache_rate=train_cache_rate, hash_as_key=True, @@ -373,7 +423,8 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): ) if valid_at_orig_resolution_at_last or valid_at_orig_resolution_only: - orig_val_ds = monai.data.Dataset(data=val_files, transform=infer_transforms) + orig_val_ds = monai.data.Dataset( + data=val_files, transform=infer_transforms) if not valid_at_orig_resolution_only: train_loader = DataLoader( @@ -384,17 +435,18 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): persistent_workers=True, pin_memory=True, ) - val_loader = DataLoader( - val_ds, - num_workers=parser.get_parsed_content("training#num_workers_validation"), - batch_size=1, - shuffle=False, - ) + val_loader = DataLoader(val_ds, num_workers=parser.get_parsed_content( + "training#num_workers_validation"), batch_size=1, shuffle=False, ) if valid_at_orig_resolution_at_last or valid_at_orig_resolution_only: - orig_val_loader = DataLoader(orig_val_ds, num_workers=2, batch_size=1, shuffle=False) + orig_val_loader = DataLoader( + orig_val_ds, + num_workers=2, + batch_size=1, + shuffle=False) - device = torch.device(f"cuda:{os.environ['LOCAL_RANK']}") if world_size > 1 else torch.device("cuda:0") + device = torch.device( + f"cuda:{os.environ['LOCAL_RANK']}") if world_size > 1 else torch.device("cuda:0") if world_size > 1: parser["training_network"]["dints_space"]["device"] = device @@ -407,7 +459,8 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model) if softmax: - post_pred = transforms.Compose([transforms.EnsureType(), transforms.AsDiscrete(argmax=True, to_onehot=None)]) + post_pred = transforms.Compose( + [transforms.EnsureType(), transforms.AsDiscrete(argmax=True, to_onehot=None)]) else: post_pred = transforms.Compose( [ @@ -432,18 +485,27 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): ] if softmax: - post_transforms += [transforms.AsDiscreted(keys="pred", argmax=True)] + post_transforms += [ + transforms.AsDiscreted( + keys="pred", argmax=True)] else: post_transforms += [ - transforms.Activationsd(keys="pred", sigmoid=True), - transforms.AsDiscreted(keys="pred", threshold=0.5 + np.finfo(np.float32).eps), + transforms.Activationsd( + keys="pred", + sigmoid=True), + transforms.AsDiscreted( + keys="pred", + threshold=0.5 + + np.finfo( + np.float32).eps), ] post_transforms = transforms.Compose(post_transforms) loss_function = parser.get_parsed_content("training#loss") - optimizer_part = parser.get_parsed_content("training#optimizer", instantiate=False) + optimizer_part = parser.get_parsed_content( + "training#optimizer", instantiate=False) optimizer = optimizer_part.instantiate(params=model.parameters()) if torch.cuda.device_count() == 1 or dist.get_rank() == 0: @@ -451,23 +513,35 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): logger.debug(f"num_epochs_per_validation: {num_epochs_per_validation}") # patch fix to support PolynomialLR use in PyTorch <= 1.12 - if "PolynomialLR" in parser.get("training#lr_scheduler#_target_") and not pytorch_after(1, 13): + if "PolynomialLR" in parser.get( + "training#lr_scheduler#_target_") and not pytorch_after(1, 13): dints_dir = os.path.dirname(os.path.dirname(__file__)) sys.path.insert(0, dints_dir) parser["training#lr_scheduler#_target_"] = "scripts.utils.PolynomialLR" - lr_scheduler_part = parser.get_parsed_content("training#lr_scheduler", instantiate=False) + lr_scheduler_part = parser.get_parsed_content( + "training#lr_scheduler", instantiate=False) lr_scheduler = lr_scheduler_part.instantiate(optimizer=optimizer) if torch.cuda.device_count() > 1: - model = DistributedDataParallel(model, device_ids=[device], find_unused_parameters=True) - - if finetune["activate_finetune"] and os.path.isfile(finetune["pretrained_ckpt_name"]): - logger.debug("fine-tuning pre-trained checkpoint {:s}".format(finetune["pretrained_ckpt_name"])) + model = DistributedDataParallel( + model, device_ids=[device], find_unused_parameters=True) + + if parser["finetune"]["activate_finetune"] and os.path.isfile( + parser["finetune"]["pretrained_ckpt_name"]): + logger.debug( + "fine-tuning pre-trained checkpoint {:s}".format( + parser["finetune"]["pretrained_ckpt_name"])) if torch.cuda.device_count() > 1: - model.module.load_state_dict(torch.load(finetune["pretrained_ckpt_name"], map_location=device)) + model.module.load_state_dict( + torch.load( + parser["finetune"]["pretrained_ckpt_name"], + map_location=device)) else: - model.load_state_dict(torch.load(finetune["pretrained_ckpt_name"], map_location=device)) + model.load_state_dict( + torch.load( + parser["finetune"]["pretrained_ckpt_name"], + map_location=device)) else: logger.debug("training from scratch") @@ -481,7 +555,8 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): best_metric = -1 best_metric_epoch = -1 - if finetune["activate_finetune"] and os.path.isfile(os.path.join(ckpt_path, "progress.yaml")): + if parser["finetune"]["activate_finetune"] and os.path.isfile( + os.path.join(ckpt_path, "progress.yaml")): with open(os.path.join(ckpt_path, "progress.yaml"), "r") as in_file: _progress = yaml.safe_load(in_file) @@ -514,11 +589,15 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): if es: # instantiate the early stopping object - early_stopping = EarlyStopping(patience=es_patience, delta=es_delta, verbose=True) + early_stopping = EarlyStopping( + patience=es_patience, delta=es_delta, verbose=True) start_time = time.time() - num_rounds = int(np.ceil(float(num_epochs) // float(num_epochs_per_validation))) + num_rounds = int( + np.ceil( + float(num_epochs) // + float(num_epochs_per_validation))) with warnings.catch_warnings(): warnings.simplefilter(action="ignore", category=FutureWarning) @@ -527,15 +606,18 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): if not valid_at_orig_resolution_only: if torch.cuda.device_count() == 1 or dist.get_rank() == 0: progress_bar = tqdm( - range(num_rounds), desc=f"{os.path.basename(bundle_root)} - training ...", unit="round" - ) + range(num_rounds), + desc=f"{os.path.basename(bundle_root)} - training ...", + unit="round") - for _round in range(num_rounds) if torch.cuda.device_count() > 1 and dist.get_rank() != 0 else progress_bar: + for _round in range(num_rounds) if torch.cuda.device_count( + ) > 1 and dist.get_rank() != 0 else progress_bar: epoch = (_round + 1) * num_epochs_per_validation lr = lr_scheduler.get_last_lr()[0] if torch.cuda.device_count() == 1 or dist.get_rank() == 0: logger.debug("----------") - logger.debug(f"epoch {_round * num_epochs_per_validation + 1}/{num_epochs}") + logger.debug( + f"epoch {_round * num_epochs_per_validation + 1}/{num_epochs}") logger.debug(f"learning rate is set to {lr}") model.train() @@ -547,23 +629,27 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): step += 1 inputs_l = ( - batch_data["image"].as_tensor() - if isinstance(batch_data["image"], monai.data.MetaTensor) - else batch_data["image"] - ) + batch_data["image"].as_tensor() if isinstance( + batch_data["image"], + monai.data.MetaTensor) else batch_data["image"]) labels_l = ( - batch_data["label"].as_tensor() - if isinstance(batch_data["label"], monai.data.MetaTensor) - else batch_data["label"] - ) + batch_data["label"].as_tensor() if isinstance( + batch_data["label"], + monai.data.MetaTensor) else batch_data["label"]) _idx = torch.randperm(inputs_l.shape[0]) inputs_l = inputs_l[_idx] labels_l = labels_l[_idx] for _k in range(inputs_l.shape[0] // num_patches_per_iter): - inputs = inputs_l[_k * num_patches_per_iter : (_k + 1) * num_patches_per_iter, ...] - labels = labels_l[_k * num_patches_per_iter : (_k + 1) * num_patches_per_iter, ...] + inputs = inputs_l[_k * + num_patches_per_iter: (_k + + 1) * + num_patches_per_iter, ...] + labels = labels_l[_k * + num_patches_per_iter: (_k + + 1) * + num_patches_per_iter, ...] inputs = inputs.to(device) labels = labels.to(device) @@ -586,7 +672,8 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): loss = loss_function(outputs.float(), labels) loss.backward() - torch.nn.utils.clip_grad_norm_(model.parameters(), 0.5) + torch.nn.utils.clip_grad_norm_( + model.parameters(), 0.5) optimizer.step() epoch_loss += loss.item() @@ -597,22 +684,23 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): if torch.cuda.device_count() == 1 or dist.get_rank() == 0: logger.debug( - f"[{str(datetime.now())[:19]}] " + f"{step}/{epoch_len}, train_loss: {loss.item():.4f}" - ) - writer.add_scalar("train/loss", loss.item(), epoch_len * _round + step) + f"[{str(datetime.now())[:19]}] " + + f"{step}/{epoch_len}, train_loss: {loss.item():.4f}") + writer.add_scalar( + "train/loss", loss.item(), epoch_len * _round + step) lr_scheduler.step() if torch.cuda.device_count() > 1: - dist.all_reduce(loss_torch, op=torch.distributed.ReduceOp.SUM) + dist.all_reduce( + loss_torch, op=torch.distributed.ReduceOp.SUM) loss_torch = loss_torch.tolist() if torch.cuda.device_count() == 1 or dist.get_rank() == 0: loss_torch_epoch = loss_torch[0] / loss_torch[1] logger.debug( f"epoch {epoch} average loss: {loss_torch_epoch:.4f}, " - f"best mean dice: {best_metric:.4f} at epoch {best_metric_epoch}" - ) + f"best mean dice: {best_metric:.4f} at epoch {best_metric_epoch}") del inputs, labels, outputs torch.cuda.empty_cache() @@ -631,13 +719,16 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): target_num_epochs_per_validation = ad_num_epochs_per_validation[-1 - _j] break - if target_num_epochs_per_validation > 0 and (_round + 1) < num_rounds: - if (_round + 1) % (target_num_epochs_per_validation // num_epochs_per_validation) != 0: + if target_num_epochs_per_validation > 0 and ( + _round + 1) < num_rounds: + if (_round + 1) % (target_num_epochs_per_validation // + num_epochs_per_validation) != 0: continue model.eval() with torch.no_grad(): - metric = torch.zeros(metric_dim * 2, dtype=torch.float, device=device) + metric = torch.zeros( + metric_dim * 2, dtype=torch.float, device=device) _index = 0 for val_data in val_loader: @@ -653,10 +744,13 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): device_list_input = [device, device, "cpu"] device_list_output = [device, "cpu", "cpu"] elif val_filename in val_devices_input and val_filename in val_devices_output: - device_list_input = [val_devices_input[val_filename]] - device_list_output = [val_devices_output[val_filename]] + device_list_input = [ + val_devices_input[val_filename]] + device_list_output = [ + val_devices_output[val_filename]] - for _device_in, _device_out in zip(device_list_input, device_list_output): + for _device_in, _device_out in zip( + device_list_input, device_list_output): try: val_devices_input[val_filename] = _device_in val_devices_output[val_filename] = _device_out @@ -684,7 +778,8 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): finished = True except RuntimeError as e: - if not any(x in str(e).lower() for x in ("memory", "cuda", "cudnn")): + if not any(x in str(e).lower() + for x in ("memory", "cuda", "cudnn")): raise e finished = False @@ -714,10 +809,12 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): include_background=not softmax, ) else: - value = compute_dice(y_pred=val_outputs, y=val_labels, include_background=not softmax) + value = compute_dice( + y_pred=val_outputs, y=val_labels, include_background=not softmax) value = value.to(device) - logger.debug(f"{_index + 1} / {len(val_loader)}: {value}") + logger.debug( + f"{_index + 1} / {len(val_loader)}: {value}") del val_labels, val_outputs torch.cuda.empty_cache() @@ -732,18 +829,21 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): _index += 1 if torch.cuda.device_count() > 1: - dist.all_reduce(metric, op=torch.distributed.ReduceOp.SUM) + dist.all_reduce( + metric, op=torch.distributed.ReduceOp.SUM) metric = metric.tolist() if torch.cuda.device_count() == 1 or dist.get_rank() == 0: for _c in range(metric_dim): - logger.debug(f"evaluation metric - class {_c + 1}: {metric[2 * _c] / metric[2 * _c + 1]}") + logger.debug( + f"evaluation metric - class {_c + 1}: {metric[2 * _c] / metric[2 * _c + 1]}") try: writer.add_scalar( f"val_class/acc_{class_names[_c]}", metric[2 * _c] / metric[2 * _c + 1], epoch ) except BaseException: - writer.add_scalar(f"val_class/acc_{_c}", metric[2 * _c] / metric[2 * _c + 1], epoch) + writer.add_scalar( + f"val_class/acc_{_c}", metric[2 * _c] / metric[2 * _c + 1], epoch) avg_metric = 0 for _c in range(metric_dim): @@ -757,37 +857,40 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): best_metric = avg_metric best_metric_epoch = epoch if torch.cuda.device_count() > 1: - torch.save(model.module.state_dict(), os.path.join(ckpt_path, "best_metric_model.pt")) + torch.save( + model.module.state_dict(), os.path.join( + ckpt_path, "best_metric_model.pt")) else: - torch.save(model.state_dict(), os.path.join(ckpt_path, "best_metric_model.pt")) + torch.save( + model.state_dict(), os.path.join( + ckpt_path, "best_metric_model.pt")) logger.debug("saved new best metric model") dict_file = {} - dict_file["best_avg_dice_score"] = float(best_metric) - dict_file["best_avg_dice_score_epoch"] = int(best_metric_epoch) - dict_file["best_avg_dice_score_iteration"] = int(idx_iter) + dict_file["best_avg_dice_score"] = float( + best_metric) + dict_file["best_avg_dice_score_epoch"] = int( + best_metric_epoch) + dict_file["best_avg_dice_score_iteration"] = int( + idx_iter) dict_file["inverted_best_validation"] = False with open(os.path.join(ckpt_path, "progress.yaml"), "a") as out_file: yaml.dump([dict_file], stream=out_file) logger.debug( "current epoch: {} current mean dice: {:.4f} best mean dice: {:.4f} at epoch {}".format( - epoch, avg_metric, best_metric, best_metric_epoch - ) - ) + epoch, avg_metric, best_metric, best_metric_epoch)) current_time = time.time() elapsed_time = (current_time - start_time) / 60.0 with open(os.path.join(ckpt_path, "accuracy_history.csv"), "a") as f: - f.write( - "{:d}\t{:.5f}\t{:.5f}\t{:.5f}\t{:.1f}\t{:d}\n".format( - epoch, avg_metric, loss_torch_epoch, lr, elapsed_time, idx_iter - ) - ) + f.write("{:d}\t{:.5f}\t{:.5f}\t{:.5f}\t{:.1f}\t{:d}\n".format( + epoch, avg_metric, loss_torch_epoch, lr, elapsed_time, idx_iter)) if es: early_stopping(val_acc=avg_metric) - stop_train = torch.tensor(early_stopping.early_stop).to(device) + stop_train = torch.tensor( + early_stopping.early_stop).to(device) if torch.cuda.device_count() > 1: dist.barrier() @@ -803,20 +906,30 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): if valid_at_orig_resolution_at_last or valid_at_orig_resolution_only: if torch.cuda.device_count() == 1 or dist.get_rank() == 0: - print(f"{os.path.basename(bundle_root)} - validation at original spacing/resolution") + print( + f"{os.path.basename(bundle_root)} - validation at original spacing/resolution") logger.debug("validation at original spacing/resolution") if torch.cuda.device_count() > 1: model.module.load_state_dict( - torch.load(os.path.join(ckpt_path, "best_metric_model.pt"), map_location=device) - ) + torch.load( + os.path.join( + ckpt_path, + "best_metric_model.pt"), + map_location=device)) else: - model.load_state_dict(torch.load(os.path.join(ckpt_path, "best_metric_model.pt"), map_location=device)) + model.load_state_dict( + torch.load( + os.path.join( + ckpt_path, + "best_metric_model.pt"), + map_location=device)) logger.debug("checkpoints loaded") model.eval() with torch.no_grad(): - metric = torch.zeros(metric_dim * 2, dtype=torch.float, device=device) + metric = torch.zeros( + metric_dim * 2, dtype=torch.float, device=device) _index = 0 for val_data in orig_val_loader: @@ -831,7 +944,8 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): device_list_input = [device, device, "cpu"] device_list_output = [device, "cpu", "cpu"] - for _device_in, _device_out in zip(device_list_input, device_list_output): + for _device_in, _device_out in zip( + device_list_input, device_list_output): try: val_images = val_data["image"].to(_device_in) val_labels = val_data["label"].to(_device_out) @@ -856,7 +970,8 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): finished = True except RuntimeError as e: - if not any(x in str(e).lower() for x in ("memory", "cuda", "cudnn")): + if not any(x in str(e).lower() + for x in ("memory", "cuda", "cudnn")): raise e finished = False @@ -871,7 +986,8 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): torch.cuda.empty_cache() gc.collect() - val_data = [post_transforms(i) for i in monai.data.decollate_batch(val_data)] + val_data = [ + post_transforms(i) for i in monai.data.decollate_batch(val_data)] val_outputs = val_data[0]["pred"][None, ...] val_labels = val_labels.to(_device_in) @@ -890,9 +1006,13 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): include_background=not softmax, ) else: - value = compute_dice(y_pred=val_outputs, y=val_labels, include_background=not softmax) + value = compute_dice( + y_pred=val_outputs, + y=val_labels, + include_background=not softmax) - logger.debug(f"validation Dice score at original spacing/resolution: {value}") + logger.debug( + f"validation Dice score at original spacing/resolution: {value}") for _c in range(metric_dim): val0 = torch.nan_to_num(value[0, _c], nan=0.0) @@ -916,15 +1036,18 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): for _c in range(metric_dim): avg_metric += metric[2 * _c] / metric[2 * _c + 1] avg_metric = avg_metric / float(metric_dim) - logger.debug(f"avg_metric at original spacing/resolution: {avg_metric}") + logger.debug( + f"avg_metric at original spacing/resolution: {avg_metric}") with open(os.path.join(ckpt_path, "progress.yaml"), "r") as out_file: progress = yaml.safe_load(out_file) dict_file = {} dict_file["best_avg_dice_score"] = float(avg_metric) - dict_file["best_avg_dice_score_epoch"] = int(progress[-1]["best_avg_dice_score_epoch"]) - dict_file["best_avg_dice_score_iteration"] = int(progress[-1]["best_avg_dice_score_iteration"]) + dict_file["best_avg_dice_score_epoch"] = int( + progress[-1]["best_avg_dice_score_epoch"]) + dict_file["best_avg_dice_score_iteration"] = int( + progress[-1]["best_avg_dice_score_iteration"]) dict_file["inverted_best_validation"] = True with open(os.path.join(ckpt_path, "progress.yaml"), "a") as out_file: yaml.dump([dict_file], stream=out_file) @@ -933,16 +1056,20 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): dist.barrier() if torch.cuda.device_count() == 1 or dist.get_rank() == 0: - logger.debug(f"train completed, best_metric: {best_metric:.4f} at epoch: {best_metric_epoch}") + logger.debug( + f"train completed, best_metric: {best_metric:.4f} at epoch: {best_metric_epoch}") writer.flush() writer.close() if torch.cuda.device_count() == 1 or dist.get_rank() == 0: - if (not valid_at_orig_resolution_only) and es and (_round + 1) < num_rounds: - logger.warning(f"{os.path.basename(bundle_root)} - training: finished with early stop") + if (not valid_at_orig_resolution_only) and es and ( + _round + 1) < num_rounds: + logger.warning( + f"{os.path.basename(bundle_root)} - training: finished with early stop") else: - logger.warning(f"{os.path.basename(bundle_root)} - training: finished") + logger.warning( + f"{os.path.basename(bundle_root)} - training: finished") if torch.cuda.device_count() > 1: dist.destroy_process_group() From efc98abd84807339b63f949632d11a2bbdde2471 Mon Sep 17 00:00:00 2001 From: dongy Date: Sun, 13 Aug 2023 17:08:29 -0700 Subject: [PATCH 06/14] update Signed-off-by: dongy --- auto3dseg/algorithm_templates/dints/scripts/train.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/auto3dseg/algorithm_templates/dints/scripts/train.py b/auto3dseg/algorithm_templates/dints/scripts/train.py index a810f952..8e5f5836 100644 --- a/auto3dseg/algorithm_templates/dints/scripts/train.py +++ b/auto3dseg/algorithm_templates/dints/scripts/train.py @@ -528,19 +528,19 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): model, device_ids=[device], find_unused_parameters=True) if parser["finetune"]["activate_finetune"] and os.path.isfile( - parser["finetune"]["pretrained_ckpt_name"]): + parser.get_parsed_content("finetune#pretrained_ckpt_name")): logger.debug( "fine-tuning pre-trained checkpoint {:s}".format( - parser["finetune"]["pretrained_ckpt_name"])) + parser.get_parsed_content("finetune#pretrained_ckpt_name"))) if torch.cuda.device_count() > 1: model.module.load_state_dict( torch.load( - parser["finetune"]["pretrained_ckpt_name"], + parser.get_parsed_content("finetune#pretrained_ckpt_name"), map_location=device)) else: model.load_state_dict( torch.load( - parser["finetune"]["pretrained_ckpt_name"], + parser.get_parsed_content("finetune#pretrained_ckpt_name"), map_location=device)) else: logger.debug("training from scratch") From 79635aaa6594f086286b81d92cc3937c4683c702 Mon Sep 17 00:00:00 2001 From: dongy Date: Mon, 14 Aug 2023 13:40:35 -0700 Subject: [PATCH 07/14] update Signed-off-by: dongy --- auto3dseg/algorithm_templates/dints/scripts/train.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/auto3dseg/algorithm_templates/dints/scripts/train.py b/auto3dseg/algorithm_templates/dints/scripts/train.py index 8e5f5836..c03ec4e7 100644 --- a/auto3dseg/algorithm_templates/dints/scripts/train.py +++ b/auto3dseg/algorithm_templates/dints/scripts/train.py @@ -853,6 +853,15 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): writer.add_scalar("val/acc", avg_metric, epoch) + if torch.cuda.device_count() > 1: + torch.save( + model.module.state_dict(), os.path.join( + ckpt_path, "current_model.pt")) + else: + torch.save( + model.state_dict(), os.path.join( + ckpt_path, "current_model.pt")) + if avg_metric > best_metric: best_metric = avg_metric best_metric_epoch = epoch From 6307c61d01df16d40de850ce63990b096c46b8aa Mon Sep 17 00:00:00 2001 From: dongy Date: Tue, 15 Aug 2023 15:11:52 -0700 Subject: [PATCH 08/14] update Signed-off-by: dongy --- .../dints/scripts/infer.py | 36 +++++++++++++------ .../dints/scripts/train.py | 2 +- .../dints/scripts/validate.py | 36 +++++++++++++------ 3 files changed, 51 insertions(+), 23 deletions(-) diff --git a/auto3dseg/algorithm_templates/dints/scripts/infer.py b/auto3dseg/algorithm_templates/dints/scripts/infer.py index 37212d1f..db9c97cc 100644 --- a/auto3dseg/algorithm_templates/dints/scripts/infer.py +++ b/auto3dseg/algorithm_templates/dints/scripts/infer.py @@ -84,20 +84,27 @@ def pre_operation(config_file, **override): mem = min(mem) if isinstance(mem, list) else mem mem = float(mem) / (1024.0**3) mem = max(1.0, mem - 1.0) - mem_bs2 = 6.0 + (20.0 - 6.0) * (output_classes - 2) / (105 - 2) - mem_bs9 = 24.0 + (74.0 - 24.0) * (output_classes - 2) / (105 - 2) - batch_size = 2 + (9 - 2) * (mem - mem_bs2) / (mem_bs9 - mem_bs2) + mem_bs2 = 6.0 + (20.0 - 6.0) * \ + (output_classes - 2) / (105 - 2) + mem_bs9 = 24.0 + (74.0 - 24.0) * \ + (output_classes - 2) / (105 - 2) + batch_size = 2 + (9 - 2) * \ + (mem - mem_bs2) / (mem_bs9 - mem_bs2) batch_size = int(batch_size) batch_size = max(batch_size, 1) - parser["training"].update({"num_patches_per_iter": batch_size}) - parser["training"].update({"num_patches_per_image": 2 * batch_size}) + parser["training"].update( + {"num_patches_per_iter": batch_size}) + parser["training"].update( + {"num_patches_per_image": 2 * batch_size}) - # estimate data size based on number of images and image size + # estimate data size based on number of images and image + # size _factor = 1.0 try: - _factor *= 1251.0 / float(parser["stats_summary"]["n_cases"]) + _factor *= 1251.0 / \ + float(parser["stats_summary"]["n_cases"]) _mean_shape = parser["stats_summary"]["image_stats"]["shape"]["mean"] _factor *= float(_mean_shape[0]) / 240.0 _factor *= float(_mean_shape[1]) / 240.0 @@ -110,16 +117,23 @@ def pre_operation(config_file, **override): _factor *= 96.0 / float(_patch_size[1]) _factor *= 96.0 / float(_patch_size[2]) - _factor /= 6.0 - _factor /= 6.0 # further reduce training time + if "training#epoch_divided_factor" in override: + epoch_divided_factor = override["training#epoch_divided_factor"] + else: + epoch_divided_factor = parser["training"]["epoch_divided_factor"] + epoch_divided_factor = float(epoch_divided_factor) + _factor /= epoch_divided_factor + _factor = max(1.0, _factor) _estimated_epochs = 400.0 _estimated_epochs *= _factor - parser["training"].update({"num_epochs": int(_estimated_epochs / float(batch_size))}) + parser["training"].update( + {"num_epochs": int(_estimated_epochs / float(batch_size))}) - ConfigParser.export_config_file(parser.get(), _file, fmt="yaml", default_flow_style=None) + ConfigParser.export_config_file( + parser.get(), _file, fmt="yaml", default_flow_style=None) return diff --git a/auto3dseg/algorithm_templates/dints/scripts/train.py b/auto3dseg/algorithm_templates/dints/scripts/train.py index c03ec4e7..0105bb43 100644 --- a/auto3dseg/algorithm_templates/dints/scripts/train.py +++ b/auto3dseg/algorithm_templates/dints/scripts/train.py @@ -61,7 +61,7 @@ p_value = ctypes.cast((ctypes.c_int * 1)(), ctypes.POINTER(ctypes.c_int)) _libcudart.cudaDeviceSetLimit(ctypes.c_int(0x05), ctypes.c_int(128)) _libcudart.cudaDeviceGetLimit(p_value, ctypes.c_int(0x05)) - assert p_value.contents.value == 128 + # assert p_value.contents.value == 128 torch.backends.cudnn.benchmark = True diff --git a/auto3dseg/algorithm_templates/dints/scripts/validate.py b/auto3dseg/algorithm_templates/dints/scripts/validate.py index e0f7915e..df0d04d1 100644 --- a/auto3dseg/algorithm_templates/dints/scripts/validate.py +++ b/auto3dseg/algorithm_templates/dints/scripts/validate.py @@ -86,20 +86,27 @@ def pre_operation(config_file, **override): mem = min(mem) if isinstance(mem, list) else mem mem = float(mem) / (1024.0**3) mem = max(1.0, mem - 1.0) - mem_bs2 = 6.0 + (20.0 - 6.0) * (output_classes - 2) / (105 - 2) - mem_bs9 = 24.0 + (74.0 - 24.0) * (output_classes - 2) / (105 - 2) - batch_size = 2 + (9 - 2) * (mem - mem_bs2) / (mem_bs9 - mem_bs2) + mem_bs2 = 6.0 + (20.0 - 6.0) * \ + (output_classes - 2) / (105 - 2) + mem_bs9 = 24.0 + (74.0 - 24.0) * \ + (output_classes - 2) / (105 - 2) + batch_size = 2 + (9 - 2) * \ + (mem - mem_bs2) / (mem_bs9 - mem_bs2) batch_size = int(batch_size) batch_size = max(batch_size, 1) - parser["training"].update({"num_patches_per_iter": batch_size}) - parser["training"].update({"num_patches_per_image": 2 * batch_size}) + parser["training"].update( + {"num_patches_per_iter": batch_size}) + parser["training"].update( + {"num_patches_per_image": 2 * batch_size}) - # estimate data size based on number of images and image size + # estimate data size based on number of images and image + # size _factor = 1.0 try: - _factor *= 1251.0 / float(parser["stats_summary"]["n_cases"]) + _factor *= 1251.0 / \ + float(parser["stats_summary"]["n_cases"]) _mean_shape = parser["stats_summary"]["image_stats"]["shape"]["mean"] _factor *= float(_mean_shape[0]) / 240.0 _factor *= float(_mean_shape[1]) / 240.0 @@ -112,16 +119,23 @@ def pre_operation(config_file, **override): _factor *= 96.0 / float(_patch_size[1]) _factor *= 96.0 / float(_patch_size[2]) - _factor /= 6.0 - _factor /= 6.0 # further reduce training time + if "training#epoch_divided_factor" in override: + epoch_divided_factor = override["training#epoch_divided_factor"] + else: + epoch_divided_factor = parser["training"]["epoch_divided_factor"] + epoch_divided_factor = float(epoch_divided_factor) + _factor /= epoch_divided_factor + _factor = max(1.0, _factor) _estimated_epochs = 400.0 _estimated_epochs *= _factor - parser["training"].update({"num_epochs": int(_estimated_epochs / float(batch_size))}) + parser["training"].update( + {"num_epochs": int(_estimated_epochs / float(batch_size))}) - ConfigParser.export_config_file(parser.get(), _file, fmt="yaml", default_flow_style=None) + ConfigParser.export_config_file( + parser.get(), _file, fmt="yaml", default_flow_style=None) return From 6f2bca4b03b6bd6c1738d7307c30cbd6710e4953 Mon Sep 17 00:00:00 2001 From: dongy Date: Fri, 18 Aug 2023 09:33:43 -0700 Subject: [PATCH 09/14] update Signed-off-by: dongy --- .../algorithm_templates/dints/scripts/infer.py | 13 +++++++++---- .../algorithm_templates/dints/scripts/train.py | 11 ++++++++--- .../algorithm_templates/dints/scripts/validate.py | 11 ++++++++--- 3 files changed, 25 insertions(+), 10 deletions(-) diff --git a/auto3dseg/algorithm_templates/dints/scripts/infer.py b/auto3dseg/algorithm_templates/dints/scripts/infer.py index db9c97cc..da0c12ce 100644 --- a/auto3dseg/algorithm_templates/dints/scripts/infer.py +++ b/auto3dseg/algorithm_templates/dints/scripts/infer.py @@ -80,9 +80,14 @@ def pre_operation(config_file, **override): if auto_scale_allowed: output_classes = parser["training"]["output_classes"] - mem = get_mem_from_visible_gpus() - mem = min(mem) if isinstance(mem, list) else mem - mem = float(mem) / (1024.0**3) + + try: + mem = get_mem_from_visible_gpus() + mem = min(mem) if isinstance(mem, list) else mem + mem = float(mem) / (1024.0**3) + except BaseException: + mem = 16.0 + mem = max(1.0, mem - 1.0) mem_bs2 = 6.0 + (20.0 - 6.0) * \ (output_classes - 2) / (105 - 2) @@ -262,7 +267,7 @@ def __init__(self, config_file: Optional[Union[str, Sequence[str]]] = None, **ov def infer(self, image_file, save_mask=False): self.model.eval() - batch_data = self.infer_transforms(image_file) + batch_data = self.infer_transforms({"image": image_file}) batch_data = list_data_collate([batch_data]) finished = None diff --git a/auto3dseg/algorithm_templates/dints/scripts/train.py b/auto3dseg/algorithm_templates/dints/scripts/train.py index 0105bb43..f0cd13b2 100644 --- a/auto3dseg/algorithm_templates/dints/scripts/train.py +++ b/auto3dseg/algorithm_templates/dints/scripts/train.py @@ -143,9 +143,14 @@ def pre_operation(config_file, **override): if auto_scale_allowed: output_classes = parser["training"]["output_classes"] - mem = get_mem_from_visible_gpus() - mem = min(mem) if isinstance(mem, list) else mem - mem = float(mem) / (1024.0**3) + + try: + mem = get_mem_from_visible_gpus() + mem = min(mem) if isinstance(mem, list) else mem + mem = float(mem) / (1024.0**3) + except BaseException: + mem = 16.0 + mem = max(1.0, mem - 1.0) mem_bs2 = 6.0 + (20.0 - 6.0) * \ (output_classes - 2) / (105 - 2) diff --git a/auto3dseg/algorithm_templates/dints/scripts/validate.py b/auto3dseg/algorithm_templates/dints/scripts/validate.py index df0d04d1..d5d65eb0 100644 --- a/auto3dseg/algorithm_templates/dints/scripts/validate.py +++ b/auto3dseg/algorithm_templates/dints/scripts/validate.py @@ -82,9 +82,14 @@ def pre_operation(config_file, **override): if auto_scale_allowed: output_classes = parser["training"]["output_classes"] - mem = get_mem_from_visible_gpus() - mem = min(mem) if isinstance(mem, list) else mem - mem = float(mem) / (1024.0**3) + + try: + mem = get_mem_from_visible_gpus() + mem = min(mem) if isinstance(mem, list) else mem + mem = float(mem) / (1024.0**3) + except BaseException: + mem = 16.0 + mem = max(1.0, mem - 1.0) mem_bs2 = 6.0 + (20.0 - 6.0) * \ (output_classes - 2) / (105 - 2) From 61bfd351d91a202fcf5284d83bd7c9cb99600d85 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 18 Aug 2023 16:33:57 +0000 Subject: [PATCH 10/14] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- auto3dseg/algorithm_templates/dints/scripts/infer.py | 2 +- auto3dseg/algorithm_templates/dints/scripts/train.py | 2 +- auto3dseg/algorithm_templates/dints/scripts/validate.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/auto3dseg/algorithm_templates/dints/scripts/infer.py b/auto3dseg/algorithm_templates/dints/scripts/infer.py index da0c12ce..0fdc0a8b 100644 --- a/auto3dseg/algorithm_templates/dints/scripts/infer.py +++ b/auto3dseg/algorithm_templates/dints/scripts/infer.py @@ -80,7 +80,7 @@ def pre_operation(config_file, **override): if auto_scale_allowed: output_classes = parser["training"]["output_classes"] - + try: mem = get_mem_from_visible_gpus() mem = min(mem) if isinstance(mem, list) else mem diff --git a/auto3dseg/algorithm_templates/dints/scripts/train.py b/auto3dseg/algorithm_templates/dints/scripts/train.py index f0cd13b2..23ba64b0 100644 --- a/auto3dseg/algorithm_templates/dints/scripts/train.py +++ b/auto3dseg/algorithm_templates/dints/scripts/train.py @@ -143,7 +143,7 @@ def pre_operation(config_file, **override): if auto_scale_allowed: output_classes = parser["training"]["output_classes"] - + try: mem = get_mem_from_visible_gpus() mem = min(mem) if isinstance(mem, list) else mem diff --git a/auto3dseg/algorithm_templates/dints/scripts/validate.py b/auto3dseg/algorithm_templates/dints/scripts/validate.py index d5d65eb0..37df0205 100644 --- a/auto3dseg/algorithm_templates/dints/scripts/validate.py +++ b/auto3dseg/algorithm_templates/dints/scripts/validate.py @@ -82,7 +82,7 @@ def pre_operation(config_file, **override): if auto_scale_allowed: output_classes = parser["training"]["output_classes"] - + try: mem = get_mem_from_visible_gpus() mem = min(mem) if isinstance(mem, list) else mem From cad5804aae9728a4117b16f7a7b7860a307d1b12 Mon Sep 17 00:00:00 2001 From: dongy Date: Fri, 18 Aug 2023 09:35:01 -0700 Subject: [PATCH 11/14] update Signed-off-by: dongy --- auto3dseg/algorithm_templates/dints/scripts/infer.py | 2 +- auto3dseg/algorithm_templates/dints/scripts/train.py | 2 +- auto3dseg/algorithm_templates/dints/scripts/validate.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/auto3dseg/algorithm_templates/dints/scripts/infer.py b/auto3dseg/algorithm_templates/dints/scripts/infer.py index da0c12ce..0fdc0a8b 100644 --- a/auto3dseg/algorithm_templates/dints/scripts/infer.py +++ b/auto3dseg/algorithm_templates/dints/scripts/infer.py @@ -80,7 +80,7 @@ def pre_operation(config_file, **override): if auto_scale_allowed: output_classes = parser["training"]["output_classes"] - + try: mem = get_mem_from_visible_gpus() mem = min(mem) if isinstance(mem, list) else mem diff --git a/auto3dseg/algorithm_templates/dints/scripts/train.py b/auto3dseg/algorithm_templates/dints/scripts/train.py index f0cd13b2..23ba64b0 100644 --- a/auto3dseg/algorithm_templates/dints/scripts/train.py +++ b/auto3dseg/algorithm_templates/dints/scripts/train.py @@ -143,7 +143,7 @@ def pre_operation(config_file, **override): if auto_scale_allowed: output_classes = parser["training"]["output_classes"] - + try: mem = get_mem_from_visible_gpus() mem = min(mem) if isinstance(mem, list) else mem diff --git a/auto3dseg/algorithm_templates/dints/scripts/validate.py b/auto3dseg/algorithm_templates/dints/scripts/validate.py index d5d65eb0..37df0205 100644 --- a/auto3dseg/algorithm_templates/dints/scripts/validate.py +++ b/auto3dseg/algorithm_templates/dints/scripts/validate.py @@ -82,7 +82,7 @@ def pre_operation(config_file, **override): if auto_scale_allowed: output_classes = parser["training"]["output_classes"] - + try: mem = get_mem_from_visible_gpus() mem = min(mem) if isinstance(mem, list) else mem From 2ec42fbc9a50e4d815f05e2373398b0eb8c4446a Mon Sep 17 00:00:00 2001 From: dongy Date: Mon, 21 Aug 2023 20:23:58 -0700 Subject: [PATCH 12/14] update Signed-off-by: dongy --- auto3dseg/algorithm_templates/dints/scripts/train.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/auto3dseg/algorithm_templates/dints/scripts/train.py b/auto3dseg/algorithm_templates/dints/scripts/train.py index 23ba64b0..c48568a1 100644 --- a/auto3dseg/algorithm_templates/dints/scripts/train.py +++ b/auto3dseg/algorithm_templates/dints/scripts/train.py @@ -947,6 +947,10 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): _index = 0 for val_data in orig_val_loader: + filename = val_data["image"].meta["filename_or_obj"] + if isinstance(filename, list): + filename = filename[0] + finished = None device_list_input = None device_list_output = None @@ -1026,7 +1030,7 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): include_background=not softmax) logger.debug( - f"validation Dice score at original spacing/resolution: {value}") + f"validation Dice score at original spacing/resolution: {value}; file name: {filename}") for _c in range(metric_dim): val0 = torch.nan_to_num(value[0, _c], nan=0.0) From 7cd80c4bff7f181d52b188ec948ebf98801ed495 Mon Sep 17 00:00:00 2001 From: dongy Date: Mon, 21 Aug 2023 20:27:03 -0700 Subject: [PATCH 13/14] update Signed-off-by: dongy --- auto3dseg/algorithm_templates/dints/scripts/train.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/auto3dseg/algorithm_templates/dints/scripts/train.py b/auto3dseg/algorithm_templates/dints/scripts/train.py index c48568a1..ed850ee0 100644 --- a/auto3dseg/algorithm_templates/dints/scripts/train.py +++ b/auto3dseg/algorithm_templates/dints/scripts/train.py @@ -1030,7 +1030,7 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override): include_background=not softmax) logger.debug( - f"validation Dice score at original spacing/resolution: {value}; file name: {filename}") + f"validation Dice score at original spacing/resolution: {value}; filename: {filename}") for _c in range(metric_dim): val0 = torch.nan_to_num(value[0, _c], nan=0.0) From 546f6112f7ac65d26a40a42cea464fd5339f5039 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 21 Sep 2023 06:39:51 +0000 Subject: [PATCH 14/14] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- prostate-mri-lesion-seg/build_and_run.ipynb | 1156 +++++++++---------- 1 file changed, 578 insertions(+), 578 deletions(-) diff --git a/prostate-mri-lesion-seg/build_and_run.ipynb b/prostate-mri-lesion-seg/build_and_run.ipynb index f6325fa5..4104251a 100644 --- a/prostate-mri-lesion-seg/build_and_run.ipynb +++ b/prostate-mri-lesion-seg/build_and_run.ipynb @@ -1,580 +1,580 @@ { - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[2023-07-25 10:53:42,712] [DEBUG] (app.AIProstateLesionSegApp) - Begin compose\n", - "[2023-07-25 10:53:42,714] [DEBUG] (app.AIProstateLesionSegApp) - End compose\n", - "[2023-07-25 10:53:44,300] [DEBUG] (app_packager) - FROM nvcr.io/nvidia/pytorch:22.08-py3\n", - "\n", - " LABEL base=\"nvcr.io/nvidia/pytorch:22.08-py3\"\n", - " LABEL tag=\"lesion_seg_workflow_app:latest\"\n", - " LABEL version=\"0.5.1\"\n", - " LABEL sdk_version=\"0.5.1\"\n", - "\n", - " ENV DEBIAN_FRONTEND=noninteractive\n", - " ENV TERM=xterm-256color\n", - " ENV MONAI_INPUTPATH=/var/monai/input\n", - " ENV MONAI_OUTPUTPATH=/var/monai/output\n", - " ENV MONAI_WORKDIR=/var/monai/\n", - " ENV MONAI_APPLICATION=/opt/monai/app\n", - " ENV MONAI_TIMEOUT=0\n", - " ENV MONAI_MODELPATH=/opt/monai/models\n", - "\n", - " RUN apt update \\\n", - " && apt upgrade -y --no-install-recommends \\\n", - " && apt install -y --no-install-recommends \\\n", - " curl \\\n", - " unzip \\\n", - " && apt autoremove -y \\\n", - " && rm -rf /var/lib/apt/lists/*\n", - " \n", - " USER root\n", - "\n", - " RUN pip install --no-cache-dir --upgrade setuptools==59.5.0 pip==22.3 wheel==0.37.1 numpy>=1.21.6\n", - "\n", - " RUN mkdir -p /etc/monai/ \\\n", - " && mkdir -p /opt/monai/ \\\n", - " && mkdir -p /var/monai/ \\\n", - " && mkdir -p /opt/monai/app \\\n", - " && mkdir -p /opt/monai/executor \\\n", - " && mkdir -p /var/monai/input \\\n", - " && mkdir -p /var/monai/output \\\n", - " && mkdir -p /opt/monai/models\n", - "\n", - " RUN mkdir -p /opt/monai/models\n", - "COPY ./models /opt/monai/models\n", - "\n", - "\n", - " COPY ./pip/requirements.txt /opt/monai/app/requirements.txt\n", - "\n", - " RUN curl https://globalcdn.nuget.org/packages/monai.deploy.executor.0.1.0-prealpha.4.nupkg -o /opt/monai/executor/executor.zip \\\n", - " && unzip /opt/monai/executor/executor.zip -d /opt/monai/executor/executor_pkg \\\n", - " && mv /opt/monai/executor/executor_pkg/lib/native/linux-x64/* /opt/monai/executor \\\n", - " && rm -f /opt/monai/executor/executor.zip \\\n", - " && rm -rf /opt/monai/executor/executor_pkg \\\n", - " && chmod +x /opt/monai/executor/monai-exec\n", - "\n", - " ENV PATH=/root/.local/bin:$PATH\n", - "\n", - " RUN pip install --no-cache-dir --user -r /opt/monai/app/requirements.txt\n", - "\n", - " # Override monai-deploy-app-sdk module\n", - " COPY ./monai-deploy-app-sdk /root/.local/lib/python3.8/site-packages/monai/deploy/\n", - " RUN echo \"User site package location: $(python3 -m site --user-site)\" \\\n", - " && [ \"$(python3 -m site --user-site)\" != \"/root/.local/lib/python3.8/site-packages\" ] \\\n", - " && mkdir -p $(python3 -m site --user-site)/monai/deploy \\\n", - " && cp -r /root/.local/lib/python3.8/site-packages/monai/deploy/* $(python3 -m site --user-site)/monai/deploy/ \\\n", - " || true\n", - "\n", - " COPY ./map/app.json /etc/monai/\n", - " COPY ./map/pkg.json /etc/monai/\n", - "\n", - " COPY ./app /opt/monai/app\n", - "\n", - " # Set the working directory\n", - " WORKDIR /var/monai/\n", - "\n", - " ENTRYPOINT [ \"/opt/monai/executor/monai-exec\" ]\n", - "\n", - "[2023-07-25 10:53:44,321] [DEBUG] (app_packager) - Docker image build command: docker build -f './monai_tmp_5ke51i4/dockerfile' -t lesion_seg_workflow_app:latest './monai_tmp_5ke51i4' --build-arg MONAI_UID=$(id -u) --build-arg MONAI_GID=$(id -g)\n", - "Building MONAI Application Package... -[2023-07-25 10:53:47,041] [DEBUG] (app_packager) - Sending build context to Docker daemon 654.1MB\n", - "\n", - "[2023-07-25 10:53:47,054] [DEBUG] (app_packager) - Step 1/30 : FROM nvcr.io/nvidia/pytorch:22.08-py3\n", - "\n", - "[2023-07-25 10:53:47,055] [DEBUG] (app_packager) - ---> b3d16c039217\n", - "\n", - "[2023-07-25 10:53:47,055] [DEBUG] (app_packager) - Step 2/30 : LABEL base=\"nvcr.io/nvidia/pytorch:22.08-py3\"\n", - "\n", - "[2023-07-25 10:53:47,057] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,057] [DEBUG] (app_packager) - ---> cc80685c6c3b\n", - "\n", - "[2023-07-25 10:53:47,057] [DEBUG] (app_packager) - Step 3/30 : LABEL tag=\"lesion_seg_workflow_app:latest\"\n", - "\n", - "[2023-07-25 10:53:47,063] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,063] [DEBUG] (app_packager) - ---> 1c092212ea50\n", - "\n", - "[2023-07-25 10:53:47,063] [DEBUG] (app_packager) - Step 4/30 : LABEL version=\"0.5.1\"\n", - "\n", - "[2023-07-25 10:53:47,069] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,069] [DEBUG] (app_packager) - ---> e250de6ab88b\n", - "\n", - "[2023-07-25 10:53:47,069] [DEBUG] (app_packager) - Step 5/30 : LABEL sdk_version=\"0.5.1\"\n", - "\n", - "[2023-07-25 10:53:47,070] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,070] [DEBUG] (app_packager) - ---> c3fb1c64336a\n", - "\n", - "[2023-07-25 10:53:47,070] [DEBUG] (app_packager) - Step 6/30 : ENV DEBIAN_FRONTEND=noninteractive\n", - "\n", - "[2023-07-25 10:53:47,072] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,072] [DEBUG] (app_packager) - ---> bd72d677a6f2\n", - "\n", - "[2023-07-25 10:53:47,072] [DEBUG] (app_packager) - Step 7/30 : ENV TERM=xterm-256color\n", - "\n", - "[2023-07-25 10:53:47,073] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,073] [DEBUG] (app_packager) - ---> c7597ae96a98\n", - "\n", - "[2023-07-25 10:53:47,074] [DEBUG] (app_packager) - Step 8/30 : ENV MONAI_INPUTPATH=/var/monai/input\n", - "\n", - "[2023-07-25 10:53:47,075] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,075] [DEBUG] (app_packager) - ---> 05ccfe0c25fa\n", - "\n", - "[2023-07-25 10:53:47,075] [DEBUG] (app_packager) - Step 9/30 : ENV MONAI_OUTPUTPATH=/var/monai/output\n", - "\n", - "[2023-07-25 10:53:47,077] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,077] [DEBUG] (app_packager) - ---> 5a84ea4b3b03\n", - "\n", - "[2023-07-25 10:53:47,077] [DEBUG] (app_packager) - Step 10/30 : ENV MONAI_WORKDIR=/var/monai/\n", - "\n", - "[2023-07-25 10:53:47,078] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,078] [DEBUG] (app_packager) - ---> 11da23172a0d\n", - "\n", - "[2023-07-25 10:53:47,078] [DEBUG] (app_packager) - Step 11/30 : ENV MONAI_APPLICATION=/opt/monai/app\n", - "\n", - "[2023-07-25 10:53:47,080] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,080] [DEBUG] (app_packager) - ---> bd70e3df1914\n", - "\n", - "[2023-07-25 10:53:47,080] [DEBUG] (app_packager) - Step 12/30 : ENV MONAI_TIMEOUT=0\n", - "\n", - "[2023-07-25 10:53:47,082] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,082] [DEBUG] (app_packager) - ---> 6235860cb56c\n", - "\n", - "[2023-07-25 10:53:47,082] [DEBUG] (app_packager) - Step 13/30 : ENV MONAI_MODELPATH=/opt/monai/models\n", - "\n", - "[2023-07-25 10:53:47,083] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,083] [DEBUG] (app_packager) - ---> 14ac90b4a489\n", - "\n", - "[2023-07-25 10:53:47,083] [DEBUG] (app_packager) - Step 14/30 : RUN apt update && apt upgrade -y --no-install-recommends && apt install -y --no-install-recommends curl unzip && apt autoremove -y && rm -rf /var/lib/apt/lists/*\n", - "\n", - "[2023-07-25 10:53:47,085] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,085] [DEBUG] (app_packager) - ---> ba5b71d0b138\n", - "\n", - "[2023-07-25 10:53:47,085] [DEBUG] (app_packager) - Step 15/30 : USER root\n", - "\n", - "[2023-07-25 10:53:47,086] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,087] [DEBUG] (app_packager) - ---> 0e9eb9961405\n", - "\n", - "[2023-07-25 10:53:47,087] [DEBUG] (app_packager) - Step 16/30 : RUN pip install --no-cache-dir --upgrade setuptools==59.5.0 pip==22.3 wheel==0.37.1 numpy>=1.21.6\n", - "\n", - "[2023-07-25 10:53:47,088] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,088] [DEBUG] (app_packager) - ---> 4a6f5f65fcfb\n", - "\n", - "[2023-07-25 10:53:47,088] [DEBUG] (app_packager) - Step 17/30 : RUN mkdir -p /etc/monai/ && mkdir -p /opt/monai/ && mkdir -p /var/monai/ && mkdir -p /opt/monai/app && mkdir -p /opt/monai/executor && mkdir -p /var/monai/input && mkdir -p /var/monai/output && mkdir -p /opt/monai/models\n", - "\n", - "[2023-07-25 10:53:47,090] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,090] [DEBUG] (app_packager) - ---> 1e5513d5ef4a\n", - "\n", - "[2023-07-25 10:53:47,090] [DEBUG] (app_packager) - Step 18/30 : RUN mkdir -p /opt/monai/models\n", - "\n", - "[2023-07-25 10:53:47,091] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,091] [DEBUG] (app_packager) - ---> 6ae160285e74\n", - "\n", - "[2023-07-25 10:53:47,091] [DEBUG] (app_packager) - Step 19/30 : COPY ./models /opt/monai/models\n", - "\n", - "[2023-07-25 10:53:47,093] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,093] [DEBUG] (app_packager) - ---> 6238e429e44f\n", - "\n", - "[2023-07-25 10:53:47,093] [DEBUG] (app_packager) - Step 20/30 : COPY ./pip/requirements.txt /opt/monai/app/requirements.txt\n", - "\n", - "[2023-07-25 10:53:47,099] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,100] [DEBUG] (app_packager) - ---> 214ddb9b9e56\n", - "\n", - "[2023-07-25 10:53:47,100] [DEBUG] (app_packager) - Step 21/30 : RUN curl https://globalcdn.nuget.org/packages/monai.deploy.executor.0.1.0-prealpha.4.nupkg -o /opt/monai/executor/executor.zip && unzip /opt/monai/executor/executor.zip -d /opt/monai/executor/executor_pkg && mv /opt/monai/executor/executor_pkg/lib/native/linux-x64/* /opt/monai/executor && rm -f /opt/monai/executor/executor.zip && rm -rf /opt/monai/executor/executor_pkg && chmod +x /opt/monai/executor/monai-exec\n", - "\n", - "[2023-07-25 10:53:47,101] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,101] [DEBUG] (app_packager) - ---> d6f21887e194\n", - "\n", - "[2023-07-25 10:53:47,101] [DEBUG] (app_packager) - Step 22/30 : ENV PATH=/root/.local/bin:$PATH\n", - "\n", - "[2023-07-25 10:53:47,102] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,102] [DEBUG] (app_packager) - ---> 61a1ec2a5045\n", - "\n", - "[2023-07-25 10:53:47,102] [DEBUG] (app_packager) - Step 23/30 : RUN pip install --no-cache-dir --user -r /opt/monai/app/requirements.txt\n", - "\n", - "[2023-07-25 10:53:47,103] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,103] [DEBUG] (app_packager) - ---> fc7b3fc8ee51\n", - "\n", - "[2023-07-25 10:53:47,103] [DEBUG] (app_packager) - Step 24/30 : COPY ./monai-deploy-app-sdk /root/.local/lib/python3.8/site-packages/monai/deploy/\n", - "\n", - "[2023-07-25 10:53:47,106] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,106] [DEBUG] (app_packager) - ---> c4c2745da875\n", - "\n", - "[2023-07-25 10:53:47,106] [DEBUG] (app_packager) - Step 25/30 : RUN echo \"User site package location: $(python3 -m site --user-site)\" && [ \"$(python3 -m site --user-site)\" != \"/root/.local/lib/python3.8/site-packages\" ] && mkdir -p $(python3 -m site --user-site)/monai/deploy && cp -r /root/.local/lib/python3.8/site-packages/monai/deploy/* $(python3 -m site --user-site)/monai/deploy/ || true\n", - "\n", - "[2023-07-25 10:53:47,107] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,107] [DEBUG] (app_packager) - ---> 7fc9322a0401\n", - "\n", - "[2023-07-25 10:53:47,107] [DEBUG] (app_packager) - Step 26/30 : COPY ./map/app.json /etc/monai/\n", - "\n", - "[2023-07-25 10:53:47,108] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,108] [DEBUG] (app_packager) - ---> ec5f10bfc404\n", - "\n", - "[2023-07-25 10:53:47,108] [DEBUG] (app_packager) - Step 27/30 : COPY ./map/pkg.json /etc/monai/\n", - "\n", - "[2023-07-25 10:53:47,109] [DEBUG] (app_packager) - ---> Using cache\n", - "\n", - "[2023-07-25 10:53:47,109] [DEBUG] (app_packager) - ---> 18827ed8e1d0\n", - "\n", - "[2023-07-25 10:53:47,109] [DEBUG] (app_packager) - Step 28/30 : COPY ./app /opt/monai/app\n", - "\n", - "\b\\[2023-07-25 10:54:08,872] [DEBUG] (app_packager) - ---> 57dd8d7c2880\n", - "\n", - "[2023-07-25 10:54:08,872] [DEBUG] (app_packager) - Step 29/30 : WORKDIR /var/monai/\n", - "\n", - "\b-[2023-07-25 10:54:11,104] [DEBUG] (app_packager) - ---> Running in 7763b9724919\n", - "\n", - "\b|[2023-07-25 10:54:12,986] [DEBUG] (app_packager) - Removing intermediate container 7763b9724919\n", - "\n", - "[2023-07-25 10:54:12,986] [DEBUG] (app_packager) - ---> 54003e6b8432\n", - "\n", - "[2023-07-25 10:54:12,986] [DEBUG] (app_packager) - Step 30/30 : ENTRYPOINT [ \"/opt/monai/executor/monai-exec\" ]\n", - "\n", - "\b-[2023-07-25 10:54:15,173] [DEBUG] (app_packager) - ---> Running in fa50988db30b\n", - "\n", - "\b\\[2023-07-25 10:54:16,804] [DEBUG] (app_packager) - Removing intermediate container fa50988db30b\n", - "\n", - "[2023-07-25 10:54:16,804] [DEBUG] (app_packager) - ---> f214f9f3ed0a\n", - "\n", - "[2023-07-25 10:54:16,804] [DEBUG] (app_packager) - [Warning] One or more build-args [MONAI_UID MONAI_GID] were not consumed\n", - "\n", - "[2023-07-25 10:54:16,888] [DEBUG] (app_packager) - Successfully built f214f9f3ed0a\n", - "\n", - "\b|[2023-07-25 10:54:17,106] [DEBUG] (app_packager) - Successfully tagged lesion_seg_workflow_app:latest\n", - "\n", - "[2023-07-25 10:54:17,109] [DEBUG] (app_packager) - \n", - "\bDone\n", - "[2023-07-25 10:54:17,120] [INFO] (app_packager) - Successfully built lesion_seg_workflow_app:latest\n" - ] - } - ], - "source": [ - "# Package code into MAP\n", - "!monai-deploy package -l DEBUG -b nvcr.io/nvidia/pytorch:22.08-py3 ./prostate_mri_lesion_seg_app --tag lesion_seg_workflow_app:latest -m prostate_mri_lesion_seg_app/models/" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Checking dependencies...\n", - "--> Verifying if \"docker\" is installed...\n", - "\n", - "--> Verifying if \"lesion_seg_workflow_app:latest\" is available...\n", - "\n", - "Checking for MAP \"lesion_seg_workflow_app:latest\" locally\n", - "\"lesion_seg_workflow_app:latest\" found.\n", - "\n", - "Reading MONAI App Package manifest...\n", - "-------------------application manifest-------------------\n", - "{\n", - " \"api-version\": \"0.1.0\",\n", - " \"command\": \"python3 -u /opt/monai/app/app.py\",\n", - " \"environment\": {},\n", - " \"input\": {\n", - " \"formats\": [],\n", - " \"path\": \"input\"\n", - " },\n", - " \"output\": {\n", - " \"format\": {},\n", - " \"path\": \"output\"\n", - " },\n", - " \"sdk-version\": \"0.5.1\",\n", - " \"timeout\": 0,\n", - " \"version\": \"0.5.1\",\n", - " \"working-directory\": \"/var/monai/\"\n", - "}\n", - "----------------------------------------------\n", - "\n", - "-------------------package manifest-------------------\n", - "{\n", - " \"api-version\": \"0.1.0\",\n", - " \"application-root\": \"/var/monai/\",\n", - " \"models\": [\n", - " {\n", - " \"name\": \"organ-5b6bbe150488ed6e34ac61d113aeb4786ec1c4cda9bd18304f6a8a891e93e79d\",\n", - " \"path\": \"/opt/monai/models/organ/model.ts\"\n", - " }\n", - " ],\n", - " \"resources\": {\n", - " \"cpu\": 1,\n", - " \"gpu\": 1,\n", - " \"memory\": \"7168Mi\"\n", - " },\n", - " \"sdk-version\": \"0.5.1\",\n", - " \"version\": \"0.5.1\"\n", - "}\n", - "----------------------------------------------\n", - "\n", - "--> Verifying if \"nvidia-docker\" is installed...\n", - "\n", - "Executing command: nvidia-docker run --rm -a STDERR -a STDOUT -e MONAI_INPUTPATH=\"/var/monai/input\" -e MONAI_OUTPUTPATH=\"/var/monai/output\" -e MONAI_MODELPATH=/opt/monai/models -v \"/media/jtetreault/Data/Datasets/ProstateX/PROSTATEx/ProstateX-0004\":\"/var/monai/input\" -v \"/home/jtetreault/Projects/monai-projects/prostate-mri/public/output\":\"/var/monai/output\" --shm-size=1g --entrypoint \"/bin/bash\" \"lesion_seg_workflow_app:latest\" -c \"python3 -u /opt/monai/app/app.py\"\n", - "\u001b[34mGoing to initiate execution of operator DICOMDataLoaderOperator\u001b[39m\n", - "\u001b[32mExecuting operator DICOMDataLoaderOperator \u001b[33m(Process ID: 1, Operator ID: 24998906-0f20-4b83-acce-ca50bf6f9b3e)\u001b[39m\n", - "\u001b[34mDone performing execution of operator DICOMDataLoaderOperator\n", - "\u001b[39m\n", - "\u001b[34mGoing to initiate execution of operator DICOMSeriesSelectorOperator\u001b[39m\n", - "\u001b[32mExecuting operator DICOMSeriesSelectorOperator \u001b[33m(Process ID: 1, Operator ID: 64da4a5b-9979-487c-b95a-96c6703db10b)\u001b[39m\n", - "[2023-07-25 14:55:46,449] [INFO] (root) - Finding series for Selection named: t2\n", - "[2023-07-25 14:55:46,449] [INFO] (root) - Searching study, : 1.3.6.1.4.1.14519.5.2.1.7311.5101.170561193612723093192571245493\n", - " # of series: 7\n", - "[2023-07-25 14:55:46,449] [INFO] (root) - Working on series, instance UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.278358228783511961204087191158\n", - "[2023-07-25 14:55:46,449] [INFO] (root) - On attribute: 'Modality' to match value: 'MR'\n", - "[2023-07-25 14:55:46,449] [INFO] (root) - Series attribute Modality value: MR\n", - "[2023-07-25 14:55:46,449] [INFO] (root) - On attribute: 'ImageType' to match value: ['ORIGINAL', 'PRIMARY']\n", - "[2023-07-25 14:55:46,449] [INFO] (root) - Series attribute ImageType value: None\n", - "[2023-07-25 14:55:46,449] [INFO] (root) - This series does not match the selection conditions.\n", - "[2023-07-25 14:55:46,449] [INFO] (root) - Working on series, instance UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.473042633347744664896320600386\n", - "[2023-07-25 14:55:46,449] [INFO] (root) - On attribute: 'Modality' to match value: 'MR'\n", - "[2023-07-25 14:55:46,450] [INFO] (root) - Series attribute Modality value: MR\n", - "[2023-07-25 14:55:46,450] [INFO] (root) - On attribute: 'ImageType' to match value: ['ORIGINAL', 'PRIMARY']\n", - "[2023-07-25 14:55:46,450] [INFO] (root) - Series attribute ImageType value: None\n", - "[2023-07-25 14:55:46,450] [INFO] (root) - On attribute: 'SeriesDescription' to match value: '((?=AX T2)|(?=AX T2 6 NSA)|(?=AX T2 FAST)|(?=AX T2 . voxel 7x.8)|(?=AX T2 B1 Default)|(?=AX T2 CS)|(?=AX T2 cs 3.0)|(?=AX T2 FAST)|(?=AX T2 FRFSE)|(?=AX T2 N/S)|(?=AX T2 No sense)|(?=AX T2 NS)|(?=AX T2 NSA 3)|(?=AX T2 NSA 4)|(?=AX T2 NSA 5)|(?=AX T2 PROP)|(?=Ax T2 PROSTATE)|(?=AX T2 SMALL FOV)|(?=Ax T2 thin FRFSE)|(?=sT2 TSE ax no post)|(?=T2 AX)|(?=T2 AX SMALL FOV[*]? IF MOTION REPEAT[*]?)|(?=T2 AXIAL 3MM)|(?=T2 TRA 3mm)|(?=T2 TSE Ax)|(?=T2 TSE ax cs)|(?=T2 TSE ax hi)|(?=T2 TSE ax hi sense)|(?=T2 TSE ax no sense)|(?=T2 TSE ax NS)|(?=T2 TSE ax NSA 3)|(?=t2_tse_tra)|(?=t2_tse_tra_320_p2)|(?=t2_tse_tra_3mm _SFOV_TE 92)|(?=t2_tse_tra_Grappa3)|(?=T2W_TSE)|(?=T2W_TSE_ax)|(?=T2W_TSE_ax PSS Refoc 52)|(?=T2W_TSE_ax zoom PSS Refoc))'\n", - "[2023-07-25 14:55:46,450] [INFO] (root) - Series attribute SeriesDescription value: t2_tse_sag\n", - "[2023-07-25 14:55:46,450] [INFO] (root) - Series attribute string value did not match. Try regEx.\n", - "[2023-07-25 14:55:46,452] [INFO] (root) - This series does not match the selection conditions.\n", - "[2023-07-25 14:55:46,452] [INFO] (root) - Working on series, instance UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.206828891270520544417996275680\n", - "[2023-07-25 14:55:46,452] [INFO] (root) - On attribute: 'Modality' to match value: 'MR'\n", - "[2023-07-25 14:55:46,452] [INFO] (root) - Series attribute Modality value: MR\n", - "[2023-07-25 14:55:46,452] [INFO] (root) - On attribute: 'ImageType' to match value: ['ORIGINAL', 'PRIMARY']\n", - "[2023-07-25 14:55:46,452] [INFO] (root) - Series attribute ImageType value: None\n", - "[2023-07-25 14:55:46,453] [INFO] (root) - On attribute: 'SeriesDescription' to match value: '((?=AX T2)|(?=AX T2 6 NSA)|(?=AX T2 FAST)|(?=AX T2 . voxel 7x.8)|(?=AX T2 B1 Default)|(?=AX T2 CS)|(?=AX T2 cs 3.0)|(?=AX T2 FAST)|(?=AX T2 FRFSE)|(?=AX T2 N/S)|(?=AX T2 No sense)|(?=AX T2 NS)|(?=AX T2 NSA 3)|(?=AX T2 NSA 4)|(?=AX T2 NSA 5)|(?=AX T2 PROP)|(?=Ax T2 PROSTATE)|(?=AX T2 SMALL FOV)|(?=Ax T2 thin FRFSE)|(?=sT2 TSE ax no post)|(?=T2 AX)|(?=T2 AX SMALL FOV[*]? IF MOTION REPEAT[*]?)|(?=T2 AXIAL 3MM)|(?=T2 TRA 3mm)|(?=T2 TSE Ax)|(?=T2 TSE ax cs)|(?=T2 TSE ax hi)|(?=T2 TSE ax hi sense)|(?=T2 TSE ax no sense)|(?=T2 TSE ax NS)|(?=T2 TSE ax NSA 3)|(?=t2_tse_tra)|(?=t2_tse_tra_320_p2)|(?=t2_tse_tra_3mm _SFOV_TE 92)|(?=t2_tse_tra_Grappa3)|(?=T2W_TSE)|(?=T2W_TSE_ax)|(?=T2W_TSE_ax PSS Refoc 52)|(?=T2W_TSE_ax zoom PSS Refoc))'\n", - "[2023-07-25 14:55:46,453] [INFO] (root) - Series attribute SeriesDescription value: t2_tse_tra\n", - "[2023-07-25 14:55:46,453] [INFO] (root) - Series attribute string value did not match. Try regEx.\n", - "[2023-07-25 14:55:46,453] [INFO] (root) - Selected Series, UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.206828891270520544417996275680\n", - "\u001b[34mDone performing execution of operator DICOMSeriesSelectorOperator\n", - "\u001b[39m\n", - "\u001b[34mGoing to initiate execution of operator DICOMSeriesSelectorOperator\u001b[39m\n", - "\u001b[32mExecuting operator DICOMSeriesSelectorOperator \u001b[33m(Process ID: 1, Operator ID: c78be533-dced-4fe8-88e5-1e894b87ec9c)\u001b[39m\n", - "[2023-07-25 14:55:46,453] [INFO] (root) - Finding series for Selection named: adc\n", - "[2023-07-25 14:55:46,453] [INFO] (root) - Searching study, : 1.3.6.1.4.1.14519.5.2.1.7311.5101.170561193612723093192571245493\n", - " # of series: 7\n", - "[2023-07-25 14:55:46,453] [INFO] (root) - Working on series, instance UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.278358228783511961204087191158\n", - "[2023-07-25 14:55:46,453] [INFO] (root) - On attribute: 'Modality' to match value: 'MR'\n", - "[2023-07-25 14:55:46,454] [INFO] (root) - Series attribute Modality value: MR\n", - "[2023-07-25 14:55:46,454] [INFO] (root) - On attribute: 'ImageType' to match value: ['DIFFUSION', 'ADC']\n", - "[2023-07-25 14:55:46,454] [INFO] (root) - Series attribute ImageType value: None\n", - "[2023-07-25 14:55:46,454] [INFO] (root) - On attribute: 'SeriesDescription' to match value: '((?=ADC (10^-6 mm²/s))|(?=Apparent Diffusion Coefficient (mm2/s))|(?=AX DIFFUSION_ADC_DFC_MIX)|(?=.*AX DWI (50,1500)_ADC.*)|(?=AX DWI_ADC_DFC_MIX)|(?=b_1500 prostate_ADC)|(?=b_2000 prostate_ADC)|(?=d3B ADC 3B 750 ERC SSh_DWI FAST SENSE)|(?=dADC)|(?=dADC 0_1500)|(?=dADC 100 400 600)|(?=dADC 2)|(?=dADC 3)|(?=dADC ALL)|(?=dADC b 0 1000 2000)|(?=dADC from 0_1500)|(?=dADC from b0_600)|(?=dADC from B0-1500)|(?=dADC Map)|(?=dADC map 1)|(?=dADC MAP 2)|(?=dADC_1 axial)|(?=dADC_b375_750_1150)|(?=ddADC MAP)|(?=DIFF bv1400_ADC)|(?=diff tra b 50 500 800 WIP511b alle spoelen_ADC)|(?=diffusie-3Scan-4bval_fs_ADC)|(?=dReg - WIP SSh_DWI FAST SENSE)|(?=dSSh_DWI SENSE)|(?=DWI PROSTATE_ADC)|(?=dWIP 3B 600 w ERC SSh_DWI S2Ovs2)|(?=dWIP 3B ADC 3B 600 w/o ERC SSh_DWI FAST SENSE)|(?=dWIP SSh_DWI FAST SENSE)|(?=ep2d_diff_new 16 measipat_ADC)|(?=ep2d_DIFF_tra_b50_500_800_1400_alle_spoelen_ADC)|(?=ep2d_diff_tra_DYNDIST_ADC)|(?=ep2d_diff_tra_DYNDIST_MIX_ADC)|(?=ep2d_diff_tra2x2_Noise0_FS_DYNDIST_ADC)|(?=ep2d-advdiff-3Scan-4bval_spair_511b_ADC))'\n", - "[2023-07-25 14:55:46,454] [INFO] (root) - Series attribute SeriesDescription value: ep2d_diff_tra_DYNDIST_MIX_ADC\n", - "[2023-07-25 14:55:46,454] [INFO] (root) - Series attribute string value did not match. Try regEx.\n", - "[2023-07-25 14:55:46,457] [INFO] (root) - Selected Series, UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.278358228783511961204087191158\n", - "\u001b[34mDone performing execution of operator DICOMSeriesSelectorOperator\n", - "\u001b[39m\n", - "\u001b[34mGoing to initiate execution of operator DICOMSeriesSelectorOperator\u001b[39m\n", - "\u001b[32mExecuting operator DICOMSeriesSelectorOperator \u001b[33m(Process ID: 1, Operator ID: dacef65e-7b0a-4fa6-b6ee-d6cc8e5bcbf1)\u001b[39m\n", - "[2023-07-25 14:55:46,458] [INFO] (root) - Finding series for Selection named: highb\n", - "[2023-07-25 14:55:46,458] [INFO] (root) - Searching study, : 1.3.6.1.4.1.14519.5.2.1.7311.5101.170561193612723093192571245493\n", - " # of series: 7\n", - "[2023-07-25 14:55:46,458] [INFO] (root) - Working on series, instance UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.278358228783511961204087191158\n", - "[2023-07-25 14:55:46,458] [INFO] (root) - On attribute: 'Modality' to match value: 'MR'\n", - "[2023-07-25 14:55:46,458] [INFO] (root) - Series attribute Modality value: MR\n", - "[2023-07-25 14:55:46,458] [INFO] (root) - On attribute: 'ImageType' to match value: ['DIFFUSION', 'TRACEW']\n", - "[2023-07-25 14:55:46,458] [INFO] (root) - Series attribute ImageType value: None\n", - "[2023-07-25 14:55:46,458] [INFO] (root) - This series does not match the selection conditions.\n", - "[2023-07-25 14:55:46,458] [INFO] (root) - Working on series, instance UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.473042633347744664896320600386\n", - "[2023-07-25 14:55:46,458] [INFO] (root) - On attribute: 'Modality' to match value: 'MR'\n", - "[2023-07-25 14:55:46,458] [INFO] (root) - Series attribute Modality value: MR\n", - "[2023-07-25 14:55:46,458] [INFO] (root) - On attribute: 'ImageType' to match value: ['DIFFUSION', 'TRACEW']\n", - "[2023-07-25 14:55:46,459] [INFO] (root) - Series attribute ImageType value: None\n", - "[2023-07-25 14:55:46,459] [INFO] (root) - This series does not match the selection conditions.\n", - "[2023-07-25 14:55:46,459] [INFO] (root) - Working on series, instance UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.206828891270520544417996275680\n", - "[2023-07-25 14:55:46,459] [INFO] (root) - On attribute: 'Modality' to match value: 'MR'\n", - "[2023-07-25 14:55:46,459] [INFO] (root) - Series attribute Modality value: MR\n", - "[2023-07-25 14:55:46,459] [INFO] (root) - On attribute: 'ImageType' to match value: ['DIFFUSION', 'TRACEW']\n", - "[2023-07-25 14:55:46,459] [INFO] (root) - Series attribute ImageType value: None\n", - "[2023-07-25 14:55:46,459] [INFO] (root) - This series does not match the selection conditions.\n", - "[2023-07-25 14:55:46,459] [INFO] (root) - Working on series, instance UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.212145009248667341607386363070\n", - "[2023-07-25 14:55:46,459] [INFO] (root) - On attribute: 'Modality' to match value: 'MR'\n", - "[2023-07-25 14:55:46,459] [INFO] (root) - Series attribute Modality value: MR\n", - "[2023-07-25 14:55:46,459] [INFO] (root) - On attribute: 'ImageType' to match value: ['DIFFUSION', 'TRACEW']\n", - "[2023-07-25 14:55:46,459] [INFO] (root) - Series attribute ImageType value: None\n", - "[2023-07-25 14:55:46,459] [INFO] (root) - This series does not match the selection conditions.\n", - "[2023-07-25 14:55:46,459] [INFO] (root) - Working on series, instance UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.196637652681902975508118013414\n", - "[2023-07-25 14:55:46,460] [INFO] (root) - On attribute: 'Modality' to match value: 'MR'\n", - "[2023-07-25 14:55:46,460] [INFO] (root) - Series attribute Modality value: MR\n", - "[2023-07-25 14:55:46,460] [INFO] (root) - On attribute: 'ImageType' to match value: ['DIFFUSION', 'TRACEW']\n", - "[2023-07-25 14:55:46,460] [INFO] (root) - Series attribute ImageType value: None\n", - "[2023-07-25 14:55:46,460] [INFO] (root) - On attribute: 'SeriesDescription' to match value: '((?=3B 2000 w ERC SSh_DWI)|(?=AX DIFFUSION_CALC_BVAL_DFC_MIX)|(?=AX DWI)|(?=.*AX DWI (50,1500).*)|(?=Ax DWI BH)|(?=AX DWI_TRACEW_DFC_MIX)|(?=Axial FOCUS DWI 1400)|(?=b_1500 prostate)|(?=b_2000 prostate)|(?=DIFF bv1400)|(?=diff tra b 50 500 800 WIP511b alle spoelenCALC_BVAL)|(?=diffusie-3Scan-4bval_fsCALC_BVAL)|(?=DW_Synthetic: Ax DWI All B-50-800 Synthetic B-1400)|(?=.*DW_Synthetic: Ax Focus 50,500,800,1400,2000.*)|(?=DWI PROSTATE)|(?=DWI_5b_0_1500)|(?=DWI_b2000)|(?=DWI_b2000_new)|(?=DWI_b2000_new SENSE)|(?=DWI_b2000_NSA6 SENSE)|(?=ep2d_diff_b1400_new 32 measipat)|(?=ep2d_diff_tra_DYNDIST_MIXCALC_BVAL$)|(?=ep2d_diff_tra_DYNDISTCALC_BVAL)|(?=ep2d_diff_tra2x2_Noise0_FS_DYNDISTCALC_BVAL)|(?=ep2d-advdiff-3Scan-high bvalue 1400)|(?=sb_1500)|(?=sb_2000)|(?=sB1400)|(?=sb1500)|(?=sb-1500)|(?=sb1500 r5 only)|(?=sb-2000)|(?=sDWI_b_2000)|(?=sDWI_b2000))'\n", - "[2023-07-25 14:55:46,460] [INFO] (root) - Series attribute SeriesDescription value: ep2d_diff_tra_DYNDIST_MIX\n", - "[2023-07-25 14:55:46,460] [INFO] (root) - Series attribute string value did not match. Try regEx.\n", - "[2023-07-25 14:55:46,463] [INFO] (root) - This series does not match the selection conditions.\n", - "[2023-07-25 14:55:46,463] [INFO] (root) - Working on series, instance UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.154156364083745277068656620138\n", - "[2023-07-25 14:55:46,463] [INFO] (root) - On attribute: 'Modality' to match value: 'MR'\n", - "[2023-07-25 14:55:46,463] [INFO] (root) - Series attribute Modality value: MR\n", - "[2023-07-25 14:55:46,463] [INFO] (root) - On attribute: 'ImageType' to match value: ['DIFFUSION', 'TRACEW']\n", - "[2023-07-25 14:55:46,463] [INFO] (root) - Series attribute ImageType value: None\n", - "[2023-07-25 14:55:46,463] [INFO] (root) - On attribute: 'SeriesDescription' to match value: '((?=3B 2000 w ERC SSh_DWI)|(?=AX DIFFUSION_CALC_BVAL_DFC_MIX)|(?=AX DWI)|(?=.*AX DWI (50,1500).*)|(?=Ax DWI BH)|(?=AX DWI_TRACEW_DFC_MIX)|(?=Axial FOCUS DWI 1400)|(?=b_1500 prostate)|(?=b_2000 prostate)|(?=DIFF bv1400)|(?=diff tra b 50 500 800 WIP511b alle spoelenCALC_BVAL)|(?=diffusie-3Scan-4bval_fsCALC_BVAL)|(?=DW_Synthetic: Ax DWI All B-50-800 Synthetic B-1400)|(?=.*DW_Synthetic: Ax Focus 50,500,800,1400,2000.*)|(?=DWI PROSTATE)|(?=DWI_5b_0_1500)|(?=DWI_b2000)|(?=DWI_b2000_new)|(?=DWI_b2000_new SENSE)|(?=DWI_b2000_NSA6 SENSE)|(?=ep2d_diff_b1400_new 32 measipat)|(?=ep2d_diff_tra_DYNDIST_MIXCALC_BVAL$)|(?=ep2d_diff_tra_DYNDISTCALC_BVAL)|(?=ep2d_diff_tra2x2_Noise0_FS_DYNDISTCALC_BVAL)|(?=ep2d-advdiff-3Scan-high bvalue 1400)|(?=sb_1500)|(?=sb_2000)|(?=sB1400)|(?=sb1500)|(?=sb-1500)|(?=sb1500 r5 only)|(?=sb-2000)|(?=sDWI_b_2000)|(?=sDWI_b2000))'\n", - "[2023-07-25 14:55:46,463] [INFO] (root) - Series attribute SeriesDescription value: ep2d_diff_tra_DYNDIST_MIXCALC_BVAL\n", - "[2023-07-25 14:55:46,463] [INFO] (root) - Series attribute string value did not match. Try regEx.\n", - "[2023-07-25 14:55:46,463] [INFO] (root) - Selected Series, UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.154156364083745277068656620138\n", - "\u001b[34mDone performing execution of operator DICOMSeriesSelectorOperator\n", - "\u001b[39m\n", - "\u001b[34mGoing to initiate execution of operator DICOMSeriesToVolumeOperator\u001b[39m\n", - "\u001b[32mExecuting operator DICOMSeriesToVolumeOperator \u001b[33m(Process ID: 1, Operator ID: cd6f330b-640c-42cc-b4c2-15bc8cf1a6cd)\u001b[39m\n", - "\u001b[34mDone performing execution of operator DICOMSeriesToVolumeOperator\n", - "\u001b[39m\n", - "\u001b[34mGoing to initiate execution of operator DICOMSeriesToVolumeOperator\u001b[39m\n", - "\u001b[32mExecuting operator DICOMSeriesToVolumeOperator \u001b[33m(Process ID: 1, Operator ID: 2b3e9177-806f-407c-86fb-41dd4e5d2521)\u001b[39m\n", - "\u001b[34mDone performing execution of operator DICOMSeriesToVolumeOperator\n", - "\u001b[39m\n", - "\u001b[34mGoing to initiate execution of operator DICOMSeriesToVolumeOperator\u001b[39m\n", - "\u001b[32mExecuting operator DICOMSeriesToVolumeOperator \u001b[33m(Process ID: 1, Operator ID: c291a66c-9ba3-4fb0-b379-f220ca9b5cb8)\u001b[39m\n", - "\u001b[34mDone performing execution of operator DICOMSeriesToVolumeOperator\n", - "\u001b[39m\n", - "\u001b[34mGoing to initiate execution of operator ProstateSegOperator\u001b[39m\n", - "\u001b[32mExecuting operator ProstateSegOperator \u001b[33m(Process ID: 1, Operator ID: 33ab5a0b-107a-499b-946c-3b141425a127)\u001b[39m\n", - "/root/.local/lib/python3.8/site-packages/monai/utils/deprecate_utils.py:321: FutureWarning: monai.transforms.io.dictionary LoadImaged.__init__:image_only: Current default value of argument `image_only=False` has been deprecated since version 1.1. It will be changed to `image_only=True` in version 1.3.\n", - " warn_deprecated(argname, msg, warning_category)\n", - "Converted Image object metadata:\n", - "SeriesInstanceUID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.206828891270520544417996275680, type \n", - "SeriesDate: 20111018, type \n", - "SeriesTime: 115341.437000, type \n", - "Modality: MR, type \n", - "SeriesDescription: t2_tse_tra, type \n", - "PatientPosition: FFS, type \n", - "SeriesNumber: 5, type \n", - "row_pixel_spacing: 0.5, type \n", - "col_pixel_spacing: 0.5, type \n", - "depth_pixel_spacing: 4.244812427751082, type \n", - "row_direction_cosine: [1.0, -1.99011e-10, 4.9619e-11], type \n", - "col_direction_cosine: [2.051034e-10, 0.97029572703367, -0.2419218925608], type \n", - "depth_direction_cosine: [1.4080733701821592e-17, 0.2419218925608, 0.97029572703367], type \n", - "dicom_affine_transform: [[ 5.00000000e-01 1.02551700e-10 0.00000000e+00 -1.17913254e+02]\n", - " [-9.95055000e-11 4.85147864e-01 1.08864853e+00 -8.51499185e+01]\n", - " [ 2.48095000e-11 -1.20960946e-01 4.36633089e+00 1.28399916e+01]\n", - " [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 1.00000000e+00]], type \n", - "nifti_affine_transform: [[-5.00000000e-01 -1.02551700e-10 -0.00000000e+00 1.17913254e+02]\n", - " [ 9.95055000e-11 -4.85147864e-01 -1.08864853e+00 8.51499185e+01]\n", - " [ 2.48095000e-11 -1.20960946e-01 4.36633089e+00 1.28399916e+01]\n", - " [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 1.00000000e+00]], type \n", - "StudyInstanceUID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.170561193612723093192571245493, type \n", - "StudyID: , type \n", - "StudyDate: 20111018, type \n", - "StudyTime: 113950.828000, type \n", - "StudyDescription: MR prostaat kanker detectie WDS_mc MCAPRODETW, type \n", - "AccessionNumber: 1566645557835538, type \n", - "selection_name: t2, type \n", - "[2023-07-25 14:55:47,007] [INFO] (Loaded image) - Data statistics:\n", - "Type: torch.float32\n", - "Shape: torch.Size([384, 384, 19])\n", - "Value range: (0.0, 1432.0)\n", - "[2023-07-25 14:55:47,018] [INFO] (Channel-first image) - Data statistics:\n", - "Type: torch.float32\n", - "Shape: torch.Size([1, 384, 384, 19])\n", - "Value range: (0.0, 1432.0)\n", - "[2023-07-25 14:55:47,829] [INFO] (Resampled and normalized image) - Data statistics:\n", - "Type: torch.float32\n", - "Shape: torch.Size([1, 192, 193, 82])\n", - "Value range: (-1.5098271369934082, 6.934866905212402)\n", - "[2023-07-25 14:56:07,830] [INFO] (Model output) - Data statistics:\n", - "Type: torch.float32\n", - "Shape: torch.Size([2, 192, 193, 82])\n", - "Value range: (3.5554221788913765e-09, 1.0)\n", - "[2023-07-25 14:56:08,196] [INFO] (Inverted output) - Data statistics:\n", - "Type: torch.float32\n", - "Shape: torch.Size([2, 384, 384, 19])\n", - "Value range: (4.7424646432148165e-09, 1.0)\n", - "[2023-07-25 14:56:08,199] [INFO] (AsDiscrete output) - Data statistics:\n", - "Type: torch.float32\n", - "Shape: torch.Size([1, 384, 384, 19])\n", - "Value range: (0.0, 1.0)\n", - "Output Seg image numpy array shaped: (19, 384, 384)\n", - "Output Seg image pixel max value: 1\n", - "Output Seg image pixel min value: 0\n", - "\u001b[34mDone performing execution of operator ProstateSegOperator\n", - "\u001b[39m\n", - "\u001b[34mGoing to initiate execution of operator CustomProstateLesionSegOperator\u001b[39m\n", - "\u001b[32mExecuting operator CustomProstateLesionSegOperator \u001b[33m(Process ID: 1, Operator ID: e31373ab-4b5d-4cbd-b5d0-71f9cbc0f5f4)\u001b[39m\n", - "2023-07-25 14:56:08,224 INFO image_writer.py:197 - writing: /var/monai/output/t2/t2.nii.gz\n", - "2023-07-25 14:56:08,584 INFO image_writer.py:197 - writing: /var/monai/output/adc/adc.nii.gz\n", - "2023-07-25 14:56:08,613 INFO image_writer.py:197 - writing: /var/monai/output/highb/highb.nii.gz\n", - "2023-07-25 14:56:08,638 INFO image_writer.py:197 - writing: /var/monai/output/organ/organ.nii.gz\n", - "Loading input...\n", - "Pre-processing input image...\n", - "inputs_shape: (206, 187, 170)\n", - "output filename: /var/monai/output/lesion/fold0_lesion_prob.nii.gz\n", - "output filename: /var/monai/output/lesion/fold1_lesion_prob.nii.gz\n", - "output filename: /var/monai/output/lesion/fold2_lesion_prob.nii.gz\n", - "output filename: /var/monai/output/lesion/fold3_lesion_prob.nii.gz\n", - "output filename: /var/monai/output/lesion/fold4_lesion_prob.nii.gz\n", - "\u001b[34mDone performing execution of operator CustomProstateLesionSegOperator\n", - "\u001b[39m\n" - ] - } - ], - "source": [ - "# Run MAP\n", - "!monai-deploy run -l DEBUG lesion_seg_workflow_app:latest /media/jtetreault/Data/Datasets/ProstateX/PROSTATEx/ProstateX-0004 output" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python (monai)", - "language": "python", - "name": "monai" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.12" - }, - "orig_nbformat": 4, - "vscode": { - "interpreter": { - "hash": "3cfcbab7e585f21ccb16282632415ba659597b9539aa6575a8dff698ec65888b" - } - } - }, - "nbformat": 4, - "nbformat_minor": 2 + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[2023-07-25 10:53:42,712] [DEBUG] (app.AIProstateLesionSegApp) - Begin compose\n", + "[2023-07-25 10:53:42,714] [DEBUG] (app.AIProstateLesionSegApp) - End compose\n", + "[2023-07-25 10:53:44,300] [DEBUG] (app_packager) - FROM nvcr.io/nvidia/pytorch:22.08-py3\n", + "\n", + " LABEL base=\"nvcr.io/nvidia/pytorch:22.08-py3\"\n", + " LABEL tag=\"lesion_seg_workflow_app:latest\"\n", + " LABEL version=\"0.5.1\"\n", + " LABEL sdk_version=\"0.5.1\"\n", + "\n", + " ENV DEBIAN_FRONTEND=noninteractive\n", + " ENV TERM=xterm-256color\n", + " ENV MONAI_INPUTPATH=/var/monai/input\n", + " ENV MONAI_OUTPUTPATH=/var/monai/output\n", + " ENV MONAI_WORKDIR=/var/monai/\n", + " ENV MONAI_APPLICATION=/opt/monai/app\n", + " ENV MONAI_TIMEOUT=0\n", + " ENV MONAI_MODELPATH=/opt/monai/models\n", + "\n", + " RUN apt update \\\n", + " && apt upgrade -y --no-install-recommends \\\n", + " && apt install -y --no-install-recommends \\\n", + " curl \\\n", + " unzip \\\n", + " && apt autoremove -y \\\n", + " && rm -rf /var/lib/apt/lists/*\n", + " \n", + " USER root\n", + "\n", + " RUN pip install --no-cache-dir --upgrade setuptools==59.5.0 pip==22.3 wheel==0.37.1 numpy>=1.21.6\n", + "\n", + " RUN mkdir -p /etc/monai/ \\\n", + " && mkdir -p /opt/monai/ \\\n", + " && mkdir -p /var/monai/ \\\n", + " && mkdir -p /opt/monai/app \\\n", + " && mkdir -p /opt/monai/executor \\\n", + " && mkdir -p /var/monai/input \\\n", + " && mkdir -p /var/monai/output \\\n", + " && mkdir -p /opt/monai/models\n", + "\n", + " RUN mkdir -p /opt/monai/models\n", + "COPY ./models /opt/monai/models\n", + "\n", + "\n", + " COPY ./pip/requirements.txt /opt/monai/app/requirements.txt\n", + "\n", + " RUN curl https://globalcdn.nuget.org/packages/monai.deploy.executor.0.1.0-prealpha.4.nupkg -o /opt/monai/executor/executor.zip \\\n", + " && unzip /opt/monai/executor/executor.zip -d /opt/monai/executor/executor_pkg \\\n", + " && mv /opt/monai/executor/executor_pkg/lib/native/linux-x64/* /opt/monai/executor \\\n", + " && rm -f /opt/monai/executor/executor.zip \\\n", + " && rm -rf /opt/monai/executor/executor_pkg \\\n", + " && chmod +x /opt/monai/executor/monai-exec\n", + "\n", + " ENV PATH=/root/.local/bin:$PATH\n", + "\n", + " RUN pip install --no-cache-dir --user -r /opt/monai/app/requirements.txt\n", + "\n", + " # Override monai-deploy-app-sdk module\n", + " COPY ./monai-deploy-app-sdk /root/.local/lib/python3.8/site-packages/monai/deploy/\n", + " RUN echo \"User site package location: $(python3 -m site --user-site)\" \\\n", + " && [ \"$(python3 -m site --user-site)\" != \"/root/.local/lib/python3.8/site-packages\" ] \\\n", + " && mkdir -p $(python3 -m site --user-site)/monai/deploy \\\n", + " && cp -r /root/.local/lib/python3.8/site-packages/monai/deploy/* $(python3 -m site --user-site)/monai/deploy/ \\\n", + " || true\n", + "\n", + " COPY ./map/app.json /etc/monai/\n", + " COPY ./map/pkg.json /etc/monai/\n", + "\n", + " COPY ./app /opt/monai/app\n", + "\n", + " # Set the working directory\n", + " WORKDIR /var/monai/\n", + "\n", + " ENTRYPOINT [ \"/opt/monai/executor/monai-exec\" ]\n", + "\n", + "[2023-07-25 10:53:44,321] [DEBUG] (app_packager) - Docker image build command: docker build -f './monai_tmp_5ke51i4/dockerfile' -t lesion_seg_workflow_app:latest './monai_tmp_5ke51i4' --build-arg MONAI_UID=$(id -u) --build-arg MONAI_GID=$(id -g)\n", + "Building MONAI Application Package... -[2023-07-25 10:53:47,041] [DEBUG] (app_packager) - Sending build context to Docker daemon 654.1MB\n", + "\n", + "[2023-07-25 10:53:47,054] [DEBUG] (app_packager) - Step 1/30 : FROM nvcr.io/nvidia/pytorch:22.08-py3\n", + "\n", + "[2023-07-25 10:53:47,055] [DEBUG] (app_packager) - ---> b3d16c039217\n", + "\n", + "[2023-07-25 10:53:47,055] [DEBUG] (app_packager) - Step 2/30 : LABEL base=\"nvcr.io/nvidia/pytorch:22.08-py3\"\n", + "\n", + "[2023-07-25 10:53:47,057] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,057] [DEBUG] (app_packager) - ---> cc80685c6c3b\n", + "\n", + "[2023-07-25 10:53:47,057] [DEBUG] (app_packager) - Step 3/30 : LABEL tag=\"lesion_seg_workflow_app:latest\"\n", + "\n", + "[2023-07-25 10:53:47,063] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,063] [DEBUG] (app_packager) - ---> 1c092212ea50\n", + "\n", + "[2023-07-25 10:53:47,063] [DEBUG] (app_packager) - Step 4/30 : LABEL version=\"0.5.1\"\n", + "\n", + "[2023-07-25 10:53:47,069] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,069] [DEBUG] (app_packager) - ---> e250de6ab88b\n", + "\n", + "[2023-07-25 10:53:47,069] [DEBUG] (app_packager) - Step 5/30 : LABEL sdk_version=\"0.5.1\"\n", + "\n", + "[2023-07-25 10:53:47,070] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,070] [DEBUG] (app_packager) - ---> c3fb1c64336a\n", + "\n", + "[2023-07-25 10:53:47,070] [DEBUG] (app_packager) - Step 6/30 : ENV DEBIAN_FRONTEND=noninteractive\n", + "\n", + "[2023-07-25 10:53:47,072] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,072] [DEBUG] (app_packager) - ---> bd72d677a6f2\n", + "\n", + "[2023-07-25 10:53:47,072] [DEBUG] (app_packager) - Step 7/30 : ENV TERM=xterm-256color\n", + "\n", + "[2023-07-25 10:53:47,073] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,073] [DEBUG] (app_packager) - ---> c7597ae96a98\n", + "\n", + "[2023-07-25 10:53:47,074] [DEBUG] (app_packager) - Step 8/30 : ENV MONAI_INPUTPATH=/var/monai/input\n", + "\n", + "[2023-07-25 10:53:47,075] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,075] [DEBUG] (app_packager) - ---> 05ccfe0c25fa\n", + "\n", + "[2023-07-25 10:53:47,075] [DEBUG] (app_packager) - Step 9/30 : ENV MONAI_OUTPUTPATH=/var/monai/output\n", + "\n", + "[2023-07-25 10:53:47,077] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,077] [DEBUG] (app_packager) - ---> 5a84ea4b3b03\n", + "\n", + "[2023-07-25 10:53:47,077] [DEBUG] (app_packager) - Step 10/30 : ENV MONAI_WORKDIR=/var/monai/\n", + "\n", + "[2023-07-25 10:53:47,078] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,078] [DEBUG] (app_packager) - ---> 11da23172a0d\n", + "\n", + "[2023-07-25 10:53:47,078] [DEBUG] (app_packager) - Step 11/30 : ENV MONAI_APPLICATION=/opt/monai/app\n", + "\n", + "[2023-07-25 10:53:47,080] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,080] [DEBUG] (app_packager) - ---> bd70e3df1914\n", + "\n", + "[2023-07-25 10:53:47,080] [DEBUG] (app_packager) - Step 12/30 : ENV MONAI_TIMEOUT=0\n", + "\n", + "[2023-07-25 10:53:47,082] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,082] [DEBUG] (app_packager) - ---> 6235860cb56c\n", + "\n", + "[2023-07-25 10:53:47,082] [DEBUG] (app_packager) - Step 13/30 : ENV MONAI_MODELPATH=/opt/monai/models\n", + "\n", + "[2023-07-25 10:53:47,083] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,083] [DEBUG] (app_packager) - ---> 14ac90b4a489\n", + "\n", + "[2023-07-25 10:53:47,083] [DEBUG] (app_packager) - Step 14/30 : RUN apt update && apt upgrade -y --no-install-recommends && apt install -y --no-install-recommends curl unzip && apt autoremove -y && rm -rf /var/lib/apt/lists/*\n", + "\n", + "[2023-07-25 10:53:47,085] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,085] [DEBUG] (app_packager) - ---> ba5b71d0b138\n", + "\n", + "[2023-07-25 10:53:47,085] [DEBUG] (app_packager) - Step 15/30 : USER root\n", + "\n", + "[2023-07-25 10:53:47,086] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,087] [DEBUG] (app_packager) - ---> 0e9eb9961405\n", + "\n", + "[2023-07-25 10:53:47,087] [DEBUG] (app_packager) - Step 16/30 : RUN pip install --no-cache-dir --upgrade setuptools==59.5.0 pip==22.3 wheel==0.37.1 numpy>=1.21.6\n", + "\n", + "[2023-07-25 10:53:47,088] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,088] [DEBUG] (app_packager) - ---> 4a6f5f65fcfb\n", + "\n", + "[2023-07-25 10:53:47,088] [DEBUG] (app_packager) - Step 17/30 : RUN mkdir -p /etc/monai/ && mkdir -p /opt/monai/ && mkdir -p /var/monai/ && mkdir -p /opt/monai/app && mkdir -p /opt/monai/executor && mkdir -p /var/monai/input && mkdir -p /var/monai/output && mkdir -p /opt/monai/models\n", + "\n", + "[2023-07-25 10:53:47,090] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,090] [DEBUG] (app_packager) - ---> 1e5513d5ef4a\n", + "\n", + "[2023-07-25 10:53:47,090] [DEBUG] (app_packager) - Step 18/30 : RUN mkdir -p /opt/monai/models\n", + "\n", + "[2023-07-25 10:53:47,091] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,091] [DEBUG] (app_packager) - ---> 6ae160285e74\n", + "\n", + "[2023-07-25 10:53:47,091] [DEBUG] (app_packager) - Step 19/30 : COPY ./models /opt/monai/models\n", + "\n", + "[2023-07-25 10:53:47,093] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,093] [DEBUG] (app_packager) - ---> 6238e429e44f\n", + "\n", + "[2023-07-25 10:53:47,093] [DEBUG] (app_packager) - Step 20/30 : COPY ./pip/requirements.txt /opt/monai/app/requirements.txt\n", + "\n", + "[2023-07-25 10:53:47,099] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,100] [DEBUG] (app_packager) - ---> 214ddb9b9e56\n", + "\n", + "[2023-07-25 10:53:47,100] [DEBUG] (app_packager) - Step 21/30 : RUN curl https://globalcdn.nuget.org/packages/monai.deploy.executor.0.1.0-prealpha.4.nupkg -o /opt/monai/executor/executor.zip && unzip /opt/monai/executor/executor.zip -d /opt/monai/executor/executor_pkg && mv /opt/monai/executor/executor_pkg/lib/native/linux-x64/* /opt/monai/executor && rm -f /opt/monai/executor/executor.zip && rm -rf /opt/monai/executor/executor_pkg && chmod +x /opt/monai/executor/monai-exec\n", + "\n", + "[2023-07-25 10:53:47,101] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,101] [DEBUG] (app_packager) - ---> d6f21887e194\n", + "\n", + "[2023-07-25 10:53:47,101] [DEBUG] (app_packager) - Step 22/30 : ENV PATH=/root/.local/bin:$PATH\n", + "\n", + "[2023-07-25 10:53:47,102] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,102] [DEBUG] (app_packager) - ---> 61a1ec2a5045\n", + "\n", + "[2023-07-25 10:53:47,102] [DEBUG] (app_packager) - Step 23/30 : RUN pip install --no-cache-dir --user -r /opt/monai/app/requirements.txt\n", + "\n", + "[2023-07-25 10:53:47,103] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,103] [DEBUG] (app_packager) - ---> fc7b3fc8ee51\n", + "\n", + "[2023-07-25 10:53:47,103] [DEBUG] (app_packager) - Step 24/30 : COPY ./monai-deploy-app-sdk /root/.local/lib/python3.8/site-packages/monai/deploy/\n", + "\n", + "[2023-07-25 10:53:47,106] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,106] [DEBUG] (app_packager) - ---> c4c2745da875\n", + "\n", + "[2023-07-25 10:53:47,106] [DEBUG] (app_packager) - Step 25/30 : RUN echo \"User site package location: $(python3 -m site --user-site)\" && [ \"$(python3 -m site --user-site)\" != \"/root/.local/lib/python3.8/site-packages\" ] && mkdir -p $(python3 -m site --user-site)/monai/deploy && cp -r /root/.local/lib/python3.8/site-packages/monai/deploy/* $(python3 -m site --user-site)/monai/deploy/ || true\n", + "\n", + "[2023-07-25 10:53:47,107] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,107] [DEBUG] (app_packager) - ---> 7fc9322a0401\n", + "\n", + "[2023-07-25 10:53:47,107] [DEBUG] (app_packager) - Step 26/30 : COPY ./map/app.json /etc/monai/\n", + "\n", + "[2023-07-25 10:53:47,108] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,108] [DEBUG] (app_packager) - ---> ec5f10bfc404\n", + "\n", + "[2023-07-25 10:53:47,108] [DEBUG] (app_packager) - Step 27/30 : COPY ./map/pkg.json /etc/monai/\n", + "\n", + "[2023-07-25 10:53:47,109] [DEBUG] (app_packager) - ---> Using cache\n", + "\n", + "[2023-07-25 10:53:47,109] [DEBUG] (app_packager) - ---> 18827ed8e1d0\n", + "\n", + "[2023-07-25 10:53:47,109] [DEBUG] (app_packager) - Step 28/30 : COPY ./app /opt/monai/app\n", + "\n", + "\b\\[2023-07-25 10:54:08,872] [DEBUG] (app_packager) - ---> 57dd8d7c2880\n", + "\n", + "[2023-07-25 10:54:08,872] [DEBUG] (app_packager) - Step 29/30 : WORKDIR /var/monai/\n", + "\n", + "\b-[2023-07-25 10:54:11,104] [DEBUG] (app_packager) - ---> Running in 7763b9724919\n", + "\n", + "\b|[2023-07-25 10:54:12,986] [DEBUG] (app_packager) - Removing intermediate container 7763b9724919\n", + "\n", + "[2023-07-25 10:54:12,986] [DEBUG] (app_packager) - ---> 54003e6b8432\n", + "\n", + "[2023-07-25 10:54:12,986] [DEBUG] (app_packager) - Step 30/30 : ENTRYPOINT [ \"/opt/monai/executor/monai-exec\" ]\n", + "\n", + "\b-[2023-07-25 10:54:15,173] [DEBUG] (app_packager) - ---> Running in fa50988db30b\n", + "\n", + "\b\\[2023-07-25 10:54:16,804] [DEBUG] (app_packager) - Removing intermediate container fa50988db30b\n", + "\n", + "[2023-07-25 10:54:16,804] [DEBUG] (app_packager) - ---> f214f9f3ed0a\n", + "\n", + "[2023-07-25 10:54:16,804] [DEBUG] (app_packager) - [Warning] One or more build-args [MONAI_UID MONAI_GID] were not consumed\n", + "\n", + "[2023-07-25 10:54:16,888] [DEBUG] (app_packager) - Successfully built f214f9f3ed0a\n", + "\n", + "\b|[2023-07-25 10:54:17,106] [DEBUG] (app_packager) - Successfully tagged lesion_seg_workflow_app:latest\n", + "\n", + "[2023-07-25 10:54:17,109] [DEBUG] (app_packager) - \n", + "\bDone\n", + "[2023-07-25 10:54:17,120] [INFO] (app_packager) - Successfully built lesion_seg_workflow_app:latest\n" + ] + } + ], + "source": [ + "# Package code into MAP\n", + "!monai-deploy package -l DEBUG -b nvcr.io/nvidia/pytorch:22.08-py3 ./prostate_mri_lesion_seg_app --tag lesion_seg_workflow_app:latest -m prostate_mri_lesion_seg_app/models/" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Checking dependencies...\n", + "--> Verifying if \"docker\" is installed...\n", + "\n", + "--> Verifying if \"lesion_seg_workflow_app:latest\" is available...\n", + "\n", + "Checking for MAP \"lesion_seg_workflow_app:latest\" locally\n", + "\"lesion_seg_workflow_app:latest\" found.\n", + "\n", + "Reading MONAI App Package manifest...\n", + "-------------------application manifest-------------------\n", + "{\n", + " \"api-version\": \"0.1.0\",\n", + " \"command\": \"python3 -u /opt/monai/app/app.py\",\n", + " \"environment\": {},\n", + " \"input\": {\n", + " \"formats\": [],\n", + " \"path\": \"input\"\n", + " },\n", + " \"output\": {\n", + " \"format\": {},\n", + " \"path\": \"output\"\n", + " },\n", + " \"sdk-version\": \"0.5.1\",\n", + " \"timeout\": 0,\n", + " \"version\": \"0.5.1\",\n", + " \"working-directory\": \"/var/monai/\"\n", + "}\n", + "----------------------------------------------\n", + "\n", + "-------------------package manifest-------------------\n", + "{\n", + " \"api-version\": \"0.1.0\",\n", + " \"application-root\": \"/var/monai/\",\n", + " \"models\": [\n", + " {\n", + " \"name\": \"organ-5b6bbe150488ed6e34ac61d113aeb4786ec1c4cda9bd18304f6a8a891e93e79d\",\n", + " \"path\": \"/opt/monai/models/organ/model.ts\"\n", + " }\n", + " ],\n", + " \"resources\": {\n", + " \"cpu\": 1,\n", + " \"gpu\": 1,\n", + " \"memory\": \"7168Mi\"\n", + " },\n", + " \"sdk-version\": \"0.5.1\",\n", + " \"version\": \"0.5.1\"\n", + "}\n", + "----------------------------------------------\n", + "\n", + "--> Verifying if \"nvidia-docker\" is installed...\n", + "\n", + "Executing command: nvidia-docker run --rm -a STDERR -a STDOUT -e MONAI_INPUTPATH=\"/var/monai/input\" -e MONAI_OUTPUTPATH=\"/var/monai/output\" -e MONAI_MODELPATH=/opt/monai/models -v \"/media/jtetreault/Data/Datasets/ProstateX/PROSTATEx/ProstateX-0004\":\"/var/monai/input\" -v \"/home/jtetreault/Projects/monai-projects/prostate-mri/public/output\":\"/var/monai/output\" --shm-size=1g --entrypoint \"/bin/bash\" \"lesion_seg_workflow_app:latest\" -c \"python3 -u /opt/monai/app/app.py\"\n", + "\u001b[34mGoing to initiate execution of operator DICOMDataLoaderOperator\u001b[39m\n", + "\u001b[32mExecuting operator DICOMDataLoaderOperator \u001b[33m(Process ID: 1, Operator ID: 24998906-0f20-4b83-acce-ca50bf6f9b3e)\u001b[39m\n", + "\u001b[34mDone performing execution of operator DICOMDataLoaderOperator\n", + "\u001b[39m\n", + "\u001b[34mGoing to initiate execution of operator DICOMSeriesSelectorOperator\u001b[39m\n", + "\u001b[32mExecuting operator DICOMSeriesSelectorOperator \u001b[33m(Process ID: 1, Operator ID: 64da4a5b-9979-487c-b95a-96c6703db10b)\u001b[39m\n", + "[2023-07-25 14:55:46,449] [INFO] (root) - Finding series for Selection named: t2\n", + "[2023-07-25 14:55:46,449] [INFO] (root) - Searching study, : 1.3.6.1.4.1.14519.5.2.1.7311.5101.170561193612723093192571245493\n", + " # of series: 7\n", + "[2023-07-25 14:55:46,449] [INFO] (root) - Working on series, instance UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.278358228783511961204087191158\n", + "[2023-07-25 14:55:46,449] [INFO] (root) - On attribute: 'Modality' to match value: 'MR'\n", + "[2023-07-25 14:55:46,449] [INFO] (root) - Series attribute Modality value: MR\n", + "[2023-07-25 14:55:46,449] [INFO] (root) - On attribute: 'ImageType' to match value: ['ORIGINAL', 'PRIMARY']\n", + "[2023-07-25 14:55:46,449] [INFO] (root) - Series attribute ImageType value: None\n", + "[2023-07-25 14:55:46,449] [INFO] (root) - This series does not match the selection conditions.\n", + "[2023-07-25 14:55:46,449] [INFO] (root) - Working on series, instance UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.473042633347744664896320600386\n", + "[2023-07-25 14:55:46,449] [INFO] (root) - On attribute: 'Modality' to match value: 'MR'\n", + "[2023-07-25 14:55:46,450] [INFO] (root) - Series attribute Modality value: MR\n", + "[2023-07-25 14:55:46,450] [INFO] (root) - On attribute: 'ImageType' to match value: ['ORIGINAL', 'PRIMARY']\n", + "[2023-07-25 14:55:46,450] [INFO] (root) - Series attribute ImageType value: None\n", + "[2023-07-25 14:55:46,450] [INFO] (root) - On attribute: 'SeriesDescription' to match value: '((?=AX T2)|(?=AX T2 6 NSA)|(?=AX T2 FAST)|(?=AX T2 . voxel 7x.8)|(?=AX T2 B1 Default)|(?=AX T2 CS)|(?=AX T2 cs 3.0)|(?=AX T2 FAST)|(?=AX T2 FRFSE)|(?=AX T2 N/S)|(?=AX T2 No sense)|(?=AX T2 NS)|(?=AX T2 NSA 3)|(?=AX T2 NSA 4)|(?=AX T2 NSA 5)|(?=AX T2 PROP)|(?=Ax T2 PROSTATE)|(?=AX T2 SMALL FOV)|(?=Ax T2 thin FRFSE)|(?=sT2 TSE ax no post)|(?=T2 AX)|(?=T2 AX SMALL FOV[*]? IF MOTION REPEAT[*]?)|(?=T2 AXIAL 3MM)|(?=T2 TRA 3mm)|(?=T2 TSE Ax)|(?=T2 TSE ax cs)|(?=T2 TSE ax hi)|(?=T2 TSE ax hi sense)|(?=T2 TSE ax no sense)|(?=T2 TSE ax NS)|(?=T2 TSE ax NSA 3)|(?=t2_tse_tra)|(?=t2_tse_tra_320_p2)|(?=t2_tse_tra_3mm _SFOV_TE 92)|(?=t2_tse_tra_Grappa3)|(?=T2W_TSE)|(?=T2W_TSE_ax)|(?=T2W_TSE_ax PSS Refoc 52)|(?=T2W_TSE_ax zoom PSS Refoc))'\n", + "[2023-07-25 14:55:46,450] [INFO] (root) - Series attribute SeriesDescription value: t2_tse_sag\n", + "[2023-07-25 14:55:46,450] [INFO] (root) - Series attribute string value did not match. Try regEx.\n", + "[2023-07-25 14:55:46,452] [INFO] (root) - This series does not match the selection conditions.\n", + "[2023-07-25 14:55:46,452] [INFO] (root) - Working on series, instance UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.206828891270520544417996275680\n", + "[2023-07-25 14:55:46,452] [INFO] (root) - On attribute: 'Modality' to match value: 'MR'\n", + "[2023-07-25 14:55:46,452] [INFO] (root) - Series attribute Modality value: MR\n", + "[2023-07-25 14:55:46,452] [INFO] (root) - On attribute: 'ImageType' to match value: ['ORIGINAL', 'PRIMARY']\n", + "[2023-07-25 14:55:46,452] [INFO] (root) - Series attribute ImageType value: None\n", + "[2023-07-25 14:55:46,453] [INFO] (root) - On attribute: 'SeriesDescription' to match value: '((?=AX T2)|(?=AX T2 6 NSA)|(?=AX T2 FAST)|(?=AX T2 . voxel 7x.8)|(?=AX T2 B1 Default)|(?=AX T2 CS)|(?=AX T2 cs 3.0)|(?=AX T2 FAST)|(?=AX T2 FRFSE)|(?=AX T2 N/S)|(?=AX T2 No sense)|(?=AX T2 NS)|(?=AX T2 NSA 3)|(?=AX T2 NSA 4)|(?=AX T2 NSA 5)|(?=AX T2 PROP)|(?=Ax T2 PROSTATE)|(?=AX T2 SMALL FOV)|(?=Ax T2 thin FRFSE)|(?=sT2 TSE ax no post)|(?=T2 AX)|(?=T2 AX SMALL FOV[*]? IF MOTION REPEAT[*]?)|(?=T2 AXIAL 3MM)|(?=T2 TRA 3mm)|(?=T2 TSE Ax)|(?=T2 TSE ax cs)|(?=T2 TSE ax hi)|(?=T2 TSE ax hi sense)|(?=T2 TSE ax no sense)|(?=T2 TSE ax NS)|(?=T2 TSE ax NSA 3)|(?=t2_tse_tra)|(?=t2_tse_tra_320_p2)|(?=t2_tse_tra_3mm _SFOV_TE 92)|(?=t2_tse_tra_Grappa3)|(?=T2W_TSE)|(?=T2W_TSE_ax)|(?=T2W_TSE_ax PSS Refoc 52)|(?=T2W_TSE_ax zoom PSS Refoc))'\n", + "[2023-07-25 14:55:46,453] [INFO] (root) - Series attribute SeriesDescription value: t2_tse_tra\n", + "[2023-07-25 14:55:46,453] [INFO] (root) - Series attribute string value did not match. Try regEx.\n", + "[2023-07-25 14:55:46,453] [INFO] (root) - Selected Series, UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.206828891270520544417996275680\n", + "\u001b[34mDone performing execution of operator DICOMSeriesSelectorOperator\n", + "\u001b[39m\n", + "\u001b[34mGoing to initiate execution of operator DICOMSeriesSelectorOperator\u001b[39m\n", + "\u001b[32mExecuting operator DICOMSeriesSelectorOperator \u001b[33m(Process ID: 1, Operator ID: c78be533-dced-4fe8-88e5-1e894b87ec9c)\u001b[39m\n", + "[2023-07-25 14:55:46,453] [INFO] (root) - Finding series for Selection named: adc\n", + "[2023-07-25 14:55:46,453] [INFO] (root) - Searching study, : 1.3.6.1.4.1.14519.5.2.1.7311.5101.170561193612723093192571245493\n", + " # of series: 7\n", + "[2023-07-25 14:55:46,453] [INFO] (root) - Working on series, instance UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.278358228783511961204087191158\n", + "[2023-07-25 14:55:46,453] [INFO] (root) - On attribute: 'Modality' to match value: 'MR'\n", + "[2023-07-25 14:55:46,454] [INFO] (root) - Series attribute Modality value: MR\n", + "[2023-07-25 14:55:46,454] [INFO] (root) - On attribute: 'ImageType' to match value: ['DIFFUSION', 'ADC']\n", + "[2023-07-25 14:55:46,454] [INFO] (root) - Series attribute ImageType value: None\n", + "[2023-07-25 14:55:46,454] [INFO] (root) - On attribute: 'SeriesDescription' to match value: '((?=ADC (10^-6 mm\u00b2/s))|(?=Apparent Diffusion Coefficient (mm2/s))|(?=AX DIFFUSION_ADC_DFC_MIX)|(?=.*AX DWI (50,1500)_ADC.*)|(?=AX DWI_ADC_DFC_MIX)|(?=b_1500 prostate_ADC)|(?=b_2000 prostate_ADC)|(?=d3B ADC 3B 750 ERC SSh_DWI FAST SENSE)|(?=dADC)|(?=dADC 0_1500)|(?=dADC 100 400 600)|(?=dADC 2)|(?=dADC 3)|(?=dADC ALL)|(?=dADC b 0 1000 2000)|(?=dADC from 0_1500)|(?=dADC from b0_600)|(?=dADC from B0-1500)|(?=dADC Map)|(?=dADC map 1)|(?=dADC MAP 2)|(?=dADC_1 axial)|(?=dADC_b375_750_1150)|(?=ddADC MAP)|(?=DIFF bv1400_ADC)|(?=diff tra b 50 500 800 WIP511b alle spoelen_ADC)|(?=diffusie-3Scan-4bval_fs_ADC)|(?=dReg - WIP SSh_DWI FAST SENSE)|(?=dSSh_DWI SENSE)|(?=DWI PROSTATE_ADC)|(?=dWIP 3B 600 w ERC SSh_DWI S2Ovs2)|(?=dWIP 3B ADC 3B 600 w/o ERC SSh_DWI FAST SENSE)|(?=dWIP SSh_DWI FAST SENSE)|(?=ep2d_diff_new 16 measipat_ADC)|(?=ep2d_DIFF_tra_b50_500_800_1400_alle_spoelen_ADC)|(?=ep2d_diff_tra_DYNDIST_ADC)|(?=ep2d_diff_tra_DYNDIST_MIX_ADC)|(?=ep2d_diff_tra2x2_Noise0_FS_DYNDIST_ADC)|(?=ep2d-advdiff-3Scan-4bval_spair_511b_ADC))'\n", + "[2023-07-25 14:55:46,454] [INFO] (root) - Series attribute SeriesDescription value: ep2d_diff_tra_DYNDIST_MIX_ADC\n", + "[2023-07-25 14:55:46,454] [INFO] (root) - Series attribute string value did not match. Try regEx.\n", + "[2023-07-25 14:55:46,457] [INFO] (root) - Selected Series, UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.278358228783511961204087191158\n", + "\u001b[34mDone performing execution of operator DICOMSeriesSelectorOperator\n", + "\u001b[39m\n", + "\u001b[34mGoing to initiate execution of operator DICOMSeriesSelectorOperator\u001b[39m\n", + "\u001b[32mExecuting operator DICOMSeriesSelectorOperator \u001b[33m(Process ID: 1, Operator ID: dacef65e-7b0a-4fa6-b6ee-d6cc8e5bcbf1)\u001b[39m\n", + "[2023-07-25 14:55:46,458] [INFO] (root) - Finding series for Selection named: highb\n", + "[2023-07-25 14:55:46,458] [INFO] (root) - Searching study, : 1.3.6.1.4.1.14519.5.2.1.7311.5101.170561193612723093192571245493\n", + " # of series: 7\n", + "[2023-07-25 14:55:46,458] [INFO] (root) - Working on series, instance UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.278358228783511961204087191158\n", + "[2023-07-25 14:55:46,458] [INFO] (root) - On attribute: 'Modality' to match value: 'MR'\n", + "[2023-07-25 14:55:46,458] [INFO] (root) - Series attribute Modality value: MR\n", + "[2023-07-25 14:55:46,458] [INFO] (root) - On attribute: 'ImageType' to match value: ['DIFFUSION', 'TRACEW']\n", + "[2023-07-25 14:55:46,458] [INFO] (root) - Series attribute ImageType value: None\n", + "[2023-07-25 14:55:46,458] [INFO] (root) - This series does not match the selection conditions.\n", + "[2023-07-25 14:55:46,458] [INFO] (root) - Working on series, instance UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.473042633347744664896320600386\n", + "[2023-07-25 14:55:46,458] [INFO] (root) - On attribute: 'Modality' to match value: 'MR'\n", + "[2023-07-25 14:55:46,458] [INFO] (root) - Series attribute Modality value: MR\n", + "[2023-07-25 14:55:46,458] [INFO] (root) - On attribute: 'ImageType' to match value: ['DIFFUSION', 'TRACEW']\n", + "[2023-07-25 14:55:46,459] [INFO] (root) - Series attribute ImageType value: None\n", + "[2023-07-25 14:55:46,459] [INFO] (root) - This series does not match the selection conditions.\n", + "[2023-07-25 14:55:46,459] [INFO] (root) - Working on series, instance UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.206828891270520544417996275680\n", + "[2023-07-25 14:55:46,459] [INFO] (root) - On attribute: 'Modality' to match value: 'MR'\n", + "[2023-07-25 14:55:46,459] [INFO] (root) - Series attribute Modality value: MR\n", + "[2023-07-25 14:55:46,459] [INFO] (root) - On attribute: 'ImageType' to match value: ['DIFFUSION', 'TRACEW']\n", + "[2023-07-25 14:55:46,459] [INFO] (root) - Series attribute ImageType value: None\n", + "[2023-07-25 14:55:46,459] [INFO] (root) - This series does not match the selection conditions.\n", + "[2023-07-25 14:55:46,459] [INFO] (root) - Working on series, instance UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.212145009248667341607386363070\n", + "[2023-07-25 14:55:46,459] [INFO] (root) - On attribute: 'Modality' to match value: 'MR'\n", + "[2023-07-25 14:55:46,459] [INFO] (root) - Series attribute Modality value: MR\n", + "[2023-07-25 14:55:46,459] [INFO] (root) - On attribute: 'ImageType' to match value: ['DIFFUSION', 'TRACEW']\n", + "[2023-07-25 14:55:46,459] [INFO] (root) - Series attribute ImageType value: None\n", + "[2023-07-25 14:55:46,459] [INFO] (root) - This series does not match the selection conditions.\n", + "[2023-07-25 14:55:46,459] [INFO] (root) - Working on series, instance UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.196637652681902975508118013414\n", + "[2023-07-25 14:55:46,460] [INFO] (root) - On attribute: 'Modality' to match value: 'MR'\n", + "[2023-07-25 14:55:46,460] [INFO] (root) - Series attribute Modality value: MR\n", + "[2023-07-25 14:55:46,460] [INFO] (root) - On attribute: 'ImageType' to match value: ['DIFFUSION', 'TRACEW']\n", + "[2023-07-25 14:55:46,460] [INFO] (root) - Series attribute ImageType value: None\n", + "[2023-07-25 14:55:46,460] [INFO] (root) - On attribute: 'SeriesDescription' to match value: '((?=3B 2000 w ERC SSh_DWI)|(?=AX DIFFUSION_CALC_BVAL_DFC_MIX)|(?=AX DWI)|(?=.*AX DWI (50,1500).*)|(?=Ax DWI BH)|(?=AX DWI_TRACEW_DFC_MIX)|(?=Axial FOCUS DWI 1400)|(?=b_1500 prostate)|(?=b_2000 prostate)|(?=DIFF bv1400)|(?=diff tra b 50 500 800 WIP511b alle spoelenCALC_BVAL)|(?=diffusie-3Scan-4bval_fsCALC_BVAL)|(?=DW_Synthetic: Ax DWI All B-50-800 Synthetic B-1400)|(?=.*DW_Synthetic: Ax Focus 50,500,800,1400,2000.*)|(?=DWI PROSTATE)|(?=DWI_5b_0_1500)|(?=DWI_b2000)|(?=DWI_b2000_new)|(?=DWI_b2000_new SENSE)|(?=DWI_b2000_NSA6 SENSE)|(?=ep2d_diff_b1400_new 32 measipat)|(?=ep2d_diff_tra_DYNDIST_MIXCALC_BVAL$)|(?=ep2d_diff_tra_DYNDISTCALC_BVAL)|(?=ep2d_diff_tra2x2_Noise0_FS_DYNDISTCALC_BVAL)|(?=ep2d-advdiff-3Scan-high bvalue 1400)|(?=sb_1500)|(?=sb_2000)|(?=sB1400)|(?=sb1500)|(?=sb-1500)|(?=sb1500 r5 only)|(?=sb-2000)|(?=sDWI_b_2000)|(?=sDWI_b2000))'\n", + "[2023-07-25 14:55:46,460] [INFO] (root) - Series attribute SeriesDescription value: ep2d_diff_tra_DYNDIST_MIX\n", + "[2023-07-25 14:55:46,460] [INFO] (root) - Series attribute string value did not match. Try regEx.\n", + "[2023-07-25 14:55:46,463] [INFO] (root) - This series does not match the selection conditions.\n", + "[2023-07-25 14:55:46,463] [INFO] (root) - Working on series, instance UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.154156364083745277068656620138\n", + "[2023-07-25 14:55:46,463] [INFO] (root) - On attribute: 'Modality' to match value: 'MR'\n", + "[2023-07-25 14:55:46,463] [INFO] (root) - Series attribute Modality value: MR\n", + "[2023-07-25 14:55:46,463] [INFO] (root) - On attribute: 'ImageType' to match value: ['DIFFUSION', 'TRACEW']\n", + "[2023-07-25 14:55:46,463] [INFO] (root) - Series attribute ImageType value: None\n", + "[2023-07-25 14:55:46,463] [INFO] (root) - On attribute: 'SeriesDescription' to match value: '((?=3B 2000 w ERC SSh_DWI)|(?=AX DIFFUSION_CALC_BVAL_DFC_MIX)|(?=AX DWI)|(?=.*AX DWI (50,1500).*)|(?=Ax DWI BH)|(?=AX DWI_TRACEW_DFC_MIX)|(?=Axial FOCUS DWI 1400)|(?=b_1500 prostate)|(?=b_2000 prostate)|(?=DIFF bv1400)|(?=diff tra b 50 500 800 WIP511b alle spoelenCALC_BVAL)|(?=diffusie-3Scan-4bval_fsCALC_BVAL)|(?=DW_Synthetic: Ax DWI All B-50-800 Synthetic B-1400)|(?=.*DW_Synthetic: Ax Focus 50,500,800,1400,2000.*)|(?=DWI PROSTATE)|(?=DWI_5b_0_1500)|(?=DWI_b2000)|(?=DWI_b2000_new)|(?=DWI_b2000_new SENSE)|(?=DWI_b2000_NSA6 SENSE)|(?=ep2d_diff_b1400_new 32 measipat)|(?=ep2d_diff_tra_DYNDIST_MIXCALC_BVAL$)|(?=ep2d_diff_tra_DYNDISTCALC_BVAL)|(?=ep2d_diff_tra2x2_Noise0_FS_DYNDISTCALC_BVAL)|(?=ep2d-advdiff-3Scan-high bvalue 1400)|(?=sb_1500)|(?=sb_2000)|(?=sB1400)|(?=sb1500)|(?=sb-1500)|(?=sb1500 r5 only)|(?=sb-2000)|(?=sDWI_b_2000)|(?=sDWI_b2000))'\n", + "[2023-07-25 14:55:46,463] [INFO] (root) - Series attribute SeriesDescription value: ep2d_diff_tra_DYNDIST_MIXCALC_BVAL\n", + "[2023-07-25 14:55:46,463] [INFO] (root) - Series attribute string value did not match. Try regEx.\n", + "[2023-07-25 14:55:46,463] [INFO] (root) - Selected Series, UID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.154156364083745277068656620138\n", + "\u001b[34mDone performing execution of operator DICOMSeriesSelectorOperator\n", + "\u001b[39m\n", + "\u001b[34mGoing to initiate execution of operator DICOMSeriesToVolumeOperator\u001b[39m\n", + "\u001b[32mExecuting operator DICOMSeriesToVolumeOperator \u001b[33m(Process ID: 1, Operator ID: cd6f330b-640c-42cc-b4c2-15bc8cf1a6cd)\u001b[39m\n", + "\u001b[34mDone performing execution of operator DICOMSeriesToVolumeOperator\n", + "\u001b[39m\n", + "\u001b[34mGoing to initiate execution of operator DICOMSeriesToVolumeOperator\u001b[39m\n", + "\u001b[32mExecuting operator DICOMSeriesToVolumeOperator \u001b[33m(Process ID: 1, Operator ID: 2b3e9177-806f-407c-86fb-41dd4e5d2521)\u001b[39m\n", + "\u001b[34mDone performing execution of operator DICOMSeriesToVolumeOperator\n", + "\u001b[39m\n", + "\u001b[34mGoing to initiate execution of operator DICOMSeriesToVolumeOperator\u001b[39m\n", + "\u001b[32mExecuting operator DICOMSeriesToVolumeOperator \u001b[33m(Process ID: 1, Operator ID: c291a66c-9ba3-4fb0-b379-f220ca9b5cb8)\u001b[39m\n", + "\u001b[34mDone performing execution of operator DICOMSeriesToVolumeOperator\n", + "\u001b[39m\n", + "\u001b[34mGoing to initiate execution of operator ProstateSegOperator\u001b[39m\n", + "\u001b[32mExecuting operator ProstateSegOperator \u001b[33m(Process ID: 1, Operator ID: 33ab5a0b-107a-499b-946c-3b141425a127)\u001b[39m\n", + "/root/.local/lib/python3.8/site-packages/monai/utils/deprecate_utils.py:321: FutureWarning: monai.transforms.io.dictionary LoadImaged.__init__:image_only: Current default value of argument `image_only=False` has been deprecated since version 1.1. It will be changed to `image_only=True` in version 1.3.\n", + " warn_deprecated(argname, msg, warning_category)\n", + "Converted Image object metadata:\n", + "SeriesInstanceUID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.206828891270520544417996275680, type \n", + "SeriesDate: 20111018, type \n", + "SeriesTime: 115341.437000, type \n", + "Modality: MR, type \n", + "SeriesDescription: t2_tse_tra, type \n", + "PatientPosition: FFS, type \n", + "SeriesNumber: 5, type \n", + "row_pixel_spacing: 0.5, type \n", + "col_pixel_spacing: 0.5, type \n", + "depth_pixel_spacing: 4.244812427751082, type \n", + "row_direction_cosine: [1.0, -1.99011e-10, 4.9619e-11], type \n", + "col_direction_cosine: [2.051034e-10, 0.97029572703367, -0.2419218925608], type \n", + "depth_direction_cosine: [1.4080733701821592e-17, 0.2419218925608, 0.97029572703367], type \n", + "dicom_affine_transform: [[ 5.00000000e-01 1.02551700e-10 0.00000000e+00 -1.17913254e+02]\n", + " [-9.95055000e-11 4.85147864e-01 1.08864853e+00 -8.51499185e+01]\n", + " [ 2.48095000e-11 -1.20960946e-01 4.36633089e+00 1.28399916e+01]\n", + " [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 1.00000000e+00]], type \n", + "nifti_affine_transform: [[-5.00000000e-01 -1.02551700e-10 -0.00000000e+00 1.17913254e+02]\n", + " [ 9.95055000e-11 -4.85147864e-01 -1.08864853e+00 8.51499185e+01]\n", + " [ 2.48095000e-11 -1.20960946e-01 4.36633089e+00 1.28399916e+01]\n", + " [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 1.00000000e+00]], type \n", + "StudyInstanceUID: 1.3.6.1.4.1.14519.5.2.1.7311.5101.170561193612723093192571245493, type \n", + "StudyID: , type \n", + "StudyDate: 20111018, type \n", + "StudyTime: 113950.828000, type \n", + "StudyDescription: MR prostaat kanker detectie WDS_mc MCAPRODETW, type \n", + "AccessionNumber: 1566645557835538, type \n", + "selection_name: t2, type \n", + "[2023-07-25 14:55:47,007] [INFO] (Loaded image) - Data statistics:\n", + "Type: torch.float32\n", + "Shape: torch.Size([384, 384, 19])\n", + "Value range: (0.0, 1432.0)\n", + "[2023-07-25 14:55:47,018] [INFO] (Channel-first image) - Data statistics:\n", + "Type: torch.float32\n", + "Shape: torch.Size([1, 384, 384, 19])\n", + "Value range: (0.0, 1432.0)\n", + "[2023-07-25 14:55:47,829] [INFO] (Resampled and normalized image) - Data statistics:\n", + "Type: torch.float32\n", + "Shape: torch.Size([1, 192, 193, 82])\n", + "Value range: (-1.5098271369934082, 6.934866905212402)\n", + "[2023-07-25 14:56:07,830] [INFO] (Model output) - Data statistics:\n", + "Type: torch.float32\n", + "Shape: torch.Size([2, 192, 193, 82])\n", + "Value range: (3.5554221788913765e-09, 1.0)\n", + "[2023-07-25 14:56:08,196] [INFO] (Inverted output) - Data statistics:\n", + "Type: torch.float32\n", + "Shape: torch.Size([2, 384, 384, 19])\n", + "Value range: (4.7424646432148165e-09, 1.0)\n", + "[2023-07-25 14:56:08,199] [INFO] (AsDiscrete output) - Data statistics:\n", + "Type: torch.float32\n", + "Shape: torch.Size([1, 384, 384, 19])\n", + "Value range: (0.0, 1.0)\n", + "Output Seg image numpy array shaped: (19, 384, 384)\n", + "Output Seg image pixel max value: 1\n", + "Output Seg image pixel min value: 0\n", + "\u001b[34mDone performing execution of operator ProstateSegOperator\n", + "\u001b[39m\n", + "\u001b[34mGoing to initiate execution of operator CustomProstateLesionSegOperator\u001b[39m\n", + "\u001b[32mExecuting operator CustomProstateLesionSegOperator \u001b[33m(Process ID: 1, Operator ID: e31373ab-4b5d-4cbd-b5d0-71f9cbc0f5f4)\u001b[39m\n", + "2023-07-25 14:56:08,224 INFO image_writer.py:197 - writing: /var/monai/output/t2/t2.nii.gz\n", + "2023-07-25 14:56:08,584 INFO image_writer.py:197 - writing: /var/monai/output/adc/adc.nii.gz\n", + "2023-07-25 14:56:08,613 INFO image_writer.py:197 - writing: /var/monai/output/highb/highb.nii.gz\n", + "2023-07-25 14:56:08,638 INFO image_writer.py:197 - writing: /var/monai/output/organ/organ.nii.gz\n", + "Loading input...\n", + "Pre-processing input image...\n", + "inputs_shape: (206, 187, 170)\n", + "output filename: /var/monai/output/lesion/fold0_lesion_prob.nii.gz\n", + "output filename: /var/monai/output/lesion/fold1_lesion_prob.nii.gz\n", + "output filename: /var/monai/output/lesion/fold2_lesion_prob.nii.gz\n", + "output filename: /var/monai/output/lesion/fold3_lesion_prob.nii.gz\n", + "output filename: /var/monai/output/lesion/fold4_lesion_prob.nii.gz\n", + "\u001b[34mDone performing execution of operator CustomProstateLesionSegOperator\n", + "\u001b[39m\n" + ] + } + ], + "source": [ + "# Run MAP\n", + "!monai-deploy run -l DEBUG lesion_seg_workflow_app:latest /media/jtetreault/Data/Datasets/ProstateX/PROSTATEx/ProstateX-0004 output" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python (monai)", + "language": "python", + "name": "monai" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.12" + }, + "orig_nbformat": 4, + "vscode": { + "interpreter": { + "hash": "3cfcbab7e585f21ccb16282632415ba659597b9539aa6575a8dff698ec65888b" + } + } + }, + "nbformat": 4, + "nbformat_minor": 2 }