From eaa7e6160b0c975c4472d77938ca4b9e0023a74d Mon Sep 17 00:00:00 2001 From: Nikolay Date: Tue, 9 May 2023 12:37:16 +0200 Subject: [PATCH 1/6] explicitly set batch=1 for NNCF in order to avoid issue with Wave2Vec --- optimum/intel/openvino/configuration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/optimum/intel/openvino/configuration.py b/optimum/intel/openvino/configuration.py index e98a833251..b9f6cc2649 100644 --- a/optimum/intel/openvino/configuration.py +++ b/optimum/intel/openvino/configuration.py @@ -59,7 +59,7 @@ def __init__( def add_input_info(self, model_inputs: Dict): self.input_info = [ { - "sample_size": list(value.shape), + "sample_size": [1] + list(value.shape[1:]), "type": "long" if value.dtype is torch.int64 else "float", "keyword": name, } From 8f67a2e34e73e733760820e00b9293cb19916001 Mon Sep 17 00:00:00 2001 From: Nikolay Date: Tue, 9 May 2023 17:37:23 +0200 Subject: [PATCH 2/6] More safe changes. affects only pruning scenario --- optimum/intel/openvino/configuration.py | 4 ++-- optimum/intel/openvino/trainer.py | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/optimum/intel/openvino/configuration.py b/optimum/intel/openvino/configuration.py index b9f6cc2649..31109c1b37 100644 --- a/optimum/intel/openvino/configuration.py +++ b/optimum/intel/openvino/configuration.py @@ -56,10 +56,10 @@ def __init__( self._enable_standard_onnx_export_option() self.optimum_version = kwargs.pop("optimum_version", None) - def add_input_info(self, model_inputs: Dict): + def add_input_info(self, model_inputs: Dict, force_single_batch: bool = False): self.input_info = [ { - "sample_size": [1] + list(value.shape[1:]), + "sample_size": [1] + list(value.shape[1:] if force_single_batch else value.shape), "type": "long" if value.dtype is torch.int64 else "float", "keyword": name, } diff --git a/optimum/intel/openvino/trainer.py b/optimum/intel/openvino/trainer.py index 3e5b2b2ccc..a7055d3417 100644 --- a/optimum/intel/openvino/trainer.py +++ b/optimum/intel/openvino/trainer.py @@ -168,7 +168,8 @@ def __init__( model_inputs = next(iter(train_dataloader)) for label_name in self.label_names: model_inputs.pop(label_name) - self.ov_config.add_input_info(model_inputs) + force_single_batch = self._should_apply_pruning_transform() + self.ov_config.add_input_info(model_inputs, force_single_batch) nncf_config = NNCFConfig.from_dict(self.ov_config.__dict__) nncf_config.register_extra_structs( [ From 16b855c34801afab1c0c79b561b8d5ff87823615 Mon Sep 17 00:00:00 2001 From: Nikolay Date: Wed, 10 May 2023 11:00:38 +0200 Subject: [PATCH 3/6] renamed variable --- optimum/intel/openvino/configuration.py | 4 ++-- optimum/intel/openvino/trainer.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/optimum/intel/openvino/configuration.py b/optimum/intel/openvino/configuration.py index 31109c1b37..56715da885 100644 --- a/optimum/intel/openvino/configuration.py +++ b/optimum/intel/openvino/configuration.py @@ -56,10 +56,10 @@ def __init__( self._enable_standard_onnx_export_option() self.optimum_version = kwargs.pop("optimum_version", None) - def add_input_info(self, model_inputs: Dict, force_single_batch: bool = False): + def add_input_info(self, model_inputs: Dict, force_batch_one: bool = False): self.input_info = [ { - "sample_size": [1] + list(value.shape[1:] if force_single_batch else value.shape), + "sample_size": [1] + list(value.shape[1:] if force_batch_one else value.shape), "type": "long" if value.dtype is torch.int64 else "float", "keyword": name, } diff --git a/optimum/intel/openvino/trainer.py b/optimum/intel/openvino/trainer.py index a7055d3417..d55bf40625 100644 --- a/optimum/intel/openvino/trainer.py +++ b/optimum/intel/openvino/trainer.py @@ -168,8 +168,8 @@ def __init__( model_inputs = next(iter(train_dataloader)) for label_name in self.label_names: model_inputs.pop(label_name) - force_single_batch = self._should_apply_pruning_transform() - self.ov_config.add_input_info(model_inputs, force_single_batch) + force_batch_one = self._should_apply_pruning_transform() + self.ov_config.add_input_info(model_inputs, force_batch_one) nncf_config = NNCFConfig.from_dict(self.ov_config.__dict__) nncf_config.register_extra_structs( [ From 436b6cbb88fded4d82ce01ee99bc222faa806340 Mon Sep 17 00:00:00 2001 From: Nikolay Date: Wed, 10 May 2023 17:13:46 +0200 Subject: [PATCH 4/6] Corrections --- optimum/intel/openvino/configuration.py | 2 +- optimum/intel/openvino/trainer.py | 11 ++++++++++- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/optimum/intel/openvino/configuration.py b/optimum/intel/openvino/configuration.py index 56715da885..4efc4a3dcb 100644 --- a/optimum/intel/openvino/configuration.py +++ b/optimum/intel/openvino/configuration.py @@ -59,7 +59,7 @@ def __init__( def add_input_info(self, model_inputs: Dict, force_batch_one: bool = False): self.input_info = [ { - "sample_size": [1] + list(value.shape[1:] if force_batch_one else value.shape), + "sample_size": [1] + list(value.shape[1:]) if force_batch_one else value.shape, "type": "long" if value.dtype is torch.int64 else "float", "keyword": name, } diff --git a/optimum/intel/openvino/trainer.py b/optimum/intel/openvino/trainer.py index d55bf40625..fe5d02154e 100644 --- a/optimum/intel/openvino/trainer.py +++ b/optimum/intel/openvino/trainer.py @@ -168,7 +168,7 @@ def __init__( model_inputs = next(iter(train_dataloader)) for label_name in self.label_names: model_inputs.pop(label_name) - force_batch_one = self._should_apply_pruning_transform() + force_batch_one = self._is_pruning_enabled() self.ov_config.add_input_info(model_inputs, force_batch_one) nncf_config = NNCFConfig.from_dict(self.ov_config.__dict__) nncf_config.register_extra_structs( @@ -767,3 +767,12 @@ def _set_task(self): if self.task is None: raise ValueError("The model task defining the model topology needs to be specified for the ONNX export.") self.task = _TASK_ALIASES.get(self.task, self.task) + + def _is_pruning_enabled(compression: Union[Dict, List, None]): + if isinstance(compression, dict) and compression["algorithm"] == "movement_pruning": + return True + if isinstance(compression, list): + for algo_config in compression: + if algo_config["algorithm"] == "movement_pruning": + return True + return False \ No newline at end of file From fd9d9ddf7e394f7f17d838f4468dd132b813d529 Mon Sep 17 00:00:00 2001 From: Nikolay Date: Wed, 10 May 2023 18:27:05 +0200 Subject: [PATCH 5/6] correction --- optimum/intel/openvino/configuration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/optimum/intel/openvino/configuration.py b/optimum/intel/openvino/configuration.py index 4efc4a3dcb..31aad18426 100644 --- a/optimum/intel/openvino/configuration.py +++ b/optimum/intel/openvino/configuration.py @@ -59,7 +59,7 @@ def __init__( def add_input_info(self, model_inputs: Dict, force_batch_one: bool = False): self.input_info = [ { - "sample_size": [1] + list(value.shape[1:]) if force_batch_one else value.shape, + "sample_size": [1] + list(value.shape[1:]) if force_batch_one else list(value.shape), "type": "long" if value.dtype is torch.int64 else "float", "keyword": name, } From d016f1477e0cdb11fcfe317341ca167998e59f2a Mon Sep 17 00:00:00 2001 From: Nikolay Date: Wed, 7 Jun 2023 11:23:00 +0200 Subject: [PATCH 6/6] fixed style --- optimum/intel/openvino/trainer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/optimum/intel/openvino/trainer.py b/optimum/intel/openvino/trainer.py index 99cda7643c..f5b724b950 100644 --- a/optimum/intel/openvino/trainer.py +++ b/optimum/intel/openvino/trainer.py @@ -779,4 +779,4 @@ def _is_pruning_enabled(compression: Union[Dict, List, None]): for algo_config in compression: if algo_config["algorithm"] == "movement_pruning": return True - return False \ No newline at end of file + return False