diff --git a/optimum/intel/openvino/configuration.py b/optimum/intel/openvino/configuration.py index e98a833251..31aad18426 100644 --- a/optimum/intel/openvino/configuration.py +++ b/optimum/intel/openvino/configuration.py @@ -56,10 +56,10 @@ def __init__( self._enable_standard_onnx_export_option() self.optimum_version = kwargs.pop("optimum_version", None) - def add_input_info(self, model_inputs: Dict): + def add_input_info(self, model_inputs: Dict, force_batch_one: bool = False): self.input_info = [ { - "sample_size": list(value.shape), + "sample_size": [1] + list(value.shape[1:]) if force_batch_one else list(value.shape), "type": "long" if value.dtype is torch.int64 else "float", "keyword": name, } diff --git a/optimum/intel/openvino/trainer.py b/optimum/intel/openvino/trainer.py index 20b9f1f550..f5b724b950 100644 --- a/optimum/intel/openvino/trainer.py +++ b/optimum/intel/openvino/trainer.py @@ -168,7 +168,8 @@ def __init__( model_inputs = next(iter(train_dataloader)) for label_name in self.label_names: model_inputs.pop(label_name) - self.ov_config.add_input_info(model_inputs) + force_batch_one = self._is_pruning_enabled() + self.ov_config.add_input_info(model_inputs, force_batch_one) nncf_config = NNCFConfig.from_dict(self.ov_config.__dict__) nncf_config.register_extra_structs( [ @@ -770,3 +771,12 @@ def _set_task(self): if self.task is None: raise ValueError("The model task defining the model topology needs to be specified for the ONNX export.") self.task = _TASK_ALIASES.get(self.task, self.task) + + def _is_pruning_enabled(compression: Union[Dict, List, None]): + if isinstance(compression, dict) and compression["algorithm"] == "movement_pruning": + return True + if isinstance(compression, list): + for algo_config in compression: + if algo_config["algorithm"] == "movement_pruning": + return True + return False