diff --git a/intel_extension_for_transformers/neural_chat/pipeline/plugins/retrieval/parser/parser.py b/intel_extension_for_transformers/neural_chat/pipeline/plugins/retrieval/parser/parser.py index 7c2750bc4d1..31ef0c3d3e9 100644 --- a/intel_extension_for_transformers/neural_chat/pipeline/plugins/retrieval/parser/parser.py +++ b/intel_extension_for_transformers/neural_chat/pipeline/plugins/retrieval/parser/parser.py @@ -49,7 +49,8 @@ def load(self, input, **kwargs): self.min_chuck_size = kwargs['min_chuck_size'] if 'process' in kwargs: self.process = kwargs['process'] - self.table_summary_model_name_or_path = kwargs['table_summary_model_name_or_path'] if 'table_summary_model_name_or_path' in kwargs else None + self.table_summary_model_name_or_path = kwargs['table_summary_model_name_or_path'] \ + if 'table_summary_model_name_or_path' in kwargs else None self.table_summary_mode = kwargs['table_summary_mode'] if 'table_summary_mode' in kwargs else None if isinstance(input, str):