Skip to content

Commit

Permalink
Replace mutable defaults in Transformers.__init__
Browse files Browse the repository at this point in the history
  • Loading branch information
aphedges committed Nov 15, 2023
1 parent c5f93f7 commit 12d8f38
Showing 1 changed file with 7 additions and 2 deletions.
9 changes: 7 additions & 2 deletions sentence_transformers/models/Transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,18 @@ class Transformer(nn.Module):
:param tokenizer_name_or_path: Name or path of the tokenizer. When None, then model_name_or_path is used
"""
def __init__(self, model_name_or_path: str, max_seq_length: Optional[int] = None,
model_args: Dict = {}, cache_dir: Optional[str] = None,
tokenizer_args: Dict = {}, do_lower_case: bool = False,
model_args: Optional[Dict] = None, cache_dir: Optional[str] = None,
tokenizer_args: Optional[Dict] = None, do_lower_case: bool = False,
tokenizer_name_or_path : str = None):
super(Transformer, self).__init__()
self.config_keys = ['max_seq_length', 'do_lower_case']
self.do_lower_case = do_lower_case

if model_args is None:
model_args = {}
if tokenizer_args is None:
tokenizer_args = {}

config = AutoConfig.from_pretrained(model_name_or_path, **model_args, cache_dir=cache_dir)
self._load_model(model_name_or_path, config, cache_dir, **model_args)

Expand Down

0 comments on commit 12d8f38

Please sign in to comment.