Skip to content

Commit

Permalink
fix: remove redundant torch_dtype parameter in LMUtil constructor
Browse files Browse the repository at this point in the history
  • Loading branch information
voidful committed Jun 17, 2024
1 parent 2f96d3c commit 8aac808
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
4 changes: 2 additions & 2 deletions nlp2/lm.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ def __init__(self, model_name="gpt2",
tokenizer=None,
model=None,
device=None,
torch_dtype=torch.float16,
device_map="auto"):
if not tokenizer:
self.tokenizer = AutoTokenizer.from_pretrained(model_name)
Expand All @@ -25,7 +24,8 @@ def __init__(self, model_name="gpt2",
else:
self.device = device
if not model:
self.model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch_dtype,
self.model = AutoModelForCausalLM.from_pretrained(model_name,
torch_dtype=torch.float16,
device_map=device_map)
else:
self.model = model
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

setup(
name='nlp2',
version='1.9.1',
version='1.9.2',
description='Tool for NLP - handle file and text',
long_description="Github : https://github.com/voidful/nlp2",
url='https://github.com/voidful/nlp2',
Expand Down

0 comments on commit 8aac808

Please sign in to comment.