Skip to content

Commit

Permalink
changed generate_output to return the LLM prompt
Browse files Browse the repository at this point in the history
  • Loading branch information
eisenzopf committed May 14, 2024
1 parent e8887fb commit 365c5b3
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
6 changes: 3 additions & 3 deletions llm_eval/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def generate_output(self, text):
outputs = self.model.generate(**inputs, max_new_tokens=self.max_new_tokens, do_sample=True, temperature=self.temperature, top_p=self.top_p)
responses = [self.tokenizer.decode(output, skip_special_tokens=True) for output in outputs]
responses = ' '.join(responses)
return responses
return prompt, responses

def load_dataset(self, dataset):
"""Loads an external CSV dataset via URL and prepares a dataframe for storing the output"""
Expand Down Expand Up @@ -102,8 +102,8 @@ def process_dataset(self):
for col in group.columns:
if col.endswith('.input'):
output_col = col.replace('.input', '.output')
output = self.generate_output(row[col])
output = output[len(row[col]):]
prompt, output = self.generate_output(row[col])
output = output[len(prompt):]
df.at[index, output_col] = output
self.unload_model(model_name)
return df
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ build-backend = "setuptools.build_meta"

[project]
name = "llm-eval"
version = "0.3.12"
version = "0.3.13"
authors = [
{name = "Jonathan Eisenzopf", email = "[email protected]"},
]
Expand Down

0 comments on commit 365c5b3

Please sign in to comment.