Skip to content

Commit

Permalink
fix cuda memory issue
Browse files Browse the repository at this point in the history
  • Loading branch information
qnguyen3 committed Oct 30, 2024
1 parent 08db482 commit aa85caa
Showing 1 changed file with 5 additions and 1 deletion.
6 changes: 5 additions & 1 deletion src/evaluator/reward_model_evaluator.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,17 +23,21 @@ async def get_score(self, instruction: str, response: str) -> float:
{"role": "assistant", "content": response}
]
loop = asyncio.get_event_loop()
return await loop.run_in_executor(self.executor, self.model.get_score, self.tokenizer, chat)
score = await loop.run_in_executor(self.executor, self.model.get_score, self.tokenizer, chat)
torch.cuda.empty_cache()
return score

async def evaluate(self, instructions: List[str], responses: List[str]) -> float:
scores = await asyncio.gather(*[self.get_score(instruction, response)
for instruction, response in zip(instructions, responses)])
torch.cuda.empty_cache()
return sum(scores) / len(scores)

async def select_best_method(self, methods: List[str], instructions: List[str], responses: List[List[str]]) -> tuple:
evaluation_tasks = [self.evaluate(instructions, method_responses)
for method_responses in responses]
scores = await asyncio.gather(*evaluation_tasks)
torch.cuda.empty_cache()

best_index = max(range(len(scores)), key=scores.__getitem__)
return methods[best_index], scores[best_index]

0 comments on commit aa85caa

Please sign in to comment.