Skip to content

Commit

Permalink
fix max token handle
Browse files Browse the repository at this point in the history
  • Loading branch information
ulya-tkch committed Sep 26, 2024
1 parent ce0b1fb commit 3c5a208
Showing 1 changed file with 1 addition and 1 deletion.
2 changes: 1 addition & 1 deletion tests/tlm/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def _get_options_dictionary(model: Optional[str]) -> dict:
add_log_perplexity_score = np.random.choice([True, False])

if add_max_tokens:
max_tokens = _TLM_MAX_TOKEN_RANGE[options.get("model", _TLM_DEFAULT_MODEL)]
max_tokens = _TLM_MAX_TOKEN_RANGE[options.get("model", _TLM_DEFAULT_MODEL)][1]
options["max_tokens"] = int(np.random.randint(64, max_tokens))
if add_use_self_reflection:
options["use_self_reflection"] = random.choice([True, False])
Expand Down

0 comments on commit 3c5a208

Please sign in to comment.