Skip to content

Commit 4655ead

Browse files
authored
Hotfix: remove DPO hyperparameter validations (#329)
* hotfix remove valitaions * bump version --------- Co-authored-by: Soroush Bassam <[email protected]>
1 parent 2d8a0d2 commit 4655ead

File tree

2 files changed

+1
-19
lines changed

2 files changed

+1
-19
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ build-backend = "poetry.masonry.api"
1212

1313
[tool.poetry]
1414
name = "together"
15-
version = "1.5.15"
15+
version = "1.5.16"
1616
authors = ["Together AI <[email protected]>"]
1717
description = "Python client for Together's Cloud Platform!"
1818
readme = "README.md"

src/together/resources/finetune.py

Lines changed: 0 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -183,24 +183,6 @@ def create_finetune_request(
183183
)
184184
train_on_inputs = "auto"
185185

186-
if dpo_beta is not None and training_method != "dpo":
187-
raise ValueError("dpo_beta is only supported for DPO training")
188-
if dpo_normalize_logratios_by_length and training_method != "dpo":
189-
raise ValueError(
190-
"dpo_normalize_logratios_by_length=True is only supported for DPO training"
191-
)
192-
if rpo_alpha is not None:
193-
if training_method != "dpo":
194-
raise ValueError("rpo_alpha is only supported for DPO training")
195-
if not rpo_alpha >= 0.0:
196-
raise ValueError(f"rpo_alpha should be non-negative (got {rpo_alpha})")
197-
198-
if simpo_gamma is not None:
199-
if training_method != "dpo":
200-
raise ValueError("simpo_gamma is only supported for DPO training")
201-
if not simpo_gamma >= 0.0:
202-
raise ValueError(f"simpo_gamma should be non-negative (got {simpo_gamma})")
203-
204186
lr_scheduler: FinetuneLRScheduler
205187
if lr_scheduler_type == "cosine":
206188
if scheduler_num_cycles <= 0.0:

0 commit comments

Comments
 (0)