File tree Expand file tree Collapse file tree 2 files changed +1
-19
lines changed Expand file tree Collapse file tree 2 files changed +1
-19
lines changed Original file line number Diff line number Diff line change @@ -12,7 +12,7 @@ build-backend = "poetry.masonry.api"
12
12
13
13
[tool .poetry ]
14
14
name = " together"
15
- version = " 1.5.15 "
15
+ version = " 1.5.16 "
16
16
authors = [
" Together AI <[email protected] >" ]
17
17
description = " Python client for Together's Cloud Platform!"
18
18
readme = " README.md"
Original file line number Diff line number Diff line change @@ -183,24 +183,6 @@ def create_finetune_request(
183
183
)
184
184
train_on_inputs = "auto"
185
185
186
- if dpo_beta is not None and training_method != "dpo" :
187
- raise ValueError ("dpo_beta is only supported for DPO training" )
188
- if dpo_normalize_logratios_by_length and training_method != "dpo" :
189
- raise ValueError (
190
- "dpo_normalize_logratios_by_length=True is only supported for DPO training"
191
- )
192
- if rpo_alpha is not None :
193
- if training_method != "dpo" :
194
- raise ValueError ("rpo_alpha is only supported for DPO training" )
195
- if not rpo_alpha >= 0.0 :
196
- raise ValueError (f"rpo_alpha should be non-negative (got { rpo_alpha } )" )
197
-
198
- if simpo_gamma is not None :
199
- if training_method != "dpo" :
200
- raise ValueError ("simpo_gamma is only supported for DPO training" )
201
- if not simpo_gamma >= 0.0 :
202
- raise ValueError (f"simpo_gamma should be non-negative (got { simpo_gamma } )" )
203
-
204
186
lr_scheduler : FinetuneLRScheduler
205
187
if lr_scheduler_type == "cosine" :
206
188
if scheduler_num_cycles <= 0.0 :
You can’t perform that action at this time.
0 commit comments