Skip to content

Commit 901b83c

Browse files
committed
hotfix remove valitaions
1 parent 2d8a0d2 commit 901b83c

1 file changed

Lines changed: 0 additions & 18 deletions

File tree

src/together/resources/finetune.py

Lines changed: 0 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -183,24 +183,6 @@ def create_finetune_request(
183183
)
184184
train_on_inputs = "auto"
185185

186-
if dpo_beta is not None and training_method != "dpo":
187-
raise ValueError("dpo_beta is only supported for DPO training")
188-
if dpo_normalize_logratios_by_length and training_method != "dpo":
189-
raise ValueError(
190-
"dpo_normalize_logratios_by_length=True is only supported for DPO training"
191-
)
192-
if rpo_alpha is not None:
193-
if training_method != "dpo":
194-
raise ValueError("rpo_alpha is only supported for DPO training")
195-
if not rpo_alpha >= 0.0:
196-
raise ValueError(f"rpo_alpha should be non-negative (got {rpo_alpha})")
197-
198-
if simpo_gamma is not None:
199-
if training_method != "dpo":
200-
raise ValueError("simpo_gamma is only supported for DPO training")
201-
if not simpo_gamma >= 0.0:
202-
raise ValueError(f"simpo_gamma should be non-negative (got {simpo_gamma})")
203-
204186
lr_scheduler: FinetuneLRScheduler
205187
if lr_scheduler_type == "cosine":
206188
if scheduler_num_cycles <= 0.0:

0 commit comments

Comments
 (0)