[feature] Support MPO (#8930)

This commit is contained in:
Kingsley
2025-08-15 15:09:59 +08:00
committed by GitHub
parent 41648020db
commit 936f4fd78e
3 changed files with 62 additions and 6 deletions

View File

@@ -134,6 +134,10 @@ class RLHFArguments:
default=0.0,
metadata={"help": "The supervised fine-tuning loss coefficient in DPO training."},
)
pref_bco_weight: float = field(
default=0.0,
metadata={"help": "The Binary Classifier Optimization coefficient in DPO training."},
)
pref_loss: Literal["sigmoid", "hinge", "ipo", "kto_pair", "orpo", "simpo"] = field(
default="sigmoid",
metadata={"help": "The type of DPO loss to use."},