fix ppo dataset bug #4012
Former-commit-id: 7fc51b2e93698ae5e012566af8481f4d861c873d
This commit is contained in:
@@ -130,7 +130,7 @@ def get_dataset(
|
||||
model_args: "ModelArguments",
|
||||
data_args: "DataArguments",
|
||||
training_args: "Seq2SeqTrainingArguments",
|
||||
stage: Literal["pt", "sft", "rm", "kto"],
|
||||
stage: Literal["pt", "sft", "rm", "ppo", "kto"],
|
||||
tokenizer: "PreTrainedTokenizer",
|
||||
processor: Optional["ProcessorMixin"] = None,
|
||||
) -> Union["Dataset", "IterableDataset"]:
|
||||
|
||||
Reference in New Issue
Block a user