fix baichuan templates
Former-commit-id: f48a49e835b32f3991cfad8874c7b9c78953809f
This commit is contained in:
@@ -76,7 +76,7 @@ class PPOPeftTrainer(PPOTrainer, PeftTrainer):
|
||||
|
||||
# Keyword arguments for `model.generate`
|
||||
gen_kwargs = self.generating_args.to_dict()
|
||||
gen_kwargs["eos_token_id"] = list(set([self.tokenizer.eos_token_id] + self.tokenizer.additional_special_tokens_ids))
|
||||
gen_kwargs["eos_token_id"] = [self.tokenizer.eos_token_id] + self.tokenizer.additional_special_tokens_ids
|
||||
gen_kwargs["pad_token_id"] = self.tokenizer.pad_token_id
|
||||
gen_kwargs["logits_processor"] = get_logits_processor()
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ from torch.optim import AdamW
|
||||
from typing import TYPE_CHECKING, Optional, List
|
||||
from transformers import DataCollatorForSeq2Seq
|
||||
from transformers.optimization import get_scheduler
|
||||
from transformers.utils.versions import require_version
|
||||
|
||||
from llmtuner.dsets import get_dataset, preprocess_dataset
|
||||
from llmtuner.extras.ploting import plot_loss
|
||||
|
||||
Reference in New Issue
Block a user