support function calling
Former-commit-id: 66533b3f65babf2429c92c0f8fafe4eff5e0ff63
This commit is contained in:
@@ -1 +1,4 @@
|
||||
from llmtuner.train.rm.workflow import run_rm
|
||||
from .workflow import run_rm
|
||||
|
||||
|
||||
__all__ = ["run_rm"]
|
||||
|
||||
@@ -4,7 +4,7 @@ import torch
|
||||
from typing import TYPE_CHECKING, Dict, List, Optional, Tuple, Union
|
||||
from transformers import Trainer
|
||||
|
||||
from llmtuner.extras.logging import get_logger
|
||||
from ...extras.logging import get_logger
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from transformers.trainer import PredictionOutput
|
||||
|
||||
@@ -3,19 +3,19 @@
|
||||
from typing import TYPE_CHECKING, Optional, List
|
||||
from transformers import Seq2SeqTrainingArguments
|
||||
|
||||
from llmtuner.data import get_dataset, preprocess_dataset, split_dataset
|
||||
from llmtuner.extras.callbacks import FixValueHeadModelCallback
|
||||
from llmtuner.extras.misc import fix_valuehead_checkpoint
|
||||
from llmtuner.extras.ploting import plot_loss
|
||||
from llmtuner.model import load_model_and_tokenizer
|
||||
from llmtuner.train.rm.collator import PairwiseDataCollatorWithPadding
|
||||
from llmtuner.train.rm.metric import compute_accuracy
|
||||
from llmtuner.train.rm.trainer import PairwiseTrainer
|
||||
from llmtuner.train.utils import create_modelcard_and_push
|
||||
from ...data import get_dataset, split_dataset
|
||||
from ...extras.callbacks import FixValueHeadModelCallback
|
||||
from ...extras.misc import fix_valuehead_checkpoint
|
||||
from ...extras.ploting import plot_loss
|
||||
from ...model import load_model_and_tokenizer
|
||||
from ...train.rm.collator import PairwiseDataCollatorWithPadding
|
||||
from ...train.rm.metric import compute_accuracy
|
||||
from ...train.rm.trainer import PairwiseTrainer
|
||||
from ...train.utils import create_modelcard_and_push
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from transformers import TrainerCallback
|
||||
from llmtuner.hparams import ModelArguments, DataArguments, FinetuningArguments
|
||||
from ...hparams import ModelArguments, DataArguments, FinetuningArguments
|
||||
|
||||
|
||||
def run_rm(
|
||||
@@ -25,9 +25,8 @@ def run_rm(
|
||||
finetuning_args: "FinetuningArguments",
|
||||
callbacks: Optional[List["TrainerCallback"]] = None
|
||||
):
|
||||
dataset = get_dataset(model_args, data_args)
|
||||
model, tokenizer = load_model_and_tokenizer(model_args, finetuning_args, training_args.do_train, add_valuehead=True)
|
||||
dataset = preprocess_dataset(dataset, tokenizer, data_args, training_args, stage="rm")
|
||||
dataset = get_dataset(model_args, data_args, tokenizer, training_args, stage="rm")
|
||||
data_collator = PairwiseDataCollatorWithPadding(tokenizer, pad_to_multiple_of=8)
|
||||
|
||||
# Update arguments
|
||||
|
||||
Reference in New Issue
Block a user