Former-commit-id: f121d5c4f94af9f165132c4309cb9bdc8217d985
This commit is contained in:
hiyouga
2024-06-10 21:24:15 +08:00
parent 0ecf0d51e3
commit 784088db3f
6 changed files with 41 additions and 54 deletions

View File

@@ -6,14 +6,14 @@ from llamafactory.hparams import get_train_args
from llamafactory.model import load_model, load_tokenizer
TINY_LLAMA = os.environ.get("TINY_LLAMA", "llamafactory/tiny-random-LlamaForCausalLM")
TINY_LLAMA = os.environ.get("TINY_LLAMA", "llamafactory/tiny-random-Llama-3")
TRAINING_ARGS = {
TRAIN_ARGS = {
"model_name_or_path": TINY_LLAMA,
"stage": "sft",
"do_train": True,
"finetuning_type": "full",
"dataset": "llamafactory/tiny_dataset",
"dataset": "llamafactory/tiny-supervised-dataset",
"dataset_dir": "ONLINE",
"template": "llama3",
"cutoff_len": 1024,
@@ -25,7 +25,7 @@ TRAINING_ARGS = {
def test_full():
model_args, _, _, finetuning_args, _ = get_train_args(TRAINING_ARGS)
model_args, _, _, finetuning_args, _ = get_train_args(TRAIN_ARGS)
tokenizer_module = load_tokenizer(model_args)
model = load_model(tokenizer_module["tokenizer"], model_args, finetuning_args, is_trainable=True)
for param in model.parameters():