[deps] adapt to transformers v5 (#10147)

Co-authored-by: frozenleaves <frozen@Mac.local>
Co-authored-by: hiyouga <hiyouga@buaa.edu.cn>
This commit is contained in:
浮梦
2026-02-02 12:07:19 +08:00
committed by GitHub
parent 762b480131
commit bf04ca6af8
23 changed files with 149 additions and 120 deletions

View File

@@ -103,7 +103,9 @@ class FixValueHeadModelCallback(TrainerCallback):
if args.should_save:
output_dir = os.path.join(args.output_dir, f"{PREFIX_CHECKPOINT_DIR}-{state.global_step}")
fix_valuehead_checkpoint(
model=kwargs.pop("model"), output_dir=output_dir, safe_serialization=args.save_safetensors
model=kwargs.pop("model"),
output_dir=output_dir,
safe_serialization=getattr(args, "save_safetensors", True),
)
@@ -137,7 +139,7 @@ class PissaConvertCallback(TrainerCallback):
if isinstance(model, PeftModel):
init_lora_weights = getattr(model.peft_config["default"], "init_lora_weights")
setattr(model.peft_config["default"], "init_lora_weights", True)
model.save_pretrained(pissa_init_dir, safe_serialization=args.save_safetensors)
model.save_pretrained(pissa_init_dir, safe_serialization=getattr(args, "save_safetensors", True))
setattr(model.peft_config["default"], "init_lora_weights", init_lora_weights)
@override
@@ -155,11 +157,11 @@ class PissaConvertCallback(TrainerCallback):
if isinstance(model, PeftModel):
init_lora_weights = getattr(model.peft_config["default"], "init_lora_weights")
setattr(model.peft_config["default"], "init_lora_weights", True)
model.save_pretrained(pissa_backup_dir, safe_serialization=args.save_safetensors)
model.save_pretrained(pissa_backup_dir, safe_serialization=getattr(args, "save_safetensors", True))
setattr(model.peft_config["default"], "init_lora_weights", init_lora_weights)
model.save_pretrained(
pissa_convert_dir,
safe_serialization=args.save_safetensors,
safe_serialization=getattr(args, "save_safetensors", True),
path_initial_model_for_weight_conversion=pissa_init_dir,
)
model.load_adapter(pissa_backup_dir, "default", is_trainable=True)