[model] add qwen2vl 32b & upgrade peft (#7469)

* add qwen2vl 32b

* fix ci

* upgrade peft to 0.15

* fix ci

* fix ci
This commit is contained in:
hoshi-hiyouga
2025-03-25 12:15:58 +08:00
committed by GitHub
parent ec6a261568
commit 0583d06676
10 changed files with 29 additions and 26 deletions

View File

@@ -20,9 +20,9 @@ Level:
Dependency graph:
main:
transformers>=4.41.2,<=4.50.0,!=4.46.*,!=4.47.*,!=4.48.0
datasets>=2.16.0,<=3.3.2
accelerate>=0.34.0,<=1.4.0
peft>=0.11.1,<=0.12.0
datasets>=2.16.0,<=3.4.1
accelerate>=0.34.0,<=1.5.2
peft>=0.14.0,<=0.15.0
trl>=0.8.6,<=0.9.6
attention:
transformers>=4.42.4 (gemma+fa2)

View File

@@ -2346,6 +2346,10 @@ register_model_group(
DownloadSource.DEFAULT: "Qwen/Qwen2.5-VL-7B-Instruct",
DownloadSource.MODELSCOPE: "Qwen/Qwen2.5-VL-7B-Instruct",
},
"Qwen2.5-VL-32B-Instruct": {
DownloadSource.DEFAULT: "Qwen/Qwen2.5-VL-32B-Instruct",
DownloadSource.MODELSCOPE: "Qwen/Qwen2.5-VL-32B-Instruct",
},
"Qwen2.5-VL-72B-Instruct": {
DownloadSource.DEFAULT: "Qwen/Qwen2.5-VL-72B-Instruct",
DownloadSource.MODELSCOPE: "Qwen/Qwen2.5-VL-72B-Instruct",

View File

@@ -89,9 +89,9 @@ def check_version(requirement: str, mandatory: bool = False) -> None:
def check_dependencies() -> None:
r"""Check the version of the required packages."""
check_version("transformers>=4.41.2,<=4.50.0,!=4.46.0,!=4.46.1,!=4.46.2,!=4.46.3,!=4.47.0,!=4.47.1,!=4.48.0")
check_version("datasets>=2.16.0,<=3.3.2")
check_version("accelerate>=0.34.0,<=1.4.0")
check_version("peft>=0.11.1,<=0.15.0")
check_version("datasets>=2.16.0,<=3.4.1")
check_version("accelerate>=0.34.0,<=1.5.2")
check_version("peft>=0.14.0,<=0.15.0")
check_version("trl>=0.8.6,<=0.9.6")
if is_transformers_version_greater_than("4.46.0") and not is_transformers_version_greater_than("4.48.1"):
logger.warning_rank0_once("There are known bugs in transformers v4.46.0-v4.48.0, please use other versions.")

View File

@@ -161,13 +161,12 @@ class PissaConvertCallback(TrainerCallback):
model.save_pretrained(pissa_backup_dir, safe_serialization=args.save_safetensors)
setattr(model.peft_config["default"], "init_lora_weights", init_lora_weights)
model.save_pretrained(
pissa_convert_dir, safe_serialization=args.save_safetensors, convert_pissa_to_lora=pissa_init_dir
) # TODO: use `path_initial_model_for_weight_conversion` (peft>=0.12.0)
pissa_convert_dir,
safe_serialization=args.save_safetensors,
path_initial_model_for_weight_conversion=pissa_init_dir,
)
model.load_adapter(pissa_backup_dir, "default", is_trainable=True)
model.set_adapter("default")
if "pissa_init" in model.peft_config.keys(): # backward compatibility (peft<0.12.0)
model.delete_adapter("pissa_init")
setattr(model.peft_config["default"], "init_lora_weights", init_lora_weights)