fix some errors due to inconsistency of model cards
Former-commit-id: dd83265b9b8768eb8732f59ace128dfe4aac1c47
This commit is contained in:
@@ -168,7 +168,7 @@ class HuggingfaceEngine(BaseEngine):
|
||||
for key, value in mm_inputs.items():
|
||||
value = value if isinstance(value, torch.Tensor) else torch.tensor(value)
|
||||
gen_kwargs[key] = value.to(model.device)
|
||||
|
||||
|
||||
return gen_kwargs, prompt_length
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -1164,12 +1164,12 @@ register_model_group(
|
||||
|
||||
register_model_group(
|
||||
models={
|
||||
"Pixtral-12B": {
|
||||
"Pixtral-12B-Instruct": {
|
||||
DownloadSource.DEFAULT: "mistral-community/pixtral-12b",
|
||||
DownloadSource.MODELSCOPE: "AI-ModelScope/pixtral-12b",
|
||||
}
|
||||
},
|
||||
template="mistral",
|
||||
template="pixtral",
|
||||
vision=True
|
||||
)
|
||||
|
||||
|
||||
@@ -92,12 +92,10 @@ def autocast_projector_dtype(model: "PreTrainedModel", model_args: "ModelArgumen
|
||||
|
||||
if getattr(model, "quantization_method", None):
|
||||
model_type = getattr(model.config, "model_type", None)
|
||||
if model_type in ["llava", "llava_next", "llava_next_video", "paligemma", "video_llava"]:
|
||||
if model_type in ["llava", "llava_next", "llava_next_video", "paligemma", "pixtral", "video_llava"]:
|
||||
mm_projector: "torch.nn.Module" = getattr(model, "multi_modal_projector")
|
||||
elif model_type == "qwen2_vl":
|
||||
mm_projector: "torch.nn.Module" = getattr(getattr(model, "visual"), "merger")
|
||||
elif model_type == "pixtral":
|
||||
mm_projector: "torch.nn.Module" = getattr(model, "vision_language_adapte")
|
||||
else:
|
||||
return
|
||||
|
||||
@@ -133,7 +131,6 @@ def get_forbidden_modules(config: "PretrainedConfig", finetuning_args: "Finetuni
|
||||
if model_type in ["llava", "llava_next", "llava_next_video", "paligemma", "pixtral", "video_llava"]:
|
||||
if finetuning_args.freeze_vision_tower:
|
||||
forbidden_modules.add("vision_tower")
|
||||
forbidden_modules.add("vision_encoder")
|
||||
|
||||
if finetuning_args.train_mm_proj_only:
|
||||
forbidden_modules.add("language_model")
|
||||
|
||||
Reference in New Issue
Block a user