add some
Former-commit-id: fede563aeb716ba5d1e368fd3e1182e4e580d248
This commit is contained in:
@@ -100,7 +100,7 @@ def load_tokenizer(model_args: "ModelArguments") -> "TokenizerModule":
|
||||
processor = AutoProcessor.from_pretrained(model_args.model_name_or_path, **init_kwargs)
|
||||
patch_processor(processor, config, tokenizer, model_args)
|
||||
except Exception as e:
|
||||
logger.debug(f"Processor was not found: {e}.")
|
||||
logger.info(f"Processor was not found: {e}.")
|
||||
processor = None
|
||||
|
||||
# Avoid load tokenizer, see:
|
||||
|
||||
@@ -46,6 +46,9 @@ def find_all_linear_modules(model: "PreTrainedModel", freeze_vision_tower: bool)
|
||||
forbidden_modules.add("visual")
|
||||
elif model_type in ["minicpmv"]:
|
||||
forbidden_modules.add("vpm")
|
||||
forbidden_modules.add("apm")
|
||||
forbidden_modules.add("resampler")
|
||||
forbidden_modules.add("tts")
|
||||
else:
|
||||
forbidden_modules.add("vision_tower")
|
||||
|
||||
|
||||
@@ -145,7 +145,11 @@ def get_forbidden_modules(config: "PretrainedConfig", finetuning_args: "Finetuni
|
||||
|
||||
elif model_type == "minicpmv":
|
||||
if finetuning_args.freeze_vision_tower:
|
||||
print("******************", model_type)
|
||||
forbidden_modules.add("vpm")
|
||||
forbidden_modules.add("apm")
|
||||
forbidden_modules.add("resampler")
|
||||
forbidden_modules.add("tts")
|
||||
|
||||
return forbidden_modules
|
||||
|
||||
|
||||
Reference in New Issue
Block a user