fix modelscope data hub

Former-commit-id: 5b63e8c22538a4788e4b6c8df50e6e6be93ceeac
This commit is contained in:
hiyouga
2023-12-12 18:33:06 +08:00
parent 7c9f37c83d
commit c27675f70d
5 changed files with 49 additions and 38 deletions

View File

@@ -44,12 +44,12 @@ def _verify_model_args(model_args: "ModelArguments", finetuning_args: "Finetunin
if model_args.quantization_bit is not None and finetuning_args.finetuning_type != "lora":
raise ValueError("Quantization is only compatible with the LoRA method.")
if (
model_args.checkpoint_dir is not None
and len(model_args.checkpoint_dir) != 1
and finetuning_args.finetuning_type != "lora"
):
raise ValueError("Multiple checkpoints are only available for LoRA tuning.")
if model_args.checkpoint_dir is not None and len(model_args.checkpoint_dir) != 1:
if finetuning_args.finetuning_type != "lora":
raise ValueError("Multiple checkpoints are only available for LoRA tuning.")
if model_args.quantization_bit is not None:
raise ValueError("Quantized model only accepts a single checkpoint. Merge them first.")
def parse_train_args(args: Optional[Dict[str, Any]] = None) -> _TRAIN_CLS: