fix modelscope data hub

Former-commit-id: 5b63e8c22538a4788e4b6c8df50e6e6be93ceeac
This commit is contained in:
hiyouga
2023-12-12 18:33:06 +08:00
parent 7c9f37c83d
commit c27675f70d
5 changed files with 49 additions and 38 deletions

View File

@@ -66,8 +66,8 @@ def init_adapter(
if model_args.checkpoint_dir is not None:
is_mergeable = True
if getattr(model, "quantization_method", None) == "gptq":
assert len(model_args.checkpoint_dir) == 1, "GPTQ quantized model only accepts a single checkpoint."
if getattr(model, "quantization_method", None): # merge lora in quantized model is unstable
assert len(model_args.checkpoint_dir) == 1, "Quantized model only accepts a single checkpoint."
is_mergeable = False
if (is_trainable and finetuning_args.resume_lora_training) or (not is_mergeable):

View File

@@ -1,4 +1,3 @@
import os
import math
import torch
from types import MethodType
@@ -13,7 +12,6 @@ from transformers import (
PreTrainedModel,
PreTrainedTokenizerBase
)
from transformers.models.llama import modeling_llama as LlamaModule
from transformers.utils.versions import require_version
from trl import AutoModelForCausalLMWithValueHead

View File

@@ -44,12 +44,12 @@ def _verify_model_args(model_args: "ModelArguments", finetuning_args: "Finetunin
if model_args.quantization_bit is not None and finetuning_args.finetuning_type != "lora":
raise ValueError("Quantization is only compatible with the LoRA method.")
if (
model_args.checkpoint_dir is not None
and len(model_args.checkpoint_dir) != 1
and finetuning_args.finetuning_type != "lora"
):
raise ValueError("Multiple checkpoints are only available for LoRA tuning.")
if model_args.checkpoint_dir is not None and len(model_args.checkpoint_dir) != 1:
if finetuning_args.finetuning_type != "lora":
raise ValueError("Multiple checkpoints are only available for LoRA tuning.")
if model_args.quantization_bit is not None:
raise ValueError("Quantized model only accepts a single checkpoint. Merge them first.")
def parse_train_args(args: Optional[Dict[str, Any]] = None) -> _TRAIN_CLS: