lora modules: all by default

Former-commit-id: 52c4ae87c7f4312704c31ef26b079b2c5b95ea5f
This commit is contained in:
hiyouga
2024-06-06 03:53:28 +08:00
parent abc2a73a33
commit 937f49ec3d
23 changed files with 78 additions and 118 deletions

View File

@@ -8,7 +8,6 @@ from yaml import safe_dump, safe_load
from ..extras.constants import (
CHECKPOINT_NAMES,
DATA_CONFIG,
DEFAULT_MODULE,
DEFAULT_TEMPLATE,
PEFT_METHODS,
STAGES_USE_PAIR_DATA,
@@ -118,13 +117,6 @@ def get_model_info(model_name: str) -> Tuple[str, str, bool]:
return get_model_path(model_name), get_template(model_name), get_visual(model_name)
def get_module(model_name: str) -> str:
r"""
Gets the LoRA modules of this model.
"""
return DEFAULT_MODULE.get(get_prefix(model_name), "all")
def get_template(model_name: str) -> str:
r"""
Gets the template name if the model is a chat model.

View File

@@ -8,7 +8,7 @@ from transformers.trainer import TRAINING_ARGS_NAME
from ..extras.constants import PEFT_METHODS, TRAINING_STAGES
from ..extras.misc import is_gpu_or_npu_available, torch_gc
from ..extras.packages import is_gradio_available
from .common import DEFAULT_CACHE_DIR, get_module, get_save_dir, load_config
from .common import DEFAULT_CACHE_DIR, get_save_dir, load_config
from .locales import ALERTS
from .utils import abort_leaf_process, gen_cmd, get_eval_results, get_trainer_info, load_args, save_args, save_cmd
@@ -159,7 +159,7 @@ class Runner:
args["create_new_adapter"] = get("train.create_new_adapter")
args["use_rslora"] = get("train.use_rslora")
args["use_dora"] = get("train.use_dora")
args["lora_target"] = get("train.lora_target") or get_module(model_name)
args["lora_target"] = get("train.lora_target") or "all"
args["additional_target"] = get("train.additional_target") or None
if args["use_llama_pro"]: