update arg name
Former-commit-id: 1509ed550b2060f946ce20e3c5a9e5c49e86e3ab
This commit is contained in:
@@ -29,11 +29,12 @@ from .model_utils.checkpointing import prepare_model_for_training
|
||||
from .model_utils.embedding import resize_embedding_layer
|
||||
from .model_utils.longlora import configure_longlora
|
||||
from .model_utils.moe import add_z3_leaf_module, configure_moe
|
||||
from .model_utils.packing import configure_packing
|
||||
from .model_utils.quantization import configure_quantization
|
||||
from .model_utils.rope import configure_rope
|
||||
from .model_utils.valuehead import prepare_valuehead_model
|
||||
from .model_utils.visual import autocast_projector_dtype, configure_visual_model
|
||||
from .model_utils.packing import configure_packing
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from transformers import PretrainedConfig, PreTrainedTokenizer
|
||||
@@ -73,6 +74,7 @@ def patch_config(
|
||||
configure_quantization(config, tokenizer, model_args, init_kwargs)
|
||||
configure_moe(config, model_args, is_trainable)
|
||||
configure_visual_model(config)
|
||||
configure_packing(config, model_args, is_trainable)
|
||||
|
||||
if model_args.use_cache and not is_trainable:
|
||||
setattr(config, "use_cache", True)
|
||||
@@ -101,9 +103,6 @@ def patch_config(
|
||||
|
||||
if init_kwargs.get("device_map", None) == "auto":
|
||||
init_kwargs["offload_folder"] = model_args.offload_folder
|
||||
|
||||
if model_args.efficient_packing:
|
||||
configure_packing(config, model_args)
|
||||
|
||||
|
||||
def patch_model(
|
||||
|
||||
Reference in New Issue
Block a user