[trainer] Add Muon Optimizer (#7749)
Co-authored-by: hoshi-hiyouga <hiyouga@buaa.edu.cn>
This commit is contained in:
@@ -411,6 +411,10 @@ class FinetuningArguments(
|
||||
default=False,
|
||||
metadata={"help": "Whether or not to use the Adam-mini optimizer."},
|
||||
)
|
||||
use_muon: bool = field(
|
||||
default=False,
|
||||
metadata={"help": "Whether or not to use the Muon optimizer."},
|
||||
)
|
||||
freeze_vision_tower: bool = field(
|
||||
default=True,
|
||||
metadata={"help": "Whether ot not to freeze the vision tower in MLLM training."},
|
||||
|
||||
@@ -153,7 +153,7 @@ def _check_extra_dependencies(
|
||||
elif model_args.infer_backend == EngineName.SGLANG:
|
||||
check_version("sglang>=0.4.4")
|
||||
check_version("sglang", mandatory=True)
|
||||
|
||||
|
||||
if finetuning_args.use_galore:
|
||||
check_version("galore_torch", mandatory=True)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user