Merge pull request #4352 from Ledzy/main
[Enhancement] Support ZeRO-3 when using BAdam Former-commit-id: 0dc75275efa7d7540b472783a52ea6aeaa503c0b
This commit is contained in:
@@ -214,13 +214,15 @@ def get_train_args(args: Optional[Dict[str, Any]] = None) -> _TRAIN_CLS:
|
||||
|
||||
if (
|
||||
finetuning_args.use_badam
|
||||
and finetuning_args.badam_mode == "layer"
|
||||
and training_args.parallel_mode == ParallelMode.DISTRIBUTED
|
||||
and training_args.parallel_mode.value == "distributed"
|
||||
):
|
||||
raise ValueError("Layer-wise BAdam does not yet support distributed training, use ratio-wise BAdam.")
|
||||
if finetuning_args.badam_mode == "ratio":
|
||||
raise ValueError("Ratio-wise BAdam does not yet support distributed training, use layer-wise BAdam: --badam_mode layer")
|
||||
if finetuning_args.badam_mode == "layer" and (not is_deepspeed_zero3_enabled()):
|
||||
raise ValueError(f"Layer-wise BAdam only supports DeepSpeed ZeRO 3 stage.")
|
||||
|
||||
if (finetuning_args.use_galore or finetuning_args.use_badam) and training_args.deepspeed is not None:
|
||||
raise ValueError("GaLore and BAdam are incompatible with DeepSpeed yet.")
|
||||
if (finetuning_args.use_galore) and training_args.deepspeed is not None:
|
||||
raise ValueError("GaLore are incompatible with DeepSpeed yet.")
|
||||
|
||||
if model_args.infer_backend == "vllm":
|
||||
raise ValueError("vLLM backend is only available for API, CLI and Web.")
|
||||
|
||||
Reference in New Issue
Block a user