add moe aux loss control #3085

Former-commit-id: c9187ebc944e2de454ace3304b7d28eabb1b1a81
This commit is contained in:
hiyouga
2024-04-02 14:26:31 +08:00
parent 03e20bb5c6
commit 117b67ea30
4 changed files with 23 additions and 16 deletions

View File

@@ -20,6 +20,7 @@ logger = get_logger(__name__)
def _get_init_kwargs(model_args: "ModelArguments") -> Dict[str, Any]:
model_args.model_name_or_path = try_download_model_from_ms(model_args)
return {
"trust_remote_code": True,
"cache_dir": model_args.cache_dir,
@@ -34,9 +35,7 @@ def load_tokenizer(model_args: "ModelArguments") -> "PreTrainedTokenizer":
Note: including inplace operation of model_args.
"""
try_download_model_from_ms(model_args)
init_kwargs = _get_init_kwargs(model_args)
tokenizer = AutoTokenizer.from_pretrained(
model_args.model_name_or_path,
use_fast=model_args.use_fast_tokenizer,