update loader

Former-commit-id: 080d8eab858217ca58bffe719d5ffde7579c5bda
This commit is contained in:
hiyouga
2023-12-24 19:10:23 +08:00
parent 940403720a
commit 921f593632
6 changed files with 67 additions and 68 deletions

View File

@@ -20,11 +20,11 @@ class ModelArguments:
)
use_fast_tokenizer: Optional[bool] = field(
default=False,
metadata={"help": "Whether to use one of the fast tokenizer (backed by the tokenizers library) or not."}
metadata={"help": "Whether or not to use one of the fast tokenizer (backed by the tokenizers library)."}
)
resize_vocab: Optional[bool] = field(
default=False,
metadata={"help": "Whether to resize the tokenizer vocab and the embedding layers."}
metadata={"help": "Whether or not to resize the tokenizer vocab and the embedding layers."}
)
split_special_tokens: Optional[bool] = field(
default=False,
@@ -44,11 +44,11 @@ class ModelArguments:
)
double_quantization: Optional[bool] = field(
default=True,
metadata={"help": "Whether to use double quantization in int4 training or not."}
metadata={"help": "Whether or not to use double quantization in int4 training."}
)
rope_scaling: Optional[Literal["linear", "dynamic"]] = field(
default=None,
metadata={"help": "Adopt scaled rotary positional embeddings."}
metadata={"help": "Which scaling strategy should be adopted for the RoPE embeddings."}
)
flash_attn: Optional[bool] = field(
default=False,
@@ -60,7 +60,15 @@ class ModelArguments:
)
use_unsloth: Optional[bool] = field(
default=False,
metadata={"help": "Whether to use unsloth's optimization for LoRA training."}
metadata={"help": "Whether or not to use unsloth's optimization for the LoRA training."}
)
disable_gradient_checkpointing: Optional[bool] = field(
default=False,
metadata={"help": "Whether or not to disable gradient checkpointing."}
)
upcast_layernorm: Optional[bool] = field(
default=False,
metadata={"help": "Whether or not to upcast the layernorm weights in fp32."}
)
hf_hub_token: Optional[str] = field(
default=None,