fix resize vocab at inference #3022

Former-commit-id: c243720b89eec0af2872fa3c7980a0026d893f4d
This commit is contained in:
hiyouga
2024-04-03 18:14:24 +08:00
parent f6530222f7
commit 1348f7d860
9 changed files with 31 additions and 40 deletions

View File

@@ -1,10 +1,9 @@
from .loader import load_model, load_model_and_tokenizer, load_tokenizer
from .loader import load_model, load_tokenizer
from .utils import find_all_linear_modules, load_valuehead_params
__all__ = [
"load_model",
"load_model_and_tokenizer",
"load_tokenizer",
"load_valuehead_params",
"find_all_linear_modules",

View File

@@ -1,4 +1,4 @@
from typing import TYPE_CHECKING, Any, Dict, Tuple
from typing import TYPE_CHECKING, Any, Dict
from transformers import AutoConfig, AutoModelForCausalLM, AutoTokenizer
from trl import AutoModelForCausalLMWithValueHead
@@ -133,17 +133,3 @@ def load_model(
)
return model
def load_model_and_tokenizer(
model_args: "ModelArguments",
finetuning_args: "FinetuningArguments",
is_trainable: bool = False,
add_valuehead: bool = False,
) -> Tuple["PreTrainedModel", "PreTrainedTokenizer"]:
r"""
Loads pretrained model and tokenizer.
"""
tokenizer = load_tokenizer(model_args)
model = load_model(tokenizer, model_args, finetuning_args, is_trainable, add_valuehead)
return model, tokenizer