fix bug
Former-commit-id: 6d483e76141420e0cb577541e6e1794c20f025f6
This commit is contained in:
@@ -11,7 +11,7 @@ from transformers.utils import (
|
||||
ADAPTER_SAFE_WEIGHTS_NAME
|
||||
)
|
||||
|
||||
from llmtuner.extras.constants import DEFAULT_MODULE, DEFAULT_TEMPLATE, SUPPORTED_MODELS, TRAINING_STAGES
|
||||
from llmtuner.extras.constants import DEFAULT_MODULE, DEFAULT_TEMPLATE, SUPPORTED_MODELS, ALL_OFFICIAL_MODELS, TRAINING_STAGES
|
||||
|
||||
|
||||
DEFAULT_CACHE_DIR = "cache"
|
||||
@@ -58,7 +58,10 @@ def save_config(lang: str, model_name: Optional[str] = None, model_path: Optiona
|
||||
|
||||
def get_model_path(model_name: str) -> str:
|
||||
user_config = load_config()
|
||||
return user_config["path_dict"].get(model_name, None) or SUPPORTED_MODELS.get(model_name, "")
|
||||
cached_path = user_config["path_dict"].get(model_name, None)
|
||||
if cached_path in ALL_OFFICIAL_MODELS.get(model_name, []):
|
||||
cached_path = None
|
||||
return cached_path or SUPPORTED_MODELS.get(model_name, "")
|
||||
|
||||
|
||||
def get_prefix(model_name: str) -> str:
|
||||
|
||||
Reference in New Issue
Block a user