[webui] fix abort finish (#8569)

This commit is contained in:
Yaowei Zheng
2025-07-07 23:07:46 +08:00
committed by GitHub
parent 12ed792db9
commit 7f8e5f52f9
3 changed files with 23 additions and 16 deletions

View File

@@ -23,6 +23,7 @@ from typing import TYPE_CHECKING, Any, Literal, Union
import torch
import torch.distributed as dist
import transformers.dynamic_module_utils
from huggingface_hub.utils import WeakFileLock
from transformers import InfNanRemoveLogitsProcessor, LogitsProcessorList
from transformers.dynamic_module_utils import get_relative_imports
from transformers.utils import (
@@ -277,21 +278,27 @@ def try_download_model_from_other_hub(model_args: "ModelArguments") -> str:
api.login(model_args.ms_hub_token)
revision = "master" if model_args.model_revision == "main" else model_args.model_revision
return snapshot_download(
model_args.model_name_or_path,
revision=revision,
cache_dir=model_args.cache_dir,
)
with WeakFileLock(os.path.abspath(os.path.expanduser("~/.cache/llamafactory/modelscope.lock"))):
model_path = snapshot_download(
model_args.model_name_or_path,
revision=revision,
cache_dir=model_args.cache_dir,
)
return model_path
if use_openmind():
check_version("openmind>=0.8.0", mandatory=True)
from openmind.utils.hub import snapshot_download # type: ignore
return snapshot_download(
model_args.model_name_or_path,
revision=model_args.model_revision,
cache_dir=model_args.cache_dir,
)
with WeakFileLock(os.path.abspath(os.path.expanduser("~/.cache/llamafactory/openmind.lock"))):
model_path = snapshot_download(
model_args.model_name_or_path,
revision=model_args.model_revision,
cache_dir=model_args.cache_dir,
)
return model_path
def use_modelscope() -> bool: