[deps] upgrade vllm (#6857)
Former-commit-id: 4bd50f65a3d62528768561019fda2723d045c7fd
This commit is contained in:
@@ -24,7 +24,7 @@ from .chat.chat_model import run_chat
|
||||
from .eval.evaluator import run_eval
|
||||
from .extras import logging
|
||||
from .extras.env import VERSION, print_env
|
||||
from .extras.misc import get_device_count, use_ray
|
||||
from .extras.misc import get_device_count, is_env_enabled, use_ray
|
||||
from .train.tuner import export_model, run_exp
|
||||
from .webui.interface import run_web_demo, run_web_ui
|
||||
|
||||
@@ -86,7 +86,7 @@ def main():
|
||||
elif command == Command.EXPORT:
|
||||
export_model()
|
||||
elif command == Command.TRAIN:
|
||||
force_torchrun = os.getenv("FORCE_TORCHRUN", "0").lower() in ["true", "1"]
|
||||
force_torchrun = is_env_enabled("FORCE_TORCHRUN")
|
||||
if force_torchrun or (get_device_count() > 1 and not use_ray()):
|
||||
master_addr = os.getenv("MASTER_ADDR", "127.0.0.1")
|
||||
master_port = os.getenv("MASTER_PORT", str(random.randint(20001, 29999)))
|
||||
|
||||
Reference in New Issue
Block a user