add llamafactory-cli env

Former-commit-id: 1df077184845ff5f394b9324d46f8c382869e590
This commit is contained in:
hiyouga
2024-06-06 01:28:14 +08:00
parent 556a4aa972
commit 875eef45f3
4 changed files with 65 additions and 5 deletions

View File

@@ -0,0 +1,54 @@
import platform
import accelerate
import datasets
import peft
import torch
import transformers
import trl
from transformers.utils import is_bitsandbytes_available, is_torch_cuda_available, is_torch_npu_available
from .packages import is_deepspeed_available, is_vllm_available
VERSION = "0.7.2.dev0"
def print_env() -> None:
info = {
"`llamafactory` version": VERSION,
"Platform": platform.platform(),
"Python version": platform.python_version(),
"PyTorch version": torch.__version__,
"Transformers version": transformers.__version__,
"Datasets version": datasets.__version__,
"Accelerate version": accelerate.__version__,
"PEFT version": peft.__version__,
"TRL version": trl.__version__,
}
if is_torch_cuda_available():
info["PyTorch version"] += " (GPU)"
info["GPU type"] = torch.cuda.get_device_name()
if is_torch_npu_available():
info["PyTorch version"] += " (NPU)"
info["NPU type"] = torch.npu.get_device_name()
info["CANN version"] = torch.version.cann
if is_deepspeed_available():
import deepspeed # type: ignore
info["DeepSpeed version"] = deepspeed.__version__
if is_bitsandbytes_available():
import bitsandbytes
info["Bitsandbytes version"] = bitsandbytes.__version__
if is_vllm_available():
import vllm
info["vLLM version"] = vllm.__version__
print("\n".join(["- {}: {}".format(key, value) for key, value in info.items()]) + "\n")

View File

@@ -20,6 +20,10 @@ def _get_package_version(name: str) -> "Version":
return version.parse("0.0.0")
def is_deepspeed_available():
return _is_package_available("deepspeed")
def is_fastapi_available():
return _is_package_available("fastapi")