support rank0 logger
Former-commit-id: 84528eabe560091bfd866b6a0ca864085af7529b
This commit is contained in:
@@ -19,6 +19,7 @@ from typing import Any, Dict, Optional, Tuple
|
||||
|
||||
from yaml import safe_dump, safe_load
|
||||
|
||||
from ..extras import logging
|
||||
from ..extras.constants import (
|
||||
CHECKPOINT_NAMES,
|
||||
DATA_CONFIG,
|
||||
@@ -30,7 +31,6 @@ from ..extras.constants import (
|
||||
VISION_MODELS,
|
||||
DownloadSource,
|
||||
)
|
||||
from ..extras.logging import get_logger
|
||||
from ..extras.misc import use_modelscope, use_openmind
|
||||
from ..extras.packages import is_gradio_available
|
||||
|
||||
@@ -39,7 +39,7 @@ if is_gradio_available():
|
||||
import gradio as gr
|
||||
|
||||
|
||||
logger = get_logger(__name__)
|
||||
logger = logging.get_logger(__name__)
|
||||
|
||||
|
||||
DEFAULT_CACHE_DIR = "cache"
|
||||
@@ -56,7 +56,7 @@ def get_save_dir(*paths: str) -> os.PathLike:
|
||||
Gets the path to saved model checkpoints.
|
||||
"""
|
||||
if os.path.sep in paths[-1]:
|
||||
logger.warning("Found complex path, some features may be not available.")
|
||||
logger.warning_rank0("Found complex path, some features may be not available.")
|
||||
return paths[-1]
|
||||
|
||||
paths = (path.replace(" ", "").strip() for path in paths)
|
||||
@@ -172,14 +172,14 @@ def load_dataset_info(dataset_dir: str) -> Dict[str, Dict[str, Any]]:
|
||||
Loads dataset_info.json.
|
||||
"""
|
||||
if dataset_dir == "ONLINE" or dataset_dir.startswith("REMOTE:"):
|
||||
logger.info(f"dataset_dir is {dataset_dir}, using online dataset.")
|
||||
logger.info_rank0(f"dataset_dir is {dataset_dir}, using online dataset.")
|
||||
return {}
|
||||
|
||||
try:
|
||||
with open(os.path.join(dataset_dir, DATA_CONFIG), encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
except Exception as err:
|
||||
logger.warning(f"Cannot open {os.path.join(dataset_dir, DATA_CONFIG)} due to {str(err)}.")
|
||||
logger.warning_rank0(f"Cannot open {os.path.join(dataset_dir, DATA_CONFIG)} due to {str(err)}.")
|
||||
return {}
|
||||
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ from transformers.trainer import TRAINING_ARGS_NAME
|
||||
|
||||
from ..extras.constants import LLAMABOARD_CONFIG, PEFT_METHODS, TRAINING_STAGES
|
||||
from ..extras.misc import is_gpu_or_npu_available, torch_gc
|
||||
from ..extras.packages import is_gradio_available
|
||||
from ..extras.packages import is_gradio_available, is_transformers_version_equal_to_4_46
|
||||
from .common import DEFAULT_CACHE_DIR, DEFAULT_CONFIG_DIR, QUANTIZATION_BITS, get_save_dir, load_config
|
||||
from .locales import ALERTS, LOCALES
|
||||
from .utils import abort_process, gen_cmd, get_eval_results, get_trainer_info, load_args, save_args, save_cmd
|
||||
@@ -152,7 +152,7 @@ class Runner:
|
||||
pure_bf16=(get("train.compute_type") == "pure_bf16"),
|
||||
plot_loss=True,
|
||||
ddp_timeout=180000000,
|
||||
include_num_input_tokens_seen=True,
|
||||
include_num_input_tokens_seen=False if is_transformers_version_equal_to_4_46() else True, # FIXME
|
||||
**json.loads(get("train.extra_args")),
|
||||
)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user