add swanlab

Former-commit-id: c85a77c8a8824a56a67d56b97b4877fcd6edeb3d
This commit is contained in:
hiyouga
2024-12-19 07:12:31 +00:00
parent eca06531c3
commit 7eeeffdb8a
6 changed files with 60 additions and 3 deletions

View File

@@ -29,7 +29,7 @@ from ...extras import logging
from ...extras.constants import IGNORE_INDEX
from ...extras.packages import is_transformers_version_equal_to_4_46, is_transformers_version_greater_than
from ..callbacks import PissaConvertCallback, SaveProcessorCallback
from ..trainer_utils import create_custom_optimizer, create_custom_scheduler
from ..trainer_utils import create_custom_optimizer, create_custom_scheduler, get_swanlab_callback
if TYPE_CHECKING:
@@ -71,6 +71,9 @@ class CustomSeq2SeqTrainer(Seq2SeqTrainer):
self.accelerator.clip_grad_norm_ = MethodType(clip_grad_norm_old_version, self.accelerator)
self.add_callback(BAdamCallback)
if finetuning_args.use_swanlab:
self.add_callback(get_swanlab_callback(finetuning_args))
@override
def create_optimizer(self) -> "torch.optim.Optimizer":
if self.optimizer is None:

View File

@@ -40,7 +40,7 @@ if is_galore_available():
if TYPE_CHECKING:
from transformers import PreTrainedModel, Seq2SeqTrainingArguments
from transformers import PreTrainedModel, Seq2SeqTrainingArguments, TrainerCallback
from trl import AutoModelForCausalLMWithValueHead
from ..hparams import DataArguments
@@ -457,3 +457,12 @@ def get_batch_logps(
labels[labels == label_pad_token_id] = 0 # dummy token
per_token_logps = torch.gather(logits.log_softmax(-1), dim=2, index=labels.unsqueeze(2)).squeeze(2)
return (per_token_logps * loss_mask).sum(-1), loss_mask.sum(-1)
def get_swanlab_callback(finetuning_args: "FinetuningArguments") -> "TrainerCallback":
r"""
Gets the callback for logging to SwanLab.
"""
from swanlab.integration.huggingface import SwanLabCallback
return SwanLabCallback()