fix: by hiyouga suggestion

Former-commit-id: 41195f1bc69e4b5da7a265369d368b06754362cf
This commit is contained in:
ZeYi Lin
2024-12-20 16:43:03 +08:00
parent e5d9d8c55d
commit 8a41c96761
7 changed files with 23 additions and 8 deletions

View File

@@ -31,7 +31,7 @@ from typing_extensions import override
from ...extras.constants import IGNORE_INDEX
from ...extras.packages import is_transformers_version_equal_to_4_46
from ..callbacks import PissaConvertCallback, SaveProcessorCallback
from ..trainer_utils import create_custom_optimizer, create_custom_scheduler, get_batch_logps
from ..trainer_utils import create_custom_optimizer, create_custom_scheduler, get_batch_logps, get_swanlab_callback
if TYPE_CHECKING:
@@ -106,6 +106,9 @@ class CustomDPOTrainer(DPOTrainer):
self.accelerator.clip_grad_norm_ = MethodType(clip_grad_norm_old_version, self.accelerator)
self.add_callback(BAdamCallback)
if finetuning_args.use_swanlab:
self.add_callback(get_swanlab_callback(finetuning_args))
@override
def create_optimizer(self) -> "torch.optim.Optimizer":
if self.optimizer is None: