[breaking] bump transformers to 4.45.0 & improve ci (#7746)

* update ci

* fix

* fix

* fix

* fix

* fix
This commit is contained in:
hoshi-hiyouga
2025-04-17 02:36:48 +08:00
committed by GitHub
parent d222f63cb7
commit 86ebb219d6
23 changed files with 211 additions and 140 deletions

View File

@@ -43,7 +43,6 @@ import torch
import torch.nn.functional as F
from ...extras import logging
from ...extras.misc import check_version
from ...extras.packages import is_transformers_version_greater_than
@@ -117,6 +116,5 @@ def configure_packing(model_args: "ModelArguments", is_trainable: bool) -> None:
if not is_trainable or not model_args.block_diag_attn:
return
check_version("transformers>=4.43.0")
transformers.modeling_flash_attention_utils._get_unpad_data = get_unpad_data
logger.info_rank0("Using block diagonal attention for sequence packing without cross-attention.")