support multiple modules in freeze training #1514

Former-commit-id: 60abac70dfd778df2ae8b3a2e960ed8b607d7ab6
This commit is contained in:
hiyouga
2023-11-15 17:08:18 +08:00
parent 8079584143
commit b2ac8376e1
2 changed files with 19 additions and 11 deletions

View File

@@ -46,7 +46,11 @@ def init_adapter(
else: # fine-tuning the first n layers if num_layer_trainable < 0
trainable_layer_ids = [k for k in range(-finetuning_args.num_layer_trainable)]
trainable_layers = ["{:d}.{}".format(idx, finetuning_args.name_module_trainable) for idx in trainable_layer_ids]
trainable_layers = []
for module_name in finetuning_args.name_module_trainable:
for idx in trainable_layer_ids:
trainable_layers.append("{:d}.{}".format(idx, module_name))
for name, param in model.named_parameters():
if not any(trainable_layer in name for trainable_layer in trainable_layers):
param.requires_grad_(False)