fix checkpoint loading

Former-commit-id: d31aa5c2c0bcb6a4ef4a62e21693548dd9acaae6
This commit is contained in:
hiyouga
2023-05-29 17:43:16 +08:00
parent 35d04a2c05
commit 304be6dc28
4 changed files with 56 additions and 23 deletions

View File

@@ -194,7 +194,8 @@ class FinetuningArguments:
if self.name_module_trainable == "mlp":
self.trainable_layers = ["layers.{:d}.mlp".format(idx) for idx in trainable_layer_ids]
elif self.name_module_trainable == "qkv":
self.trainable_layers = ["layers.{:d}.attention.query_key_value".format(idx) for idx in trainable_layer_ids]
self.trainable_layers = ["layers.{:d}.self_attn.{}".format(idx, proj) \
for proj in ["k_proj", "q_proj", "v_proj", "o_proj"] for idx in trainable_layer_ids]
assert self.finetuning_type in ["none", "freeze", "lora", "full"], "Invalid fine-tuning method."