Merge pull request #1553 from hannlp/hans

Change the default argument settings for PPO training

Former-commit-id: 1b64678fa4979485f67c3bb1420dfdff6fcbc6e7
This commit is contained in:
hoshi-hiyouga
2023-11-20 20:32:55 +08:00
committed by GitHub
4 changed files with 9 additions and 1 deletions

View File

@@ -313,6 +313,8 @@ CUDA_VISIBLE_DEVICES=0 python src/train_bash.py \
--per_device_train_batch_size 2 \
--gradient_accumulation_steps 4 \
--lr_scheduler_type cosine \
--top_k 0 \
--top_p 0.9 \
--logging_steps 10 \
--save_steps 1000 \
--learning_rate 1e-5 \