Merge pull request #2525 from stephen-nju/main
update project_kwargs for ppo config Former-commit-id: e7a6910141cc8d8dd966c1f54388d9ef764418d0
This commit is contained in:
@@ -61,6 +61,7 @@ def run_ppo(
|
|||||||
use_score_norm=finetuning_args.ppo_score_norm,
|
use_score_norm=finetuning_args.ppo_score_norm,
|
||||||
whiten_rewards=finetuning_args.ppo_whiten_rewards,
|
whiten_rewards=finetuning_args.ppo_whiten_rewards,
|
||||||
accelerator_kwargs={"step_scheduler_with_optimizer": False},
|
accelerator_kwargs={"step_scheduler_with_optimizer": False},
|
||||||
|
project_kwargs={"logging_dir": training_args.logging_dir},
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create optimizer and scheduler
|
# Create optimizer and scheduler
|
||||||
|
|||||||
Reference in New Issue
Block a user