fix name of loss for epoch

This commit is contained in:
Kohya S
2023-02-16 22:21:36 +09:00
parent d01d953262
commit ffdfd5f615

View File

@@ -309,7 +309,7 @@ def train(args):
break
if args.logging_dir is not None:
logs = {"epoch_loss": loss_total / len(loss_list)}
logs = {"loss/epoch": loss_total / len(loss_list)}
accelerator.log(logs, step=epoch+1)
accelerator.wait_for_everyone()