diff --git a/fine_tune.py b/fine_tune.py index c5e99ad4..27d64739 100644 --- a/fine_tune.py +++ b/fine_tune.py @@ -406,7 +406,7 @@ def train(args): accelerator.log(logs, step=global_step) loss_recorder.add(epoch=epoch, step=step, loss=current_loss) - avr_loss: float = loss_recorder.get_moving_average() + avr_loss: float = loss_recorder.moving_average logs = {"avr_loss": avr_loss} # , "lr": lr_scheduler.get_last_lr()[0]} progress_bar.set_postfix(**logs) @@ -414,7 +414,7 @@ def train(args): break if args.logging_dir is not None: - logs = {"loss/epoch": loss_recorder.get_moving_average()} + logs = {"loss/epoch": loss_recorder.moving_average} accelerator.log(logs, step=epoch + 1) accelerator.wait_for_everyone() diff --git a/library/train_util.py b/library/train_util.py index 7f7190b3..e86293e3 100644 --- a/library/train_util.py +++ b/library/train_util.py @@ -4700,5 +4700,6 @@ class LossRecorder: self.loss_list[step] = loss self.loss_total += loss - def get_moving_average(self) -> float: + @property + def moving_average(self) -> float: return self.loss_total / len(self.loss_list) diff --git a/sdxl_train.py b/sdxl_train.py index 096c89e9..9017d7b8 100644 --- a/sdxl_train.py +++ b/sdxl_train.py @@ -633,7 +633,7 @@ def train(args): accelerator.log(logs, step=global_step) loss_recorder.add(epoch=epoch, step=step, loss=current_loss) - avr_loss: float = loss_recorder.get_moving_average() + avr_loss: float = loss_recorder.moving_average logs = {"avr_loss": avr_loss} # , "lr": lr_scheduler.get_last_lr()[0]} progress_bar.set_postfix(**logs) @@ -641,7 +641,7 @@ def train(args): break if args.logging_dir is not None: - logs = {"loss/epoch": loss_recorder.get_moving_average()} + logs = {"loss/epoch": loss_recorder.moving_average} accelerator.log(logs, step=epoch + 1) accelerator.wait_for_everyone() diff --git a/train_db.py b/train_db.py index 11230349..aa741794 100644 --- a/train_db.py +++ b/train_db.py @@ -392,7 +392,7 @@ def train(args): accelerator.log(logs, step=global_step) loss_recorder.add(epoch=epoch, step=step, loss=current_loss) - avr_loss: float = loss_recorder.get_moving_average() + avr_loss: float = loss_recorder.moving_average logs = {"avr_loss": avr_loss} # , "lr": lr_scheduler.get_last_lr()[0]} progress_bar.set_postfix(**logs) @@ -400,7 +400,7 @@ def train(args): break if args.logging_dir is not None: - logs = {"loss/epoch": loss_recorder.get_moving_average()} + logs = {"loss/epoch": loss_recorder.moving_average} accelerator.log(logs, step=epoch + 1) accelerator.wait_for_everyone() diff --git a/train_network.py b/train_network.py index 58f7e445..c81aeff8 100644 --- a/train_network.py +++ b/train_network.py @@ -854,7 +854,7 @@ class NetworkTrainer: current_loss = loss.detach().item() loss_recorder.add(epoch=epoch, step=step, loss=current_loss) - avr_loss: float = loss_recorder.get_moving_average() + avr_loss: float = loss_recorder.moving_average logs = {"avr_loss": avr_loss} # , "lr": lr_scheduler.get_last_lr()[0]} progress_bar.set_postfix(**logs) @@ -869,7 +869,7 @@ class NetworkTrainer: break if args.logging_dir is not None: - logs = {"loss/epoch": loss_recorder.get_moving_average()} + logs = {"loss/epoch": loss_recorder.moving_average} accelerator.log(logs, step=epoch + 1) accelerator.wait_for_everyone()