From d6f158ddf6a3631df7db10ac97453b12de8eadbe Mon Sep 17 00:00:00 2001 From: rockerBOO Date: Wed, 8 Jan 2025 18:48:05 -0500 Subject: [PATCH] Fix incorrect destructoring for load_abritrary_dataset --- fine_tune.py | 3 ++- flux_train_control_net.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/fine_tune.py b/fine_tune.py index 6be2f98c..e1ed4749 100644 --- a/fine_tune.py +++ b/fine_tune.py @@ -93,7 +93,8 @@ def train(args): blueprint = blueprint_generator.generate(user_config, args) train_dataset_group, val_dataset_group = config_util.generate_dataset_group_by_blueprint(blueprint.dataset_group) else: - train_dataset_group, val_dataset_group = train_util.load_arbitrary_dataset(args) + train_dataset_group = train_util.load_arbitrary_dataset(args) + val_dataset_group = None current_epoch = Value("i", 0) current_step = Value("i", 0) diff --git a/flux_train_control_net.py b/flux_train_control_net.py index 54dec2a7..cecd0001 100644 --- a/flux_train_control_net.py +++ b/flux_train_control_net.py @@ -128,7 +128,8 @@ def train(args): blueprint = blueprint_generator.generate(user_config, args) train_dataset_group, val_dataset_group = config_util.generate_dataset_group_by_blueprint(blueprint.dataset_group) else: - train_dataset_group, val_dataset_group = train_util.load_arbitrary_dataset(args) + train_dataset_group = train_util.load_arbitrary_dataset(args) + val_dataset_group = None current_epoch = Value("i", 0) current_step = Value("i", 0)