From 85bc371ebc3ab311b2e6b75a98bdf9fb310516f7 Mon Sep 17 00:00:00 2001 From: DukeG Date: Fri, 26 Jan 2024 18:58:47 +0800 Subject: [PATCH 1/3] test --- train_network.py | 1 + 1 file changed, 1 insertion(+) diff --git a/train_network.py b/train_network.py index 8b6c395c..2e06262f 100644 --- a/train_network.py +++ b/train_network.py @@ -774,6 +774,7 @@ class NetworkTrainer: else: raise NotImplementedError("multipliers for each sample is not supported yet") # print(f"set multiplier: {multipliers}") + print(network) network.set_multiplier(multipliers) with torch.set_grad_enabled(train_text_encoder), accelerator.autocast(): From 50f631c768eb3db578c98224106236cf3abc03f9 Mon Sep 17 00:00:00 2001 From: DukeG Date: Fri, 26 Jan 2024 20:02:48 +0800 Subject: [PATCH 2/3] test --- train_network.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/train_network.py b/train_network.py index 2e06262f..d64d7a1b 100644 --- a/train_network.py +++ b/train_network.py @@ -774,7 +774,8 @@ class NetworkTrainer: else: raise NotImplementedError("multipliers for each sample is not supported yet") # print(f"set multiplier: {multipliers}") - print(network) + print(type(network)) + network.set_multiplier(multipliers) with torch.set_grad_enabled(train_text_encoder), accelerator.autocast(): From 4e67fb8444b54deb0d867cc3d622f694a62b5c2b Mon Sep 17 00:00:00 2001 From: DukeG Date: Fri, 26 Jan 2024 20:22:49 +0800 Subject: [PATCH 3/3] test --- train_network.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/train_network.py b/train_network.py index d64d7a1b..8d102ae8 100644 --- a/train_network.py +++ b/train_network.py @@ -774,9 +774,7 @@ class NetworkTrainer: else: raise NotImplementedError("multipliers for each sample is not supported yet") # print(f"set multiplier: {multipliers}") - print(type(network)) - - network.set_multiplier(multipliers) + accelerator.unwrap_model(network).set_multiplier(multipliers) with torch.set_grad_enabled(train_text_encoder), accelerator.autocast(): # Get the text embedding for conditioning