From 1e4512b2c86fca4411777a11cfc85ae21da9f190 Mon Sep 17 00:00:00 2001 From: Kohya S Date: Sat, 29 Jul 2023 14:19:25 +0900 Subject: [PATCH] support ckpt without position id in sd v1 #687 --- library/model_util.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/library/model_util.py b/library/model_util.py index 26f72235..70a8c752 100644 --- a/library/model_util.py +++ b/library/model_util.py @@ -540,6 +540,11 @@ def convert_ldm_clip_checkpoint_v1(checkpoint): for key in keys: if key.startswith("cond_stage_model.transformer"): text_model_dict[key[len("cond_stage_model.transformer.") :]] = checkpoint[key] + + # support checkpoint without position_ids (invalid checkpoint) + if "text_model.embeddings.position_ids" not in text_model_dict: + text_model_dict["text_model.embeddings.position_ids"] = torch.arange(77).unsqueeze(0) # 77 is the max length of the text + return text_model_dict