From 7ac14154b5a4cfa0075cc9cd2c58b1f38da47eee Mon Sep 17 00:00:00 2001 From: hdeezy <82070413+hdeezy@users.noreply.github.com> Date: Thu, 5 Jan 2023 08:46:23 -0600 Subject: [PATCH] Fix --resume_text_encoder in train_lora_dreambooth.py This attempted fix results in the following error. Putting it here in case someone can see the problem with inject_trainable_lora. Any help appreciated, thanks! Traceback (most recent call last): File "C:\Users\\lora\train_lora_dreambooth.py", line 1044, in main(args) File "C:\Users\\lora\train_lora_dreambooth.py", line 631, in main unet_lora_params, _ = inject_trainable_lora( File "C:\Users\\lora\lora_diffusion\lora.py", line 176, in inject_trainable_lora _module._modules[name].lora_up.weight = loras.pop(0) File "F:\ANACONDA\envs\sd\lib\site-packages\torch\nn\modules\module.py", line 1228, in __setattr__ raise TypeError("cannot assign '{}' as parameter '{}' " TypeError: cannot assign 'torch.HalfTensor' as parameter 'weight' (torch.nn.Parameter or None expected) --- train_lora_dreambooth.py | 1 + 1 file changed, 1 insertion(+) diff --git a/train_lora_dreambooth.py b/train_lora_dreambooth.py index aadfdaf..72e0075 100644 --- a/train_lora_dreambooth.py +++ b/train_lora_dreambooth.py @@ -627,6 +627,7 @@ def main(args): text_encoder, target_replace_module=["CLIPAttention"], r=args.lora_rank, + loras=args.resume_text_encoder, ) for _up, _down in extract_lora_ups_down( text_encoder, target_replace_module=["CLIPAttention"]