From a95191060c8ee04ffec064213b554ccc20009039 Mon Sep 17 00:00:00 2001 From: Arun Date: Sat, 27 Jan 2024 20:59:36 +0530 Subject: [PATCH] Fix initialization of lora_A and lora_B --- loralib/layers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/loralib/layers.py b/loralib/layers.py index 0e54a64b..80e45b54 100644 --- a/loralib/layers.py +++ b/loralib/layers.py @@ -56,8 +56,8 @@ def reset_parameters(self): nn.Embedding.reset_parameters(self) if hasattr(self, 'lora_A'): # initialize A the same way as the default for nn.Linear and B to zero - nn.init.zeros_(self.lora_A) - nn.init.normal_(self.lora_B) + nn.init.normal_(self.lora_A) + nn.init.zeros_(self.lora_B) def train(self, mode: bool = True): nn.Embedding.train(self, mode)