From 7c33481e28975b019319018057e234c1ed54b20d Mon Sep 17 00:00:00 2001 From: Daniel Petti Date: Fri, 17 Oct 2025 13:12:07 -0400 Subject: [PATCH] Avoid use of lambdas so that LoRA layers are pickleable. --- loralib/layers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/loralib/layers.py b/loralib/layers.py index 63786560..bbc365ef 100644 --- a/loralib/layers.py +++ b/loralib/layers.py @@ -23,7 +23,7 @@ def __init__( if lora_dropout > 0.: self.lora_dropout = nn.Dropout(p=lora_dropout) else: - self.lora_dropout = lambda x: x + self.lora_dropout = nn.Identity() # Mark the weight as unmerged self.merged = False self.merge_weights = merge_weights