From 5f7fdb0c9fd78c8ff5bc0349fd89e595134b7d2e Mon Sep 17 00:00:00 2001 From: Benjamin Bossan Date: Tue, 29 Aug 2023 16:18:30 +0200 Subject: [PATCH] FIX: Error in forward of 4bit linear lora layer This was introduced during the refactoring of the forward function. It should now be fixed and be equivalent to the forward function before the refactoring: https://github.com/huggingface/peft/blob/4df9c5a243194b03e703c1dd526d64163f9b4fd2/src/peft/tuners/lora.py#L1207 Bug reported by @jiqing-feng --- src/peft/tuners/lora/bnb.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/peft/tuners/lora/bnb.py b/src/peft/tuners/lora/bnb.py index d60f56a8a1..ac0433e950 100644 --- a/src/peft/tuners/lora/bnb.py +++ b/src/peft/tuners/lora/bnb.py @@ -189,8 +189,6 @@ def forward(self, x: torch.Tensor) -> torch.Tensor: expected_dtype = result.dtype x = x.to(lora_A.weight.dtype) - x = x.to(lora_A.weight.dtype) - result += lora_B(lora_A(dropout(x))) * scaling output = lora_B(lora_A(dropout(x))) if requires_conversion: output = output.to(expected_dtype)