Skip to content

Commit 49d9bd7

Browse files
committed
reverted groups fix change (this will be a separate pr
1 parent cde0624 commit 49d9bd7

File tree

1 file changed

+1
-8
lines changed

1 file changed

+1
-8
lines changed

src/peft/tuners/lora/layer.py

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -904,13 +904,7 @@ def update_layer(self, adapter_name, r, lora_alpha, lora_dropout, init_lora_weig
904904
conv_layer = type(base_layer)
905905
out_kernel = out_stride = (1,) * (self._kernel_dim - 2)
906906
self.lora_A[adapter_name] = conv_layer(self.in_features, r, kernel_size, stride, padding, bias=False)
907-
908-
if use_dora:
909-
# this ensures correct dimensions for layers using the groups argument
910-
self.lora_B[adapter_name] = conv_layer(r, int(self.out_features / self.base_layer.groups), out_kernel,
911-
out_stride,bias=False)
912-
else:
913-
self.lora_B[adapter_name] = conv_layer(r, self.out_features, out_kernel, out_stride, bias=False)
907+
self.lora_B[adapter_name] = conv_layer(r, self.out_features, out_kernel, out_stride, bias=False)
914908

915909
if use_rslora:
916910
self.scaling[adapter_name] = lora_alpha / math.sqrt(r)
@@ -1096,7 +1090,6 @@ def get_delta_weight(self, adapter) -> torch.Tensor:
10961090
def forward(self, x: torch.Tensor, *args, **kwargs) -> torch.Tensor:
10971091
self._check_forward_args(x, *args, **kwargs)
10981092
adapter_names = kwargs.pop("adapter_names", None)
1099-
11001093
if self.disable_adapters:
11011094
if self.merged:
11021095
self.unmerge()

0 commit comments

Comments
 (0)