@@ -147,6 +147,21 @@ def resolve_lora_variant(self, *, config: LoraConfig, **kwargs) -> Optional[Lora
147147
148148 def update_layer (
149149 self ,
150+ adapter_name ,
151+ r ,
152+ lora_alpha ,
153+ lora_dropout ,
154+ init_lora_weights ,
155+ use_rslora ,
156+ use_dora : bool = False ,
157+ use_alora : bool = False ,
158+ use_qalora : bool = False ,
159+ use_monteclora : bool = False ,
160+ lora_bias : bool = False ,
161+ arrow_config : ArrowConfig = None ,
162+ monteclora_config = None ,
163+ qalora_group_size : int = 32 ,
164+ inference_mode : bool = False ,
150165 adapter_name : str ,
151166 r : int ,
152167 lora_alpha : int ,
@@ -174,7 +189,15 @@ def update_layer(
174189 PeftWarning ,
175190 )
176191
177- lora_variant = self .resolve_lora_variant (config = config )
192+ lora_variant = self .resolve_lora_variant (
193+ use_dora = use_dora ,
194+ use_alora = use_alora ,
195+ use_qalora = use_qalora ,
196+ use_monteclora = use_monteclora ,
197+ qalora_group_size = qalora_group_size ,
198+ arrow_config = arrow_config ,
199+ monteclora_config = monteclora_config ,
200+ )
178201 if lora_variant is not None :
179202 self .lora_variant [adapter_name ] = lora_variant
180203
@@ -732,6 +755,14 @@ def __init__(
732755 r : int = 0 ,
733756 lora_alpha : int = 1 ,
734757 is_target_conv_1d_layer : bool = False ,
758+ init_lora_weights : Union [bool , str ] = True ,
759+ use_rslora : bool = False ,
760+ use_dora : bool = False ,
761+ use_alora : bool = False ,
762+ use_monteclora : bool = False ,
763+ arrow_config : ArrowConfig = None ,
764+ monteclora_config = None ,
765+ lora_bias : bool = False ,
735766 ** kwargs ,
736767 ) -> None :
737768 super ().__init__ ()
@@ -743,24 +774,32 @@ def __init__(
743774 adapter_name ,
744775 r ,
745776 lora_alpha = lora_alpha ,
746- config = config ,
747- ** kwargs ,
777+ lora_dropout = lora_dropout ,
778+ init_lora_weights = init_lora_weights ,
779+ use_rslora = use_rslora ,
780+ use_dora = use_dora ,
781+ use_alora = use_alora ,
782+ use_monteclora = use_monteclora ,
783+ lora_bias = lora_bias ,
784+ arrow_config = arrow_config ,
785+ monteclora_config = monteclora_config ,
748786 )
749787 self .is_target_conv_1d_layer = is_target_conv_1d_layer
750788
751- def resolve_lora_variant (self , config : LoraConfig , ** kwargs ) -> Optional [LoraVariant ]:
752- if config .arrow_config is not None :
789+ def resolve_lora_variant (
790+ self , * , arrow_config : ArrowConfig , use_dora : bool , use_alora : bool , use_monteclora : bool = False , ** kwargs
791+ ) -> Optional [LoraVariant ]:
792+ if arrow_config is not None :
753793 from .variants import ArrowLinearVariant
754794
755795 return ArrowLinearVariant ()
756796
757- if config . use_bdlora is not None :
758- from . variants import BdLoraLinearVariant
797+ if use_monteclora :
798+ from peft . tuners . monteclora . variant import MonteCLoraLinearVariant
759799
760- return BdLoraLinearVariant ()
800+ return MonteCLoraLinearVariant ()
761801
762- use_alora = config .alora_invocation_tokens is not None
763- if not config .use_dora and not use_alora :
802+ if not use_dora and not use_alora :
764803 return None
765804
766805 from .variants import ALoraLinearVariant , DoraLinearVariant
0 commit comments