@@ -29,7 +29,7 @@ class FourierFTLayer(BaseTunerLayer):
2929 # All names of other parameters that may contain adapter-related parameters
3030 other_param_names = ("fourierft_n_frequency" , "fourierft_scaling" , "fourierft_random_loc_seed" )
3131
32- def __init__ (self , base_layer : nn .Module , ** kwargs ) -> None :
32+ def __init__ (self , base_layer : nn .Module , alpha , ** kwargs ) -> None :
3333 self .base_layer = base_layer
3434 self .fourierft_n_frequency = {}
3535 self .fourierft_scaling = {}
@@ -49,7 +49,8 @@ def __init__(self, base_layer: nn.Module, **kwargs) -> None:
4949 base_layer .weight .ds_shape if hasattr (base_layer .weight , "ds_shape" ) else base_layer .weight .shape
5050 )
5151 elif isinstance (base_layer , nn .Conv2d ):
52- pass
52+ self .in_features = base_layer .in_channels
53+ self .out_features = base_layer .out_channels
5354 else :
5455 raise ValueError (f"Unsupported layer type { type (base_layer )} " )
5556
@@ -104,14 +105,20 @@ def __init__(
104105 base_layer ,
105106 adapter_name : str ,
106107 n_frequency : int = 1000 ,
108+ alpha : float = None ,
107109 scaling : float = 150.0 ,
108110 fan_in_fan_out : bool = False , # Set this to True if the layer to replace stores weight like (fan_in, fan_out)
109111 init_weights : Union [bool , str ] = False ,
110112 random_loc_seed : int = 777 ,
111113 ** kwargs ,
112114 ) -> None :
113115 super ().__init__ ()
114- FourierFTLayer .__init__ (self , base_layer , ** kwargs )
116+ FourierFTLayer .__init__ (self , base_layer , alpha , ** kwargs )
117+
118+ # apply alpha patch
119+ if alpha :
120+ n_frequency = int (alpha * self .in_features * self .out_features )
121+
115122 self .fan_in_fan_out = fan_in_fan_out
116123 self ._active_adapter = adapter_name
117124 self .update_layer (adapter_name , n_frequency , scaling , init_weights , random_loc_seed )
@@ -201,18 +208,22 @@ def __init__(
201208 base_layer ,
202209 adapter_name : str ,
203210 n_frequency : int = 1000 ,
211+ alpha : float = None ,
204212 scaling : float = 150.0 ,
205213 fan_in_fan_out : bool = False , # Set this to True if the layer to replace stores weight like (fan_in, fan_out)
206214 init_weights : Union [bool , str ] = False ,
207215 random_loc_seed : int = 777 ,
208216 ** kwargs ,
209217 ) -> None :
210218 super ().__init__ ()
211- FourierFTLayer .__init__ (self , base_layer , ** kwargs )
219+ FourierFTLayer .__init__ (self , base_layer , alpha , ** kwargs )
220+
221+ # apply alpha patch
222+ if alpha :
223+ n_frequency = int (alpha * self .in_features * self .out_features )
224+
212225 self .fan_in_fan_out = fan_in_fan_out
213226 self ._active_adapter = adapter_name
214- self .in_features = base_layer .in_channels
215- self .out_features = base_layer .out_channels
216227 self .kW = base_layer .kernel_size [0 ]
217228 self .kH = base_layer .kernel_size [1 ]
218229 self .stride = base_layer .stride
0 commit comments