Skip to content

Commit 214345e

Browse files
authored
ENH Check layers to transforms and layer pattern (#2159)
1 parent 9c730d7 commit 214345e

File tree

12 files changed

+60
-7
lines changed

12 files changed

+60
-7
lines changed

src/peft/tuners/adalora/config.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -68,9 +68,9 @@ def __post_init__(self):
6868
if isinstance(self.target_modules, str) and self.layers_to_transform is not None:
6969
raise ValueError("`layers_to_transform` cannot be used when `target_modules` is a str.")
7070

71-
# if target_modules is a regex expression, then layers_pattern should be None
72-
if isinstance(self.target_modules, str) and self.layers_pattern is not None:
73-
raise ValueError("`layers_pattern` cannot be used when `target_modules` is a str.")
71+
# check for layers_to_transform and layers_pattern
72+
if self.layers_pattern and not self.layers_to_transform:
73+
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")
7474

7575
# Check if 'r' has been set to a non-default value
7676
if self.r != 8: # 8 is the default value for 'r' in LoraConfig

src/peft/tuners/boft/config.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -146,6 +146,9 @@ def __post_init__(self):
146146
self.exclude_modules = (
147147
set(self.exclude_modules) if isinstance(self.exclude_modules, list) else self.exclude_modules
148148
)
149+
# check for layers_to_transform and layers_pattern
150+
if self.layers_pattern and not self.layers_to_transform:
151+
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")
149152
if self.boft_block_size == 0 and self.boft_block_num == 0:
150153
raise ValueError(
151154
f"Either `boft_block_size` or `boft_block_num` must be non-zero. Currently, boft_block_size = {self.boft_block_size} and boft_block_num = {self.boft_block_num}."

src/peft/tuners/fourierft/config.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -199,3 +199,6 @@ def __post_init__(self):
199199
# if target_modules is a regex expression, then layers_pattern should be None
200200
if isinstance(self.target_modules, str) and self.layers_pattern is not None:
201201
raise ValueError("`layers_pattern` cannot be used when `target_modules` is a str.")
202+
# check for layers_to_transform and layers_pattern
203+
if self.layers_pattern and not self.layers_to_transform:
204+
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")

src/peft/tuners/hra/config.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -129,3 +129,7 @@ def __post_init__(self):
129129
# if target_modules is a regex expression, then layers_pattern should be None
130130
if isinstance(self.target_modules, str) and self.layers_pattern is not None:
131131
raise ValueError("`layers_pattern` cannot be used when `target_modules` is a str.")
132+
133+
# check for layers_to_transform and layers_pattern
134+
if self.layers_pattern and not self.layers_to_transform:
135+
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")

src/peft/tuners/loha/config.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -133,3 +133,6 @@ def __post_init__(self):
133133
self.exclude_modules = (
134134
set(self.exclude_modules) if isinstance(self.exclude_modules, list) else self.exclude_modules
135135
)
136+
# check for layers_to_transform and layers_pattern
137+
if self.layers_pattern and not self.layers_to_transform:
138+
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")

src/peft/tuners/lokr/config.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -142,3 +142,6 @@ def __post_init__(self):
142142
self.exclude_modules = (
143143
set(self.exclude_modules) if isinstance(self.exclude_modules, list) else self.exclude_modules
144144
)
145+
# check for layers_to_transform and layers_pattern
146+
if self.layers_pattern and not self.layers_to_transform:
147+
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")

src/peft/tuners/lora/config.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -340,6 +340,7 @@ def __post_init__(self):
340340
self.exclude_modules = (
341341
set(self.exclude_modules) if isinstance(self.exclude_modules, list) else self.exclude_modules
342342
)
343+
343344
# if target_modules is a regex expression, then layers_to_transform should be None
344345
if isinstance(self.target_modules, str) and self.layers_to_transform is not None:
345346
raise ValueError("`layers_to_transform` cannot be used when `target_modules` is a str.")
@@ -348,6 +349,10 @@ def __post_init__(self):
348349
if isinstance(self.target_modules, str) and self.layers_pattern is not None:
349350
raise ValueError("`layers_pattern` cannot be used when `target_modules` is a str.")
350351

352+
# check for layers_to_transform and layers_pattern
353+
if self.layers_pattern and not self.layers_to_transform:
354+
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")
355+
351356
if self.use_dora and self.megatron_config:
352357
raise ValueError("DoRA does not support megatron_core, please set `use_dora=False`.")
353358

src/peft/tuners/oft/config.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -176,6 +176,9 @@ def __post_init__(self):
176176
self.exclude_modules = (
177177
set(self.exclude_modules) if isinstance(self.exclude_modules, list) else self.exclude_modules
178178
)
179+
# check for layers_to_transform and layers_pattern
180+
if self.layers_pattern and not self.layers_to_transform:
181+
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")
179182
if self.r == 0 and self.oft_block_size == 0:
180183
raise ValueError(
181184
f"Either `r` or `oft_block_size` must be non-zero. Currently, r = {self.r} and oft_block_size = {self.oft_block_size}."

src/peft/tuners/vblora/config.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -190,3 +190,6 @@ def __post_init__(self):
190190
self.exclude_modules = (
191191
set(self.exclude_modules) if isinstance(self.exclude_modules, list) else self.exclude_modules
192192
)
193+
# check for layers_to_transform and layers_pattern
194+
if self.layers_pattern and not self.layers_to_transform:
195+
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")

src/peft/tuners/vera/config.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -150,7 +150,9 @@ def __post_init__(self):
150150
self.target_modules = (
151151
set(self.target_modules) if isinstance(self.target_modules, list) else self.target_modules
152152
)
153-
153+
# check for layers_to_transform and layers_pattern
154+
if self.layers_pattern and not self.layers_to_transform:
155+
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")
154156
if not self.save_projection:
155157
warnings.warn(
156158
"Specified to not save vera_A and vera_B within the state dictionary, instead they will be restored "

0 commit comments

Comments
 (0)