Skip to content

Commit

Permalink
ENH Check layers to transforms and layer pattern (#2159)
Browse files Browse the repository at this point in the history
  • Loading branch information
JINO-ROHIT authored Oct 29, 2024
1 parent 9c730d7 commit 214345e
Show file tree
Hide file tree
Showing 12 changed files with 60 additions and 7 deletions.
6 changes: 3 additions & 3 deletions src/peft/tuners/adalora/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,9 +68,9 @@ def __post_init__(self):
if isinstance(self.target_modules, str) and self.layers_to_transform is not None:
raise ValueError("`layers_to_transform` cannot be used when `target_modules` is a str.")

# if target_modules is a regex expression, then layers_pattern should be None
if isinstance(self.target_modules, str) and self.layers_pattern is not None:
raise ValueError("`layers_pattern` cannot be used when `target_modules` is a str.")
# check for layers_to_transform and layers_pattern
if self.layers_pattern and not self.layers_to_transform:
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")

# Check if 'r' has been set to a non-default value
if self.r != 8: # 8 is the default value for 'r' in LoraConfig
Expand Down
3 changes: 3 additions & 0 deletions src/peft/tuners/boft/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,9 @@ def __post_init__(self):
self.exclude_modules = (
set(self.exclude_modules) if isinstance(self.exclude_modules, list) else self.exclude_modules
)
# check for layers_to_transform and layers_pattern
if self.layers_pattern and not self.layers_to_transform:
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")
if self.boft_block_size == 0 and self.boft_block_num == 0:
raise ValueError(
f"Either `boft_block_size` or `boft_block_num` must be non-zero. Currently, boft_block_size = {self.boft_block_size} and boft_block_num = {self.boft_block_num}."
Expand Down
3 changes: 3 additions & 0 deletions src/peft/tuners/fourierft/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,3 +199,6 @@ def __post_init__(self):
# if target_modules is a regex expression, then layers_pattern should be None
if isinstance(self.target_modules, str) and self.layers_pattern is not None:
raise ValueError("`layers_pattern` cannot be used when `target_modules` is a str.")
# check for layers_to_transform and layers_pattern
if self.layers_pattern and not self.layers_to_transform:
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")
4 changes: 4 additions & 0 deletions src/peft/tuners/hra/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,3 +129,7 @@ def __post_init__(self):
# if target_modules is a regex expression, then layers_pattern should be None
if isinstance(self.target_modules, str) and self.layers_pattern is not None:
raise ValueError("`layers_pattern` cannot be used when `target_modules` is a str.")

# check for layers_to_transform and layers_pattern
if self.layers_pattern and not self.layers_to_transform:
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")
3 changes: 3 additions & 0 deletions src/peft/tuners/loha/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,3 +133,6 @@ def __post_init__(self):
self.exclude_modules = (
set(self.exclude_modules) if isinstance(self.exclude_modules, list) else self.exclude_modules
)
# check for layers_to_transform and layers_pattern
if self.layers_pattern and not self.layers_to_transform:
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")
3 changes: 3 additions & 0 deletions src/peft/tuners/lokr/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,3 +142,6 @@ def __post_init__(self):
self.exclude_modules = (
set(self.exclude_modules) if isinstance(self.exclude_modules, list) else self.exclude_modules
)
# check for layers_to_transform and layers_pattern
if self.layers_pattern and not self.layers_to_transform:
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")
5 changes: 5 additions & 0 deletions src/peft/tuners/lora/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -340,6 +340,7 @@ def __post_init__(self):
self.exclude_modules = (
set(self.exclude_modules) if isinstance(self.exclude_modules, list) else self.exclude_modules
)

# if target_modules is a regex expression, then layers_to_transform should be None
if isinstance(self.target_modules, str) and self.layers_to_transform is not None:
raise ValueError("`layers_to_transform` cannot be used when `target_modules` is a str.")
Expand All @@ -348,6 +349,10 @@ def __post_init__(self):
if isinstance(self.target_modules, str) and self.layers_pattern is not None:
raise ValueError("`layers_pattern` cannot be used when `target_modules` is a str.")

# check for layers_to_transform and layers_pattern
if self.layers_pattern and not self.layers_to_transform:
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")

if self.use_dora and self.megatron_config:
raise ValueError("DoRA does not support megatron_core, please set `use_dora=False`.")

Expand Down
3 changes: 3 additions & 0 deletions src/peft/tuners/oft/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,6 +176,9 @@ def __post_init__(self):
self.exclude_modules = (
set(self.exclude_modules) if isinstance(self.exclude_modules, list) else self.exclude_modules
)
# check for layers_to_transform and layers_pattern
if self.layers_pattern and not self.layers_to_transform:
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")
if self.r == 0 and self.oft_block_size == 0:
raise ValueError(
f"Either `r` or `oft_block_size` must be non-zero. Currently, r = {self.r} and oft_block_size = {self.oft_block_size}."
Expand Down
3 changes: 3 additions & 0 deletions src/peft/tuners/vblora/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,3 +190,6 @@ def __post_init__(self):
self.exclude_modules = (
set(self.exclude_modules) if isinstance(self.exclude_modules, list) else self.exclude_modules
)
# check for layers_to_transform and layers_pattern
if self.layers_pattern and not self.layers_to_transform:
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")
4 changes: 3 additions & 1 deletion src/peft/tuners/vera/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,9 @@ def __post_init__(self):
self.target_modules = (
set(self.target_modules) if isinstance(self.target_modules, list) else self.target_modules
)

# check for layers_to_transform and layers_pattern
if self.layers_pattern and not self.layers_to_transform:
raise ValueError("When `layers_pattern` is specified, `layers_to_transform` must also be specified. ")
if not self.save_projection:
warnings.warn(
"Specified to not save vera_A and vera_B within the state dictionary, instead they will be restored "
Expand Down
27 changes: 27 additions & 0 deletions tests/test_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -351,3 +351,30 @@ def test_from_pretrained_sanity_check(self, config_class, tmp_path):
msg = f"The config that is trying to be loaded is not a valid {config_class.__name__} config"
with pytest.raises(TypeError, match=msg):
config_class.from_pretrained(tmp_path)

def test_lora_config_layers_to_transform_validation(self):
"""Test that specifying layers_pattern without layers_to_transform raises an error"""
with pytest.raises(
ValueError, match="When `layers_pattern` is specified, `layers_to_transform` must also be specified."
):
LoraConfig(r=8, lora_alpha=16, target_modules=["query", "value"], layers_pattern="model.layers")

# Test that specifying both layers_to_transform and layers_pattern works fine
config = LoraConfig(
r=8,
lora_alpha=16,
target_modules=["query", "value"],
layers_to_transform=[0, 1, 2],
layers_pattern="model.layers",
)
assert config.layers_to_transform == [0, 1, 2]
assert config.layers_pattern == "model.layers"

# Test that not specifying either works fine
config = LoraConfig(
r=8,
lora_alpha=16,
target_modules=["query", "value"],
)
assert config.layers_to_transform is None
assert config.layers_pattern is None
3 changes: 0 additions & 3 deletions tests/test_tuners_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,9 +92,6 @@
("foo.bar.1.baz", ["baz"], [0, 1, 2], ["bar"], True),
("foo.bar.1.baz", ["baz", "spam"], [1], ["bar"], True),
("foo.bar.1.baz", ["baz", "spam"], [0, 1, 2], ["bar"], True),
# empty layers_to_transform
("foo.bar.7.baz", ["baz"], [], ["bar"], True),
("foo.bar.7.baz", ["baz"], None, ["bar"], True),
# empty layers_pattern
("foo.whatever.1.baz", ["baz"], [1], [], True),
("foo.whatever.1.baz", ["baz"], [0], [], False),
Expand Down

0 comments on commit 214345e

Please sign in to comment.