Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 10 additions & 9 deletions src/transformers/integrations/peft.py
Original file line number Diff line number Diff line change
Expand Up @@ -654,17 +654,18 @@ def maybe_load_adapters(
token_from_adapter_kwargs = adapter_kwargs.pop("token", None)

if _adapter_model_path is None:
peft_kwargs = adapter_kwargs.copy()
for arg_name in ("cache_dir", "proxies", "subfolder"): # don't override revision
if (arg_name not in peft_kwargs) and (arg_name in download_kwargs):
peft_kwargs[arg_name] = download_kwargs[arg_name]
if "commit_hash" in download_kwargs:
peft_kwargs["_commit_hash"] = download_kwargs["commit_hash"]
peft_kwargs["force_download"] = bool(download_kwargs.get("force_download", False))
peft_kwargs["local_files_only"] = bool(download_kwargs.get("local_files_only", False))
peft_kwargs["token"] = token or token_from_adapter_kwargs
_adapter_model_path = find_adapter_config_file(
pretrained_model_name_or_path,
cache_dir=download_kwargs.get("cache_dir"),
force_download=bool(download_kwargs.get("force_download", False)),
proxies=download_kwargs.get("proxies"),
token=token or token_from_adapter_kwargs,
revision=download_kwargs.get("revision"),
local_files_only=bool(download_kwargs.get("local_files_only", False)),
subfolder=download_kwargs.get("subfolder", ""),
_commit_hash=download_kwargs.get("commit_hash"),
**adapter_kwargs,
**peft_kwargs,
)

if _adapter_model_path is not None and os.path.isfile(_adapter_model_path):
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/modeling_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3921,7 +3921,7 @@ def from_pretrained(
subfolder = kwargs.pop("subfolder", "")
commit_hash = kwargs.pop("_commit_hash", None)
variant = kwargs.pop("variant", None)
adapter_kwargs = kwargs.pop("adapter_kwargs", {})
adapter_kwargs = (kwargs.pop("adapter_kwargs", {}) or {}).copy()
adapter_name = kwargs.pop("adapter_name", "default")
generation_config = kwargs.pop("generation_config", None)
gguf_file = kwargs.pop("gguf_file", None)
Expand Down
5 changes: 3 additions & 2 deletions src/transformers/models/auto/auto_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,8 +288,9 @@ def from_pretrained(cls, pretrained_model_name_or_path: Union[str, os.PathLike[s
if is_peft_available():
if adapter_kwargs is None:
adapter_kwargs = {}
if token is not None:
adapter_kwargs["token"] = token
adapter_kwargs = adapter_kwargs.copy() # avoid mutating original
if token is not None:
adapter_kwargs["token"] = token
Comment on lines +292 to +293
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Note: This change is not directly related to the PR, but I think this is the correct way to handle the token.


maybe_adapter_path = find_adapter_config_file(
pretrained_model_name_or_path, _commit_hash=commit_hash, **adapter_kwargs
Expand Down
7 changes: 4 additions & 3 deletions tests/peft_integration/test_peft_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -699,20 +699,21 @@ def test_peft_from_pretrained_hub_kwargs(self):
with self.assertRaises(OSError):
_ = AutoModelForCausalLM.from_pretrained(peft_model_id)

adapter_kwargs = {"revision": "test"}

# This should work
adapter_kwargs = {"revision": "test"}
model = AutoModelForCausalLM.from_pretrained(peft_model_id, adapter_kwargs=adapter_kwargs)
self.assertTrue(self._check_lora_correctly_converted(model))

# note: always create new adapter_kwargs, avoid the test relying on the previous calls possibly mutating them
adapter_kwargs = {"revision": "test"}
model = OPTForCausalLM.from_pretrained(peft_model_id, adapter_kwargs=adapter_kwargs)
self.assertTrue(self._check_lora_correctly_converted(model))

adapter_kwargs = {"revision": "main", "subfolder": "test_subfolder"}

model = AutoModelForCausalLM.from_pretrained(peft_model_id, adapter_kwargs=adapter_kwargs)
self.assertTrue(self._check_lora_correctly_converted(model))

adapter_kwargs = {"revision": "main", "subfolder": "test_subfolder"}
model = OPTForCausalLM.from_pretrained(peft_model_id, adapter_kwargs=adapter_kwargs)
self.assertTrue(self._check_lora_correctly_converted(model))

Expand Down