File tree 2 files changed +4
-3
lines changed
2 files changed +4
-3
lines changed Original file line number Diff line number Diff line change 26
26
require_flash_attn ,
27
27
require_read_token ,
28
28
require_torch ,
29
- require_torch_gpu ,
30
29
require_torch_large_gpu ,
31
30
slow ,
32
31
torch_device ,
@@ -285,7 +284,10 @@ def test_export_static_cache(self):
285
284
if version .parse (torch .__version__ ) < version .parse ("2.5.0" ):
286
285
self .skipTest (reason = "This test requires torch >= 2.5 to run." )
287
286
288
- from transformers .integrations .executorch import TorchExportableModuleWithStaticCache , convert_and_export_with_cache
287
+ from transformers .integrations .executorch import (
288
+ TorchExportableModuleWithStaticCache ,
289
+ convert_and_export_with_cache ,
290
+ )
289
291
290
292
model_id = "CohereForAI/c4ai-command-r7b-12-2024"
291
293
EXPECTED_TEXT_COMPLETION = [
Original file line number Diff line number Diff line change 23
23
is_flaky ,
24
24
require_flash_attn ,
25
25
require_torch ,
26
- require_torch_accelerator ,
27
26
require_torch_large_gpu ,
28
27
require_torch_sdpa ,
29
28
slow ,
You can’t perform that action at this time.
0 commit comments