Skip to content

Commit f12a721

Browse files
committed
style
1 parent f0dee79 commit f12a721

File tree

1 file changed

+1
-0
lines changed

1 file changed

+1
-0
lines changed

src/transformers/models/diffllama/modular_diffllama.py

+1
Original file line numberDiff line numberDiff line change
@@ -433,6 +433,7 @@ def __init__(self, config: DiffLlamaConfig, layer_idx: int):
433433
class DiffLlamaPreTrainedModel(LlamaPreTrainedModel):
434434
_supports_flex_attn = False
435435

436+
436437
class DiffLlamaModel(LlamaModel):
437438
pass
438439

0 commit comments

Comments
 (0)