Skip to content

Commit 43fc2e9

Browse files
authored
Update modeling_llama.py
1 parent ae0643d commit 43fc2e9

File tree

1 file changed

+2
-0
lines changed

1 file changed

+2
-0
lines changed

LAVIS/lavis/models/blip2_models/modeling_llama.py

+2
Original file line numberDiff line numberDiff line change
@@ -418,6 +418,8 @@ class LlamaFlashAttention2(LlamaAttention):
418418
untouched. The only required change would be on the forward pass where it needs to correctly call the public API of
419419
flash attention and deal with padding tokens in case the input contains any of them.
420420
"""
421+
def __init__(self, config: LlamaConfig):
422+
super().__init__(config)
421423

422424
def forward(
423425
self,

0 commit comments

Comments
 (0)