We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent ae0643d commit 43fc2e9Copy full SHA for 43fc2e9
LAVIS/lavis/models/blip2_models/modeling_llama.py
@@ -418,6 +418,8 @@ class LlamaFlashAttention2(LlamaAttention):
418
untouched. The only required change would be on the forward pass where it needs to correctly call the public API of
419
flash attention and deal with padding tokens in case the input contains any of them.
420
"""
421
+ def __init__(self, config: LlamaConfig):
422
+ super().__init__(config)
423
424
def forward(
425
self,
0 commit comments