Update model.py
Browse files
model.py
CHANGED
@@ -95,7 +95,7 @@ from transformers.models.whisper.generation_whisper import WhisperGenerationMixi
|
|
95 |
|
96 |
|
97 |
if is_flash_attn_2_available():
|
98 |
-
from
|
99 |
|
100 |
|
101 |
logger = logging.get_logger(__name__)
|
|
|
95 |
|
96 |
|
97 |
if is_flash_attn_2_available():
|
98 |
+
from transformers.modeling_flash_attention_utils import _flash_attention_forward
|
99 |
|
100 |
|
101 |
logger = logging.get_logger(__name__)
|