Spaces:
Running
on
Zero
Running
on
Zero
Update app.py (#8)
Browse files- Update app.py (ea8e736e6853df72bf0f777bffbb4ff11260ebae)
app.py
CHANGED
@@ -1,15 +1,14 @@
|
|
1 |
-
import subprocess
|
2 |
-
# Installing flash_attn
|
3 |
-
subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
|
4 |
-
|
5 |
import gradio as gr
|
6 |
import spaces
|
7 |
from transformers import AutoModelForCausalLM, AutoProcessor
|
8 |
import torch
|
9 |
from PIL import Image
|
|
|
|
|
10 |
|
11 |
models = {
|
12 |
-
"microsoft/Phi-3.5-vision-instruct": AutoModelForCausalLM.from_pretrained("microsoft/Phi-3.5-vision-instruct", trust_remote_code=True, torch_dtype="auto").cuda().eval()
|
|
|
13 |
}
|
14 |
|
15 |
processors = {
|
|
|
|
|
|
|
|
|
|
|
1 |
import gradio as gr
|
2 |
import spaces
|
3 |
from transformers import AutoModelForCausalLM, AutoProcessor
|
4 |
import torch
|
5 |
from PIL import Image
|
6 |
+
import subprocess
|
7 |
+
subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
|
8 |
|
9 |
models = {
|
10 |
+
"microsoft/Phi-3.5-vision-instruct": AutoModelForCausalLM.from_pretrained("microsoft/Phi-3.5-vision-instruct", trust_remote_code=True, torch_dtype="auto", _attn_implementation="flash_attention_2").cuda().eval()
|
11 |
+
|
12 |
}
|
13 |
|
14 |
processors = {
|