Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,12 +1,17 @@
|
|
|
|
1 |
import gradio as gr
|
2 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
3 |
import torch
|
4 |
|
5 |
-
#
|
|
|
|
|
|
|
6 |
model_name = "mistralai/Mistral-7B-Instruct-v0.2"
|
7 |
-
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
8 |
model = AutoModelForCausalLM.from_pretrained(
|
9 |
model_name,
|
|
|
10 |
torch_dtype=torch.float16,
|
11 |
device_map="auto"
|
12 |
)
|
@@ -30,4 +35,4 @@ with gr.Blocks() as demo:
|
|
30 |
msg = gr.Textbox(placeholder="Écris un message...")
|
31 |
msg.submit(chat, [msg, chatbot], [chatbot, chatbot])
|
32 |
|
33 |
-
demo.launch()
|
|
|
1 |
+
import os
|
2 |
import gradio as gr
|
3 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
4 |
import torch
|
5 |
|
6 |
+
# Récupérer le token depuis les secrets du Space
|
7 |
+
hf_token = os.getenv("HF_TOKEN")
|
8 |
+
|
9 |
+
# Charger le modèle avec le token
|
10 |
model_name = "mistralai/Mistral-7B-Instruct-v0.2"
|
11 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name, use_auth_token=hf_token)
|
12 |
model = AutoModelForCausalLM.from_pretrained(
|
13 |
model_name,
|
14 |
+
use_auth_token=hf_token,
|
15 |
torch_dtype=torch.float16,
|
16 |
device_map="auto"
|
17 |
)
|
|
|
35 |
msg = gr.Textbox(placeholder="Écris un message...")
|
36 |
msg.submit(chat, [msg, chatbot], [chatbot, chatbot])
|
37 |
|
38 |
+
demo.launch()
|