Spaces:
Runtime error
Runtime error
Volker Janz
commited on
Commit
·
308ee31
1
Parent(s):
f99bd60
Initial commit
Browse files- app.py +37 -0
- requirements.txt +2 -0
app.py
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer
|
2 |
+
import gradio as gr
|
3 |
+
import torch
|
4 |
+
|
5 |
+
|
6 |
+
title = "Mental Health Chatbot"
|
7 |
+
description = "This bot is using a fine-tuned version of meta-llama/Llama-2-7b-chat-hf"
|
8 |
+
|
9 |
+
tokenizer = AutoTokenizer.from_pretrained("vojay/Llama-2-7b-chat-hf-mental-health")
|
10 |
+
model = AutoModelForCausalLM.from_pretrained("vojay/Llama-2-7b-chat-hf-mental-health")
|
11 |
+
|
12 |
+
|
13 |
+
def predict(input, history=[]):
|
14 |
+
new_user_input_ids = tokenizer.encode(f"{input}{tokenizer.eos_token}", return_tensors="pt")
|
15 |
+
bot_input_ids = torch.cat([torch.LongTensor(history), new_user_input_ids], dim=-1)
|
16 |
+
|
17 |
+
history = model.generate(
|
18 |
+
bot_input_ids,
|
19 |
+
max_length=4000,
|
20 |
+
pad_token_id=tokenizer.eos_token_id
|
21 |
+
).tolist()
|
22 |
+
|
23 |
+
response = tokenizer.decode(history[0]).split("<|endoftext|>")
|
24 |
+
response = [
|
25 |
+
(response[i], response[i + 1]) for i in range(0, len(response) - 1, 2)
|
26 |
+
]
|
27 |
+
|
28 |
+
return response, history
|
29 |
+
|
30 |
+
|
31 |
+
gr.Interface(
|
32 |
+
fn=predict,
|
33 |
+
title=title,
|
34 |
+
description=description,
|
35 |
+
inputs=["text", "state"],
|
36 |
+
outputs=["chatbot", "state"]
|
37 |
+
).launch()
|
requirements.txt
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
transformers
|
2 |
+
torch
|