Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -10,7 +10,7 @@ import warnings
|
|
10 |
warnings.filterwarnings("ignore", category=UserWarning)
|
11 |
|
12 |
# model_name = "AI-Sweden-Models/gpt-sw3-126m-instruct"
|
13 |
-
model_name = "AI-Sweden-Models/gpt-sw3-
|
14 |
|
15 |
|
16 |
device = "cuda:0" if torch.cuda.is_available() else "cpu"
|
@@ -78,7 +78,7 @@ class StopOnTokenCriteria(StoppingCriteria):
|
|
78 |
stop_on_token_criteria = StopOnTokenCriteria(stop_token_id=tokenizer.bos_token_id)
|
79 |
|
80 |
st.title("Paralegal Assistant")
|
81 |
-
st.
|
82 |
|
83 |
# Initialize chat history
|
84 |
if "messages" not in st.session_state:
|
|
|
10 |
warnings.filterwarnings("ignore", category=UserWarning)
|
11 |
|
12 |
# model_name = "AI-Sweden-Models/gpt-sw3-126m-instruct"
|
13 |
+
model_name = "AI-Sweden-Models/gpt-sw3-126m-instruct"
|
14 |
|
15 |
|
16 |
device = "cuda:0" if torch.cuda.is_available() else "cpu"
|
|
|
78 |
stop_on_token_criteria = StopOnTokenCriteria(stop_token_id=tokenizer.bos_token_id)
|
79 |
|
80 |
st.title("Paralegal Assistant")
|
81 |
+
st.subheader("RAG: föräldrabalken")
|
82 |
|
83 |
# Initialize chat history
|
84 |
if "messages" not in st.session_state:
|