Boning c commited on
Commit
be7cce5
Β·
verified Β·
1 Parent(s): acff8f4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -5
app.py CHANGED
@@ -6,8 +6,8 @@ import time
6
  from html import escape
7
 
8
  # Model config
9
- PRIMARY_MODEL = "Smilyai-labs/Sam-reason-A1"
10
- FALLBACK_MODEL = "Smilyai-labs/Sam-reason-S2.1"
11
  USAGE_LIMIT = 5
12
  RESET_AFTER_SECONDS = 20 * 60 # 20 minutes
13
  device = "cuda" if torch.cuda.is_available() else "cpu"
@@ -85,16 +85,16 @@ def respond(message, history, reasoning_enabled, request: gr.Request):
85
  for output in generate_stream(prompt, use_fallback=use_fallback):
86
  formatted = format_thinking(output)
87
  history[-1][1] = f"{formatted}<br><sub style='color:gray'>({model_used})</sub>"
88
- yield history, history, f"🧠 A1 messages left: {remaining}"
89
 
90
  def clear_chat():
91
- return [], [], "🧠 A1 messages left: 10"
92
 
93
  # UI
94
  with gr.Blocks() as demo:
95
  gr.Markdown("# πŸ€– SamAI – Reasoning Chat")
96
  model_status = gr.Textbox(interactive=False, label="Model Status")
97
- usage_counter = gr.Textbox(value="🧠 A1 messages left: 10", interactive=False, show_label=False)
98
  chat_box = gr.Chatbot(type="tuples")
99
  chat_state = gr.State([])
100
 
 
6
  from html import escape
7
 
8
  # Model config
9
+ PRIMARY_MODEL = "Smilyai-labs/Sam-reason-A3"
10
+ FALLBACK_MODEL = "Smilyai-labs/Sam-reason-A1"
11
  USAGE_LIMIT = 5
12
  RESET_AFTER_SECONDS = 20 * 60 # 20 minutes
13
  device = "cuda" if torch.cuda.is_available() else "cpu"
 
85
  for output in generate_stream(prompt, use_fallback=use_fallback):
86
  formatted = format_thinking(output)
87
  history[-1][1] = f"{formatted}<br><sub style='color:gray'>({model_used})</sub>"
88
+ yield history, history, f"🧠 A3 messages left: {remaining}"
89
 
90
  def clear_chat():
91
+ return [], [], "🧠 A3 messages left: 5"
92
 
93
  # UI
94
  with gr.Blocks() as demo:
95
  gr.Markdown("# πŸ€– SamAI – Reasoning Chat")
96
  model_status = gr.Textbox(interactive=False, label="Model Status")
97
+ usage_counter = gr.Textbox(value="🧠 A3 messages left: 10", interactive=False, show_label=False)
98
  chat_box = gr.Chatbot(type="tuples")
99
  chat_state = gr.State([])
100