azettl commited on
Commit
8683501
Β·
verified Β·
1 Parent(s): 40759f1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -23
app.py CHANGED
@@ -245,12 +245,12 @@ class VisualConsensusEngine:
245
  'available': bool(mistral_key)
246
  },
247
  'sambanova_deepseek': {
248
- 'name': 'DeepSeek-V3',
249
  'api_key': sambanova_key,
250
  'available': bool(sambanova_key)
251
  },
252
  'sambanova_llama': {
253
- 'name': 'Meta-Llama-3.1-8B',
254
  'api_key': sambanova_key,
255
  'available': bool(sambanova_key)
256
  },
@@ -480,10 +480,10 @@ class VisualConsensusEngine:
480
  )
481
  model_mapping = {
482
  'sambanova_deepseek': 'DeepSeek-R1',
483
- 'sambanova_llama': 'Meta-Llama-3.1-8B-Instruct',
484
  'sambanova_qwq': 'QwQ-32B'
485
  }
486
- model_name = model_mapping.get(calling_model, 'Meta-Llama-3.1-8B-Instruct')
487
 
488
  final_completion = client.chat.completions.create(
489
  model=model_name,
@@ -551,20 +551,19 @@ class VisualConsensusEngine:
551
 
552
  model_mapping = {
553
  'sambanova_deepseek': 'DeepSeek-R1',
554
- 'sambanova_llama': 'Meta-Llama-3.1-8B-Instruct',
555
  'sambanova_qwq': 'QwQ-32B'
556
  }
557
 
558
- sambanova_model = model_mapping.get(model, 'Meta-Llama-3.1-8B-Instruct')
559
  print(f"Calling SambaNova model: {sambanova_model}")
560
 
561
  # Check if model supports function calling
562
  supports_functions = sambanova_model in [
563
- 'DeepSeek-V3-0324',
564
  'Meta-Llama-3.1-8B-Instruct',
565
  'Meta-Llama-3.1-405B-Instruct',
566
  'Meta-Llama-3.3-70B-Instruct'
567
- # QwQ-32B is NOT in this list, so it won't get function calling
568
  ]
569
 
570
  if supports_functions:
@@ -1006,7 +1005,7 @@ Your expert response:"""
1006
  moderator_title = "Senior Advisor"
1007
  elif decision_protocol in ['majority_voting', 'ranked_choice']:
1008
  phase_name = "βš–οΈ Phase 3: Final Decision"
1009
- moderator_title = "Chief Analyst"
1010
  else:
1011
  phase_name = "πŸ“Š Phase 3: Expert Synthesis"
1012
  moderator_title = "Lead Researcher"
@@ -1271,8 +1270,8 @@ def check_model_status_session(session_id_state: str = None, request: gr.Request
1271
 
1272
  models = {
1273
  'Mistral Large': mistral_key,
1274
- 'DeepSeek-V3': sambanova_key,
1275
- 'Meta-Llama-3.1-8B': sambanova_key,
1276
  'QwQ-32B': sambanova_key,
1277
  'Research Agent': True
1278
  }
@@ -1293,20 +1292,21 @@ def check_model_status_session(session_id_state: str = None, request: gr.Request
1293
  with gr.Blocks(title="🎭 Consilium: Visual AI Consensus Platform", theme=gr.themes.Soft()) as demo:
1294
  gr.Markdown("""
1295
  # 🎭 Consilium: Multi-AI Expert Consensus Platform
1296
-
1297
  **Watch expert AI models collaborate with live research to solve your most complex decisions**
1298
 
1299
- This platform provides **rigorous multi-perspective analysis** with:
1300
- - 🎨 **Visual Expert Roundtable** - See AI specialists thinking and collaborating
1301
- - πŸ€– **Multi-Model Expertise** - Mistral, DeepSeek, Llama, QwQ specialists
1302
- - πŸ” **Native Research Integration** - Expert AIs call research functions automatically
1303
- - πŸŽ“ **Expert Role Assignment** - Advocates, analysts, advisors, researchers
1304
- - 🌐 **Strategic Communication** - Full mesh collaboration, hierarchical, sequential
1305
- - βš–οΈ **Protocol-Based Decisions** - Consensus building, competitive analysis, expert synthesis
1306
- - πŸ“Š **Live Data Integration** - Real-time web search and Wikipedia research
1307
- - πŸ”’ **Private Sessions** - Each user gets their own secure analysis space
1308
 
1309
- **Perfect for:** Strategic planning, technical decisions, research synthesis, policy analysis
 
 
 
 
 
 
 
1310
  """)
1311
 
1312
  # Hidden session state component
@@ -1502,7 +1502,7 @@ with gr.Blocks(title="🎭 Consilium: Visual AI Consensus Platform", theme=gr.th
1502
  ### πŸ¦™ SambaNova Expert Models (with Function Calling)
1503
  The platform includes **3 SambaNova specialists**:
1504
  - **DeepSeek-R1**: Advanced reasoning and strategic analysis
1505
- - **Meta-Llama-3.1-8B**: Fast, efficient collaborative analysis + research calls
1506
  - **QwQ-32B**: Large-scale comprehensive evaluation
1507
 
1508
  ### πŸ“‹ Dependencies
 
245
  'available': bool(mistral_key)
246
  },
247
  'sambanova_deepseek': {
248
+ 'name': 'DeepSeek-R1',
249
  'api_key': sambanova_key,
250
  'available': bool(sambanova_key)
251
  },
252
  'sambanova_llama': {
253
+ 'name': 'Meta-Llama-3.3-70B-Instruct',
254
  'api_key': sambanova_key,
255
  'available': bool(sambanova_key)
256
  },
 
480
  )
481
  model_mapping = {
482
  'sambanova_deepseek': 'DeepSeek-R1',
483
+ 'sambanova_llama': 'Meta-Llama-3.3-70B-Instruct',
484
  'sambanova_qwq': 'QwQ-32B'
485
  }
486
+ model_name = model_mapping.get(calling_model, 'Meta-Llama-3.3-70B-Instruct')
487
 
488
  final_completion = client.chat.completions.create(
489
  model=model_name,
 
551
 
552
  model_mapping = {
553
  'sambanova_deepseek': 'DeepSeek-R1',
554
+ 'sambanova_llama': 'Meta-Llama-3.3-70B-Instruct',
555
  'sambanova_qwq': 'QwQ-32B'
556
  }
557
 
558
+ sambanova_model = model_mapping.get(model, 'Meta-Llama-3.3-70B-Instruct')
559
  print(f"Calling SambaNova model: {sambanova_model}")
560
 
561
  # Check if model supports function calling
562
  supports_functions = sambanova_model in [
563
+ 'DeepSeek-R1-0324',
564
  'Meta-Llama-3.1-8B-Instruct',
565
  'Meta-Llama-3.1-405B-Instruct',
566
  'Meta-Llama-3.3-70B-Instruct'
 
567
  ]
568
 
569
  if supports_functions:
 
1005
  moderator_title = "Senior Advisor"
1006
  elif decision_protocol in ['majority_voting', 'ranked_choice']:
1007
  phase_name = "βš–οΈ Phase 3: Final Decision"
1008
+ moderator_title = "Lead Analyst"
1009
  else:
1010
  phase_name = "πŸ“Š Phase 3: Expert Synthesis"
1011
  moderator_title = "Lead Researcher"
 
1270
 
1271
  models = {
1272
  'Mistral Large': mistral_key,
1273
+ 'DeepSeek-R1': sambanova_key,
1274
+ 'Meta-Llama-3.3-70B-Instruct': sambanova_key,
1275
  'QwQ-32B': sambanova_key,
1276
  'Research Agent': True
1277
  }
 
1292
  with gr.Blocks(title="🎭 Consilium: Visual AI Consensus Platform", theme=gr.themes.Soft()) as demo:
1293
  gr.Markdown("""
1294
  # 🎭 Consilium: Multi-AI Expert Consensus Platform
1295
+
1296
  **Watch expert AI models collaborate with live research to solve your most complex decisions**
1297
 
1298
+ This MCP server was built for the Gradio Agents and MCP Hackathon 2025. Additionally, I built a custom Gradio component for the roundtable (https://huggingface.co/spaces/azettl/gradio_consilium_roundtable).
1299
+
1300
+ ## Features:
 
 
 
 
 
 
1301
 
1302
+ * Visual roundtable of the AI models, including speech bubbles to see the discussion in real time.
1303
+ * MCP mode enabled to also use it directly in, for example, Claude Desktop (without the visual table).
1304
+ * Includes Mistral (mistral-large-latest) via their API and the Models DeepSeek-R1, Meta-Llama-3.1-8B-Instruct and QwQ-32B via the SambaNova API.
1305
+ * Research Agent to search via DuckDuckGo or Wikipedia, added as a tool for the models from Mistral and Llama.
1306
+ * Assign different roles to the models, the protocol they should follow, and decide the communication strategy.
1307
+ * Pick one model as the lead analyst (had the best results when picking Mistral).
1308
+ * Configure the amount of discussion rounds.
1309
+ * After the discussion, the whole conversation and a final answer will be presented.
1310
  """)
1311
 
1312
  # Hidden session state component
 
1502
  ### πŸ¦™ SambaNova Expert Models (with Function Calling)
1503
  The platform includes **3 SambaNova specialists**:
1504
  - **DeepSeek-R1**: Advanced reasoning and strategic analysis
1505
+ - **Meta-Llama-3.3-70B-Instruct**: Fast, efficient collaborative analysis + research calls
1506
  - **QwQ-32B**: Large-scale comprehensive evaluation
1507
 
1508
  ### πŸ“‹ Dependencies