Update app.py
Browse files
app.py
CHANGED
@@ -26,6 +26,14 @@ MODERATOR_MODEL = os.getenv("MODERATOR_MODEL", "mistral")
|
|
26 |
# Session-based storage for isolated discussions
|
27 |
user_sessions: Dict[str, Dict] = {}
|
28 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
29 |
# NATIVE FUNCTION CALLING: Define search functions for both Mistral and SambaNova
|
30 |
SEARCH_FUNCTIONS = [
|
31 |
{
|
@@ -742,7 +750,8 @@ class VisualConsensusEngine:
|
|
742 |
"messages": [],
|
743 |
"currentSpeaker": None,
|
744 |
"thinking": [],
|
745 |
-
"showBubbles": []
|
|
|
746 |
})
|
747 |
|
748 |
all_messages = []
|
@@ -762,7 +771,8 @@ class VisualConsensusEngine:
|
|
762 |
"messages": all_messages,
|
763 |
"currentSpeaker": None,
|
764 |
"thinking": [self.models[model]['name']],
|
765 |
-
"showBubbles": existing_bubbles
|
|
|
766 |
})
|
767 |
|
768 |
time.sleep(1)
|
@@ -809,7 +819,8 @@ Provide your expert analysis:"""
|
|
809 |
"messages": all_messages,
|
810 |
"currentSpeaker": self.models[model]['name'],
|
811 |
"thinking": [],
|
812 |
-
"showBubbles": existing_bubbles
|
|
|
813 |
})
|
814 |
|
815 |
time.sleep(2)
|
@@ -861,7 +872,8 @@ Provide your expert analysis:"""
|
|
861 |
"messages": all_messages,
|
862 |
"currentSpeaker": None,
|
863 |
"thinking": [],
|
864 |
-
"showBubbles": responded_speakers
|
|
|
865 |
})
|
866 |
|
867 |
time.sleep(2) # Longer pause to see the response
|
@@ -884,7 +896,8 @@ Provide your expert analysis:"""
|
|
884 |
"messages": all_messages,
|
885 |
"currentSpeaker": None,
|
886 |
"thinking": [self.models[model]['name']],
|
887 |
-
"showBubbles": existing_bubbles
|
|
|
888 |
})
|
889 |
|
890 |
time.sleep(1)
|
@@ -932,7 +945,8 @@ Your expert response:"""
|
|
932 |
"messages": all_messages,
|
933 |
"currentSpeaker": self.models[model]['name'],
|
934 |
"thinking": [],
|
935 |
-
"showBubbles": existing_bubbles
|
|
|
936 |
})
|
937 |
|
938 |
time.sleep(2)
|
@@ -978,7 +992,8 @@ Your expert response:"""
|
|
978 |
"messages": all_messages,
|
979 |
"currentSpeaker": None,
|
980 |
"thinking": [],
|
981 |
-
"showBubbles": responded_speakers
|
|
|
982 |
})
|
983 |
|
984 |
time.sleep(1)
|
@@ -1007,7 +1022,8 @@ Your expert response:"""
|
|
1007 |
"messages": all_messages,
|
1008 |
"currentSpeaker": None,
|
1009 |
"thinking": expert_names,
|
1010 |
-
"showBubbles": existing_bubbles
|
|
|
1011 |
})
|
1012 |
|
1013 |
time.sleep(2)
|
@@ -1084,7 +1100,8 @@ Provide your synthesis:"""
|
|
1084 |
"messages": all_messages,
|
1085 |
"currentSpeaker": "Consilium",
|
1086 |
"thinking": [],
|
1087 |
-
"showBubbles": existing_bubbles
|
|
|
1088 |
})
|
1089 |
|
1090 |
# Call moderator model - may also trigger function calls
|
@@ -1135,7 +1152,8 @@ Provide your synthesis:"""
|
|
1135 |
"messages": all_messages,
|
1136 |
"currentSpeaker": None,
|
1137 |
"thinking": [],
|
1138 |
-
"showBubbles": responded_speakers
|
|
|
1139 |
})
|
1140 |
|
1141 |
log_event('phase', content="β
Expert Analysis Complete")
|
@@ -1358,13 +1376,15 @@ with gr.Blocks(title="π Consilium: Visual AI Consensus Platform", theme=gr.th
|
|
1358 |
with gr.Column(scale=2):
|
1359 |
# The visual roundtable component
|
1360 |
roundtable = consilium_roundtable(
|
1361 |
-
label="
|
|
|
1362 |
value=json.dumps({
|
1363 |
"participants": [],
|
1364 |
"messages": [],
|
1365 |
"currentSpeaker": None,
|
1366 |
"thinking": [],
|
1367 |
-
"showBubbles": []
|
|
|
1368 |
})
|
1369 |
)
|
1370 |
|
@@ -1417,7 +1437,8 @@ with gr.Blocks(title="π Consilium: Visual AI Consensus Platform", theme=gr.th
|
|
1417 |
"messages": [],
|
1418 |
"currentSpeaker": None,
|
1419 |
"thinking": [],
|
1420 |
-
"showBubbles": []
|
|
|
1421 |
})
|
1422 |
|
1423 |
gr.Timer(1.0).tick(refresh_roundtable, inputs=[session_state], outputs=[roundtable])
|
|
|
26 |
# Session-based storage for isolated discussions
|
27 |
user_sessions: Dict[str, Dict] = {}
|
28 |
|
29 |
+
# Model Images
|
30 |
+
avatar_images = {
|
31 |
+
"QwQ-32B": "https://cdn-avatars.huggingface.co/v1/production/uploads/620760a26e3b7210c2ff1943/-s1gyJfvbE1RgO5iBeNOi.png",
|
32 |
+
"DeepSeek-R1": "https://logosandtypes.com/wp-content/uploads/2025/02/deepseek.svg",
|
33 |
+
"Mistral Large": "https://logosandtypes.com/wp-content/uploads/2025/02/mistral-ai.svg",
|
34 |
+
"Meta-Llama-3.3-70B-Instruct": "https://registry.npmmirror.com/@lobehub/icons-static-png/1.46.0/files/dark/meta-color.png",
|
35 |
+
}
|
36 |
+
|
37 |
# NATIVE FUNCTION CALLING: Define search functions for both Mistral and SambaNova
|
38 |
SEARCH_FUNCTIONS = [
|
39 |
{
|
|
|
750 |
"messages": [],
|
751 |
"currentSpeaker": None,
|
752 |
"thinking": [],
|
753 |
+
"showBubbles": [],
|
754 |
+
"avatarImages": avatar_images
|
755 |
})
|
756 |
|
757 |
all_messages = []
|
|
|
771 |
"messages": all_messages,
|
772 |
"currentSpeaker": None,
|
773 |
"thinking": [self.models[model]['name']],
|
774 |
+
"showBubbles": existing_bubbles,
|
775 |
+
"avatarImages": avatar_images
|
776 |
})
|
777 |
|
778 |
time.sleep(1)
|
|
|
819 |
"messages": all_messages,
|
820 |
"currentSpeaker": self.models[model]['name'],
|
821 |
"thinking": [],
|
822 |
+
"showBubbles": existing_bubbles,
|
823 |
+
"avatarImages": avatar_images
|
824 |
})
|
825 |
|
826 |
time.sleep(2)
|
|
|
872 |
"messages": all_messages,
|
873 |
"currentSpeaker": None,
|
874 |
"thinking": [],
|
875 |
+
"showBubbles": responded_speakers,
|
876 |
+
"avatarImages": avatar_images
|
877 |
})
|
878 |
|
879 |
time.sleep(2) # Longer pause to see the response
|
|
|
896 |
"messages": all_messages,
|
897 |
"currentSpeaker": None,
|
898 |
"thinking": [self.models[model]['name']],
|
899 |
+
"showBubbles": existing_bubbles,
|
900 |
+
"avatarImages": avatar_images
|
901 |
})
|
902 |
|
903 |
time.sleep(1)
|
|
|
945 |
"messages": all_messages,
|
946 |
"currentSpeaker": self.models[model]['name'],
|
947 |
"thinking": [],
|
948 |
+
"showBubbles": existing_bubbles,
|
949 |
+
"avatarImages": avatar_images
|
950 |
})
|
951 |
|
952 |
time.sleep(2)
|
|
|
992 |
"messages": all_messages,
|
993 |
"currentSpeaker": None,
|
994 |
"thinking": [],
|
995 |
+
"showBubbles": responded_speakers,
|
996 |
+
"avatarImages": avatar_images
|
997 |
})
|
998 |
|
999 |
time.sleep(1)
|
|
|
1022 |
"messages": all_messages,
|
1023 |
"currentSpeaker": None,
|
1024 |
"thinking": expert_names,
|
1025 |
+
"showBubbles": existing_bubbles,
|
1026 |
+
"avatarImages": avatar_images
|
1027 |
})
|
1028 |
|
1029 |
time.sleep(2)
|
|
|
1100 |
"messages": all_messages,
|
1101 |
"currentSpeaker": "Consilium",
|
1102 |
"thinking": [],
|
1103 |
+
"showBubbles": existing_bubbles,
|
1104 |
+
"avatarImages": avatar_images
|
1105 |
})
|
1106 |
|
1107 |
# Call moderator model - may also trigger function calls
|
|
|
1152 |
"messages": all_messages,
|
1153 |
"currentSpeaker": None,
|
1154 |
"thinking": [],
|
1155 |
+
"showBubbles": responded_speakers,
|
1156 |
+
"avatarImages": avatar_images
|
1157 |
})
|
1158 |
|
1159 |
log_event('phase', content="β
Expert Analysis Complete")
|
|
|
1376 |
with gr.Column(scale=2):
|
1377 |
# The visual roundtable component
|
1378 |
roundtable = consilium_roundtable(
|
1379 |
+
label="AI Expert Roundtable",
|
1380 |
+
label_icon="https://huggingface.co/front/assets/huggingface_logo-noborder.svg",
|
1381 |
value=json.dumps({
|
1382 |
"participants": [],
|
1383 |
"messages": [],
|
1384 |
"currentSpeaker": None,
|
1385 |
"thinking": [],
|
1386 |
+
"showBubbles": [],
|
1387 |
+
"avatarImages": avatar_images
|
1388 |
})
|
1389 |
)
|
1390 |
|
|
|
1437 |
"messages": [],
|
1438 |
"currentSpeaker": None,
|
1439 |
"thinking": [],
|
1440 |
+
"showBubbles": [],
|
1441 |
+
"avatarImages": avatar_images
|
1442 |
})
|
1443 |
|
1444 |
gr.Timer(1.0).tick(refresh_roundtable, inputs=[session_state], outputs=[roundtable])
|