Spaces:
Runtime error
Runtime error
nananie143
commited on
Upload folder using huggingface_hub
Browse files- app.py +30 -24
- requirements.txt +1 -1
app.py
CHANGED
@@ -362,25 +362,10 @@ class VentureUI:
|
|
362 |
self.app = app
|
363 |
|
364 |
def create_interface(self):
|
365 |
-
|
366 |
-
|
367 |
-
|
368 |
-
|
369 |
-
label="Message",
|
370 |
-
placeholder="Chat with the Agentic System...",
|
371 |
-
lines=2
|
372 |
-
),
|
373 |
-
gr.State([]) # For chat history
|
374 |
-
],
|
375 |
-
outputs=[
|
376 |
-
gr.Textbox(
|
377 |
-
label="Response",
|
378 |
-
lines=10
|
379 |
-
),
|
380 |
-
gr.State([]) # Matching state output
|
381 |
-
],
|
382 |
-
title="Advanced Agentic System Chat Interface",
|
383 |
-
description="""
|
384 |
Chat with our autonomous agent teams:
|
385 |
- Team A: Coders (App/Software Developers)
|
386 |
- Team B: Business (Entrepreneurs)
|
@@ -392,12 +377,33 @@ class VentureUI:
|
|
392 |
2. Create new objectives
|
393 |
3. Check status of teams and objectives
|
394 |
4. Get insights and recommendations
|
395 |
-
"""
|
396 |
-
|
397 |
-
|
398 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
399 |
|
400 |
-
def create_chat_interface() -> gr.
|
401 |
"""Create Gradio chat interface."""
|
402 |
chat = ChatInterface()
|
403 |
ui = VentureUI(chat.process_message)
|
|
|
362 |
self.app = app
|
363 |
|
364 |
def create_interface(self):
|
365 |
+
with gr.Blocks(theme=gr.themes.Soft()) as interface:
|
366 |
+
gr.Markdown("""
|
367 |
+
# Advanced Agentic System Chat Interface
|
368 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
369 |
Chat with our autonomous agent teams:
|
370 |
- Team A: Coders (App/Software Developers)
|
371 |
- Team B: Business (Entrepreneurs)
|
|
|
377 |
2. Create new objectives
|
378 |
3. Check status of teams and objectives
|
379 |
4. Get insights and recommendations
|
380 |
+
""")
|
381 |
+
|
382 |
+
chatbot = gr.Chatbot(label="Chat History")
|
383 |
+
msg = gr.Textbox(
|
384 |
+
label="Message",
|
385 |
+
placeholder="Chat with the Agentic System...",
|
386 |
+
lines=2
|
387 |
+
)
|
388 |
+
clear = gr.ClearButton([msg, chatbot])
|
389 |
+
|
390 |
+
def respond(message, history):
|
391 |
+
# Convert history to the format expected by process_message
|
392 |
+
history_list = [[x, y] for x, y in history]
|
393 |
+
response, updated_history = asyncio.run(self.app(message, history_list))
|
394 |
+
history.append((message, response))
|
395 |
+
return "", history
|
396 |
+
|
397 |
+
msg.submit(
|
398 |
+
respond,
|
399 |
+
[msg, chatbot],
|
400 |
+
[msg, chatbot],
|
401 |
+
queue=False
|
402 |
+
)
|
403 |
+
|
404 |
+
return interface
|
405 |
|
406 |
+
def create_chat_interface() -> gr.Blocks:
|
407 |
"""Create Gradio chat interface."""
|
408 |
chat = ChatInterface()
|
409 |
ui = VentureUI(chat.process_message)
|
requirements.txt
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
fastapi>=0.68.0
|
2 |
uvicorn>=0.15.0
|
3 |
pydantic>=2.0.0
|
4 |
-
gradio
|
5 |
llama-cpp-python>=0.2.23
|
6 |
huggingface-hub>=0.19.4
|
7 |
numpy>=1.24.0
|
|
|
1 |
fastapi>=0.68.0
|
2 |
uvicorn>=0.15.0
|
3 |
pydantic>=2.0.0
|
4 |
+
gradio==4.44.1
|
5 |
llama-cpp-python>=0.2.23
|
6 |
huggingface-hub>=0.19.4
|
7 |
numpy>=1.24.0
|