File size: 14,139 Bytes
3240e5c 71ab0a8 3240e5c a7d03c5 3240e5c d5c9c65 3240e5c 5f9881c 48e003d 175604a bcc8503 a7d03c5 3240e5c fc994dc f0fc5f8 a7d03c5 f0fc5f8 ff42e3f 6d2199d ff42e3f fc994dc f0fc5f8 abfa81d ff42e3f 46e3999 3240e5c 6d2199d 91c4196 6d2199d 91c4196 6d2199d d4c1a74 6d2199d 91c4196 6d2199d a4595fc a7d03c5 0fb079d f0fc5f8 3240e5c be494ba f0fc5f8 6093b14 e2f3eb3 f0fc5f8 175604a f0fc5f8 caf1faa a7d03c5 175604a a7d03c5 175604a caf1faa f0fc5f8 ff42e3f a7d03c5 3240e5c a7d03c5 463425a df26154 bcc8503 463425a fa9f031 df26154 f0fc5f8 3240e5c df26154 a7d03c5 5f9881c df26154 bcc8503 df26154 fc994dc df26154 6093b14 bcc8503 df26154 3240e5c df26154 bcc8503 df26154 463425a a7d03c5 bcc8503 463425a a7d03c5 463425a 175604a 463425a a7d03c5 175604a a7d03c5 fc994dc 463425a 6093b14 4b4bf28 3240e5c a7d03c5 463425a a7d03c5 175604a 463425a a7d03c5 463425a a7d03c5 463425a 175604a 463425a a7d03c5 463425a 175604a a7d03c5 463425a 3240e5c 463425a e77b244 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 |
# Import necessary libraries
import os
import gradio as gr
from azure.storage.fileshare import ShareServiceClient
# Import custom modules
from climateqa.engine.embeddings import get_embeddings_function
from climateqa.engine.llm import get_llm
from climateqa.engine.vectorstore import get_pinecone_vectorstore
from climateqa.engine.reranker import get_reranker
from climateqa.engine.graph import make_graph_agent,make_graph_agent_poc
from climateqa.engine.chains.retrieve_papers import find_papers
from climateqa.chat import start_chat, chat_stream, finish_chat
from front.tabs import (create_config_modal, create_examples_tab, create_papers_tab, create_figures_tab, create_chat_interface, create_about_tab)
from front.utils import process_figures
from utils import create_user_id
import logging
logging.basicConfig(level=logging.WARNING)
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # Suppresses INFO and WARNING logs
logging.getLogger().setLevel(logging.WARNING)
# Load environment variables in local mode
try:
from dotenv import load_dotenv
load_dotenv()
except Exception as e:
pass
# Set up Gradio Theme
theme = gr.themes.Base(
primary_hue="blue",
secondary_hue="red",
font=[gr.themes.GoogleFont("Poppins"), "ui-sans-serif", "system-ui", "sans-serif"],
)
# Azure Blob Storage credentials
account_key = os.environ["BLOB_ACCOUNT_KEY"]
if len(account_key) == 86:
account_key += "=="
credential = {
"account_key": account_key,
"account_name": os.environ["BLOB_ACCOUNT_NAME"],
}
account_url = os.environ["BLOB_ACCOUNT_URL"]
file_share_name = "climateqa"
service = ShareServiceClient(account_url=account_url, credential=credential)
share_client = service.get_share_client(file_share_name)
user_id = create_user_id()
# Create vectorstore and retriever
embeddings_function = get_embeddings_function()
vectorstore = get_pinecone_vectorstore(embeddings_function, index_name=os.getenv("PINECONE_API_INDEX"))
vectorstore_graphs = get_pinecone_vectorstore(embeddings_function, index_name=os.getenv("PINECONE_API_INDEX_OWID"), text_key="description")
vectorstore_region = get_pinecone_vectorstore(embeddings_function, index_name=os.getenv("PINECONE_API_INDEX_REGION"))
llm = get_llm(provider="openai",max_tokens = 1024,temperature = 0.0)
if os.getenv("ENV")=="GRADIO_ENV":
reranker = get_reranker("nano")
else:
reranker = get_reranker("large")
agent = make_graph_agent(llm=llm, vectorstore_ipcc=vectorstore, vectorstore_graphs=vectorstore_graphs, vectorstore_region = vectorstore_region, reranker=reranker, threshold_docs=0.2)
agent_poc = make_graph_agent_poc(llm=llm, vectorstore_ipcc=vectorstore, vectorstore_graphs=vectorstore_graphs, vectorstore_region = vectorstore_region, reranker=reranker, threshold_docs=0)#TODO put back default 0.2
async def chat(query, history, audience, sources, reports, relevant_content_sources_selection, search_only):
print("chat cqa - message received")
async for event in chat_stream(agent, query, history, audience, sources, reports, relevant_content_sources_selection, search_only, share_client, user_id):
yield event
async def chat_poc(query, history, audience, sources, reports, relevant_content_sources_selection, search_only):
print("chat poc - message received")
async for event in chat_stream(agent_poc, query, history, audience, sources, reports, relevant_content_sources_selection, search_only, share_client, user_id):
yield event
# --------------------------------------------------------------------
# Gradio
# --------------------------------------------------------------------
# Function to update modal visibility
def update_config_modal_visibility(config_open):
new_config_visibility_status = not config_open
return gr.update(visible=new_config_visibility_status), new_config_visibility_status
def update_sources_number_display(sources_textbox, figures_cards, current_graphs, papers_html):
sources_number = sources_textbox.count("<h2>")
figures_number = figures_cards.count("<h2>")
graphs_number = current_graphs.count("<iframe")
papers_number = papers_html.count("<h2>")
sources_notif_label = f"Sources ({sources_number})"
figures_notif_label = f"Figures ({figures_number})"
graphs_notif_label = f"Graphs ({graphs_number})"
papers_notif_label = f"Papers ({papers_number})"
recommended_content_notif_label = f"Recommended content ({figures_number + graphs_number + papers_number})"
return gr.update(label=recommended_content_notif_label), gr.update(label=sources_notif_label), gr.update(label=figures_notif_label), gr.update(label=graphs_notif_label), gr.update(label=papers_notif_label)
# # UI Layout Components
def cqa_tab(tab_name):
# State variables
current_graphs = gr.State([])
with gr.Tab(tab_name):
with gr.Row(elem_id="chatbot-row"):
# Left column - Chat interface
with gr.Column(scale=2):
chatbot, textbox, config_button = create_chat_interface()
# Right column - Content panels
with gr.Column(scale=2, variant="panel", elem_id="right-panel"):
with gr.Tabs(elem_id="right_panel_tab") as tabs:
# Examples tab
with gr.TabItem("Examples", elem_id="tab-examples", id=0):
examples_hidden = create_examples_tab()
# Sources tab
with gr.Tab("Sources", elem_id="tab-sources", id=1) as tab_sources:
sources_textbox = gr.HTML(show_label=False, elem_id="sources-textbox")
# Recommended content tab
with gr.Tab("Recommended content", elem_id="tab-recommended_content", id=2) as tab_recommended_content:
with gr.Tabs(elem_id="group-subtabs") as tabs_recommended_content:
# Figures subtab
with gr.Tab("Figures", elem_id="tab-figures", id=3) as tab_figures:
sources_raw, new_figures, used_figures, gallery_component, figures_cards, figure_modal = create_figures_tab()
# Papers subtab
with gr.Tab("Papers", elem_id="tab-citations", id=4) as tab_papers:
papers_summary, papers_html, citations_network, papers_modal = create_papers_tab()
# Graphs subtab
with gr.Tab("Graphs", elem_id="tab-graphs", id=5) as tab_graphs:
graphs_container = gr.HTML(
"<h2>There are no graphs to be displayed at the moment. Try asking another question.</h2>",
elem_id="graphs-container"
)
return {
"chatbot": chatbot,
"textbox": textbox,
"tabs": tabs,
"sources_raw": sources_raw,
"new_figures": new_figures,
"current_graphs": current_graphs,
"examples_hidden": examples_hidden,
"sources_textbox": sources_textbox,
"figures_cards": figures_cards,
"gallery_component": gallery_component,
"config_button": config_button,
"papers_html": papers_html,
"citations_network": citations_network,
"papers_summary": papers_summary,
"tab_recommended_content": tab_recommended_content,
"tab_sources": tab_sources,
"tab_figures": tab_figures,
"tab_graphs": tab_graphs,
"tab_papers": tab_papers,
"graph_container": graphs_container
}
def event_handling(
main_tab_components,
config_components,
tab_name="ClimateQ&A"
):
chatbot = main_tab_components["chatbot"]
textbox = main_tab_components["textbox"]
tabs = main_tab_components["tabs"]
sources_raw = main_tab_components["sources_raw"]
new_figures = main_tab_components["new_figures"]
current_graphs = main_tab_components["current_graphs"]
examples_hidden = main_tab_components["examples_hidden"]
sources_textbox = main_tab_components["sources_textbox"]
figures_cards = main_tab_components["figures_cards"]
gallery_component = main_tab_components["gallery_component"]
config_button = main_tab_components["config_button"]
papers_html = main_tab_components["papers_html"]
citations_network = main_tab_components["citations_network"]
papers_summary = main_tab_components["papers_summary"]
tab_recommended_content = main_tab_components["tab_recommended_content"]
tab_sources = main_tab_components["tab_sources"]
tab_figures = main_tab_components["tab_figures"]
tab_graphs = main_tab_components["tab_graphs"]
tab_papers = main_tab_components["tab_papers"]
graphs_container = main_tab_components["graph_container"]
config_open = config_components["config_open"]
config_modal = config_components["config_modal"]
dropdown_sources = config_components["dropdown_sources"]
dropdown_reports = config_components["dropdown_reports"]
dropdown_external_sources = config_components["dropdown_external_sources"]
search_only = config_components["search_only"]
dropdown_audience = config_components["dropdown_audience"]
after = config_components["after"]
output_query = config_components["output_query"]
output_language = config_components["output_language"]
close_config_modal = config_components["close_config_modal_button"]
new_sources_hmtl = gr.State([])
print("textbox id : ", textbox.elem_id)
for button in [config_button, close_config_modal]:
button.click(
fn=update_config_modal_visibility,
inputs=[config_open],
outputs=[config_modal, config_open]
)
if tab_name == "ClimateQ&A":
print("chat cqa - message sent")
# Event for textbox
(textbox
.submit(start_chat, [textbox, chatbot, search_only], [textbox, tabs, chatbot, sources_raw], queue=False, api_name=f"start_chat_{textbox.elem_id}")
.then(chat, [textbox, chatbot, dropdown_audience, dropdown_sources, dropdown_reports, dropdown_external_sources, search_only], [chatbot, new_sources_hmtl, output_query, output_language, new_figures, current_graphs], concurrency_limit=8, api_name=f"chat_{textbox.elem_id}")
.then(finish_chat, None, [textbox], api_name=f"finish_chat_{textbox.elem_id}")
)
# Event for examples_hidden
(examples_hidden
.change(start_chat, [examples_hidden, chatbot, search_only], [examples_hidden, tabs, chatbot, sources_raw], queue=False, api_name=f"start_chat_{examples_hidden.elem_id}")
.then(chat, [examples_hidden, chatbot, dropdown_audience, dropdown_sources, dropdown_reports, dropdown_external_sources, search_only], [chatbot, new_sources_hmtl, output_query, output_language, new_figures, current_graphs], concurrency_limit=8, api_name=f"chat_{examples_hidden.elem_id}")
.then(finish_chat, None, [textbox], api_name=f"finish_chat_{examples_hidden.elem_id}")
)
elif tab_name == "Beta - POC Adapt'Action":
print("chat poc - message sent")
# Event for textbox
(textbox
.submit(start_chat, [textbox, chatbot, search_only], [textbox, tabs, chatbot, sources_raw], queue=False, api_name=f"start_chat_{textbox.elem_id}")
.then(chat_poc, [textbox, chatbot, dropdown_audience, dropdown_sources, dropdown_reports, dropdown_external_sources, search_only], [chatbot, new_sources_hmtl, output_query, output_language, new_figures, current_graphs], concurrency_limit=8, api_name=f"chat_{textbox.elem_id}")
.then(finish_chat, None, [textbox], api_name=f"finish_chat_{textbox.elem_id}")
)
# Event for examples_hidden
(examples_hidden
.change(start_chat, [examples_hidden, chatbot, search_only], [examples_hidden, tabs, chatbot, sources_raw], queue=False, api_name=f"start_chat_{examples_hidden.elem_id}")
.then(chat_poc, [examples_hidden, chatbot, dropdown_audience, dropdown_sources, dropdown_reports, dropdown_external_sources, search_only], [chatbot, new_sources_hmtl, output_query, output_language, new_figures, current_graphs], concurrency_limit=8, api_name=f"chat_{examples_hidden.elem_id}")
.then(finish_chat, None, [textbox], api_name=f"finish_chat_{examples_hidden.elem_id}")
)
new_sources_hmtl.change(lambda x : x, inputs = [new_sources_hmtl], outputs = [sources_textbox])
current_graphs.change(lambda x: x, inputs=[current_graphs], outputs=[graphs_container])
new_figures.change(process_figures, inputs=[sources_raw, new_figures], outputs=[sources_raw, figures_cards, gallery_component])
# Update sources numbers
for component in [sources_textbox, figures_cards, current_graphs, papers_html]:
component.change(update_sources_number_display, [sources_textbox, figures_cards, current_graphs, papers_html], [tab_recommended_content, tab_sources, tab_figures, tab_graphs, tab_papers])
# Search for papers
for component in [textbox, examples_hidden]:
component.submit(find_papers, [component, after, dropdown_external_sources], [papers_html, citations_network, papers_summary])
def main_ui():
# config_open = gr.State(True)
with gr.Blocks(title="Climate Q&A", css_paths=os.getcwd()+ "/style.css", theme=theme, elem_id="main-component") as demo:
config_components = create_config_modal()
with gr.Tabs():
cqa_components = cqa_tab(tab_name = "ClimateQ&A")
local_cqa_components = cqa_tab(tab_name = "Beta - POC Adapt'Action")
create_about_tab()
event_handling(cqa_components, config_components, tab_name = 'ClimateQ&A')
event_handling(local_cqa_components, config_components, tab_name = 'Beta - POC Adapt\'Action')
demo.queue()
return demo
demo = main_ui()
demo.launch(ssr_mode=False)
|