|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
from meutils.pipe import * |
|
from chatllm.applications import ChatBase |
|
from chatllm.utils import llm_load4chat |
|
|
|
import streamlit as st |
|
from appzoo.streamlit_app.utils import display_pdf, reply4input |
|
|
|
st.set_page_config('🔥ChatLLM', layout='centered', initial_sidebar_state='collapsed') |
|
|
|
|
|
@st.experimental_singleton |
|
def get_chat_func(): |
|
chat_func = llm_load4chat() |
|
return chat_func |
|
|
|
|
|
chat_func = get_chat_func() |
|
|
|
qa = ChatBase(chat_func=chat_func) |
|
|
|
|
|
def reply_func(query): |
|
for response, _ in qa(query=query): |
|
yield response |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
container = st.container() |
|
text = st.text_area(label="用户输入", height=100, placeholder="请在这儿输入您的问题") |
|
|
|
|
|
if st.button("发送", key="predict"): |
|
with st.spinner("AI正在思考,请稍等........"): |
|
history = st.session_state.get('state') |
|
st.session_state["state"] = reply4input(text, history, container=container, reply_func=reply_func) |
|
|