Create app.py
Browse files
app.py
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import gradio as gr
|
| 3 |
+
from langchain.chat_models import ChatOpenAI
|
| 4 |
+
from langchain import LLMChain, PromptTemplate
|
| 5 |
+
from langchain.memory import ConversationBufferMemory
|
| 6 |
+
|
| 7 |
+
OPENAI_API_KEY = os.getenv('OPENAI_API_KEY')
|
| 8 |
+
|
| 9 |
+
template = """You are a very helpful assistant in providing users with information and knowledge about statistics. Additionally, you are also an expert in translating specialized documents on statistics, artificial intelligence, and technology from foreign languages into Vietnamese in a complete, coherent manner while ensuring the original knowledge from the foreign documents is accurately conveyed.
|
| 10 |
+
{chat_history}
|
| 11 |
+
User: {user_message}
|
| 12 |
+
Chatbot:"""
|
| 13 |
+
|
| 14 |
+
prompt = PromptTemplate(
|
| 15 |
+
input_variables=["chat_history", "user_message"], template=template
|
| 16 |
+
)
|
| 17 |
+
|
| 18 |
+
memory = ConversationBufferMemory(memory_key="chat_history")
|
| 19 |
+
|
| 20 |
+
llm_chain = LLMChain(
|
| 21 |
+
llm=ChatOpenAI(temperature=0.5, model_name="gpt-3.5-turbo"),
|
| 22 |
+
prompt=prompt,
|
| 23 |
+
verbose=True,
|
| 24 |
+
memory=memory,
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
def get_text_response(user_message, history):
|
| 28 |
+
response = llm_chain.predict(user_message=user_message)
|
| 29 |
+
return response
|
| 30 |
+
|
| 31 |
+
demo = gr.ChatInterface(get_text_response)
|
| 32 |
+
|
| 33 |
+
if __name__ == "__main__":
|
| 34 |
+
demo.launch() # To create a public link, set `share=True` in `launch()`. To enable errors and logs
|