law-bot / app.py
anpigon's picture
chore: Add utils, config, embeddings, retrievers, prompt, and llm modules
56487d0
raw
history blame
1.6 kB
# app.py
import os
import gradio as gr
from langchain_core.output_parsers import StrOutputParser
from langchain_core.runnables import RunnablePassthrough, RunnableLambda
from langchain_community.document_transformers import LongContextReorder
from config import LLM_MODEL, STREAMING
from embeddings import get_embeddings
from retrievers import load_retrievers
from llm import get_llm
from prompt import get_prompt
def create_rag_chain(chat_history):
embeddings = get_embeddings()
retriever = load_retrievers(embeddings)
llm = get_llm(streaming=STREAMING)
prompt = get_prompt(chat_history)
return (
{
"context": retriever
| RunnableLambda(LongContextReorder().transform_documents),
"question": RunnablePassthrough(),
}
| prompt
| llm.with_config(configurable={"llm": LLM_MODEL})
| StrOutputParser()
)
def respond_stream(message, history):
rag_chain = create_rag_chain(history)
response = ""
for chunk in rag_chain.stream(message):
response += chunk
yield response
def respond(message, history):
rag_chain = create_rag_chain(history)
return rag_chain.invoke(message)
demo = gr.ChatInterface(
respond_stream if STREAMING else respond,
title="νŒλ‘€μ— λŒ€ν•΄μ„œ λ¬Όμ–΄λ³΄μ„Έμš”!",
description="μ•ˆλ…•ν•˜μ„Έμš”!\nμ €λŠ” νŒλ‘€μ— λŒ€ν•œ 인곡지λŠ₯ QAλ΄‡μž…λ‹ˆλ‹€. νŒλ‘€μ— λŒ€ν•΄ κΉŠμ€ 지식을 κ°€μ§€κ³  μžˆμ–΄μš”. νŒλ‘€μ— κ΄€ν•œ 도움이 ν•„μš”ν•˜μ‹œλ©΄ μ–Έμ œλ“ μ§€ μ§ˆλ¬Έν•΄μ£Όμ„Έμš”!",
)
if __name__ == "__main__":
demo.launch()