File size: 1,447 Bytes
5b97f9e
 
3c5f44b
5b97f9e
 
3c5f44b
 
5b97f9e
 
 
 
 
3c5f44b
5b97f9e
 
 
 
 
 
3c5f44b
 
 
5b97f9e
3c5f44b
5b97f9e
3c5f44b
5b97f9e
3c5f44b
 
 
 
 
 
 
 
 
5b97f9e
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
from langchain.retrievers import WikipediaRetriever
import wikipedia
from chatbot.llm import gemini_llm  
from chatbot.memory import memory  
from chatbot.prompts import chat_prompt  
from langchain.chains import ConversationalRetrievalChain

def search_wikipedia(query: str, language: str = "vi"):
    """Search Wikipedia in the specified language (vi or en)."""
    wikipedia.set_lang(language)  # Set Wikipedia language dynamically
    retriever = WikipediaRetriever()  # Create a new retriever each time to apply language setting
    return retriever.get_relevant_documents(query)

def get_retriever(user_input: str):
    """Decide which language retriever to use based on user input."""
    # Example logic: If input contains English words, use "en"; otherwise, use "vi".
    if any(char.isascii() for char in user_input):
        return search_wikipedia(user_input, language="en")
    return search_wikipedia(user_input, language="vi")

qa_chain = ConversationalRetrievalChain.from_llm(
    llm=gemini_llm, 
    retriever=get_retriever,  # Dynamic Wikipedia search
    memory=memory,  
    return_source_documents=False,  
    combine_docs_chain_kwargs={"prompt": chat_prompt},
    output_key="result"
)

def get_chat_response(user_input: str) -> str:
    response = qa_chain(user_input)

    # Lưu vào bộ nhớ hội thoại
    memory.save_context({"input": user_input}, {"output": response["result"]})

    return response["result"]