from .llm import gemini_llm from .retrieval import load_vectordb from .memory import memory from .prompts import chat_prompt, classification_prompt, category_tree_json from langchain.chains import ConversationalRetrievalChain from .metadata_selfquery import metadata_field_info from langchain.retrievers.self_query.base import SelfQueryRetriever from langchain.retrievers.self_query.qdrant import QdrantTranslator vector_store = load_vectordb() def classify_query(query): response = gemini_llm.invoke(classification_prompt.format(query=query, category_tree=category_tree_json)) return response retriever = SelfQueryRetriever.from_llm( llm=gemini_llm, vectorstore=vector_store, document_contents="Thông tin sản phẩm gồm mô tả ngắn và danh mục phân cấp, giá mà khách hàng tìm kiếm", metadata_field_info=metadata_field_info, structured_query_translator= QdrantTranslator(metadata_key="metadata"), search_type="similarity", search_kwargs={"k": 10, "score_threshold": 0.3} ) qa_chain = ConversationalRetrievalChain.from_llm( llm=gemini_llm, retriever=retriever, memory=memory, return_source_documents= False, combine_docs_chain_kwargs={"prompt": chat_prompt}, output_key="result" ) def get_chat_response(user_input: str) -> str: classified_query = classify_query(user_input) response = qa_chain({"question": classified_query}) memory.save_context({"input": classified_query}, {"output": response["result"]}) print(classified_query) return response["result"]