Spaces:
Sleeping
Sleeping
| import os | |
| import logging | |
| import traceback | |
| from typing import List, Tuple | |
| import gradio as gr | |
| from dotenv import load_dotenv | |
| from langchain.document_loaders import TextLoader | |
| from langchain.text_splitter import RecursiveCharacterTextSplitter | |
| from langchain.embeddings import OpenAIEmbeddings | |
| from langchain.vectorstores import FAISS | |
| from langchain.chat_models import ChatOpenAI | |
| from langchain.chains import RetrievalQA | |
| from langchain.prompts import PromptTemplate | |
| # Configure logging | |
| logging.basicConfig(level=logging.INFO) | |
| logger = logging.getLogger(__name__) | |
| # Load environment variables | |
| load_dotenv() | |
| class RAGChatbot: | |
| def __init__(self, document_path): | |
| """ | |
| Initialize RAG Chatbot with document vectorization | |
| :param document_path: Path to the input document | |
| """ | |
| self.openai_api_key = os.getenv('OPENAI_API_KEY') | |
| if not self.openai_api_key: | |
| raise ValueError("OpenAI API Key is not set. Please add it to environment variables.") | |
| self.document_path = document_path | |
| self.vectorstore = self._load_or_create_vector_store() | |
| self.qa_system = self._create_qa_system() | |
| def _load_or_create_vector_store(self): | |
| """ | |
| Load existing FAISS index or create a new one | |
| :return: FAISS vector store | |
| """ | |
| try: | |
| embeddings = OpenAIEmbeddings(openai_api_key=self.openai_api_key) | |
| # Check if index exists | |
| if os.path.exists('faiss_index'): | |
| logger.info("Loading existing vector store...") | |
| return FAISS.load_local('faiss_index', embeddings) | |
| # Create new vector store | |
| logger.info("Creating new vector store...") | |
| loader = TextLoader(self.document_path, encoding='utf-8') | |
| documents = loader.load() | |
| text_splitter = RecursiveCharacterTextSplitter( | |
| chunk_size=3000, | |
| chunk_overlap=600, | |
| separators=["\n\n\n", "\n\n", "\n", ".", " ", ""] | |
| ) | |
| texts = text_splitter.split_documents(documents) | |
| vectorstore = FAISS.from_documents(texts, embeddings) | |
| # Ensure faiss_index directory exists | |
| os.makedirs('faiss_index', exist_ok=True) | |
| vectorstore.save_local('faiss_index') | |
| return vectorstore | |
| except Exception as e: | |
| logger.error(f"Vector store creation error: {e}") | |
| logger.error(traceback.format_exc()) | |
| raise | |
| def _create_qa_system(self): | |
| """ | |
| Create Question-Answering system with custom prompt | |
| :return: RetrievalQA chain | |
| """ | |
| custom_prompt = PromptTemplate( | |
| input_variables=["context", "question"], | |
| template="""You are an expert AI assistant for Beeline Uzbekistan tariffs. | |
| Provide clear, precise answers based on the context. | |
| Respond in the language of the question. | |
| Context: {context} | |
| Question: {question} | |
| Comprehensive Answer:""" | |
| ) | |
| llm = ChatOpenAI( | |
| model_name="gpt-3.5-turbo", | |
| openai_api_key=self.openai_api_key, | |
| temperature=0.1 | |
| ) | |
| return RetrievalQA.from_chain_type( | |
| llm=llm, | |
| chain_type="stuff", | |
| retriever=self.vectorstore.as_retriever( | |
| search_type="mmr", | |
| search_kwargs={"k": 4, "fetch_k": 10} | |
| ), | |
| chain_type_kwargs={"prompt": custom_prompt} | |
| ) | |
| def chat(self, message: str, history: List[Tuple[str, str]]) -> str: | |
| """ | |
| Main chat method with multilingual support | |
| :param message: User input message | |
| :param history: Chat history | |
| :return: Bot response | |
| """ | |
| # Handle initial greeting | |
| if message.lower() in ['init', 'start', 'begin']: | |
| return "Assalomu alaykum! 📱 Beeline tarifları haqida qanday ma'lumot kerak? (Hello! What Beeline tariff information do you need?)" | |
| # Multilingual greeting handling | |
| greetings = { | |
| 'uz': ['salom', 'assalomu alaykum', 'hammaga salom'], | |
| 'ru': ['привет', 'здравствуйте', 'hi', 'hello'], | |
| 'en': ['hi', 'hello', 'hey'] | |
| } | |
| for lang, greeting_list in greetings.items(): | |
| if message.lower() in greeting_list: | |
| return { | |
| 'uz': "Salom! Sizga qanday yordam bera olaman? 🤖", | |
| 'ru': "Привет! Чем могу помочь? 🤖", | |
| 'en': "Hello! How can I assist you today about Beeline tariffs? 🤖" | |
| }[lang] | |
| try: | |
| # Query the document | |
| response = self.qa_system.run(message) | |
| # Add conversational touch | |
| response += "\n\n📞 Yana bir nima so'rashingizni xohlar edingizmi? (Would you like to ask anything else?)" | |
| return response | |
| except Exception as e: | |
| logger.error(f"Chat processing error: {e}") | |
| logger.error(traceback.format_exc()) | |
| return "Kechirasiz, so'rovingizni qayta ishlashda xatolik yuz berdi. Iltimos, qaytadan urinib ko'ring. (Sorry, there was an error processing your request. Please try again.)" | |
| def create_demo() -> gr.Interface: | |
| """ | |
| Create Gradio interface for the chatbot | |
| :return: Gradio demo | |
| """ | |
| # Initialize chatbot with document | |
| chatbot = RAGChatbot('12.txt') | |
| with gr.Blocks() as demo: | |
| gr.Markdown("# 📱 Beeline Uzbekistan Tariff Assistant") | |
| chatbot_interface = gr.Chatbot( | |
| height=600, | |
| show_copy_button=True, | |
| avatar_images=["🤔", "🤖"] # User and Bot avatars | |
| ) | |
| with gr.Row(): | |
| msg = gr.Textbox( | |
| show_label=False, | |
| placeholder="Beeline tariffları haqida so'rang... (Ask about Beeline tariffs...)", | |
| container=False | |
| ) | |
| submit = gr.Button("Yuborish (Send)", variant="primary") | |
| clear = gr.Button("Yangi suhbat (New Chat)") | |
| def respond(message, chat_history): | |
| bot_message = chatbot.chat(message, chat_history) | |
| chat_history.append((message, bot_message)) | |
| return "", chat_history | |
| def init_chat(): | |
| initial_greeting = chatbot.chat("init", []) | |
| return [("", initial_greeting)] | |
| # Event handlers | |
| submit.click(respond, [msg, chatbot_interface], [msg, chatbot_interface]) | |
| msg.submit(respond, [msg, chatbot_interface], [msg, chatbot_interface]) | |
| clear.click(fn=init_chat, inputs=None, outputs=chatbot_interface) | |
| demo.load(init_chat, inputs=None, outputs=chatbot_interface) | |
| return demo | |
| # Main execution | |
| demo = create_demo() | |
| if __name__ == "__main__": | |
| demo.launch(debug=True) |