Spaces:
Running
Running
| # app.py | |
| import gradio as gr | |
| import os | |
| from langchain_community.vectorstores import FAISS | |
| from langchain_together import TogetherEmbeddings, Together | |
| from langchain.prompts import ChatPromptTemplate | |
| from langchain.schema.runnable import RunnablePassthrough | |
| from langchain.schema.output_parser import StrOutputParser | |
| from langchain.memory import ConversationBufferMemory | |
| from typing import List, Tuple | |
| import re | |
| # Environment variables for API keys | |
| TOGETHER_API_KEY = os.getenv('TOGETHER_API_KEY') | |
| class ChatBot: | |
| def __init__(self): | |
| # Initialize embeddings | |
| self.embeddings = TogetherEmbeddings( | |
| model="togethercomputer/m2-bert-80M-32k-retrieval", | |
| together_api_key=TOGETHER_API_KEY | |
| ) | |
| # Load the pre-created FAISS index with embeddings | |
| self.vectorstore = FAISS.load_local( | |
| ".", | |
| embeddings=self.embeddings, | |
| allow_dangerous_deserialization=True # Only enable this if you trust the source of the index | |
| ) | |
| self.retriever = self.vectorstore.as_retriever() | |
| # Initialize the model | |
| self.model = Together( | |
| model="meta-llama/Llama-3.3-70B-Instruct-Turbo", | |
| temperature=0.7, | |
| max_tokens=150, | |
| top_k=30, | |
| together_api_key=TOGETHER_API_KEY | |
| ) | |
| # Initialize memory | |
| self.memory = ConversationBufferMemory( | |
| return_messages=True, | |
| memory_key="chat_history", | |
| output_key="answer" | |
| ) | |
| # Create the prompt template | |
| self.template = """Quyidagi kontekst va suhbat tarixiga asoslanib, savolga faqat matn ko'rinishida, kod yoki ortiqcha belgilarsiz, faqat o'zbek tilida tabiiy tarzda javob bering: | |
| Kontekst: {context} | |
| Suhbat Tarixi: {chat_history} | |
| Savol: {question} | |
| Javobni faqat matn shaklida bering, kod yoki ortiqcha belgilar kiritmang.""" | |
| self.prompt = ChatPromptTemplate.from_template(self.template) | |
| # Create the chain | |
| self.chain = ( | |
| { | |
| "context": self.retriever, | |
| "chat_history": lambda x: self.get_chat_history(), | |
| "question": RunnablePassthrough() | |
| } | |
| | self.prompt | |
| | self.model | |
| | StrOutputParser() | |
| ) | |
| def get_chat_history(self) -> str: | |
| """Format chat history for the prompt""" | |
| messages = self.memory.load_memory_variables({})["chat_history"] | |
| return "\n".join([f"{m.type}: {m.content}" for m in messages]) | |
| import re | |
| def process_response(self, response: str) -> str: | |
| """Clean up the response""" | |
| unwanted_tags = ["[INST]", "[/INST]", "<s>", "</s>"] | |
| for tag in unwanted_tags: | |
| response = response.replace(tag, "") | |
| # Python kod snippetlarini olib tashlash | |
| response = re.sub(r"```.*?```", "", response, flags=re.DOTALL) | |
| response = re.sub(r"print\(.*?\)", "", response) | |
| return response.strip() | |
| def chat(self, message: str, history: List[Tuple[str, str]]) -> str: | |
| """Process a single chat message""" | |
| try: | |
| self.memory.chat_memory.add_user_message(message) | |
| response = self.chain.invoke(message) | |
| clean_response = self.process_response(response) | |
| # Agar javob to'liq bo'lmasa yoki noto'g'ri bo'lsa, qayta urinib ko'rish | |
| if not clean_response or len(clean_response.split()) < 3: | |
| clean_response = "Kechirasiz, savolingizni tushunolmadim. Iltimos, batafsilroq savol bering." | |
| self.memory.chat_memory.add_ai_message(clean_response) | |
| return clean_response | |
| except Exception as e: | |
| return f"Xatolik yuz berdi: {str(e)}" | |
| def reset_chat(self) -> List[Tuple[str, str]]: | |
| """Reset the chat history""" | |
| self.memory.clear() | |
| return [] | |
| # Create the Gradio interface | |
| def create_demo() -> gr.Interface: | |
| chatbot = ChatBot() | |
| with gr.Blocks() as demo: | |
| gr.Markdown("""# RAG Chatbot | |
| Beeline Uzbekistanning jismoniy shaxslar uchun tariflari haqida ma'lumotlar beruvchi bot""") | |
| chatbot_interface = gr.Chatbot( | |
| height=600, | |
| show_copy_button=True, | |
| ) | |
| with gr.Row(): | |
| msg = gr.Textbox( | |
| show_label=False, | |
| placeholder="Xabaringizni shu yerda yozing", | |
| container=False | |
| ) | |
| submit = gr.Button("Xabarni yuborish", variant="primary") | |
| clear = gr.Button("Yangi suhbat") | |
| def respond(message, chat_history): | |
| # Foydalanuvchi xabarini tozalash | |
| message = message.strip() | |
| if not message: | |
| return "", chat_history | |
| bot_message = chatbot.chat(message, chat_history) | |
| chat_history.append((message, bot_message)) | |
| return "", chat_history | |
| submit.click(respond, [msg, chatbot_interface], [msg, chatbot_interface]) | |
| msg.submit(respond, [msg, chatbot_interface], [msg, chatbot_interface]) | |
| clear.click(lambda: chatbot.reset_chat(), None, chatbot_interface) | |
| return demo | |
| demo = create_demo() | |
| if __name__ == "__main__": | |
| demo.launch() |