|
import pandas as pd |
|
from langchain_groq import ChatGroq |
|
from langchain_huggingface import HuggingFaceEmbeddings |
|
from langchain_chroma import Chroma |
|
from langchain_core.prompts import PromptTemplate |
|
from langchain_core.output_parsers import StrOutputParser |
|
from langchain_core.runnables import RunnablePassthrough |
|
import gradio as gr |
|
|
|
|
|
df = pd.read_csv('./botreformasconstrucciones.csv') |
|
|
|
|
|
context_data = [] |
|
for i in range(len(df)): |
|
context = "" |
|
for j in range(3): |
|
context += df.columns[j] |
|
context += ": " |
|
context += df.iloc[i, j] |
|
context += " " |
|
context_data.append(context) |
|
|
|
|
|
import os |
|
from langchain_groq import ChatGroq |
|
from langchain_huggingface import HuggingFaceEmbeddings |
|
from langchain_chroma import Chroma |
|
|
|
|
|
groq_key = os.environ.get('groq_api_keys') |
|
|
|
|
|
llm = ChatGroq(model="llama-3.1-70b-versatile", api_key=groq_key) |
|
|
|
|
|
embed_model = HuggingFaceEmbeddings(model_name="mixedbread-ai/mxbai-embed-large-v1") |
|
|
|
|
|
vectorstore = Chroma( |
|
collection_name="reformas_construccion_juancarlos_y_yoises", |
|
embedding_function=embed_model, |
|
) |
|
|
|
|
|
vectorstore.add_texts(context_data) |
|
|
|
|
|
retriever = vectorstore.as_retriever() |
|
|
|
|
|
template = ("""Tu eres un experto en reformas y construccion, asistente de reformas y construccion juan carlos y moises, responderas a los clientes sobre sus dudas, y les remitiras si no encuentras una respuesta a contactar con nosotros al telefono 697 945 127 - 622 523 309, whatsapp o enviar un formulario en nuestfa pagina de contacto |
|
Context: {context} |
|
Question: {question} |
|
Answer:""") |
|
|
|
|
|
rag_prompt = PromptTemplate.from_template(template) |
|
|
|
|
|
from langchain_core.output_parsers import StrOutputParser |
|
|
|
|
|
from langchain_core.runnables import RunnablePassthrough |
|
|
|
|
|
rag_chain = ( |
|
{"context": retriever, "question": RunnablePassthrough()} |
|
| rag_prompt |
|
| llm |
|
| StrOutputParser() |
|
) |
|
|
|
|
|
import gradio as gr |
|
|
|
|
|
def rag_memory_stream(message, history): |
|
partial_text = "" |
|
for new_text in rag_chain.stream(message): |
|
partial_text += new_text |
|
yield partial_text |
|
|
|
|
|
examples = [ |
|
"驴Como pintar las paredes?", |
|
"驴C贸mo alicatar el suelo del ba帽o?" |
|
] |
|
description = "Aplicaci贸n de IA experta en reformas y construccion Juan Carlos y Moises" |
|
title = "Experto en construccion y reformas" |
|
demo = gr.ChatInterface(fn=rag_memory_stream, |
|
type="messages", |
|
title=title, |
|
description=description, |
|
fill_height=True, |
|
examples=examples, |
|
theme="glass", |
|
) |
|
|
|
|
|
if __name__ == "__main__": |
|
demo.launch() |