|
import streamlit as st |
|
import joblib |
|
from langchain_community.document_loaders import PyPDFLoader |
|
from langchain.text_splitter import RecursiveCharacterTextSplitter |
|
from langchain_community.embeddings import SentenceTransformerEmbeddings |
|
from langchain_community.vectorstores import Chroma |
|
from langchain.chains import RetrievalQAWithSourcesChain |
|
from langchain_openai import ChatOpenAI |
|
from dotenv import load_dotenv |
|
import os |
|
import time |
|
|
|
load_dotenv("bookie.env") |
|
api_key=os.getenv("OPENAI_API_KEY") |
|
api_base=os.getenv("OPENAI_API_BASE") |
|
llm=ChatOpenAI(model_name="google/gemma-3n-e2b-it:free",temperature=0.2) |
|
em=joblib.load("bai.joblib") |
|
mp=st.empty() |
|
st.title("Welcome to Bookie ๐๐") |
|
st.sidebar.title("give you book in pdf format(digitally generated) and less than 5mb for faster answers๐๐") |
|
uploaded_file = st.sidebar.file_uploader("Upload a PDF file", type=["pdf"]) |
|
upl=st.sidebar.button("upload") |
|
import tempfile |
|
|
|
if upl and uploaded_file: |
|
if uploaded_file.size > 5 * 1024 * 1024: |
|
st.sidebar.error("โ File too large. Please upload files under 5MB.") |
|
st.stop() |
|
with tempfile.NamedTemporaryFile(delete=False, suffix=".pdf") as tmp_file: |
|
tmp_file.write(uploaded_file.read()) |
|
tmp_path = tmp_file.name |
|
mp.text("loading doc") |
|
loader = PyPDFLoader(tmp_path) |
|
docs = loader.load() |
|
st.write(len(docs)) |
|
mp.text("loading split") |
|
tct=RecursiveCharacterTextSplitter.from_tiktoken_encoder(encoding_name="cl100k_base",chunk_size=512, chunk_overlap=16) |
|
doc=tct.split_documents(docs) |
|
st.write(len(doc)) |
|
mp.text("loading vector db") |
|
vb= Chroma.from_documents(doc,em) |
|
r1=vb.as_retriever(search_type="similarity",search_kwargs={"k":4}) |
|
mp.text("loading retriever") |
|
chain=RetrievalQAWithSourcesChain.from_chain_type(llm=llm,chain_type="map_reduce",retriever=r1) |
|
st.session_state.chain=chain |
|
mp.text("loading done") |
|
time.sleep(3) |
|
q=mp.text_input("Ask a question about the document:") |
|
qb=st.button("submit") |
|
if qb: |
|
if "chain" not in st.session_state: |
|
st.warning("โ ๏ธ Please upload a document first.") |
|
st.stop() |
|
else: |
|
with st.spinner("Waiting for it...."): |
|
result=st.session_state.chain({"question":q},return_only_outputs=True) |
|
st.header("Answer") |
|
st.subheader(result["answer"]) |
|
sb=st.button("show sources") |
|
if sb: |
|
sources = result.get("sources", "") |
|
st.subheader("Sources") |
|
for line in sources.split("\n"): |
|
st.write(line) |
|
|
|
|
|
|
|
|
|
|