vectorstore
Browse files
app.py
CHANGED
|
@@ -34,13 +34,7 @@ login(token=HF_KEY)
|
|
| 34 |
|
| 35 |
class BSIChatbot:
|
| 36 |
def __init__(self, model_paths: Dict[str, str], docs_path: str):
|
| 37 |
-
|
| 38 |
-
self.embedding_model = HuggingFaceEmbeddings(
|
| 39 |
-
model_name=self.word_and_embed_model_path,
|
| 40 |
-
multi_process=True,
|
| 41 |
-
model_kwargs={"device": "cuda"},
|
| 42 |
-
encode_kwargs={"normalize_embeddings": True},
|
| 43 |
-
)
|
| 44 |
self.llmpipeline = None
|
| 45 |
self.llmtokenizer = None
|
| 46 |
self.vectorstore = None
|
|
@@ -58,7 +52,12 @@ class BSIChatbot:
|
|
| 58 |
raw_knowledge_base = []
|
| 59 |
|
| 60 |
# Initialize embedding model
|
| 61 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 62 |
|
| 63 |
if rebuild_embeddings:
|
| 64 |
# Load documents
|
|
@@ -98,9 +97,9 @@ class BSIChatbot:
|
|
| 98 |
@spaces.GPU
|
| 99 |
def retrieve_similar_embedding(self, query: str):
|
| 100 |
#lazy load
|
| 101 |
-
if (self.vectorstore == None):
|
| 102 |
-
|
| 103 |
-
|
| 104 |
print("DBG: Vectorstore Status retriever:", self.vectorstore)
|
| 105 |
query = f"Instruct: Given a search query, retrieve the relevant passages that answer the query\nQuery:{query}"
|
| 106 |
return self.vectorstore.similarity_search(query=query, k=20)
|
|
|
|
| 34 |
|
| 35 |
class BSIChatbot:
|
| 36 |
def __init__(self, model_paths: Dict[str, str], docs_path: str):
|
| 37 |
+
self.embedding_model = None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 38 |
self.llmpipeline = None
|
| 39 |
self.llmtokenizer = None
|
| 40 |
self.vectorstore = None
|
|
|
|
| 52 |
raw_knowledge_base = []
|
| 53 |
|
| 54 |
# Initialize embedding model
|
| 55 |
+
self.embedding_model = HuggingFaceEmbeddings(
|
| 56 |
+
model_name=self.word_and_embed_model_path,
|
| 57 |
+
multi_process=True,
|
| 58 |
+
model_kwargs={"device": "cuda"},
|
| 59 |
+
encode_kwargs={"normalize_embeddings": True},
|
| 60 |
+
)
|
| 61 |
|
| 62 |
if rebuild_embeddings:
|
| 63 |
# Load documents
|
|
|
|
| 97 |
@spaces.GPU
|
| 98 |
def retrieve_similar_embedding(self, query: str):
|
| 99 |
#lazy load
|
| 100 |
+
#if (self.vectorstore == None):
|
| 101 |
+
# self.vectorstore = FAISS.load_local(os.path.join(self.docs, "_embeddings"), self.embedding_model,
|
| 102 |
+
# allow_dangerous_deserialization=True)
|
| 103 |
print("DBG: Vectorstore Status retriever:", self.vectorstore)
|
| 104 |
query = f"Instruct: Given a search query, retrieve the relevant passages that answer the query\nQuery:{query}"
|
| 105 |
return self.vectorstore.similarity_search(query=query, k=20)
|