Update latest app
Browse files
app.py
CHANGED
@@ -1,10 +1,15 @@
|
|
1 |
from datasets import load_dataset
|
2 |
-
from langchain_community.document_loaders.csv_loader import CSVLoader
|
3 |
-
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
4 |
from langchain.embeddings import CacheBackedEmbeddings
|
5 |
from langchain.storage import LocalFileStore
|
6 |
-
from
|
|
|
|
|
|
|
|
|
|
|
7 |
from langchain_community.vectorstores import FAISS
|
|
|
|
|
8 |
|
9 |
dataset = load_dataset('ShubhamChoksi/IMDB_Movies')
|
10 |
dataset_dict = dataset
|
@@ -31,12 +36,19 @@ vector_file = "local_vector"
|
|
31 |
vector_store = FAISS.from_documents(chunked_documents, cached_embedder)
|
32 |
vector_store.save_local(vector_file)
|
33 |
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
|
|
|
|
|
|
|
|
|
|
42 |
|
|
|
|
|
|
1 |
from datasets import load_dataset
|
|
|
|
|
2 |
from langchain.embeddings import CacheBackedEmbeddings
|
3 |
from langchain.storage import LocalFileStore
|
4 |
+
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
5 |
+
from langchain_core.runnables.base import RunnableSequence
|
6 |
+
from langchain_core.runnables.passthrough import RunnablePassthrough
|
7 |
+
from langchain_core.output_parsers import StrOutputParser
|
8 |
+
from langchain_core.prompts import ChatPromptTemplate
|
9 |
+
from langchain_community.document_loaders.csv_loader import CSVLoader
|
10 |
from langchain_community.vectorstores import FAISS
|
11 |
+
from langchain_openai import OpenAIEmbeddings
|
12 |
+
from langchain_openai import ChatOpenAI
|
13 |
|
14 |
dataset = load_dataset('ShubhamChoksi/IMDB_Movies')
|
15 |
dataset_dict = dataset
|
|
|
36 |
vector_store = FAISS.from_documents(chunked_documents, cached_embedder)
|
37 |
vector_store.save_local(vector_file)
|
38 |
|
39 |
+
prompt_template = ChatPromptTemplate.from_template(
|
40 |
+
"You are a movie recommendation system, for a given {query} find recommendations from {content}."
|
41 |
+
)
|
42 |
+
retriever = vector_store.as_retriever()
|
43 |
+
chat_model = ChatOpenAI(model="gpt-4o", temperature=0.2, openai_api_key=openai_api_key)
|
44 |
+
parser = StrOutputParser()
|
45 |
+
|
46 |
+
runnable_chain = (
|
47 |
+
{"query": RunnablePassthrough(), "content": retriever}
|
48 |
+
| prompt_template
|
49 |
+
| chat_model
|
50 |
+
| StrOutputParser()
|
51 |
+
)
|
52 |
|
53 |
+
output_chunks = runnable_chain.invoke(query)
|
54 |
+
print(''.join(output_chunks))
|