santhoshs commited on
Commit
d9bb31c
·
1 Parent(s): 0ddf0c4

Move chain within on chat start

Browse files
Files changed (1) hide show
  1. app.py +16 -14
app.py CHANGED
@@ -44,34 +44,36 @@ if not os.path.exists(vector_file):
44
  else:
45
  vector_store = FAISS.load_local(vector_file, cached_embedder, allow_dangerous_deserialization=True)
46
 
47
- prompt_template = ChatPromptTemplate.from_template(
48
- "You are a movie recommendation system, for a given {query} find recommendations from {content}."
49
- )
50
- retriever = vector_store.as_retriever()
51
- chat_model = ChatOpenAI(model="gpt-4o", temperature=0.2, openai_api_key=openai_api_key)
52
- parser = StrOutputParser()
53
-
54
- runnable_chain = (
55
- {"query": RunnablePassthrough(), "content": retriever}
56
- | prompt_template
57
- | chat_model
58
- | StrOutputParser()
59
- )
60
-
61
  @cl.on_chat_start
62
  async def on_chat_start():
 
63
  await cl.Message(content="Hello, i am your movie recommender, how can i help you today?").send()
 
 
 
 
 
 
 
 
 
 
 
 
 
64
  cl.user_session.set("chain", runnable_chain)
65
 
66
 
67
  @cl.on_message
68
  async def main(message):
 
69
  chain = cl.user_session.get("chain")
70
  cb = cl.AsyncLangchainCallbackHandler()
71
  user_input = cl.Message(content="")
72
  await user_input.send()
73
 
74
  async for stream in chain.astream(user_input, cb):
 
75
  await user_input.stream_token(stream)
76
 
77
  await user_input.update()
 
44
  else:
45
  vector_store = FAISS.load_local(vector_file, cached_embedder, allow_dangerous_deserialization=True)
46
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
  @cl.on_chat_start
48
  async def on_chat_start():
49
+ print("On Chat Start")
50
  await cl.Message(content="Hello, i am your movie recommender, how can i help you today?").send()
51
+ prompt_template = ChatPromptTemplate.from_template(
52
+ "You are a movie recommendation system, for a given {query} find recommendations from {content}."
53
+ )
54
+ retriever = vector_store.as_retriever()
55
+ chat_model = ChatOpenAI(model="gpt-4o", temperature=0.2, openai_api_key=openai_api_key)
56
+ parser = StrOutputParser()
57
+
58
+ runnable_chain = (
59
+ {"query": RunnablePassthrough(), "content": retriever}
60
+ | prompt_template
61
+ | chat_model
62
+ | StrOutputParser()
63
+ )
64
  cl.user_session.set("chain", runnable_chain)
65
 
66
 
67
  @cl.on_message
68
  async def main(message):
69
+ print("On Message Start")
70
  chain = cl.user_session.get("chain")
71
  cb = cl.AsyncLangchainCallbackHandler()
72
  user_input = cl.Message(content="")
73
  await user_input.send()
74
 
75
  async for stream in chain.astream(user_input, cb):
76
+ print(stream)
77
  await user_input.stream_token(stream)
78
 
79
  await user_input.update()