Victor Hom commited on
Commit
e87d89a
·
1 Parent(s): c867e5d

try making the output stream go to screen

Browse files
Files changed (1) hide show
  1. app.py +13 -7
app.py CHANGED
@@ -183,19 +183,25 @@ async def main(message: cl.Message):
183
  # )
184
 
185
  runnable_chain = prompt | model | parser
186
- output_chunks = runnable_chain.invoke({})
187
- print(''.join(output_chunks))
188
- print("output chunks")
189
 
190
 
191
- print([m.to_openai() for m in prompt.messages])
192
 
193
  msg = cl.Message(content="")
194
 
 
 
 
 
 
195
  # Call OpenAI
196
- async for stream_resp in await client.chat.completions.create(
197
- messages=[m.to_openai() for m in prompt.messages], stream=True, **settings
198
- ):
 
199
  token = stream_resp.choices[0].delta.content
200
  if not token:
201
  token = ""
 
183
  # )
184
 
185
  runnable_chain = prompt | model | parser
186
+ # output_chunks = runnable_chain.invoke({})
187
+ # print(''.join(output_chunks))
188
+ # print("output chunks")
189
 
190
 
191
+ # print([m.to_openai() for m in prompt.messages])
192
 
193
  msg = cl.Message(content="")
194
 
195
+ output_stream = runnable_chain.astream({})
196
+
197
+ async for chunk in output_stream:
198
+ print(chunk, sep='', flush=True)
199
+
200
  # Call OpenAI
201
+ # async for stream_resp in await client.chat.completions.create(
202
+ # messages=[m.to_openai() for m in prompt.messages], stream=True, **settings
203
+ # ):
204
+ async for stream_resp in output_stream:
205
  token = stream_resp.choices[0].delta.content
206
  if not token:
207
  token = ""