Victor Hom commited on
Commit
ef7a628
·
1 Parent(s): e87d89a

test the streamed resp

Browse files
Files changed (1) hide show
  1. app.py +10 -8
app.py CHANGED
@@ -194,22 +194,24 @@ async def main(message: cl.Message):
194
 
195
  output_stream = runnable_chain.astream({})
196
 
197
- async for chunk in output_stream:
198
- print(chunk, sep='', flush=True)
199
 
200
  # Call OpenAI
201
  # async for stream_resp in await client.chat.completions.create(
202
  # messages=[m.to_openai() for m in prompt.messages], stream=True, **settings
203
  # ):
204
  async for stream_resp in output_stream:
205
- token = stream_resp.choices[0].delta.content
206
- if not token:
207
- token = ""
208
- await msg.stream_token(token)
 
 
209
 
210
  # Update the prompt object with the completion
211
- prompt.completion = msg.content
212
- msg.prompt = prompt
213
 
214
  # Send and close the message stream
215
  await msg.send()
 
194
 
195
  output_stream = runnable_chain.astream({})
196
 
197
+ # async for chunk in output_stream:
198
+ # print(chunk, sep='', flush=True)
199
 
200
  # Call OpenAI
201
  # async for stream_resp in await client.chat.completions.create(
202
  # messages=[m.to_openai() for m in prompt.messages], stream=True, **settings
203
  # ):
204
  async for stream_resp in output_stream:
205
+ await msg.stream_token(stream_resp)
206
+
207
+ # token = stream_resp.choices[0].delta.content
208
+ # if not token:
209
+ # token = ""
210
+ # await msg.stream_token(token)
211
 
212
  # Update the prompt object with the completion
213
+ # prompt.completion = msg.content
214
+ # msg.prompt = prompt
215
 
216
  # Send and close the message stream
217
  await msg.send()