Update app.py
Browse files
app.py
CHANGED
@@ -248,7 +248,7 @@ def load_html_OG(inp,title):
|
|
248 |
|
249 |
|
250 |
|
251 |
-
def generate(prompt, history, post_check, agent_name=agents[0], sys_prompt="", temperature=0.9, max_new_tokens=1048, top_p=0.95, repetition_penalty=1.0):
|
252 |
html_out=""
|
253 |
#main_point[0]=prompt
|
254 |
#print(datetime.datetime.now())
|
@@ -272,6 +272,12 @@ def generate(prompt, history, post_check, agent_name=agents[0], sys_prompt="", t
|
|
272 |
json_obj={}
|
273 |
full_conv=[]
|
274 |
post_cnt=1
|
|
|
|
|
|
|
|
|
|
|
|
|
275 |
seed = random.randint(1,1111111111111111)
|
276 |
if not post_check:
|
277 |
print("writing blog")
|
@@ -299,7 +305,7 @@ def generate(prompt, history, post_check, agent_name=agents[0], sys_prompt="", t
|
|
299 |
|
300 |
for response in stream:
|
301 |
output += response.token.text
|
302 |
-
yield '', [(prompt,output)],
|
303 |
|
304 |
if not title:
|
305 |
for line in output.split("\n"):
|
@@ -524,7 +530,7 @@ with gr.Blocks() as app:
|
|
524 |
m_choice.change(load_models,m_choice,[chatbot])
|
525 |
app.load(load_models,m_choice,[chatbot]).then(load_html,None,html)
|
526 |
|
527 |
-
sub_b = submit_b.click(generate, [msg,chatbot,post_handler,chat_handler,tokens],[msg,chatbot,post_handler,chat_handler,sumbox,sum_out_box,hist_out_box,html])
|
528 |
sub_c = submit_c.click(comment_generate, [msg,chatbot,post_handler,chat_handler],[msg,chatbot,sumbox,sum_out_box,hist_out_box,html])
|
529 |
sub_r = submit_r.click(reply_generate, [msg,chatbot,tokens],[msg,chatbot,sumbox,sum_out_box,hist_out_box,html])
|
530 |
sub_e = msg.submit(generate, [msg, chatbot,tokens], [msg, chatbot,sumbox,sum_out_box,hist_out_box,html])
|
|
|
248 |
|
249 |
|
250 |
|
251 |
+
def generate(prompt, history, post_check,full_conv, agent_name=agents[0], sys_prompt="", temperature=0.9, max_new_tokens=1048, top_p=0.95, repetition_penalty=1.0):
|
252 |
html_out=""
|
253 |
#main_point[0]=prompt
|
254 |
#print(datetime.datetime.now())
|
|
|
272 |
json_obj={}
|
273 |
full_conv=[]
|
274 |
post_cnt=1
|
275 |
+
if not post_check:
|
276 |
+
post_check={}
|
277 |
+
if not full_conv:
|
278 |
+
full_conv={}
|
279 |
+
|
280 |
+
|
281 |
seed = random.randint(1,1111111111111111)
|
282 |
if not post_check:
|
283 |
print("writing blog")
|
|
|
305 |
|
306 |
for response in stream:
|
307 |
output += response.token.text
|
308 |
+
yield '', [(prompt,output)],post_check,full_conv,summary[0],json_obj, json_hist,html_out
|
309 |
|
310 |
if not title:
|
311 |
for line in output.split("\n"):
|
|
|
530 |
m_choice.change(load_models,m_choice,[chatbot])
|
531 |
app.load(load_models,m_choice,[chatbot]).then(load_html,None,html)
|
532 |
|
533 |
+
sub_b = submit_b.click(generate, [msg,chatbot,post_handler,chat_handler,chat_handler,tokens],[msg,chatbot,post_handler,chat_handler,sumbox,sum_out_box,hist_out_box,html])
|
534 |
sub_c = submit_c.click(comment_generate, [msg,chatbot,post_handler,chat_handler],[msg,chatbot,sumbox,sum_out_box,hist_out_box,html])
|
535 |
sub_r = submit_r.click(reply_generate, [msg,chatbot,tokens],[msg,chatbot,sumbox,sum_out_box,hist_out_box,html])
|
536 |
sub_e = msg.submit(generate, [msg, chatbot,tokens], [msg, chatbot,sumbox,sum_out_box,hist_out_box,html])
|