JiakaiDu commited on
Commit
7e72e7c
1 Parent(s): f498d2a

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. Test_RAG.py +9 -9
Test_RAG.py CHANGED
@@ -613,17 +613,17 @@ def bot(history, temperature, top_p, top_k, repetition_penalty, hide_full_prompt
613
  llm.pipeline._forward_params["stopping_criteria"] = StoppingCriteriaList(stop_tokens)
614
 
615
  if do_rag:
616
- # t1 = Thread(target=rag_chain.invoke, args=({"input": history[-1][0]},))
617
- input_text = history[-1][0]
618
- response = llm.invoke(input_text)
619
- print(response)
620
  else:
621
  input_text = rag_prompt_template.format(input=history[-1][0], context="")
622
- # t1 = Thread(target=llm.invoke, args=(input_text,))
623
- # input_text = history[-1][0]
624
- response = llm.invoke(input_text)
625
- print(response)
626
- # t1.start()
627
 
628
  # Initialize an empty string to store the generated text
629
  partial_text = ""
 
613
  llm.pipeline._forward_params["stopping_criteria"] = StoppingCriteriaList(stop_tokens)
614
 
615
  if do_rag:
616
+ t1 = Thread(target=rag_chain.invoke, args=({"input": history[-1][0]},))
617
+ # input_text = history[-1][0]
618
+ # response = llm.invoke(input_text)
619
+ # print(response)
620
  else:
621
  input_text = rag_prompt_template.format(input=history[-1][0], context="")
622
+ t1 = Thread(target=llm.invoke, args=(input_text,))
623
+ # # input_text = history[-1][0]
624
+ # response = llm.invoke(input_text)
625
+ # print(response)
626
+ t1.start()
627
 
628
  # Initialize an empty string to store the generated text
629
  partial_text = ""