MikeMann commited on
Commit
42b4e9e
·
1 Parent(s): b38b149

added EvalDataset Generation

Browse files
Files changed (1) hide show
  1. app.py +18 -19
app.py CHANGED
@@ -397,20 +397,19 @@ class BSIChatbot:
397
  return query, context
398
 
399
  def queryRemoteLLM(self, systemPrompt, query, summary):
400
- #newif summary != True:
401
- #newchat_completion = self.llm_client.chat.completions.create(
402
- #newmessages=[{"role": "system", "content": systemPrompt},
403
- #new{"role": "user", "content": "Step-Back Frage, die neu gestellt werden soll: " + query}],
404
- #newmodel=self.llm_remote_model,
405
- #new)
406
- #newif summary == True:
407
- #newchat_completion = self.llm_client.chat.completions.create(
408
- #newmessages=[{"role": "system", "content": systemPrompt},
409
- #new{"role": "user", "content": query}],
410
- #newmodel=self.llm_remote_model,
411
- #new)
412
- #newreturn chat_completion.choices[0].message.content
413
- return "AnswerqueryRemoteLLM" #new
414
 
415
  def stepBackPrompt(self, query):
416
  systemPrompt = """
@@ -510,12 +509,12 @@ class BSIChatbot:
510
  return stream
511
 
512
  else:
513
- #newanswer = self.llm_client.chat.completions.create(
514
- #newmessages=final_prompt,
515
- #newmodel=self.llm_remote_model,
516
- #newstream=False)
 
517
  self.cleanResources()
518
- answer = "Answer" #new
519
  return answer, context
520
 
521
  def returnImages(self):
 
397
  return query, context
398
 
399
  def queryRemoteLLM(self, systemPrompt, query, summary):
400
+ if summary != True:
401
+ chat_completion = self.llm_client.chat.completions.create(
402
+ messages=[{"role": "system", "content": systemPrompt},
403
+ {"role": "user", "content": "Step-Back Frage, die neu gestellt werden soll: " + query}],
404
+ model=self.llm_remote_model,
405
+ )
406
+ if summary == True:
407
+ chat_completion = self.llm_client.chat.completions.create(
408
+ messages=[{"role": "system", "content": systemPrompt},
409
+ {"role": "user", "content": query}],
410
+ model=self.llm_remote_model,
411
+ )
412
+ return chat_completion.choices[0].message.content
 
413
 
414
  def stepBackPrompt(self, query):
415
  systemPrompt = """
 
509
  return stream
510
 
511
  else:
512
+ answer = self.llm_client.chat.completions.create(
513
+ messages=final_prompt,
514
+ model=self.llm_remote_model,
515
+ stream=False
516
+ )
517
  self.cleanResources()
 
518
  return answer, context
519
 
520
  def returnImages(self):