added stepback-Prompting
Browse files
app.py
CHANGED
@@ -360,7 +360,9 @@ class BSIChatbot:
|
|
360 |
query, context = retrieval(query, True)
|
361 |
if stepBackPrompt == True:
|
362 |
stepBackQuery = stepBackPrompt(query)
|
|
|
363 |
stepBackQuery, stepBackContext = retrieval(stepBackQuery, True)
|
|
|
364 |
sysPrompt = """
|
365 |
You are an helpful Chatbot for the BSI IT-Grundschutz. Using the information contained in the context,
|
366 |
give a comprehensive answer to the question.
|
@@ -369,12 +371,11 @@ class BSIChatbot:
|
|
369 |
If the answer cannot be deduced from the context, do not give an answer.
|
370 |
"""
|
371 |
stepBackAnswer = queryRemoteLLM(sysPrompt, stepBackQuery, True)
|
|
|
372 |
context += "Übergreifende Frage:" + stepBackQuery + "Übergreifender Context:" + stepBackAnswer
|
373 |
|
374 |
#def queryRemoteLLM(self, systemPrompt, query, summary):
|
375 |
|
376 |
-
|
377 |
-
|
378 |
prompt_in_chat_format[-1]["content"] = prompt_in_chat_format[-1]["content"].format(
|
379 |
question=query, context=context, history=history[:-1]
|
380 |
)
|
|
|
360 |
query, context = retrieval(query, True)
|
361 |
if stepBackPrompt == True:
|
362 |
stepBackQuery = stepBackPrompt(query)
|
363 |
+
print("DBG stepBackQuery:" + stepBackQuery)
|
364 |
stepBackQuery, stepBackContext = retrieval(stepBackQuery, True)
|
365 |
+
print("DBG stepBackContext:" + stepBackContext)
|
366 |
sysPrompt = """
|
367 |
You are an helpful Chatbot for the BSI IT-Grundschutz. Using the information contained in the context,
|
368 |
give a comprehensive answer to the question.
|
|
|
371 |
If the answer cannot be deduced from the context, do not give an answer.
|
372 |
"""
|
373 |
stepBackAnswer = queryRemoteLLM(sysPrompt, stepBackQuery, True)
|
374 |
+
print("DBG stepBackAnswer:" + stepBackAnswer)
|
375 |
context += "Übergreifende Frage:" + stepBackQuery + "Übergreifender Context:" + stepBackAnswer
|
376 |
|
377 |
#def queryRemoteLLM(self, systemPrompt, query, summary):
|
378 |
|
|
|
|
|
379 |
prompt_in_chat_format[-1]["content"] = prompt_in_chat_format[-1]["content"].format(
|
380 |
question=query, context=context, history=history[:-1]
|
381 |
)
|