Spaces:
Runtime error
Runtime error
Commit
·
1cdc167
1
Parent(s):
b0a9bdd
Update app.py
Browse files
app.py
CHANGED
|
@@ -6,7 +6,7 @@ import os
|
|
| 6 |
import langchain
|
| 7 |
import chromadb
|
| 8 |
import glob
|
| 9 |
-
|
| 10 |
|
| 11 |
from langchain.embeddings.openai import OpenAIEmbeddings
|
| 12 |
from langchain.vectorstores import Chroma
|
|
@@ -26,7 +26,7 @@ from langchain.chains.question_answering import load_qa_chain
|
|
| 26 |
# persist_directory="./embeddings" # Optional, defaults to .chromadb/ in the current directory
|
| 27 |
#))
|
| 28 |
|
| 29 |
-
|
| 30 |
|
| 31 |
def get_empty_state():
|
| 32 |
return {"total_tokens": 0, "messages": []}
|
|
@@ -117,8 +117,7 @@ def submit_message(prompt, prompt_template, temperature, max_tokens, context_len
|
|
| 117 |
query = str(system_prompt + history[-context_length*2:] + [prompt_msg])
|
| 118 |
#completion = completion({"query": query})
|
| 119 |
completion = completion.run(query)
|
| 120 |
-
|
| 121 |
-
logger.info(completion)
|
| 122 |
# completion = completion({"question": query, "chat_history": history[-context_length*2:]})
|
| 123 |
|
| 124 |
|
|
@@ -126,8 +125,8 @@ def submit_message(prompt, prompt_template, temperature, max_tokens, context_len
|
|
| 126 |
# https://colab.research.google.com/drive/1dzdNDZyofRB0f2KIB4gHXmIza7ehMX30?usp=sharing#scrollTo=b-ejDn_JfpWW
|
| 127 |
|
| 128 |
history.append(prompt_msg)
|
| 129 |
-
|
| 130 |
-
history.append(completion["result"].choices[0].message.to_dict())
|
| 131 |
|
| 132 |
state['total_tokens'] += completion['usage']['total_tokens']
|
| 133 |
|
|
|
|
| 6 |
import langchain
|
| 7 |
import chromadb
|
| 8 |
import glob
|
| 9 |
+
|
| 10 |
|
| 11 |
from langchain.embeddings.openai import OpenAIEmbeddings
|
| 12 |
from langchain.vectorstores import Chroma
|
|
|
|
| 26 |
# persist_directory="./embeddings" # Optional, defaults to .chromadb/ in the current directory
|
| 27 |
#))
|
| 28 |
|
| 29 |
+
|
| 30 |
|
| 31 |
def get_empty_state():
|
| 32 |
return {"total_tokens": 0, "messages": []}
|
|
|
|
| 117 |
query = str(system_prompt + history[-context_length*2:] + [prompt_msg])
|
| 118 |
#completion = completion({"query": query})
|
| 119 |
completion = completion.run(query)
|
| 120 |
+
|
|
|
|
| 121 |
# completion = completion({"question": query, "chat_history": history[-context_length*2:]})
|
| 122 |
|
| 123 |
|
|
|
|
| 125 |
# https://colab.research.google.com/drive/1dzdNDZyofRB0f2KIB4gHXmIza7ehMX30?usp=sharing#scrollTo=b-ejDn_JfpWW
|
| 126 |
|
| 127 |
history.append(prompt_msg)
|
| 128 |
+
history.append(completion.choices[0].message.to_dict())
|
| 129 |
+
#history.append(completion["result"].choices[0].message.to_dict())
|
| 130 |
|
| 131 |
state['total_tokens'] += completion['usage']['total_tokens']
|
| 132 |
|