Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -53,10 +53,11 @@ def generate_text(message, history):
|
|
53 |
for out in output:
|
54 |
stream = copy.deepcopy(out)
|
55 |
temp += stream["choices"][0]["text"]
|
56 |
-
yield temp
|
57 |
|
58 |
history.append(("USER:", message))
|
59 |
history.append(("ASSISTANT:", temp))
|
|
|
|
|
60 |
|
61 |
# Define the predict function for the FastAI model
|
62 |
def predict_with_llama_and_generate_text(img):
|
@@ -66,8 +67,9 @@ def predict_with_llama_and_generate_text(img):
|
|
66 |
|
67 |
response = f"The system has detected {detected_object}. Do you want to know about {detected_object}?"
|
68 |
|
69 |
-
|
70 |
-
|
|
|
71 |
|
72 |
# Define the Gradio interface
|
73 |
gr.Interface(
|
|
|
53 |
for out in output:
|
54 |
stream = copy.deepcopy(out)
|
55 |
temp += stream["choices"][0]["text"]
|
|
|
56 |
|
57 |
history.append(("USER:", message))
|
58 |
history.append(("ASSISTANT:", temp))
|
59 |
+
|
60 |
+
return temp
|
61 |
|
62 |
# Define the predict function for the FastAI model
|
63 |
def predict_with_llama_and_generate_text(img):
|
|
|
67 |
|
68 |
response = f"The system has detected {detected_object}. Do you want to know about {detected_object}?"
|
69 |
|
70 |
+
llama_response = generate_text(response, history)
|
71 |
+
|
72 |
+
return llama_response
|
73 |
|
74 |
# Define the Gradio interface
|
75 |
gr.Interface(
|