Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,39 +1,39 @@
|
|
1 |
-
#Downloading and importing all the modules required for this TaskBot v1 AI
|
2 |
-
import google.generativeai as genai
|
3 |
-
import os
|
4 |
-
import json
|
5 |
-
import requests
|
6 |
-
from flask import Flask, request, jsonify, render_template
|
7 |
-
import torch
|
8 |
-
|
9 |
-
#Connect to index.html
|
10 |
-
app = Flask(__name__)
|
11 |
-
@app.route("/")
|
12 |
-
def index():
|
13 |
-
return render_template("index.html")
|
14 |
-
|
15 |
-
@app.route("/ask", methods=["POST"])
|
16 |
-
def ask():
|
17 |
-
#getting the question from the form
|
18 |
-
question = request.form.get("question", "").strip()
|
19 |
-
if not question:
|
20 |
-
return jsonify({"error": "Please provide a question."}), 400
|
21 |
-
|
22 |
-
genai.configure(api_key="AIzaSyA5FFcaVCfVmwf7X5C59n4xWc96xWR-A_4")
|
23 |
-
|
24 |
-
try:
|
25 |
-
# use Google's Gemini-2.0-Flash nodle for generating content
|
26 |
-
model = genai.GenerativeModel('gemini-2.0-flash')
|
27 |
-
response = model.generate_content(f"You are TaskBot AI created and trained by Advay Singh and Astrumix. Remember that and just anser me this question in detail- {question}. And don't reply on that message. Just answer me the question.")
|
28 |
-
answer = response.text
|
29 |
-
|
30 |
-
# Log the question and answer for debugging
|
31 |
-
print(f"Question: {question}\n------------------------- \n {answer} \n -------------------------")
|
32 |
-
# Return the answer as JSON
|
33 |
-
return jsonify({"answer": answer})
|
34 |
-
except Exception as e:
|
35 |
-
print(f"Error: {e}")
|
36 |
-
return jsonify({"error": "An error occurred while processing your request."}), 500
|
37 |
-
|
38 |
-
if __name__ == "__main__":
|
39 |
-
app.run(port=
|
|
|
1 |
+
#Downloading and importing all the modules required for this TaskBot v1 AI
|
2 |
+
import google.generativeai as genai
|
3 |
+
import os
|
4 |
+
import json
|
5 |
+
import requests
|
6 |
+
from flask import Flask, request, jsonify, render_template
|
7 |
+
import torch
|
8 |
+
|
9 |
+
#Connect to index.html
|
10 |
+
app = Flask(__name__)
|
11 |
+
@app.route("/")
|
12 |
+
def index():
|
13 |
+
return render_template("index.html")
|
14 |
+
|
15 |
+
@app.route("/ask", methods=["POST"])
|
16 |
+
def ask():
|
17 |
+
#getting the question from the form
|
18 |
+
question = request.form.get("question", "").strip()
|
19 |
+
if not question:
|
20 |
+
return jsonify({"error": "Please provide a question."}), 400
|
21 |
+
|
22 |
+
genai.configure(api_key="AIzaSyA5FFcaVCfVmwf7X5C59n4xWc96xWR-A_4")
|
23 |
+
|
24 |
+
try:
|
25 |
+
# use Google's Gemini-2.0-Flash nodle for generating content
|
26 |
+
model = genai.GenerativeModel('gemini-2.0-flash')
|
27 |
+
response = model.generate_content(f"You are TaskBot AI created and trained by Advay Singh and Astrumix. Remember that and just anser me this question in detail- {question}. And don't reply on that message. Just answer me the question.")
|
28 |
+
answer = response.text
|
29 |
+
|
30 |
+
# Log the question and answer for debugging
|
31 |
+
print(f"Question: {question}\n------------------------- \n {answer} \n -------------------------")
|
32 |
+
# Return the answer as JSON
|
33 |
+
return jsonify({"answer": answer})
|
34 |
+
except Exception as e:
|
35 |
+
print(f"Error: {e}")
|
36 |
+
return jsonify({"error": "An error occurred while processing your request."}), 500
|
37 |
+
|
38 |
+
if __name__ == "__main__":
|
39 |
+
app.run(port=7860, host="0.0.0.0")
|