Spaces:
Running
Running
Upload 4 files
Browse files- .gitattributes +1 -0
- Dockerfile +14 -0
- app.py +39 -0
- templates/TaskBot logo.png +3 -0
- templates/index.html +0 -0
.gitattributes
CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
templates/TaskBot[[:space:]]logo.png filter=lfs diff=lfs merge=lfs -text
|
Dockerfile
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM python:3.9
|
2 |
+
|
3 |
+
WORKDIR /app
|
4 |
+
COPY . /app
|
5 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
6 |
+
|
7 |
+
ENV HF_HOME=/app/hf_cache
|
8 |
+
ENV TRANSFORMERS_CACHE=/app/hf_cache
|
9 |
+
RUN mkdir -p /app/hf_cache && chmod -R 777 /app/hf_cache
|
10 |
+
|
11 |
+
EXPOSE 7860
|
12 |
+
|
13 |
+
# Run Gunicorn with 4 worker processes on port 7860
|
14 |
+
CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:7860", "app:app"]
|
app.py
ADDED
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#Downloading and importing all the modules required for this TaskBot v1 AI
|
2 |
+
import google.generativeai as genai
|
3 |
+
import os
|
4 |
+
import json
|
5 |
+
import requests
|
6 |
+
from flask import Flask, request, jsonify, render_template
|
7 |
+
import torch
|
8 |
+
|
9 |
+
#Connect to index.html
|
10 |
+
app = Flask(__name__)
|
11 |
+
@app.route("/")
|
12 |
+
def index():
|
13 |
+
return render_template("index.html")
|
14 |
+
|
15 |
+
@app.route("/ask", methods=["POST"])
|
16 |
+
def ask():
|
17 |
+
#getting the question from the form
|
18 |
+
question = request.form.get("question", "").strip()
|
19 |
+
if not question:
|
20 |
+
return jsonify({"error": "Please provide a question."}), 400
|
21 |
+
|
22 |
+
genai.configure(api_key="AIzaSyA5FFcaVCfVmwf7X5C59n4xWc96xWR-A_4")
|
23 |
+
|
24 |
+
try:
|
25 |
+
# use Google's Gemini-2.0-Flash nodle for generating content
|
26 |
+
model = genai.GenerativeModel('gemini-2.0-flash')
|
27 |
+
response = model.generate_content(f"You are TaskBot AI created and trained by Advay Singh and Astrumix. Remember that and just anser me this question in detail- {question}. And don't reply on that message. Just answer me the question.")
|
28 |
+
answer = response.text
|
29 |
+
|
30 |
+
# Log the question and answer for debugging
|
31 |
+
print(f"Question: {question}\n------------------------- \n {answer} \n -------------------------")
|
32 |
+
# Return the answer as JSON
|
33 |
+
return jsonify({"answer": answer})
|
34 |
+
except Exception as e:
|
35 |
+
print(f"Error: {e}")
|
36 |
+
return jsonify({"error": "An error occurred while processing your request."}), 500
|
37 |
+
|
38 |
+
if __name__ == "__main__":
|
39 |
+
app.run(port=5000, host="0.0.0.0")
|
templates/TaskBot logo.png
ADDED
![]() |
Git LFS Details
|
templates/index.html
ADDED
File without changes
|