sivapriya14's picture
Update app.py
64b6a3a verified
import json
import os
from flask import Flask, request, jsonify
from transformers import pipeline
from huggingface_hub import login
# Retrieve Hugging Face token securely
hf_token = os.getenv("chatbot")
if hf_token is None:
raise ValueError("Hugging Face token is missing! Set HF_TOKEN in your environment variables or Secrets.")
# Log in with Hugging Face token
login(token=hf_token)
# Load the model (Falcon-1B)
try:
llm = pipeline("text-generation", model="tiiuae/falcon-1b-instruct", token=hf_token)
except Exception as e:
raise RuntimeError(f"Error loading model: {str(e)}")
# Load the schemes data from JSON file
SCHEMES_FILE = "schemes.json"
if os.path.exists(SCHEMES_FILE):
with open(SCHEMES_FILE, "r", encoding="utf-8") as f:
schemes_data = json.load(f)
else:
schemes_data = {"schemes": []} # Default to empty if file is missing
app = Flask(__name__)
# Function to search for relevant scheme information
def find_scheme_info(query):
for scheme in schemes_data.get("schemes", []):
if query.lower() in scheme["name"].lower() or query.lower() in scheme["description"].lower():
return scheme["description"]
return None
# Function to generate chatbot response
def chatbot_response(query):
scheme_info = find_scheme_info(query)
if scheme_info:
return scheme_info # Return scheme info if found
try:
response = llm(query, max_length=200, do_sample=True)
return response[0]['generated_text']
except Exception as e:
return f"Error generating response: {str(e)}"
# API route for chatbot
@app.route("/chat", methods=["POST"])
def chat():
user_input = request.json.get("query", "").strip()
if not user_input:
return jsonify({"error": "No query provided"}), 400
response = chatbot_response(user_input)
return jsonify({"response": response})
if __name__ == "__main__":
app.run(host="0.0.0.0", port=7860, debug=True)