llm / app.py
KunalThakare279's picture
Create app.py
0c05a2a verified
raw
history blame
1.08 kB
from flask import Flask, render_template, request
from llm import get_prompt, get_responce
app = Flask(__name__)
# Assume your question generation logic is already defined
def generate_questions(question_type):
if question_type == 'mcq':
return ["What is the capital of France?", "Choose the correct answer from four options."]
elif question_type == 'fill_blank':
return ["The sun rises in the ____.", "Fill in the correct word."]
elif question_type == 'short_answer':
return ["Explain the process of photosynthesis."]
else:
return ["No questions available for the selected type."]
@app.route('/')
def home():
return render_template('index.html')
@app.route('/get_questions', methods=['POST'])
def get_questions():
question_type = request.form['question_type']
prompt, seed = get_prompt(question_type)
questions = get_responce(prompt)
return render_template('questions.html', questions=questions, question_type=questions[0]["question_type"], seed = seed)
if __name__ == '__main__':
app.run(debug=True)