Spaces:
Sleeping
Sleeping
from transformers import AutoTokenizer, AutoModelForQuestionAnswering | |
from flask import Flask, request, jsonify | |
import torch | |
# Modell betöltése | |
tokenizer = AutoTokenizer.from_pretrained("nlpaueb/legal-bert-base-uncased") | |
model = AutoModelForQuestionAnswering.from_pretrained("nlpaueb/legal-bert-base-uncased") | |
app = Flask(__name__) | |
def answer(): | |
data = request.json | |
context = data.get("context") | |
question = data.get("question") | |
# Tokenizálás és válasz előállítás | |
inputs = tokenizer.encode_plus(question, context, return_tensors="pt") | |
answer_start_scores, answer_end_scores = model(**inputs).values() | |
# Legjobb válasz kiválasztása | |
answer_start = torch.argmax(answer_start_scores) | |
answer_end = torch.argmax(answer_end_scores) + 1 | |
answer = tokenizer.convert_tokens_to_string( | |
tokenizer.convert_ids_to_tokens(inputs["input_ids"][0][answer_start:answer_end]) | |
) | |
return jsonify({"answer": answer}) | |
if __name__ == "__main__": | |
app.run() | |