Update app.py
Browse files
app.py
CHANGED
@@ -15,11 +15,8 @@ torch_device = 'cuda' if torch.cuda.is_available() else 'cpu'
|
|
15 |
tokenizer3 = PegasusTokenizer.from_pretrained(model_name)
|
16 |
model3 = PegasusForConditionalGeneration.from_pretrained(model_name).to(torch_device)
|
17 |
|
18 |
-
prev_context = "" # to store the previous context
|
19 |
|
20 |
def qa_paraphrase(text_input, question):
|
21 |
-
global prev_context
|
22 |
-
text_input = prev_context + " " + text_input # combine with previous context
|
23 |
prediction = classifier(
|
24 |
context=text_input,
|
25 |
question=question,
|
@@ -38,8 +35,7 @@ def qa_paraphrase(text_input, question):
|
|
38 |
batch = tokenizer3([sentence],truncation=True,padding='longest',max_length=60, return_tensors="pt").to(torch_device)
|
39 |
translated = model3.generate(**batch,max_length=60,num_beams=10, num_return_sequences=1, temperature=1.5)
|
40 |
paraphrase = tokenizer3.batch_decode(translated, skip_special_tokens=True)[0]
|
41 |
-
|
42 |
-
return f"Q: {question}\nA: {answer}\nParaphrased Sentence: {paraphrase}"
|
43 |
|
44 |
|
45 |
iface = gr.Interface(
|
@@ -49,8 +45,8 @@ iface = gr.Interface(
|
|
49 |
gr.inputs.Textbox(label="Question")
|
50 |
],
|
51 |
outputs=gr.outputs.Textbox(label="Output"),
|
52 |
-
title="Question Answering
|
53 |
-
description="
|
54 |
)
|
55 |
|
56 |
-
iface.launch()
|
|
|
15 |
tokenizer3 = PegasusTokenizer.from_pretrained(model_name)
|
16 |
model3 = PegasusForConditionalGeneration.from_pretrained(model_name).to(torch_device)
|
17 |
|
|
|
18 |
|
19 |
def qa_paraphrase(text_input, question):
|
|
|
|
|
20 |
prediction = classifier(
|
21 |
context=text_input,
|
22 |
question=question,
|
|
|
35 |
batch = tokenizer3([sentence],truncation=True,padding='longest',max_length=60, return_tensors="pt").to(torch_device)
|
36 |
translated = model3.generate(**batch,max_length=60,num_beams=10, num_return_sequences=1, temperature=1.5)
|
37 |
paraphrase = tokenizer3.batch_decode(translated, skip_special_tokens=True)[0]
|
38 |
+
return f"Answer: {answer}\nLong Form Answer: {paraphrase}"
|
|
|
39 |
|
40 |
|
41 |
iface = gr.Interface(
|
|
|
45 |
gr.inputs.Textbox(label="Question")
|
46 |
],
|
47 |
outputs=gr.outputs.Textbox(label="Output"),
|
48 |
+
title="Long Form Question Answering",
|
49 |
+
description="mimics long form question answering by extracting the sentence containing the answer and paraphrasing it"
|
50 |
)
|
51 |
|
52 |
+
iface.launch()
|