SymptomChecker / app.py
srinivas-mushroom's picture
Update app.py
f3f7a22
from transformers import AutoTokenizer, AutoModelForQuestionAnswering
tokenizer = AutoTokenizer.from_pretrained("microsoft/BiomedNLP-PubMedBERT-base-uncased-abstract-fulltext")
model = AutoModelForQuestionAnswering.from_pretrained("microsoft/BiomedNLP-PubMedBERT-base-uncased-abstract-fulltext")
import torch
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
def biomedical_chatbot(user_message):
# Tokenize the user's message
inputs = tokenizer.encode_plus(user_message, add_special_tokens=True, return_tensors="pt").to(device)
# Generate a response using the pre-trained model
answer_start_scores, answer_end_scores = model(**inputs)
answer_start = torch.argmax(answer_start_scores)
answer_end = torch.argmax(answer_end_scores) + 1
answer = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(inputs["input_ids"][0][answer_start:answer_end]))
# Return the response
return answer
import gradio as gr
gradio_interface = gr.Interface(fn=biomedical_chatbot,
inputs=gr.inputs.Textbox(placeholder="Enter your message here..."),
outputs=gr.outputs.Textbox())
gradio_interface.launch()