| import gradio as gr | |
| import json | |
| from transformers import AutoTokenizer, pipeline | |
| from optimum.onnxruntime import ORTModelForSequenceClassification | |
| model_id = "SamLowe/roberta-base-go_emotions-onnx" | |
| file_name = "onnx/model_quantized.onnx" | |
| model = ORTModelForSequenceClassification.from_pretrained(model_id, file_name=file_name) | |
| tokenizer = AutoTokenizer.from_pretrained(model_id) | |
| classifier = pipeline( | |
| task="text-classification", | |
| model=model, | |
| tokenizer=tokenizer, | |
| top_k=None, | |
| function_to_apply="sigmoid", | |
| binary_output=True, | |
| ) | |
| def predict(param_0): | |
| results = classifier(param_0) | |
| response = { | |
| "label": results[0][0]["label"], | |
| "confidences": results[0] | |
| } | |
| return response | |
| demo = gr.Interface( | |
| fn = predict, | |
| inputs = 'text', | |
| outputs = 'json', | |
| ) | |
| demo.launch() |