Spaces:
Running
Running
File size: 851 Bytes
1946528 bcaa150 1946528 673e117 bcaa150 1946528 bcaa150 1946528 673e117 1946528 bcaa150 673e117 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 |
import gradio as gr
import torch
from moderation import *
moderation = ModerationModel()
moderation.load_state_dict(torch.load('moderation_model.pth', map_location=torch.device('cpu'))) # Remove map_location if run on gpu
moderation.eval()
def predict_moderation(text):
embeddings_for_prediction = getEmb(text)
prediction = predict(moderation, embeddings_for_prediction)
category_scores = prediction.get('category_scores', {})
detected = prediction.get('detected', False)
return category_scores, str(detected)
iface = gr.Interface(fn=predict_moderation,
inputs="text",
outputs=[gr.Label(label="Category Scores"), gr.Label(label="Detected")],
title="Moderation Model",
description="Enter text to check for moderation flags.")
iface.launch()
|