File size: 1,740 Bytes
4b15e92 838fbb0 c39dec4 4b15e92 838fbb0 4b15e92 838fbb0 b638306 838fbb0 364c237 a67d0d4 b638306 838fbb0 4b15e92 838fbb0 4b15e92 838fbb0 4b15e92 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 |
import tensorflow as tf
from tensorflow.keras.layers import Layer, Dense
import gradio as gr
import joblib
from tensorflow.keras.preprocessing.sequence import pad_sequences
# ๐ธ Define Custom Layer Again
class BetterAttention(Layer):
def __init__(self, units=64, return_attention=False, **kwargs):
super(BetterAttention, self).__init__(**kwargs)
self.return_attention = return_attention
self.W = Dense(units)
self.V = Dense(1)
def call(self, inputs):
score = self.V(tf.nn.tanh(self.W(inputs)))
attention_weights = tf.nn.softmax(score, axis=1)
context_vector = attention_weights * inputs
context_vector = tf.reduce_sum(context_vector, axis=1)
return (context_vector, attention_weights) if self.return_attention else context_vector
# ๐ธ Load model & tokenizer
model = tf.keras.models.load_model("sentiment_model.keras", custom_objects={"BetterAttention": BetterAttention})
tokenizer = joblib.load("tokenizer.joblib")
# ๐ธ Define prediction
max_len = 40
def predict_sentiment(text):
seq = tokenizer.texts_to_sequences([text])
padded = pad_sequences(seq, maxlen=max_len, padding='post')
pred = model.predict(padded)[0][0]
label = "Positive" if pred >= 0.5 else "Negative"
confidence = float(pred if pred >= 0.5 else 1 - pred)
return {label: confidence}
# ๐ธ Gradio Interface
demo = gr.Interface(fn=predict_sentiment,
inputs=gr.Textbox(lines=2, placeholder="Enter a tweet..."),
outputs=gr.Label(num_top_classes=2),
title="Sentiment Analysis on Tweets",
description="Enter a tweet and get predicted sentiment with confidence score.")
demo.launch() |