BERT-LSTM-based-ABSA / configuration.py
trek90s
add model
617dc35
raw
history blame contribute delete
No virus
891 Bytes
from transformers import PretrainedConfig
class BertABSAConfig(PretrainedConfig):
model_type = "BertABSAForSequenceClassification"
def __init__(self,
num_classes=3,
embed_dim=768,
num_layers=12,
dropout_rate=0.1,
fc_hidden=256,
hidden_dim_lstm=128,
**kwargs):
super().__init__(**kwargs)
self.num_classes = num_classes
self.embed_dim = embed_dim
self.num_layers = num_layers
self.dropout_rate = dropout_rate
self.fc_hidden = fc_hidden
self.hidden_dim_lstm = hidden_dim_lstm
self.id2label = {
0: "negative",
1: "positive",
2: "neutral",
}
self.label2id = {
"negative": 0,
"positive": 1,
"neutral": 2,
}