gsl22's picture
Update config.json
5a1766d
raw
history blame contribute delete
No virus
1.36 kB
{
"_name_or_path": "distilbert-base-uncased",
"activation": "gelu",
"architectures": [
"DistilBertForSequenceClassification"
],
"attention_dropout": 0.1,
"dim": 768,
"dropout": 0.1,
"hidden_dim": 3072,
"id2label": {
"0": "animation",
"1": "contentment",
"2": "ecstasy",
"3": "enchantment",
"4": "enthusiasm",
"5": "euphoria",
"6": "excitement",
"7": "generosity",
"8": "gratitude",
"9": "happiness",
"10": "internal-harmony",
"11": "joy",
"12": "satisfaction",
"13": "serenity",
"14": "solidarity",
"15": "tranquility"
},
"initializer_range": 0.02,
"label2id": {
"animation": 0,
"contentment": 1,
"internal-harmony": 10,
"joy": 11,
"satisfaction": 12,
"serenity": 13,
"solidarity": 14,
"tranquility": 15,
"ecstasy": 2,
"enchantment": 3,
"enthusiasm": 4,
"euphoria": 5,
"excitement": 6,
"generosity": 7,
"gratitude": 8,
"happiness": 9
},
"max_position_embeddings": 512,
"model_type": "distilbert",
"n_heads": 12,
"n_layers": 6,
"pad_token_id": 0,
"problem_type": "multi_label_classification",
"qa_dropout": 0.1,
"seq_classif_dropout": 0.2,
"sinusoidal_pos_embds": false,
"tie_weights_": true,
"torch_dtype": "float32",
"transformers_version": "4.31.0",
"vocab_size": 30522
}