Anwaarma commited on
Commit
d3c9e96
1 Parent(s): a7cc661

Upload config

Browse files
Files changed (1) hide show
  1. config.json +9 -2
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "_name_or_path": "aubmindlab/bert-base-arabertv02-twitter",
3
  "architectures": [
4
- "BertForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "classifier_dropout": null,
@@ -9,8 +9,16 @@
9
  "hidden_act": "gelu",
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
 
 
 
 
12
  "initializer_range": 0.02,
13
  "intermediate_size": 3072,
 
 
 
 
14
  "layer_norm_eps": 1e-12,
15
  "max_position_embeddings": 512,
16
  "model_type": "bert",
@@ -18,7 +26,6 @@
18
  "num_hidden_layers": 12,
19
  "pad_token_id": 0,
20
  "position_embedding_type": "absolute",
21
- "problem_type": "single_label_classification",
22
  "torch_dtype": "float32",
23
  "transformers_version": "4.34.1",
24
  "type_vocab_size": 2,
 
1
  {
2
  "_name_or_path": "aubmindlab/bert-base-arabertv02-twitter",
3
  "architectures": [
4
+ "BertForMaskedLM"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "classifier_dropout": null,
 
9
  "hidden_act": "gelu",
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
+ "id2label": {
13
+ "0": "Negative",
14
+ "1": "Positive"
15
+ },
16
  "initializer_range": 0.02,
17
  "intermediate_size": 3072,
18
+ "label2id": {
19
+ "Negative": 0,
20
+ "Positive": 1
21
+ },
22
  "layer_norm_eps": 1e-12,
23
  "max_position_embeddings": 512,
24
  "model_type": "bert",
 
26
  "num_hidden_layers": 12,
27
  "pad_token_id": 0,
28
  "position_embedding_type": "absolute",
 
29
  "torch_dtype": "float32",
30
  "transformers_version": "4.34.1",
31
  "type_vocab_size": 2,