lamaabdulaziz commited on
Commit
c572043
·
1 Parent(s): 2f2877d

End of training

Browse files
special_tokens_map.json CHANGED
@@ -1 +1,7 @@
1
- {"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1 +1,20 @@
1
- {"do_lower_case": false, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "max_len": 512, "do_basic_tokenize": true, "never_split": ["[بريد]", "[مستخدم]", "[رابط]"], "special_tokens_map_file": null, "name_or_path": "aubmindlab/aragpt2-mega-detector-long", "tokenizer_class": "ElectraTokenizer"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "do_basic_tokenize": true,
4
+ "do_lower_case": false,
5
+ "mask_token": "[MASK]",
6
+ "max_len": 512,
7
+ "name_or_path": "aubmindlab/aragpt2-mega-detector-long",
8
+ "never_split": [
9
+ "[بريد]",
10
+ "[مستخدم]",
11
+ "[رابط]"
12
+ ],
13
+ "pad_token": "[PAD]",
14
+ "sep_token": "[SEP]",
15
+ "special_tokens_map_file": null,
16
+ "strip_accents": null,
17
+ "tokenize_chinese_chars": true,
18
+ "tokenizer_class": "ElectraTokenizer",
19
+ "unk_token": "[UNK]"
20
+ }