la-min commited on
Commit
cb3ef48
1 Parent(s): 69e143f

Upload tokenizer

Browse files
Files changed (2) hide show
  1. special_tokens_map.json +14 -0
  2. tokenizer_config.json +2 -0
special_tokens_map.json CHANGED
@@ -1,4 +1,11 @@
1
  {
 
 
 
 
 
 
 
2
  "cls_token": {
3
  "content": "[CLS]",
4
  "lstrip": false,
@@ -6,6 +13,13 @@
6
  "rstrip": false,
7
  "single_word": false
8
  },
 
 
 
 
 
 
 
9
  "mask_token": {
10
  "content": "[MASK]",
11
  "lstrip": false,
 
1
  {
2
+ "bos_token": {
3
+ "content": "[CLS]",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
  "cls_token": {
10
  "content": "[CLS]",
11
  "lstrip": false,
 
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
+ "eos_token": {
17
+ "content": "[SEP]",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
  "mask_token": {
24
  "content": "[MASK]",
25
  "lstrip": false,
tokenizer_config.json CHANGED
@@ -41,8 +41,10 @@
41
  "special": true
42
  }
43
  },
 
44
  "clean_up_tokenization_spaces": true,
45
  "cls_token": "[CLS]",
 
46
  "mask_token": "[MASK]",
47
  "max_length": 500,
48
  "model_max_length": 1000000000000000019884624838656,
 
41
  "special": true
42
  }
43
  },
44
+ "bos_token": "[CLS]",
45
  "clean_up_tokenization_spaces": true,
46
  "cls_token": "[CLS]",
47
+ "eos_token": "[SEP]",
48
  "mask_token": "[MASK]",
49
  "max_length": 500,
50
  "model_max_length": 1000000000000000019884624838656,