kiyono commited on
Commit
52c0a6f
1 Parent(s): 8e0dd0e

fix tokenization config

Browse files
Files changed (1) hide show
  1. tokenizer_config.json +28 -6
tokenizer_config.json CHANGED
@@ -1,8 +1,30 @@
1
  {
2
- "do_lower_case": false,
3
- "word_tokenizer_type": "mecab",
4
- "mecab_kwargs": {
5
- "mecab_dic": "unidic_lite"
6
- },
7
- "do_zenkaku": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  }
 
 
1
  {
2
+ "bos_token": "<s>",
3
+ "eos_token": "</s>",
4
+ "unk_token": "<unk>",
5
+ "sep_token": "</s>",
6
+ "cls_token": "<s>",
7
+ "pad_token": "<pad>",
8
+ "mask_token": {
9
+ "content": "<mask>",
10
+ "single_word": false,
11
+ "lstrip": true,
12
+ "rstrip": false,
13
+ "normalized": true,
14
+ "__type": "AddedToken"
15
+ },
16
+ "sp_model_kwargs": {},
17
+ "do_lower_case": false,
18
+ "do_word_tokenize": true,
19
+ "do_subword_tokenize": true,
20
+ "word_tokenizer_type": "mecab",
21
+ "subword_tokenizer_type": "bpe",
22
+ "never_split": null,
23
+ "mecab_kwargs": {
24
+ "mecab_dic": "unidic_lite"
25
+ },
26
+ "special_tokens_map_file": null,
27
+ "tokenizer_file": null,
28
+ "tokenizer_class": "RobertaJapaneseTokenizer"
29
  }
30
+