pe4enov commited on
Commit
33c6b1b
·
1 Parent(s): 2e70cb8

Upload tokenizer

Browse files
Files changed (2) hide show
  1. special_tokens_map.json +5 -21
  2. tokenizer_config.json +6 -5
special_tokens_map.json CHANGED
@@ -1,23 +1,7 @@
1
  {
2
- "bos_token": {
3
- "content": "",
4
- "lstrip": false,
5
- "normalized": true,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "",
11
- "lstrip": false,
12
- "normalized": true,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "unk_token": {
17
- "content": "",
18
- "lstrip": false,
19
- "normalized": true,
20
- "rstrip": false,
21
- "single_word": false
22
- }
23
  }
 
1
  {
2
+ "bos_token": "<s>",
3
+ "eos_token": "</s>",
4
+ "pad_token": "</s>",
5
+ "sep_token": "<s>",
6
+ "unk_token": "<unk>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
  }
tokenizer_config.json CHANGED
@@ -1,9 +1,9 @@
1
  {
2
- "add_bos_token": true,
3
  "add_eos_token": false,
4
  "bos_token": {
5
  "__type": "AddedToken",
6
- "content": "",
7
  "lstrip": false,
8
  "normalized": true,
9
  "rstrip": false,
@@ -15,20 +15,21 @@
15
  },
16
  "eos_token": {
17
  "__type": "AddedToken",
18
- "content": "",
19
  "lstrip": false,
20
  "normalized": true,
21
  "rstrip": false,
22
  "single_word": false
23
  },
24
  "legacy": true,
25
- "model_max_length": 1000000000000000019884624838656,
26
  "pad_token": null,
 
27
  "sp_model_kwargs": {},
28
  "tokenizer_class": "LlamaTokenizer",
29
  "unk_token": {
30
  "__type": "AddedToken",
31
- "content": "",
32
  "lstrip": false,
33
  "normalized": true,
34
  "rstrip": false,
 
1
  {
2
+ "add_bos_token": false,
3
  "add_eos_token": false,
4
  "bos_token": {
5
  "__type": "AddedToken",
6
+ "content": "<s>",
7
  "lstrip": false,
8
  "normalized": true,
9
  "rstrip": false,
 
15
  },
16
  "eos_token": {
17
  "__type": "AddedToken",
18
+ "content": "</s>",
19
  "lstrip": false,
20
  "normalized": true,
21
  "rstrip": false,
22
  "single_word": false
23
  },
24
  "legacy": true,
25
+ "model_max_length": 2048,
26
  "pad_token": null,
27
+ "padding_side": "left",
28
  "sp_model_kwargs": {},
29
  "tokenizer_class": "LlamaTokenizer",
30
  "unk_token": {
31
  "__type": "AddedToken",
32
+ "content": "<unk>",
33
  "lstrip": false,
34
  "normalized": true,
35
  "rstrip": false,