ajyl commited on
Commit
6d34ae6
·
verified ·
1 Parent(s): eb57559

Upload tokenizer

Browse files
Files changed (3) hide show
  1. special_tokens_map.json +23 -0
  2. tokenizer.json +44 -0
  3. tokenizer_config.json +19 -0
special_tokens_map.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|endoftext|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|endoftext|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<|endoftext|>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
tokenizer.json ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "version": "1.0",
3
+ "truncation": null,
4
+ "padding": null,
5
+ "added_tokens": [
6
+ {
7
+ "id": 15,
8
+ "content": "<|endoftext|>",
9
+ "single_word": false,
10
+ "lstrip": false,
11
+ "rstrip": false,
12
+ "normalized": false,
13
+ "special": true
14
+ }
15
+ ],
16
+ "normalizer": null,
17
+ "pre_tokenizer": {
18
+ "type": "Whitespace"
19
+ },
20
+ "post_processor": null,
21
+ "decoder": null,
22
+ "model": {
23
+ "type": "WordLevel",
24
+ "vocab": {
25
+ "r": 0,
26
+ "u": 1,
27
+ "f": 2,
28
+ "l": 3,
29
+ "d": 4,
30
+ "b": 5,
31
+ "R": 6,
32
+ "RR": 7,
33
+ "RRR": 8,
34
+ "U": 9,
35
+ "UU": 10,
36
+ "UUU": 11,
37
+ "F": 12,
38
+ "FF": 13,
39
+ "FFF": 14,
40
+ "<|endoftext|>": 15
41
+ },
42
+ "unk_token": "<|endoftext|>"
43
+ }
44
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "15": {
4
+ "content": "<|endoftext|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ }
11
+ },
12
+ "bos_token": "<|endoftext|>",
13
+ "clean_up_tokenization_spaces": false,
14
+ "eos_token": "<|endoftext|>",
15
+ "extra_special_tokens": {},
16
+ "model_max_length": 1000000000000000019884624838656,
17
+ "pad_token": "<|endoftext|>",
18
+ "tokenizer_class": "PreTrainedTokenizer"
19
+ }