Sercan commited on
Commit
2b9fc97
·
1 Parent(s): f70f0db

Upload tokenizer

Browse files
added_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "</s>": 46,
3
+ "<s>": 45
4
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ {
4
+ "content": "<s>",
5
+ "lstrip": false,
6
+ "normalized": true,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ },
10
+ {
11
+ "content": "</s>",
12
+ "lstrip": false,
13
+ "normalized": true,
14
+ "rstrip": false,
15
+ "single_word": false
16
+ }
17
+ ],
18
+ "bos_token": "<s>",
19
+ "eos_token": "</s>",
20
+ "pad_token": "[PAD]",
21
+ "unk_token": "[UNK]"
22
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "do_lower_case": false,
4
+ "eos_token": "</s>",
5
+ "model_max_length": 1000000000000000019884624838656,
6
+ "name_or_path": "./",
7
+ "pad_token": "[PAD]",
8
+ "replace_word_delimiter_char": " ",
9
+ "special_tokens_map_file": null,
10
+ "tokenizer_class": "Wav2Vec2CTCTokenizer",
11
+ "unk_token": "[UNK]",
12
+ "word_delimiter_token": "|"
13
+ }
vocab.json ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "0": 14,
3
+ "1": 23,
4
+ "2": 25,
5
+ "3": 34,
6
+ "4": 33,
7
+ "5": 40,
8
+ "6": 11,
9
+ "7": 13,
10
+ "8": 12,
11
+ "9": 24,
12
+ "[PAD]": 44,
13
+ "[UNK]": 43,
14
+ "a": 2,
15
+ "b": 41,
16
+ "c": 19,
17
+ "d": 29,
18
+ "e": 30,
19
+ "f": 6,
20
+ "g": 17,
21
+ "h": 16,
22
+ "i": 4,
23
+ "j": 20,
24
+ "k": 10,
25
+ "l": 36,
26
+ "m": 42,
27
+ "n": 1,
28
+ "o": 3,
29
+ "p": 27,
30
+ "q": 18,
31
+ "r": 7,
32
+ "s": 8,
33
+ "t": 28,
34
+ "u": 22,
35
+ "v": 5,
36
+ "w": 38,
37
+ "x": 15,
38
+ "y": 21,
39
+ "z": 35,
40
+ "|": 26,
41
+ "ç": 37,
42
+ "ö": 32,
43
+ "ü": 39,
44
+ "ğ": 9,
45
+ "ı": 0,
46
+ "ş": 31
47
+ }