Upload tokenizer
Browse files- special_tokens_map.json +1 -4
- tokenizer.json +6 -28
- tokenizer_config.json +1 -28
special_tokens_map.json
CHANGED
|
@@ -3,10 +3,7 @@
|
|
| 3 |
"β<PRE>",
|
| 4 |
"β<MID>",
|
| 5 |
"β<SUF>",
|
| 6 |
-
"β<EOT>"
|
| 7 |
-
">>>>>>>",
|
| 8 |
-
"=======",
|
| 9 |
-
"<<<<<<<"
|
| 10 |
],
|
| 11 |
"bos_token": "<s>",
|
| 12 |
"eos_token": "</s>",
|
|
|
|
| 3 |
"β<PRE>",
|
| 4 |
"β<MID>",
|
| 5 |
"β<SUF>",
|
| 6 |
+
"β<EOT>"
|
|
|
|
|
|
|
|
|
|
| 7 |
],
|
| 8 |
"bos_token": "<s>",
|
| 9 |
"eos_token": "</s>",
|
tokenizer.json
CHANGED
|
@@ -1,6 +1,11 @@
|
|
| 1 |
{
|
| 2 |
"version": "1.0",
|
| 3 |
-
"truncation":
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 4 |
"padding": null,
|
| 5 |
"added_tokens": [
|
| 6 |
{
|
|
@@ -65,33 +70,6 @@
|
|
| 65 |
"rstrip": true,
|
| 66 |
"normalized": false,
|
| 67 |
"special": true
|
| 68 |
-
},
|
| 69 |
-
{
|
| 70 |
-
"id": 32016,
|
| 71 |
-
"content": ">>>>>>>",
|
| 72 |
-
"single_word": false,
|
| 73 |
-
"lstrip": true,
|
| 74 |
-
"rstrip": true,
|
| 75 |
-
"normalized": false,
|
| 76 |
-
"special": true
|
| 77 |
-
},
|
| 78 |
-
{
|
| 79 |
-
"id": 32017,
|
| 80 |
-
"content": "=======",
|
| 81 |
-
"single_word": false,
|
| 82 |
-
"lstrip": true,
|
| 83 |
-
"rstrip": true,
|
| 84 |
-
"normalized": false,
|
| 85 |
-
"special": true
|
| 86 |
-
},
|
| 87 |
-
{
|
| 88 |
-
"id": 32018,
|
| 89 |
-
"content": "<<<<<<<",
|
| 90 |
-
"single_word": false,
|
| 91 |
-
"lstrip": true,
|
| 92 |
-
"rstrip": true,
|
| 93 |
-
"normalized": false,
|
| 94 |
-
"special": true
|
| 95 |
}
|
| 96 |
],
|
| 97 |
"normalizer": {
|
|
|
|
| 1 |
{
|
| 2 |
"version": "1.0",
|
| 3 |
+
"truncation": {
|
| 4 |
+
"direction": "Right",
|
| 5 |
+
"max_length": 512,
|
| 6 |
+
"strategy": "LongestFirst",
|
| 7 |
+
"stride": 0
|
| 8 |
+
},
|
| 9 |
"padding": null,
|
| 10 |
"added_tokens": [
|
| 11 |
{
|
|
|
|
| 70 |
"rstrip": true,
|
| 71 |
"normalized": false,
|
| 72 |
"special": true
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 73 |
}
|
| 74 |
],
|
| 75 |
"normalizer": {
|
tokenizer_config.json
CHANGED
|
@@ -55,40 +55,13 @@
|
|
| 55 |
"rstrip": true,
|
| 56 |
"single_word": false,
|
| 57 |
"special": true
|
| 58 |
-
},
|
| 59 |
-
"32016": {
|
| 60 |
-
"content": ">>>>>>>",
|
| 61 |
-
"lstrip": true,
|
| 62 |
-
"normalized": false,
|
| 63 |
-
"rstrip": true,
|
| 64 |
-
"single_word": false,
|
| 65 |
-
"special": true
|
| 66 |
-
},
|
| 67 |
-
"32017": {
|
| 68 |
-
"content": "=======",
|
| 69 |
-
"lstrip": true,
|
| 70 |
-
"normalized": false,
|
| 71 |
-
"rstrip": true,
|
| 72 |
-
"single_word": false,
|
| 73 |
-
"special": true
|
| 74 |
-
},
|
| 75 |
-
"32018": {
|
| 76 |
-
"content": "<<<<<<<",
|
| 77 |
-
"lstrip": true,
|
| 78 |
-
"normalized": false,
|
| 79 |
-
"rstrip": true,
|
| 80 |
-
"single_word": false,
|
| 81 |
-
"special": true
|
| 82 |
}
|
| 83 |
},
|
| 84 |
"additional_special_tokens": [
|
| 85 |
"β<PRE>",
|
| 86 |
"β<MID>",
|
| 87 |
"β<SUF>",
|
| 88 |
-
"β<EOT>"
|
| 89 |
-
">>>>>>>",
|
| 90 |
-
"=======",
|
| 91 |
-
"<<<<<<<"
|
| 92 |
],
|
| 93 |
"bos_token": "<s>",
|
| 94 |
"clean_up_tokenization_spaces": false,
|
|
|
|
| 55 |
"rstrip": true,
|
| 56 |
"single_word": false,
|
| 57 |
"special": true
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 58 |
}
|
| 59 |
},
|
| 60 |
"additional_special_tokens": [
|
| 61 |
"β<PRE>",
|
| 62 |
"β<MID>",
|
| 63 |
"β<SUF>",
|
| 64 |
+
"β<EOT>"
|
|
|
|
|
|
|
|
|
|
| 65 |
],
|
| 66 |
"bos_token": "<s>",
|
| 67 |
"clean_up_tokenization_spaces": false,
|