Upload tokenizer files
Browse files- tokenizer.json +1 -6
- tokenizer_config.json +0 -4
tokenizer.json
CHANGED
@@ -1,11 +1,6 @@
|
|
1 |
{
|
2 |
"version": "1.0",
|
3 |
-
"truncation":
|
4 |
-
"direction": "Right",
|
5 |
-
"max_length": 4094,
|
6 |
-
"strategy": "LongestFirst",
|
7 |
-
"stride": 0
|
8 |
-
},
|
9 |
"padding": null,
|
10 |
"added_tokens": [
|
11 |
{
|
|
|
1 |
{
|
2 |
"version": "1.0",
|
3 |
+
"truncation": null,
|
|
|
|
|
|
|
|
|
|
|
4 |
"padding": null,
|
5 |
"added_tokens": [
|
6 |
{
|
tokenizer_config.json
CHANGED
@@ -933,7 +933,6 @@
|
|
933 |
"cls_token": "[CLS]",
|
934 |
"extra_special_tokens": {},
|
935 |
"mask_token": "[MASK]",
|
936 |
-
"max_length": 4094,
|
937 |
"model_input_names": [
|
938 |
"input_ids",
|
939 |
"attention_mask"
|
@@ -941,9 +940,6 @@
|
|
941 |
"model_max_length": 8192,
|
942 |
"pad_token": "[PAD]",
|
943 |
"sep_token": "[SEP]",
|
944 |
-
"stride": 0,
|
945 |
"tokenizer_class": "PreTrainedTokenizer",
|
946 |
-
"truncation_side": "right",
|
947 |
-
"truncation_strategy": "longest_first",
|
948 |
"unk_token": "[UNK]"
|
949 |
}
|
|
|
933 |
"cls_token": "[CLS]",
|
934 |
"extra_special_tokens": {},
|
935 |
"mask_token": "[MASK]",
|
|
|
936 |
"model_input_names": [
|
937 |
"input_ids",
|
938 |
"attention_mask"
|
|
|
940 |
"model_max_length": 8192,
|
941 |
"pad_token": "[PAD]",
|
942 |
"sep_token": "[SEP]",
|
|
|
943 |
"tokenizer_class": "PreTrainedTokenizer",
|
|
|
|
|
944 |
"unk_token": "[UNK]"
|
945 |
}
|