{ | |
"added_tokens_decoder": {}, | |
"additional_special_tokens": [ | |
"<|startoftext|>", | |
"<|extra_0|>", | |
"<|extra_4|>", | |
"<|extra_5|>", | |
"<|eos|>" | |
], | |
"architectures": [ | |
"GPT2LMHeadModel" | |
], | |
"auto_map": { | |
"AutoTokenizer": [ | |
"tokenization_hy.HYTokenizer", | |
null | |
] | |
}, | |
"clean_up_tokenization_spaces": false, | |
"eos_token": "<|eos|>", | |
"extra_special_tokens": {}, | |
"model_max_length": 1048576, | |
"model_type": "gpt2", | |
"pad_token": "<|pad|>", | |
"tokenizer_class": "HYTokenizer" | |
} | |