{ "tokenizer_class": "SentencePieceTokenizerWrapper", "vocab_size": 16000, "bos_token_id": 1, "eos_token_id": 2, "pad_token_id": 0, "unk_token_id": 3, "model_max_length": 512 }