custom_efficient_vision / tokenizer.json
rjeeva's picture
All files
4be4621 verified
raw
history blame contribute delete
310 Bytes
{
"version": "1.0",
"truncation": null,
"padding": null,
"added_tokens": [],
"normalizer": null,
"pre_tokenizer": null,
"post_processor": null,
"decoder": null,
"model": {
"type": "unigram",
"unk_id": null,
"bos_id": null,
"eos_id": null,
"vocab": {}
}
}