la_en_morphology / tokenizer_config.json
grosenthal's picture
Upload tokenizer
07123ab
raw
history blame contribute delete
498 Bytes
{
"eos_token": "</s>",
"model_max_length": 512,
"pad_token": "<pad>",
"separate_vocabs": false,
"source_lang": "fr+fr_BE+fr_CA+fr_FR+wa+frp+oc+ca+rm+lld+fur+lij+lmo+es+es_AR+es_CL+es_CO+es_CR+es_DO+es_EC+es_ES+es_GT+es_HN+es_MX+es_NI+es_PA+es_PE+es_PR+es_SV+es_UY+es_VE+pt+pt_br+pt_BR+pt_PT+gl+lad+an+mwl+it+it_IT+co+nap+scn+vec+sc+ro+la",
"sp_model_kwargs": {},
"special_tokens_map_file": null,
"target_lang": "en",
"tokenizer_class": "MarianTokenizer",
"unk_token": "<unk>"
}