nanoVLM-222M / config.json
lusxvr's picture
lusxvr HF Staff
Prepare for vanilla HF integration (#1)
dddddae verified
raw
history blame contribute delete
903 Bytes
{
"vit_hidden_dim": 768,
"vit_inter_dim": 3072,
"vit_patch_size": 16,
"vit_img_size": 224,
"vit_n_heads": 12,
"vit_dropout": 0.0,
"vit_n_blocks": 12,
"vit_ln_eps": 1e-06,
"vit_cls_flag": false,
"vit_model_type": "google/siglip-base-patch16-224",
"lm_hidden_dim": 576,
"lm_inter_dim": 1536,
"lm_rms_eps": 1e-05,
"lm_re_base": 100000,
"lm_max_position_embeddings": 8192,
"lm_vocab_size": 49152,
"lm_n_heads": 9,
"lm_n_kv_heads": 3,
"lm_dropout": 0.0,
"lm_n_blocks": 30,
"lm_attn_scaling": 1.0,
"lm_max_length": 79,
"lm_use_tokens": false,
"lm_tie_weights": true,
"lm_model_type": "HuggingFaceTB/SmolLM2-135M",
"lm_tokenizer": "HuggingFaceTB/cosmo2-tokenizer",
"lm_eos_token_id": 0,
"mp_pixel_shuffle_factor": 2,
"vlm_load_backbone_weights": true,
"vlm_checkpoint_path": "nanoVLM.pth"
}