File size: 852 Bytes
3dba822 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 |
{
"architectures": [
"Sarashina2VisionForCausalLM"
],
"auto_map": {
"AutoConfig": "configuration_sarashina2_vision.Sarashina2VisionConfig",
"AutoModelForCausalLM": "modeling_sarashina2_vision.Sarashina2VisionForCausalLM"
},
"end_image_token_index": 102398,
"image_token_index": 14,
"model_type": "sarashina2_vision",
"start_image_token_index": 102397,
"text_config": {
"_name_or_path": "sbintuitions/sarashina2-7b",
"architectures": [
"LlamaForCausalLM"
],
"max_position_embeddings": 4096,
"model_type": "llama",
"rms_norm_eps": 1e-05,
"torch_dtype": "bfloat16",
"vocab_size": 102400
},
"torch_dtype": "bfloat16",
"transformers_version": "4.47.0",
"vision_config": {
"hidden_size": 4096,
"in_chans": 3,
"model_type": "qwen2_vl",
"spatial_patch_size": 14
}
}
|