lxb / config.json
123lxb's picture
Update config.json
dd4486d verified
{
"architectures": ["MultiModalityCausalLM"],
"model_type": "multi_modality",
"torch_dtype": "bfloat16",
"llama_config": {
"architectures": ["LlamaForCausalLM"],
"hidden_size": 2048,
"intermediate_size": 5632,
"max_position_embeddings": 16384,
"num_attention_heads": 16,
"num_hidden_layers": 24,
"num_key_value_heads": 16,
"rms_norm_eps": 1e-6,
"vocab_size": 102400,
"torch_dtype": "bfloat16"
},
"clip_vision_config": {
"architectures": ["CLIPVisionModel"],
"hidden_size": 1024,
"image_size": 384,
"patch_size": 16,
"projection_dim": 2048,
"num_hidden_layers": 24,
"num_attention_heads": 16,
"intermediate_size": 4096
},
"alignment_config": {
"projector_type": "mlp_gelu",
"depth": 2,
"input_dim": 1024,
"n_embed": 2048
},
"generation_config": {
"image_token_size": 16384,
"vq_codebook_size": 8,
"image_token_embed": 2048
}
}