{ "activation_type": "silu", "add_faster_video": false, "add_time_instruction": false, "alibi": false, "alibi_bias_max": 8.0, "architectures": [ "LlavaLladaForMaskedDiffusion" ], "attention_dropout": 0.0, "attention_layer_norm": false, "attention_layer_norm_with_affine": true, "auto_map": { "AutoConfig": "configuration_llada.LLaDAConfig", "AutoModel": "modeling_llada.LLaDAModelLM", "AutoModelForCausalLM": "modeling_llada.LLaDAModelLM" }, "bias_for_layer_norm": false, "block_group_size": 1, "block_type": "llama", "d_model": 4096, "embedding_dropout": 0.0, "embedding_size": 126464, "eos_token_id": 126081, "faster_token_stride": 10, "flash_attention": false, "force_sample": false, "image_aspect_ratio": "square", "image_crop_resolution": null, "image_grid_pinpoints": null, "image_split_resolution": null, "include_bias": false, "include_qkv_bias": false, "init_cutoff_factor": null, "init_device": "meta", "init_fn": "mitchell", "init_std": 0.02, "input_emb_norm": false, "layer_norm_type": "rms", "layer_norm_with_affine": true, "mask_token_id": 126336, "max_sequence_length": 4096, "mlp_hidden_size": 12288, "mlp_ratio": 4, "mm_hidden_size": 1152, "mm_newline_position": "grid", "mm_patch_merge_type": "spatial_unpad", "mm_pooler_ratio": 2, "mm_projector_lr": null, "mm_projector_type": "mlp2x_gelu", "mm_spatial_pool_mode": "bilinear", "mm_spatial_pool_stride": null, "mm_tunable_parts": "mm_vision_tower,mm_mlp_adapter,mm_language_model", "mm_use_im_patch_token": false, "mm_use_im_start_end": false, "mm_vision_select_feature": "patch", "mm_vision_select_layer": -2, "mm_vision_tower": "/data/siglip-so400m-patch14-384", "mm_vision_tower_lr": 2e-06, "model_type": "llada", "multi_query_attention": null, "n_heads": 32, "n_kv_heads": 32, "n_layers": 32, "pad_token_id": 126081, "pos_skipping_range": 4096, "precision": "amp_bf16", "resampler_type": null, "residual_dropout": 0.0, "rms_norm_eps": 1e-05, "rope": true, "rope_full_precision": true, "rope_theta": 500000.0, "scale_logits": false, "tokenizer_model_max_length": 2048, "tokenizer_padding_side": "right", "torch_dtype": "bfloat16", "transformers_version": "4.50.3", "use_cache": false, "use_mm_proj": true, "use_pos_skipping": false, "vision_tower_pretrained": null, "vocab_size": 126464, "weight_tying": false }