neilmehta24 commited on
Commit
b590650
·
verified ·
1 Parent(s): e0c8869

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -25
config.json CHANGED
@@ -60,29 +60,5 @@
60
  "vocab_size": 202048
61
  },
62
  "torch_dtype": "bfloat16",
63
- "transformers_version": "4.51.0.dev0",
64
- "vision_config": {
65
- "_attn_implementation_autoset": true,
66
- "attention_dropout": 0.0,
67
- "hidden_act": "gelu",
68
- "hidden_size": 1408,
69
- "image_size": 336,
70
- "initializer_range": 0.02,
71
- "intermediate_size": 5632,
72
- "model_type": "llama4_vision_model",
73
- "multi_modal_projector_bias": false,
74
- "norm_eps": 1e-05,
75
- "num_attention_heads": 16,
76
- "num_channels": 3,
77
- "num_hidden_layers": 34,
78
- "patch_size": 14,
79
- "pixel_shuffle_ratio": 0.5,
80
- "projector_dropout": 0.0,
81
- "projector_input_dim": 4096,
82
- "projector_output_dim": 4096,
83
- "rope_theta": 10000,
84
- "vision_feature_layer": -1,
85
- "vision_feature_select_strategy": "default",
86
- "vision_output_dim": 4096
87
- }
88
  }
 
60
  "vocab_size": 202048
61
  },
62
  "torch_dtype": "bfloat16",
63
+ "transformers_version": "4.51.0.dev0"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
64
  }