danielhanchen commited on
Commit
4f0f5b6
·
verified ·
1 Parent(s): 6b4cade

Upload folder using huggingface_hub

Browse files
config.json CHANGED
@@ -23,6 +23,7 @@
23
  32
24
  ],
25
  "sscp_conv_eps": 0.001,
 
26
  "sscp_conv_kernel_size": [
27
  [
28
  3,
@@ -110,7 +111,43 @@
110
  "hidden_size": 2048,
111
  "hidden_size_per_layer_input": 256,
112
  "initializer_range": 0.02,
113
- "intermediate_size": 16384,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
114
  "laurel_rank": 64,
115
  "layer_types": [
116
  "sliding_attention",
@@ -167,7 +204,7 @@
167
  "vocab_size_per_layer_input": 262144
168
  },
169
  "torch_dtype": "bfloat16",
170
- "transformers_version": "4.53.0.dev0",
171
  "unsloth_fixed": true,
172
  "vision_config": {
173
  "architecture": "mobilenetv5_300m_enc",
@@ -178,6 +215,7 @@
178
  "LABEL_0",
179
  "LABEL_1"
180
  ],
 
181
  "model_type": "gemma3n_vision",
182
  "num_classes": 2,
183
  "rms_norm_eps": 1e-06,
 
23
  32
24
  ],
25
  "sscp_conv_eps": 0.001,
26
+ "sscp_conv_group_norm_eps": 0.001,
27
  "sscp_conv_kernel_size": [
28
  [
29
  3,
 
111
  "hidden_size": 2048,
112
  "hidden_size_per_layer_input": 256,
113
  "initializer_range": 0.02,
114
+ "intermediate_size": [
115
+ 16384,
116
+ 16384,
117
+ 16384,
118
+ 16384,
119
+ 16384,
120
+ 16384,
121
+ 16384,
122
+ 16384,
123
+ 16384,
124
+ 16384,
125
+ 16384,
126
+ 16384,
127
+ 16384,
128
+ 16384,
129
+ 16384,
130
+ 16384,
131
+ 16384,
132
+ 16384,
133
+ 16384,
134
+ 16384,
135
+ 16384,
136
+ 16384,
137
+ 16384,
138
+ 16384,
139
+ 16384,
140
+ 16384,
141
+ 16384,
142
+ 16384,
143
+ 16384,
144
+ 16384,
145
+ 16384,
146
+ 16384,
147
+ 16384,
148
+ 16384,
149
+ 16384
150
+ ],
151
  "laurel_rank": 64,
152
  "layer_types": [
153
  "sliding_attention",
 
204
  "vocab_size_per_layer_input": 262144
205
  },
206
  "torch_dtype": "bfloat16",
207
+ "transformers_version": "4.53.0",
208
  "unsloth_fixed": true,
209
  "vision_config": {
210
  "architecture": "mobilenetv5_300m_enc",
 
215
  "LABEL_0",
216
  "LABEL_1"
217
  ],
218
+ "model_args": null,
219
  "model_type": "gemma3n_vision",
220
  "num_classes": 2,
221
  "rms_norm_eps": 1e-06,
generation_config.json CHANGED
@@ -9,5 +9,5 @@
9
  "pad_token_id": 0,
10
  "top_k": 64,
11
  "top_p": 0.95,
12
- "transformers_version": "4.53.0.dev0"
13
  }
 
9
  "pad_token_id": 0,
10
  "top_k": 64,
11
  "top_p": 0.95,
12
+ "transformers_version": "4.53.0"
13
  }
preprocessor_config.json CHANGED
@@ -3,6 +3,7 @@
3
  "data_format": "channels_first",
4
  "default_to_square": false,
5
  "device": null,
 
6
  "dither": 0.0,
7
  "do_center_crop": null,
8
  "do_convert_rgb": null,
 
3
  "data_format": "channels_first",
4
  "default_to_square": false,
5
  "device": null,
6
+ "disable_grouping": null,
7
  "dither": 0.0,
8
  "do_center_crop": null,
9
  "do_convert_rgb": null,