apepkuss79 commited on
Commit
b5e7392
·
verified ·
1 Parent(s): 833e3b3

Update models

Browse files
.gitattributes CHANGED
@@ -33,3 +33,17 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ MiniCPM-V-4-Q2_K.gguf filter=lfs diff=lfs merge=lfs -text
37
+ MiniCPM-V-4-Q3_K_L.gguf filter=lfs diff=lfs merge=lfs -text
38
+ MiniCPM-V-4-Q3_K_M.gguf filter=lfs diff=lfs merge=lfs -text
39
+ MiniCPM-V-4-Q3_K_S.gguf filter=lfs diff=lfs merge=lfs -text
40
+ MiniCPM-V-4-Q4_0.gguf filter=lfs diff=lfs merge=lfs -text
41
+ MiniCPM-V-4-Q4_K_M.gguf filter=lfs diff=lfs merge=lfs -text
42
+ MiniCPM-V-4-Q4_K_S.gguf filter=lfs diff=lfs merge=lfs -text
43
+ MiniCPM-V-4-Q5_0.gguf filter=lfs diff=lfs merge=lfs -text
44
+ MiniCPM-V-4-Q5_K_M.gguf filter=lfs diff=lfs merge=lfs -text
45
+ MiniCPM-V-4-Q5_K_S.gguf filter=lfs diff=lfs merge=lfs -text
46
+ MiniCPM-V-4-Q6_K.gguf filter=lfs diff=lfs merge=lfs -text
47
+ MiniCPM-V-4-Q8_0.gguf filter=lfs diff=lfs merge=lfs -text
48
+ MiniCPM-V-4-f16.gguf filter=lfs diff=lfs merge=lfs -text
49
+ MiniCPM-V-4-mmproj-f16.gguf filter=lfs diff=lfs merge=lfs -text
MiniCPM-V-4-Q2_K.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86ef17481e5df577c8e2eb2c659cfe34402cd124772c688d2d8575f44dfd79c7
3
+ size 1402204576
MiniCPM-V-4-Q3_K_L.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dd7a9871de37208f7b2d03922b696bc7085cbcc20519e25fbfef085cac7861eb
3
+ size 1933301056
MiniCPM-V-4-Q3_K_M.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2ec7bc3a54a5c37fbac3e6609c2c35b5ab0369f1099022f3717d8cfd295abdbc
3
+ size 1790596416
MiniCPM-V-4-Q3_K_S.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4b85a833033243e460844a6285e4f3903290e38dc36bd51d14ff2d178c2a30ea
3
+ size 1625118016
MiniCPM-V-4-Q4_0.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b55582c110395bdcab055d9bf7b0d5aaa755655e2305a91e021a0866579b172d
3
+ size 2079023456
MiniCPM-V-4-Q4_K_M.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:48f384d199d0015e9d562577bc334c2e2444efa7e7951c92bb04cf70d2ebb185
3
+ size 2189861216
MiniCPM-V-4-Q4_K_S.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9a2625539e64a8a39cd137d2d31384b0818e21afdc8aeac1adf47cb73c9659b0
3
+ size 2092458336
MiniCPM-V-4-Q5_0.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:94fbcbca815e9516480bb54ecf9fe4d2c008a9f5964e6c945e89191c4948979e
3
+ size 2506228576
MiniCPM-V-4-Q5_K_M.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ab0e676bedd84621df7643042f4a3ea74484458254755d25cbf8c42579c5ecfb
3
+ size 2563326816
MiniCPM-V-4-Q5_K_S.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4bdcbff9587481628c6bd0a8826133046faf3cd30e718cfbc4dbc9069f4efbe4
3
+ size 2506228576
MiniCPM-V-4-Q6_K.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aad9eee9eb1604b7a258ea63c20ef6754ea25625ab46308f58a33d84102d197b
3
+ size 2960134016
MiniCPM-V-4-Q8_0.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4618297c8a2ec285c84dd219d6daaaecd4359a8c92a1fc9bb0d629928be44bad
3
+ size 3833381696
MiniCPM-V-4-f16.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:037334923a63203f4a47dad1d76f6811f61912a487bc74c5de1f910ddbf537f6
3
+ size 7213695296
MiniCPM-V-4-mmproj-f16.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:032ba56656c13d4e34828f82c937311aa3f4d53abde07b3213567cb6fe7211af
3
+ size 958777824
config.json ADDED
@@ -0,0 +1,203 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "MiniCPMV"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_minicpm.MiniCPMVConfig",
9
+ "AutoModel": "modeling_minicpmv.MiniCPMV",
10
+ "AutoModelForCausalLM": "modeling_minicpmv.MiniCPMV"
11
+ },
12
+ "batch_vision_input": true,
13
+ "bos_token_id": 1,
14
+ "drop_vision_last_layer": false,
15
+ "eos_token_id": [
16
+ 2,
17
+ 73440
18
+ ],
19
+ "head_dim": 128,
20
+ "hidden_act": "silu",
21
+ "hidden_size": 2560,
22
+ "image_size": 448,
23
+ "initializer_range": 0.1,
24
+ "intermediate_size": 10240,
25
+ "max_position_embeddings": 32768,
26
+ "mlp_bias": false,
27
+ "model_type": "minicpmv",
28
+ "num_attention_heads": 32,
29
+ "num_hidden_layers": 32,
30
+ "num_key_value_heads": 2,
31
+ "pad_token_id": 2,
32
+ "patch_size": 14,
33
+ "pretraining_tp": 1,
34
+ "query_num": 64,
35
+ "rms_norm_eps": 1e-06,
36
+ "rope_scaling": {
37
+ "factor": 1.0,
38
+ "long_factor": [
39
+ 0.9977997200264581,
40
+ 1.014658295992452,
41
+ 1.0349680404997148,
42
+ 1.059429246056193,
43
+ 1.0888815016813513,
44
+ 1.1243301355211495,
45
+ 1.166977103606075,
46
+ 1.2182568066927284,
47
+ 1.2798772354275727,
48
+ 1.3538666751582975,
49
+ 1.4426259039919596,
50
+ 1.5489853358570191,
51
+ 1.6762658237220625,
52
+ 1.8283407612492941,
53
+ 2.0096956085876183,
54
+ 2.225478927469756,
55
+ 2.481536379650452,
56
+ 2.784415934557119,
57
+ 3.1413289096347365,
58
+ 3.560047844772632,
59
+ 4.048719380066383,
60
+ 4.615569542115128,
61
+ 5.2684819496549835,
62
+ 6.014438591970396,
63
+ 6.858830049237097,
64
+ 7.804668263503327,
65
+ 8.851768731513417,
66
+ 9.99600492938444,
67
+ 11.228766118181639,
68
+ 12.536757560834843,
69
+ 13.902257701387796,
70
+ 15.303885189125953,
71
+ 16.717837610115794,
72
+ 18.119465097853947,
73
+ 19.484965238406907,
74
+ 20.792956681060105,
75
+ 22.02571786985731,
76
+ 23.16995406772833,
77
+ 24.217054535738416,
78
+ 25.16289275000465,
79
+ 26.007284207271347,
80
+ 26.753240849586767,
81
+ 27.40615325712662,
82
+ 27.973003419175363,
83
+ 28.461674954469114,
84
+ 28.880393889607006,
85
+ 29.237306864684626,
86
+ 29.540186419591297,
87
+ 29.79624387177199,
88
+ 30.01202719065413,
89
+ 30.193382037992453,
90
+ 30.34545697551969,
91
+ 30.47273746338473,
92
+ 30.579096895249787,
93
+ 30.66785612408345,
94
+ 30.741845563814174,
95
+ 30.80346599254902,
96
+ 30.85474569563567,
97
+ 30.897392663720595,
98
+ 30.932841297560394,
99
+ 30.962293553185553,
100
+ 30.986754758742034,
101
+ 31.007064503249293,
102
+ 31.02392307921529
103
+ ],
104
+ "original_max_position_embeddings": 32786,
105
+ "rope_type": "longrope",
106
+ "short_factor": [
107
+ 0.9977997200264581,
108
+ 1.014658295992452,
109
+ 1.0349680404997148,
110
+ 1.059429246056193,
111
+ 1.0888815016813513,
112
+ 1.1243301355211495,
113
+ 1.166977103606075,
114
+ 1.2182568066927284,
115
+ 1.2798772354275727,
116
+ 1.3538666751582975,
117
+ 1.4426259039919596,
118
+ 1.5489853358570191,
119
+ 1.6762658237220625,
120
+ 1.8283407612492941,
121
+ 2.0096956085876183,
122
+ 2.225478927469756,
123
+ 2.481536379650452,
124
+ 2.784415934557119,
125
+ 3.1413289096347365,
126
+ 3.560047844772632,
127
+ 4.048719380066383,
128
+ 4.615569542115128,
129
+ 5.2684819496549835,
130
+ 6.014438591970396,
131
+ 6.858830049237097,
132
+ 7.804668263503327,
133
+ 8.851768731513417,
134
+ 9.99600492938444,
135
+ 11.228766118181639,
136
+ 12.536757560834843,
137
+ 13.902257701387796,
138
+ 15.303885189125953,
139
+ 16.717837610115794,
140
+ 18.119465097853947,
141
+ 19.484965238406907,
142
+ 20.792956681060105,
143
+ 22.02571786985731,
144
+ 23.16995406772833,
145
+ 24.217054535738416,
146
+ 25.16289275000465,
147
+ 26.007284207271347,
148
+ 26.753240849586767,
149
+ 27.40615325712662,
150
+ 27.973003419175363,
151
+ 28.461674954469114,
152
+ 28.880393889607006,
153
+ 29.237306864684626,
154
+ 29.540186419591297,
155
+ 29.79624387177199,
156
+ 30.01202719065413,
157
+ 30.193382037992453,
158
+ 30.34545697551969,
159
+ 30.47273746338473,
160
+ 30.579096895249787,
161
+ 30.66785612408345,
162
+ 30.741845563814174,
163
+ 30.80346599254902,
164
+ 30.85474569563567,
165
+ 30.897392663720595,
166
+ 30.932841297560394,
167
+ 30.962293553185553,
168
+ 30.986754758742034,
169
+ 31.007064503249293,
170
+ 31.02392307921529
171
+ ]
172
+ },
173
+ "rope_theta": 10000.0,
174
+ "slice_config": {
175
+ "max_slice_nums": 9,
176
+ "model_type": "minicpmv",
177
+ "patch_size": 14,
178
+ "scale_resolution": 448
179
+ },
180
+ "slice_mode": true,
181
+ "tie_word_embeddings": false,
182
+ "torch_dtype": "bfloat16",
183
+ "transformers_version": "4.51.0",
184
+ "use_cache": true,
185
+ "use_image_id": true,
186
+ "version": 4.0,
187
+ "vision_batch_size": 16,
188
+ "vision_config": {
189
+ "_attn_implementation_autoset": true,
190
+ "attention_dropout": 0.0,
191
+ "hidden_act": "gelu_pytorch_tanh",
192
+ "hidden_size": 1152,
193
+ "image_size": 980,
194
+ "intermediate_size": 4304,
195
+ "layer_norm_eps": 1e-06,
196
+ "model_type": "siglip_vision_model",
197
+ "num_attention_heads": 16,
198
+ "num_channels": 3,
199
+ "num_hidden_layers": 27,
200
+ "patch_size": 14
201
+ },
202
+ "vocab_size": 73448
203
+ }