Guilherme34 commited on
Commit
f025b33
·
verified ·
1 Parent(s): 1d7d24b

Update adapter_config.json

Browse files
Files changed (1) hide show
  1. adapter_config.json +192 -25
adapter_config.json CHANGED
@@ -1,26 +1,193 @@
1
  {
2
- "alpha_pattern": {},
3
- "auto_mapping": null,
4
- "base_model_name_or_path": "openbmb/MiniCPM-o-2_6",
5
- "bias": "none",
6
- "fan_in_fan_out": false,
7
- "inference_mode": true,
8
- "init_lora_weights": true,
9
- "layer_replication": null,
10
- "layers_pattern": null,
11
- "layers_to_transform": null,
12
- "loftq_config": {},
13
- "lora_alpha": 16,
14
- "lora_dropout": 0,
15
- "megatron_config": null,
16
- "megatron_core": "megatron.core",
17
- "modules_to_save": null,
18
- "peft_type": "LORA",
19
- "r": 8,
20
- "rank_pattern": {},
21
- "revision": null,
22
- "target_modules": "o_proj|gate_proj|v_proj|k_proj|down_proj|up_proj|q_proj",
23
- "task_type": "CAUSAL_LM",
24
- "use_dora": false,
25
- "use_rslora": false
26
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  {
2
+ "_name_or_path": "openbmb/MiniCPM-o-2_6",
3
+ "architectures": [
4
+ "MiniCPMO"
5
+ ],
6
+
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 151643,
9
+ "eos_token_id": 151645,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 3584,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 18944,
14
+ "max_position_embeddings": 32768,
15
+ "max_window_layers": 28,
16
+ "num_attention_heads": 28,
17
+ "num_hidden_layers": 28,
18
+ "num_key_value_heads": 4,
19
+ "rms_norm_eps": 1e-06,
20
+ "rope_theta": 1000000.0,
21
+ "sliding_window": 131072,
22
+ "tie_word_embeddings": false,
23
+ "use_sliding_window": false,
24
+ "vocab_size": 151700,
25
+ "batch_vision_input": true,
26
+ "drop_vision_last_layer": false,
27
+ "image_size": 448,
28
+
29
+ "audio_chunk_length": 1.0,
30
+ "audio_config": {
31
+ "_name_or_path": "openai/whisper-medium",
32
+ "architectures": [
33
+ "MiniCPMWhisperEncoder"
34
+ ],
35
+ "begin_suppress_tokens": [
36
+ 220,
37
+ 50257
38
+ ],
39
+ "bos_token_id": 50257,
40
+ "d_model": 1024,
41
+ "decoder_attention_heads": 16,
42
+ "decoder_ffn_dim": 4096,
43
+ "decoder_layers": 24,
44
+ "decoder_start_token_id": 50258,
45
+ "encoder_attention_heads": 16,
46
+ "encoder_ffn_dim": 4096,
47
+ "encoder_layers": 24,
48
+ "eos_token_id": 50257,
49
+ "forced_decoder_ids": [
50
+ [
51
+ 1,
52
+ 50259
53
+ ],
54
+ [
55
+ 2,
56
+ 50359
57
+ ],
58
+ [
59
+ 3,
60
+ 50363
61
+ ]
62
+ ],
63
+ "max_length": 448,
64
+ "model_type": "whisper",
65
+ "num_hidden_layers": 24,
66
+ "pad_token_id": 50257,
67
+ "suppress_tokens": [
68
+ 1,
69
+ 2,
70
+ 7,
71
+ 8,
72
+ 9,
73
+ 10,
74
+ 14,
75
+ 25,
76
+ 26,
77
+ 27,
78
+ 28,
79
+ 29,
80
+ 31,
81
+ 58,
82
+ 59,
83
+ 60,
84
+ 61,
85
+ 62,
86
+ 63,
87
+ 90,
88
+ 91,
89
+ 92,
90
+ 93,
91
+ 359,
92
+ 503,
93
+ 522,
94
+ 542,
95
+ 873,
96
+ 893,
97
+ 902,
98
+ 918,
99
+ 922,
100
+ 931,
101
+ 1350,
102
+ 1853,
103
+ 1982,
104
+ 2460,
105
+ 2627,
106
+ 3246,
107
+ 3253,
108
+ 3268,
109
+ 3536,
110
+ 3846,
111
+ 3961,
112
+ 4183,
113
+ 4667,
114
+ 6585,
115
+ 6647,
116
+ 7273,
117
+ 9061,
118
+ 9383,
119
+ 10428,
120
+ 10929,
121
+ 11938,
122
+ 12033,
123
+ 12331,
124
+ 12562,
125
+ 13793,
126
+ 14157,
127
+ 14635,
128
+ 15265,
129
+ 15618,
130
+ 16553,
131
+ 16604,
132
+ 18362,
133
+ 18956,
134
+ 20075,
135
+ 21675,
136
+ 22520,
137
+ 26130,
138
+ 26161,
139
+ 26435,
140
+ 28279,
141
+ 29464,
142
+ 31650,
143
+ 32302,
144
+ 32470,
145
+ 36865,
146
+ 42863,
147
+ 47425,
148
+ 49870,
149
+ 50254,
150
+ 50258,
151
+ 50358,
152
+ 50359,
153
+ 50360,
154
+ 50361,
155
+ 50362
156
+ ],
157
+ "torch_dtype": "float32"
158
+ },
159
+ "audio_pool_step": 2,
160
+ "auto_map": {
161
+ "AutoConfig": "configuration_minicpm.MiniCPMOConfig",
162
+ "AutoModel": "modeling_minicpmo.MiniCPMO",
163
+ "AutoModelForCausalLM": "modeling_minicpmo.MiniCPMO"
164
+ },
165
+ "chunk_input": true,
166
+ "listen_speak_type": "asr",
167
+ "model_type": "minicpmo",
168
+ "patch_size": 14,
169
+ "query_num": 64,
170
+ "slice_config": {
171
+ "max_slice_nums": 9,
172
+ "model_type": "minicpmv"
173
+ },
174
+ "slice_mode": true,
175
+ "torch_dtype": "bfloat16",
176
+ "transformers_version": "4.44.2",
177
+ "tts_config": {
178
+ "model_type": "conditional_chattts",
179
+ "llm_dim": 3584
180
+ },
181
+ "use_cache": true,
182
+ "use_image_id": true,
183
+ "version": 2.6,
184
+ "vision_batch_size": 16,
185
+ "vision_config": {
186
+ "hidden_size": 1152,
187
+ "image_size": 980,
188
+ "intermediate_size": 4304,
189
+ "model_type": "siglip_vision_model",
190
+ "num_attention_heads": 16,
191
+ "num_hidden_layers": 27,
192
+ "patch_size": 14
193
+ }