sid819 commited on
Commit
b2d9b96
1 Parent(s): 4f6b332

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +21 -162
config.json CHANGED
@@ -1,187 +1,46 @@
1
  {
2
- "_name_or_path": "checkpoints/llavaPhi-v0-3b-finetune/checkpoint-4000",
3
  "architectures": [
4
  "LlavaPhiForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
- "bos_token_id": 1,
 
 
 
 
8
  "embd_pdrop": 0.0,
9
- "eos_token_id": 2,
10
- "freeze_vision_tower": false,
11
  "hidden_act": "gelu_new",
12
  "hidden_size": 2560,
13
  "image_aspect_ratio": "pad",
 
14
  "initializer_range": 0.02,
15
  "intermediate_size": 10240,
16
  "layer_norm_eps": 1e-05,
17
  "max_position_embeddings": 2048,
18
- "mm_projector_lr": null,
 
19
  "mm_use_im_patch_token": false,
20
  "mm_use_im_start_end": false,
21
- "model_type": "llava_phi",
 
 
 
22
  "num_attention_heads": 32,
23
  "num_hidden_layers": 32,
 
24
  "partial_rotary_factor": 0.4,
25
- "pretraining_tp": 1,
26
  "qk_layernorm": false,
27
  "resid_pdrop": 0.1,
28
  "rope_scaling": null,
29
  "rope_theta": 10000.0,
30
  "tie_word_embeddings": false,
31
- "tokenizer_model_max_length": 2048,
32
- "tokenizer_padding_side": "right",
33
  "torch_dtype": "float32",
34
- "transformers_version": "4.36.2",
35
- "tune_mm_mlp_adapter": true,
36
- "use_cache": false,
37
- "vision_config": {
38
- "mm_projector": {
39
- "_name_or_path": "",
40
- "add_cross_attention": false,
41
- "architectures": null,
42
- "bad_words_ids": null,
43
- "begin_suppress_tokens": null,
44
- "bos_token_id": null,
45
- "chunk_size_feed_forward": 0,
46
- "cross_attention_hidden_size": null,
47
- "decoder_start_token_id": null,
48
- "diversity_penalty": 0.0,
49
- "do_sample": false,
50
- "early_stopping": false,
51
- "encoder_no_repeat_ngram_size": 0,
52
- "eos_token_id": null,
53
- "exponential_decay_length_penalty": null,
54
- "finetuning_task": null,
55
- "forced_bos_token_id": null,
56
- "forced_eos_token_id": null,
57
- "hidden_size": 2560,
58
- "id2label": {
59
- "0": "LABEL_0",
60
- "1": "LABEL_1"
61
- },
62
- "is_decoder": false,
63
- "is_encoder_decoder": false,
64
- "label2id": {
65
- "LABEL_0": 0,
66
- "LABEL_1": 1
67
- },
68
- "length_penalty": 1.0,
69
- "max_length": 20,
70
- "min_length": 0,
71
- "mm_hidden_size": 1024,
72
- "mm_projector_type": "mlp2x_gelu",
73
- "model_type": "llava_phi_projector",
74
- "no_repeat_ngram_size": 0,
75
- "num_beam_groups": 1,
76
- "num_beams": 1,
77
- "num_return_sequences": 1,
78
- "output_attentions": false,
79
- "output_hidden_states": false,
80
- "output_scores": false,
81
- "pad_token_id": null,
82
- "prefix": null,
83
- "problem_type": null,
84
- "pruned_heads": {},
85
- "remove_invalid_values": false,
86
- "repetition_penalty": 1.0,
87
- "return_dict": true,
88
- "return_dict_in_generate": false,
89
- "sep_token_id": null,
90
- "suppress_tokens": null,
91
- "task_specific_params": null,
92
- "temperature": 1.0,
93
- "tf_legacy_loss": false,
94
- "tie_encoder_decoder": false,
95
- "tie_word_embeddings": true,
96
- "tokenizer_class": null,
97
- "top_k": 50,
98
- "top_p": 1.0,
99
- "torch_dtype": null,
100
- "torchscript": false,
101
- "transformers_version": "4.36.2",
102
- "typical_p": 1.0,
103
- "use_bfloat16": false
104
- },
105
- "vision_tower": {
106
- "_name_or_path": "",
107
- "add_cross_attention": false,
108
- "architectures": null,
109
- "attention_dropout": 0.0,
110
- "bad_words_ids": null,
111
- "begin_suppress_tokens": null,
112
- "bos_token_id": null,
113
- "chunk_size_feed_forward": 0,
114
- "cross_attention_hidden_size": null,
115
- "decoder_start_token_id": null,
116
- "diversity_penalty": 0.0,
117
- "do_sample": false,
118
- "dropout": 0.0,
119
- "early_stopping": false,
120
- "encoder_no_repeat_ngram_size": 0,
121
- "eos_token_id": null,
122
- "exponential_decay_length_penalty": null,
123
- "finetuning_task": null,
124
- "forced_bos_token_id": null,
125
- "forced_eos_token_id": null,
126
- "hidden_act": "quick_gelu",
127
- "hidden_size": 1024,
128
- "id2label": {
129
- "0": "LABEL_0",
130
- "1": "LABEL_1"
131
- },
132
- "image_size": 336,
133
- "initializer_factor": 1.0,
134
- "initializer_range": 0.02,
135
- "intermediate_size": 4096,
136
- "is_decoder": false,
137
- "is_encoder_decoder": false,
138
- "label2id": {
139
- "LABEL_0": 0,
140
- "LABEL_1": 1
141
- },
142
- "layer_norm_eps": 1e-05,
143
- "length_penalty": 1.0,
144
- "max_length": 20,
145
- "min_length": 0,
146
- "mm_vision_select_feature": "patch",
147
- "mm_vision_select_layer": -2,
148
- "model_type": "llava_phi_clip_vision_model",
149
- "no_repeat_ngram_size": 0,
150
- "num_attention_heads": 16,
151
- "num_beam_groups": 1,
152
- "num_beams": 1,
153
- "num_channels": 3,
154
- "num_hidden_layers": 24,
155
- "num_return_sequences": 1,
156
- "output_attentions": false,
157
- "output_hidden_states": false,
158
- "output_scores": false,
159
- "pad_token_id": null,
160
- "patch_size": 14,
161
- "prefix": null,
162
- "problem_type": null,
163
- "projection_dim": 768,
164
- "pruned_heads": {},
165
- "remove_invalid_values": false,
166
- "repetition_penalty": 1.0,
167
- "return_dict": true,
168
- "return_dict_in_generate": false,
169
- "sep_token_id": null,
170
- "suppress_tokens": null,
171
- "task_specific_params": null,
172
- "temperature": 1.0,
173
- "tf_legacy_loss": false,
174
- "tie_encoder_decoder": false,
175
- "tie_word_embeddings": true,
176
- "tokenizer_class": null,
177
- "top_k": 50,
178
- "top_p": 1.0,
179
- "torch_dtype": null,
180
- "torchscript": false,
181
- "transformers_version": "4.36.2",
182
- "typical_p": 1.0,
183
- "use_bfloat16": false
184
- }
185
- },
186
  "vocab_size": 51200
187
- }
 
1
  {
2
+ "_name_or_path": "/p/project/laionize/marianna/bakllava_original/llava-phi-2-3b",
3
  "architectures": [
4
  "LlavaPhiForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_phi.PhiConfig",
9
+ "AutoModelForCausalLM": "modeling_phi.PhiForCausalLM"
10
+ },
11
+ "bos_token_id": null,
12
  "embd_pdrop": 0.0,
13
+ "eos_token_id": null,
14
+ "freeze_mm_mlp_adapter": false,
15
  "hidden_act": "gelu_new",
16
  "hidden_size": 2560,
17
  "image_aspect_ratio": "pad",
18
+ "image_grid_pinpoints": null,
19
  "initializer_range": 0.02,
20
  "intermediate_size": 10240,
21
  "layer_norm_eps": 1e-05,
22
  "max_position_embeddings": 2048,
23
+ "mm_hidden_size": 1024,
24
+ "mm_projector_type": "mlp2x_gelu",
25
  "mm_use_im_patch_token": false,
26
  "mm_use_im_start_end": false,
27
+ "mm_vision_select_feature": "patch",
28
+ "mm_vision_select_layer": -2,
29
+ "mm_vision_tower": "openai/clip-vit-large-patch14-336",
30
+ "model_type": "phi-llava",
31
  "num_attention_heads": 32,
32
  "num_hidden_layers": 32,
33
+ "num_key_value_heads": 32,
34
  "partial_rotary_factor": 0.4,
 
35
  "qk_layernorm": false,
36
  "resid_pdrop": 0.1,
37
  "rope_scaling": null,
38
  "rope_theta": 10000.0,
39
  "tie_word_embeddings": false,
 
 
40
  "torch_dtype": "float32",
41
+ "transformers_version": "4.37.0",
42
+ "tune_mm_mlp_adapter": false,
43
+ "use_cache": true,
44
+ "use_mm_proj": true,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
45
  "vocab_size": 51200
46
+ }