RaushanA commited on
Commit
e99402e
·
verified ·
1 Parent(s): 7789c93

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +19 -19
config.json CHANGED
@@ -7,12 +7,12 @@
7
  "AutoConfig": "microsoft/Florence-2-base-ft--configuration_florence2.Florence2Config",
8
  "AutoModelForCausalLM": "microsoft/Florence-2-base-ft--modeling_florence2.Florence2ForConditionalGeneration"
9
  },
10
- "bos_token_id": 0,
11
- "eos_token_id": 2,
12
  "ignore_index": -100,
13
  "is_encoder_decoder": true,
14
  "model_type": "florence2",
15
- "pad_token_id": 1,
16
  "projection_dim": 1024,
17
  "text_config": {
18
  "_name_or_path": "",
@@ -30,20 +30,20 @@
30
  "classif_dropout": 0.1,
31
  "classifier_dropout": 0.0,
32
  "cross_attention_hidden_size": null,
33
- "d_model": 1024,
34
- "decoder_attention_heads": 16,
35
- "decoder_ffn_dim": 4096,
36
  "decoder_layerdrop": 0.0,
37
- "decoder_layers": 12,
38
  "decoder_start_token_id": 2,
39
  "diversity_penalty": 0.0,
40
  "do_sample": false,
41
  "dropout": 0.1,
42
  "early_stopping": true,
43
- "encoder_attention_heads": 16,
44
- "encoder_ffn_dim": 4096,
45
  "encoder_layerdrop": 0.0,
46
- "encoder_layers": 12,
47
  "encoder_no_repeat_ngram_size": 0,
48
  "eos_token_id": 2,
49
  "exponential_decay_length_penalty": null,
@@ -73,7 +73,7 @@
73
  "normalize_before": false,
74
  "num_beam_groups": 1,
75
  "num_beams": 3,
76
- "num_hidden_layers": 12,
77
  "num_return_sequences": 1,
78
  "output_attentions": false,
79
  "output_hidden_states": false,
@@ -105,7 +105,7 @@
105
  "vocab_size": 51289
106
  },
107
  "torch_dtype": "float32",
108
- "transformers_version": "4.41.2",
109
  "vision_config": {
110
  "_name_or_path": "",
111
  "add_cross_attention": false,
@@ -123,10 +123,10 @@
123
  1
124
  ],
125
  "dim_embed": [
 
126
  256,
127
  512,
128
- 1024,
129
- 2048
130
  ],
131
  "diversity_penalty": 0.0,
132
  "do_sample": false,
@@ -165,16 +165,16 @@
165
  "num_beam_groups": 1,
166
  "num_beams": 1,
167
  "num_groups": [
 
168
  8,
169
  16,
170
- 32,
171
- 64
172
  ],
173
  "num_heads": [
 
174
  8,
175
  16,
176
- 32,
177
- 64
178
  ],
179
  "num_return_sequences": 1,
180
  "output_attentions": false,
@@ -207,7 +207,7 @@
207
  ],
208
  "prefix": null,
209
  "problem_type": null,
210
- "projection_dim": 1024,
211
  "pruned_heads": {},
212
  "remove_invalid_values": false,
213
  "repetition_penalty": 1.0,
 
7
  "AutoConfig": "microsoft/Florence-2-base-ft--configuration_florence2.Florence2Config",
8
  "AutoModelForCausalLM": "microsoft/Florence-2-base-ft--modeling_florence2.Florence2ForConditionalGeneration"
9
  },
10
+ "bos_token_id": 2,
11
+ "eos_token_id": 1,
12
  "ignore_index": -100,
13
  "is_encoder_decoder": true,
14
  "model_type": "florence2",
15
+ "pad_token_id": 0,
16
  "projection_dim": 1024,
17
  "text_config": {
18
  "_name_or_path": "",
 
30
  "classif_dropout": 0.1,
31
  "classifier_dropout": 0.0,
32
  "cross_attention_hidden_size": null,
33
+ "d_model": 768,
34
+ "decoder_attention_heads": 12,
35
+ "decoder_ffn_dim": 3072,
36
  "decoder_layerdrop": 0.0,
37
+ "decoder_layers": 6,
38
  "decoder_start_token_id": 2,
39
  "diversity_penalty": 0.0,
40
  "do_sample": false,
41
  "dropout": 0.1,
42
  "early_stopping": true,
43
+ "encoder_attention_heads": 12,
44
+ "encoder_ffn_dim": 3072,
45
  "encoder_layerdrop": 0.0,
46
+ "encoder_layers": 6,
47
  "encoder_no_repeat_ngram_size": 0,
48
  "eos_token_id": 2,
49
  "exponential_decay_length_penalty": null,
 
73
  "normalize_before": false,
74
  "num_beam_groups": 1,
75
  "num_beams": 3,
76
+ "num_hidden_layers": 6,
77
  "num_return_sequences": 1,
78
  "output_attentions": false,
79
  "output_hidden_states": false,
 
105
  "vocab_size": 51289
106
  },
107
  "torch_dtype": "float32",
108
+ "transformers_version": "4.42.3",
109
  "vision_config": {
110
  "_name_or_path": "",
111
  "add_cross_attention": false,
 
123
  1
124
  ],
125
  "dim_embed": [
126
+ 128,
127
  256,
128
  512,
129
+ 1024
 
130
  ],
131
  "diversity_penalty": 0.0,
132
  "do_sample": false,
 
165
  "num_beam_groups": 1,
166
  "num_beams": 1,
167
  "num_groups": [
168
+ 4,
169
  8,
170
  16,
171
+ 32
 
172
  ],
173
  "num_heads": [
174
+ 4,
175
  8,
176
  16,
177
+ 32
 
178
  ],
179
  "num_return_sequences": 1,
180
  "output_attentions": false,
 
207
  ],
208
  "prefix": null,
209
  "problem_type": null,
210
+ "projection_dim": 768,
211
  "pruned_heads": {},
212
  "remove_invalid_values": false,
213
  "repetition_penalty": 1.0,