Xenova HF staff commited on
Commit
dacab49
·
verified ·
1 Parent(s): 7bb223c

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +13 -18
config.json CHANGED
@@ -1,39 +1,34 @@
1
  {
2
- "_attn_implementation_autoset": true,
3
- "_name_or_path": "eustlb/moonshine",
4
- "apply_spec_augment": false,
5
  "architectures": [
6
- "MoonshineModel"
7
  ],
8
  "attention_bias": false,
9
  "attention_dropout": 0.0,
10
  "bos_token_id": 1,
11
  "decoder_hidden_act": "silu",
 
 
 
12
  "decoder_start_token_id": 1,
13
  "encoder_hidden_act": "gelu",
 
 
 
14
  "eos_token_id": 2,
15
- "ff_mult": 4,
16
  "hidden_size": 288,
17
  "initializer_range": 0.02,
18
- "intermediate_size": null,
19
  "is_encoder_decoder": true,
20
- "layer_norm_eps": 1e-05,
21
- "mask_feature_length": 10,
22
- "mask_feature_min_masks": 0,
23
- "mask_feature_prob": 0.0,
24
- "mask_time_length": 10,
25
- "mask_time_min_masks": 2,
26
- "mask_time_prob": 0.05,
27
- "max_position_embeddings": 2048,
28
  "model_type": "moonshine",
29
  "num_attention_heads": 8,
30
  "num_hidden_layers": 6,
31
  "num_key_value_heads": 8,
32
- "partial_rotary_factor": 0.5,
33
- "qk_layernorm": false,
34
  "rope_scaling": null,
35
  "rope_theta": 10000.0,
36
- "transformers_version": "4.47.0.dev0",
 
37
  "use_cache": true,
38
  "vocab_size": 32768
39
- }
 
1
  {
 
 
 
2
  "architectures": [
3
+ "MoonshineForConditionalGeneration"
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 1,
8
  "decoder_hidden_act": "silu",
9
+ "decoder_num_attention_heads": 8,
10
+ "decoder_num_hidden_layers": 6,
11
+ "decoder_num_key_value_heads": 8,
12
  "decoder_start_token_id": 1,
13
  "encoder_hidden_act": "gelu",
14
+ "encoder_num_attention_heads": 8,
15
+ "encoder_num_hidden_layers": 6,
16
+ "encoder_num_key_value_heads": 8,
17
  "eos_token_id": 2,
 
18
  "hidden_size": 288,
19
  "initializer_range": 0.02,
20
+ "intermediate_size": 1152,
21
  "is_encoder_decoder": true,
22
+ "max_position_embeddings": 512,
 
 
 
 
 
 
 
23
  "model_type": "moonshine",
24
  "num_attention_heads": 8,
25
  "num_hidden_layers": 6,
26
  "num_key_value_heads": 8,
27
+ "partial_rotary_factor": 0.9,
 
28
  "rope_scaling": null,
29
  "rope_theta": 10000.0,
30
+ "torch_dtype": "float32",
31
+ "transformers_version": "4.48.0.dev0",
32
  "use_cache": true,
33
  "vocab_size": 32768
34
+ }