bigdefence commited on
Commit
d2961f4
·
verified ·
1 Parent(s): 6cb2945

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -50,7 +50,7 @@
50
  "max_position_embeddings": 65536,
51
  "mlp_bias": false,
52
  "mm_tunable_parts": "speech_projector,backbone",
53
- "model_type": "omni_speech_llama",
54
  "num_attention_heads": 32,
55
  "num_hidden_layers": 30,
56
  "num_key_value_heads": 8,
 
50
  "max_position_embeddings": 65536,
51
  "mlp_bias": false,
52
  "mm_tunable_parts": "speech_projector,backbone",
53
+ "model_type": "omni_speech_exaone",
54
  "num_attention_heads": 32,
55
  "num_hidden_layers": 30,
56
  "num_key_value_heads": 8,