UniVoice / univoice_all /config.json
root
upload models
e5a3051
{
"_name_or_path": "hf_ckpts/SmolLM2-360M-Instruct",
"add_pos_embed_each_layer": false,
"architectures": [
"LlamaForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"audio_encoder_path": "hf_ckpts/whisper-large-v3",
"bos_token_id": 1,
"decoder_t_embed": "add_before_speech_tokens",
"eos_token_id": 2,
"hidden_act": "silu",
"hidden_size": 960,
"initializer_range": 0.02,
"intermediate_size": 2560,
"is_llama_config": true,
"learn_sigma": true,
"max_position_embeddings": 8192,
"mlp_bias": false,
"model_type": "llama",
"num_attention_heads": 15,
"num_hidden_layers": 32,
"num_key_value_heads": 5,
"pad_token_id": 2,
"pretraining_tp": 1,
"rms_norm_eps": 1e-05,
"rope_interleaved": false,
"rope_scaling": null,
"rope_theta": 100000,
"speaker_encoder_path": "hf_ckpts/wav2vec2-large-xlsr-53",
"tie_word_embeddings": true,
"tokenizer_max_length": 1024,
"tokenizer_padding_side": "right",
"torch_dtype": "bfloat16",
"transformers.js_config": {
"kv_cache_dtype": {
"fp16": "float16",
"q4f16": "float16"
}
},
"transformers_version": "4.39.0",
"use_adaln_final_layer": true,
"use_bi_attn_speech_tokens": true,
"use_cache": true,
"use_flash_attn": false,
"use_hybrid_attn_mask": false,
"use_pos_embed": true,
"vocab_size": 49152
}