{ | |
"model_id": "llama3.1_8b_8gen4", | |
"model_arch": "llama", | |
"version": 20241202, | |
"llm_config": { | |
"embed_dim": 4096, | |
"ffn_dim": 14336, | |
"head_size": 128, | |
"kv_dim": 1024, | |
"n_attn_heads": 32, | |
"n_attn_kv_heads": 8, | |
"n_ctx": 131072, | |
"n_layers": 32, | |
"norm_eps": 9.999999747378752e-06, | |
"vocab_size": 128256, | |
"rope_config": { | |
"n_rope_ctx_orig": 131072, | |
"rope_attn_factor": 1.0, | |
"rope_dim": 128, | |
"rope_freq_base": 500000.0, | |
"rope_freq_scale": 1.0, | |
"rope_scale_type": "linear", | |
"rope_type": 0 | |
} | |
}, | |
"vision": {} | |
} |