{ | |
"model_type": "llama", | |
"vocab_size": 33244, | |
"hidden_size": 1024, | |
"num_hidden_layers": 16, | |
"num_attention_heads": 16, | |
"num_key_value_heads": 4, | |
"intermediate_size": 2816, | |
"max_position_embeddings": 2048, | |
"rms_norm_eps": 1e-6, | |
"initializer_range": 0.02, | |
"use_cache": true, | |
"pad_token_id": 0, | |
"bos_token_id": 2, | |
"eos_token_id": 3, | |
"tie_word_embeddings": false | |
} |