Lake-nano / config.json
BICORP's picture
Upload 2 files
9c301b9 verified
raw
history blame contribute delete
381 Bytes
{
"architectures": [
"Lake1NanoModel"
],
"expert_hidden_size": 64,
"fp16": true,
"hidden_size": 128,
"intermediate_size": 256,
"max_position_embeddings": 512,
"model_type": "lake1",
"num_attention_heads": 4,
"num_experts": 4,
"num_hidden_layers": 2,
"torch_dtype": "float16",
"transformers_version": "4.53.3",
"vocab_size": 30522
}