File size: 500 Bytes
c3dda6a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
{
  "architectures": ["BVVAbsForCausalLM"],
  "auto_map": {
    "AutoConfig": "modeling_bvv_abs.BVVAbsConfig",
    "AutoModel": "modeling_bvv_abs.BVVAbsForCausalLM",
    "AutoModelForCausalLM": "modeling_bvv_abs.BVVAbsForCausalLM"
  },
  "model_type": "bvv_abs",
  "vocab_size": 131072,
  "block_size ": 1024,
  "n_embd": 4096,
  "n_layer": 5,
  "n_head": 32,
  "pad_id": 57344,
  "bos_token": "<s>",
  "eos_token": "</s>",
  "unk_token": "<unk>",
  "pad_token": "<pad>",
  "torch_dtype": "float32"
}