skeskinen commited on
Commit
f2a1597
·
1 Parent(s): eb506d5

Upload LlamaForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -10,7 +10,7 @@
10
  "intermediate_size": 2048,
11
  "max_position_embeddings": 2048,
12
  "model_type": "llama",
13
- "num_attention_heads": 12,
14
  "num_hidden_layers": 12,
15
  "pad_token_id": 0,
16
  "rms_norm_eps": 1e-12,
 
10
  "intermediate_size": 2048,
11
  "max_position_embeddings": 2048,
12
  "model_type": "llama",
13
+ "num_attention_heads": 6,
14
  "num_hidden_layers": 12,
15
  "pad_token_id": 0,
16
  "rms_norm_eps": 1e-12,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e8845dbef1079d80d6d89e6e09197ffba5324b0e1f1a8564a065824eeb7de35c
3
- size 536464249
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5fdaf22d518f60055b56450531e7d0b52e0df644efcd0919f619bd85c7f54235
3
+ size 536465785