WesleySantos commited on
Commit
08ee86b
1 Parent(s): a22b18f

Upload LlamaForCausalLM (#4)

Browse files

- Upload LlamaForCausalLM (310216432808ac106581d9a0a66aac34f043525e)

config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "alanrios2001/Llama-2-7b-ptbr",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -9,7 +9,7 @@
9
  "hidden_size": 4096,
10
  "initializer_range": 0.02,
11
  "intermediate_size": 11008,
12
- "max_position_embeddings": 4096,
13
  "model_type": "llama",
14
  "num_attention_heads": 32,
15
  "num_hidden_layers": 32,
@@ -18,7 +18,6 @@
18
  "pretraining_tp": 1,
19
  "rms_norm_eps": 1e-06,
20
  "rope_scaling": null,
21
- "rope_theta": 10000.0,
22
  "tie_word_embeddings": false,
23
  "torch_dtype": "bfloat16",
24
  "transformers_version": "4.31.0",
 
1
  {
2
+ "_name_or_path": "daryl149/llama-2-7b-chat-hf",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
9
  "hidden_size": 4096,
10
  "initializer_range": 0.02,
11
  "intermediate_size": 11008,
12
+ "max_position_embeddings": 2048,
13
  "model_type": "llama",
14
  "num_attention_heads": 32,
15
  "num_hidden_layers": 32,
 
18
  "pretraining_tp": 1,
19
  "rms_norm_eps": 1e-06,
20
  "rope_scaling": null,
 
21
  "tie_word_embeddings": false,
22
  "torch_dtype": "bfloat16",
23
  "transformers_version": "4.31.0",
pytorch_model-00001-of-00002.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6a3a0e9c19b1ec09b89a75951c3a6cbf1e3796ea441a3a05403e7875d5e047e6
3
  size 9976634558
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5b9108916566f9bfbbc5832c04ff140fe0873cbc596b49c001fb7a624aff383
3
  size 9976634558
pytorch_model-00002-of-00002.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ca04ad528b8f3756951720b0d8b805fcb8608fbc6a05719ccb0b468cb9eaa390
3
  size 3500315539
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:626b1ae30cb7806b5b9f6c087adaa7c5addb4774db539ed610ec7e55aece486b
3
  size 3500315539