SergeMiro commited on
Commit
d8283a8
1 Parent(s): 72116f9

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +10 -37
config.json CHANGED
@@ -1,41 +1,14 @@
1
  {
2
- "architectures": [
3
- "GPTForCausalLM"
4
- ],
5
- "model_type": "gpt",
6
- "vocab_size": 50257,
7
- "n_positions": 1024,
8
- "n_ctx": 1024,
9
- "n_embd": 768,
10
- "n_layer": 12,
11
- "n_head": 12,
12
- "activation_function": "gelu_new",
13
- "resid_pdrop": 0.1,
14
- "embd_pdrop": 0.1,
15
- "attn_pdrop": 0.1,
16
- "layer_norm_epsilon": 1e-5,
17
  "initializer_range": 0.02,
18
- "summary_type": "cls_index",
19
- "summary_use_proj": true,
20
- "summary_activation": "gelu",
21
- "summary_proj_to_labels": true,
22
- "summary_first_dropout": 0.1,
23
- "scale_attn_weights": true,
24
  "use_cache": true,
25
- "bos_token_id": 50256,
26
- "eos_token_id": 50256,
27
- "pad_token_id": 50256,
28
- "max_length": 1024,
29
- "min_length": 0,
30
- "do_sample": false,
31
- "early_stopping": false,
32
- "num_beams": 1,
33
- "temperature": 1.0,
34
- "top_k": 50,
35
- "top_p": 1.0,
36
- "repetition_penalty": 1.0,
37
- "length_penalty": 1.0,
38
- "no_repeat_ngram_size": 0,
39
- "num_return_sequences": 1
40
  }
41
-
 
1
  {
2
+ "model_type": "mistral",
3
+ "architectures": ["MistralForCausalLM"],
4
+ "hidden_size": 4096,
5
+ "num_attention_heads": 16,
6
+ "num_hidden_layers": 32,
7
+ "intermediate_size": 11008,
8
+ "max_position_embeddings": 2048,
9
+ "layer_norm_eps": 1e-5,
 
 
 
 
 
 
 
10
  "initializer_range": 0.02,
 
 
 
 
 
 
11
  "use_cache": true,
12
+ "vocab_size": 50257,
13
+ "tokenizer_class": "GPT2Tokenizer"
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  }