MCES10 commited on
Commit
369137d
·
verified ·
1 Parent(s): aafc555

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +138 -0
config.json ADDED
@@ -0,0 +1,138 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Phi3ForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 199999,
8
+ "embd_pdrop": 0.0,
9
+ "eos_token_id": 199999,
10
+ "full_attn_mod": 1,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 3072,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 8192,
15
+ "interpolate_factor": 1,
16
+ "lm_head_bias": false,
17
+ "max_position_embeddings": 131072,
18
+ "mlp_bias": false,
19
+ "model_type": "phi3",
20
+ "num_attention_heads": 24,
21
+ "num_hidden_layers": 32,
22
+ "num_key_value_heads": 8,
23
+ "original_max_position_embeddings": 4096,
24
+ "pad_token_id": 199999,
25
+ "partial_rotary_factor": 0.75,
26
+ "resid_pdrop": 0.0,
27
+ "rms_norm_eps": 1e-05,
28
+ "rope_scaling": {
29
+ "long_factor": [
30
+ 1,
31
+ 1.118320672,
32
+ 1.250641126,
33
+ 1.398617824,
34
+ 1.564103225,
35
+ 1.74916897,
36
+ 1.956131817,
37
+ 2.187582649,
38
+ 2.446418898,
39
+ 2.735880826,
40
+ 3.059592084,
41
+ 3.421605075,
42
+ 3.826451687,
43
+ 4.279200023,
44
+ 4.785517845,
45
+ 5.351743533,
46
+ 5.984965424,
47
+ 6.693110555,
48
+ 7.485043894,
49
+ 8.370679318,
50
+ 9.36110372,
51
+ 10.4687158,
52
+ 11.70738129,
53
+ 13.09260651,
54
+ 14.64173252,
55
+ 16.37415215,
56
+ 18.31155283,
57
+ 20.47818807,
58
+ 22.90118105,
59
+ 25.61086418,
60
+ 28.64115884,
61
+ 32.03,
62
+ 32.1,
63
+ 32.13,
64
+ 32.23,
65
+ 32.6,
66
+ 32.61,
67
+ 32.64,
68
+ 32.66,
69
+ 32.7,
70
+ 32.71,
71
+ 32.93,
72
+ 32.97,
73
+ 33.28,
74
+ 33.49,
75
+ 33.5,
76
+ 44.16,
77
+ 47.77
78
+ ],
79
+ "short_factor": [
80
+ 1,
81
+ 1.118320672,
82
+ 1.250641126,
83
+ 1.398617824,
84
+ 1.564103225,
85
+ 1.74916897,
86
+ 1.956131817,
87
+ 2.187582649,
88
+ 2.446418898,
89
+ 2.735880826,
90
+ 3.059592084,
91
+ 3.421605075,
92
+ 3.826451687,
93
+ 4.279200023,
94
+ 4.785517845,
95
+ 5.351743533,
96
+ 5.984965424,
97
+ 6.693110555,
98
+ 7.485043894,
99
+ 8.370679318,
100
+ 9.36110372,
101
+ 10.4687158,
102
+ 11.70738129,
103
+ 13.09260651,
104
+ 14.64173252,
105
+ 16.37415215,
106
+ 18.31155283,
107
+ 20.47818807,
108
+ 22.90118105,
109
+ 25.61086418,
110
+ 28.64115884,
111
+ 32.03,
112
+ 32.1,
113
+ 32.13,
114
+ 32.23,
115
+ 32.6,
116
+ 32.61,
117
+ 32.64,
118
+ 32.66,
119
+ 32.7,
120
+ 32.71,
121
+ 32.93,
122
+ 32.97,
123
+ 33.28,
124
+ 33.49,
125
+ 33.5,
126
+ 44.16,
127
+ 47.77
128
+ ],
129
+ "type": "longrope"
130
+ },
131
+ "rope_theta": 10000.0,
132
+ "sliding_window": 262144,
133
+ "tie_word_embeddings": true,
134
+ "torch_dtype": "bfloat16",
135
+ "transformers_version": "4.50.0",
136
+ "use_cache": true,
137
+ "vocab_size": 200064
138
+ }