wyy-code commited on
Commit
6b3bcc2
·
verified ·
1 Parent(s): 3eabd40

Delete models

Browse files
models/Readme.md DELETED
@@ -1 +0,0 @@
1
- Uploading the ckpt...
 
 
models/config.json DELETED
@@ -1,34 +0,0 @@
1
- {
2
- "architectures": [
3
- "Phi3ForCausalLM"
4
- ],
5
- "attention_bias": false,
6
- "attention_dropout": 0.0,
7
- "auto_map": {},
8
- "bos_token_id": 100257,
9
- "embd_pdrop": 0.0,
10
- "eos_token_id": 100257,
11
- "hidden_act": "silu",
12
- "hidden_size": 5120,
13
- "initializer_range": 0.02,
14
- "intermediate_size": 17920,
15
- "is_model_parallel": false,
16
- "max_position_embeddings": 16384,
17
- "model_type": "phi3",
18
- "num_attention_heads": 40,
19
- "num_hidden_layers": 40,
20
- "num_key_value_heads": 10,
21
- "original_max_position_embeddings": 16384,
22
- "pad_token_id": 100257,
23
- "partial_rotary_factor": 1.0,
24
- "resid_pdrop": 0.0,
25
- "rms_norm_eps": 1e-05,
26
- "rope_scaling": null,
27
- "rope_theta": 250000,
28
- "sliding_window": null,
29
- "tie_word_embeddings": false,
30
- "torch_dtype": "bfloat16",
31
- "transformers_version": "4.50.0.dev0",
32
- "use_cache": true,
33
- "vocab_size": 100352
34
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
models/generation_config.json DELETED
@@ -1,9 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 100257,
4
- "eos_token_id": [
5
- 100257,
6
- 100265
7
- ],
8
- "transformers_version": "4.50.0.dev0"
9
- }
 
 
 
 
 
 
 
 
 
 
models/merges.txt DELETED
The diff for this file is too large to render. See raw diff
 
models/pytorch_model-00001-of-00006.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:b9d1cb301eca129b77fe3d4caf3c13d23c8066e76720a2732eec07f8f2d15be0
3
- size 4933664874
 
 
 
 
models/pytorch_model-00002-of-00006.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:f1a40920e88bcaeb352a0617d59f3e0a2e91c40e2bdd4a35223cba5c8868df28
3
- size 4954701704
 
 
 
 
models/pytorch_model-00003-of-00006.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:096c2de7d76eba0d8cbb57c95f91681a358a8b7565c7daeae1d8d3e1fe84dd5d
3
- size 4902252026
 
 
 
 
models/pytorch_model-00004-of-00006.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:518bdd688ad80330d70d4cef20e7c51173f29cc57f42fba9486b37ecc92bdbd2
3
- size 4771179362
 
 
 
 
models/pytorch_model-00005-of-00006.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:f608b4d19f3dafbde60bcf50950b49548ac628a282a2bb163d26b7fdb4fe32b2
3
- size 4771179362
 
 
 
 
models/pytorch_model-00006-of-00006.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:62f0773e583dc908285f5acc9d290dc6d315e35ebdcbf950833a4404fc76b83d
3
- size 4986125082
 
 
 
 
models/pytorch_model.bin.index.json DELETED
@@ -1,250 +0,0 @@
1
- {
2
- "metadata": {
3
- "total_size": 29319014400
4
- },
5
- "weight_map": {
6
- "lm_head.weight": "pytorch_model-00006-of-00006.bin",
7
- "model.embed_tokens.weight": "pytorch_model-00001-of-00006.bin",
8
- "model.layers.0.input_layernorm.weight": "pytorch_model-00001-of-00006.bin",
9
- "model.layers.0.mlp.down_proj.weight": "pytorch_model-00001-of-00006.bin",
10
- "model.layers.0.mlp.gate_up_proj.weight": "pytorch_model-00001-of-00006.bin",
11
- "model.layers.0.post_attention_layernorm.weight": "pytorch_model-00001-of-00006.bin",
12
- "model.layers.0.self_attn.o_proj.weight": "pytorch_model-00001-of-00006.bin",
13
- "model.layers.0.self_attn.qkv_proj.weight": "pytorch_model-00001-of-00006.bin",
14
- "model.layers.1.input_layernorm.weight": "pytorch_model-00001-of-00006.bin",
15
- "model.layers.1.mlp.down_proj.weight": "pytorch_model-00001-of-00006.bin",
16
- "model.layers.1.mlp.gate_up_proj.weight": "pytorch_model-00001-of-00006.bin",
17
- "model.layers.1.post_attention_layernorm.weight": "pytorch_model-00001-of-00006.bin",
18
- "model.layers.1.self_attn.o_proj.weight": "pytorch_model-00001-of-00006.bin",
19
- "model.layers.1.self_attn.qkv_proj.weight": "pytorch_model-00001-of-00006.bin",
20
- "model.layers.10.input_layernorm.weight": "pytorch_model-00002-of-00006.bin",
21
- "model.layers.10.mlp.down_proj.weight": "pytorch_model-00002-of-00006.bin",
22
- "model.layers.10.mlp.gate_up_proj.weight": "pytorch_model-00002-of-00006.bin",
23
- "model.layers.10.post_attention_layernorm.weight": "pytorch_model-00002-of-00006.bin",
24
- "model.layers.10.self_attn.o_proj.weight": "pytorch_model-00002-of-00006.bin",
25
- "model.layers.10.self_attn.qkv_proj.weight": "pytorch_model-00002-of-00006.bin",
26
- "model.layers.11.input_layernorm.weight": "pytorch_model-00002-of-00006.bin",
27
- "model.layers.11.mlp.down_proj.weight": "pytorch_model-00002-of-00006.bin",
28
- "model.layers.11.mlp.gate_up_proj.weight": "pytorch_model-00002-of-00006.bin",
29
- "model.layers.11.post_attention_layernorm.weight": "pytorch_model-00002-of-00006.bin",
30
- "model.layers.11.self_attn.o_proj.weight": "pytorch_model-00002-of-00006.bin",
31
- "model.layers.11.self_attn.qkv_proj.weight": "pytorch_model-00002-of-00006.bin",
32
- "model.layers.12.input_layernorm.weight": "pytorch_model-00002-of-00006.bin",
33
- "model.layers.12.mlp.down_proj.weight": "pytorch_model-00002-of-00006.bin",
34
- "model.layers.12.mlp.gate_up_proj.weight": "pytorch_model-00002-of-00006.bin",
35
- "model.layers.12.post_attention_layernorm.weight": "pytorch_model-00002-of-00006.bin",
36
- "model.layers.12.self_attn.o_proj.weight": "pytorch_model-00002-of-00006.bin",
37
- "model.layers.12.self_attn.qkv_proj.weight": "pytorch_model-00002-of-00006.bin",
38
- "model.layers.13.input_layernorm.weight": "pytorch_model-00003-of-00006.bin",
39
- "model.layers.13.mlp.down_proj.weight": "pytorch_model-00003-of-00006.bin",
40
- "model.layers.13.mlp.gate_up_proj.weight": "pytorch_model-00003-of-00006.bin",
41
- "model.layers.13.post_attention_layernorm.weight": "pytorch_model-00003-of-00006.bin",
42
- "model.layers.13.self_attn.o_proj.weight": "pytorch_model-00003-of-00006.bin",
43
- "model.layers.13.self_attn.qkv_proj.weight": "pytorch_model-00003-of-00006.bin",
44
- "model.layers.14.input_layernorm.weight": "pytorch_model-00003-of-00006.bin",
45
- "model.layers.14.mlp.down_proj.weight": "pytorch_model-00003-of-00006.bin",
46
- "model.layers.14.mlp.gate_up_proj.weight": "pytorch_model-00003-of-00006.bin",
47
- "model.layers.14.post_attention_layernorm.weight": "pytorch_model-00003-of-00006.bin",
48
- "model.layers.14.self_attn.o_proj.weight": "pytorch_model-00003-of-00006.bin",
49
- "model.layers.14.self_attn.qkv_proj.weight": "pytorch_model-00003-of-00006.bin",
50
- "model.layers.15.input_layernorm.weight": "pytorch_model-00003-of-00006.bin",
51
- "model.layers.15.mlp.down_proj.weight": "pytorch_model-00003-of-00006.bin",
52
- "model.layers.15.mlp.gate_up_proj.weight": "pytorch_model-00003-of-00006.bin",
53
- "model.layers.15.post_attention_layernorm.weight": "pytorch_model-00003-of-00006.bin",
54
- "model.layers.15.self_attn.o_proj.weight": "pytorch_model-00003-of-00006.bin",
55
- "model.layers.15.self_attn.qkv_proj.weight": "pytorch_model-00003-of-00006.bin",
56
- "model.layers.16.input_layernorm.weight": "pytorch_model-00003-of-00006.bin",
57
- "model.layers.16.mlp.down_proj.weight": "pytorch_model-00003-of-00006.bin",
58
- "model.layers.16.mlp.gate_up_proj.weight": "pytorch_model-00003-of-00006.bin",
59
- "model.layers.16.post_attention_layernorm.weight": "pytorch_model-00003-of-00006.bin",
60
- "model.layers.16.self_attn.o_proj.weight": "pytorch_model-00003-of-00006.bin",
61
- "model.layers.16.self_attn.qkv_proj.weight": "pytorch_model-00003-of-00006.bin",
62
- "model.layers.17.input_layernorm.weight": "pytorch_model-00003-of-00006.bin",
63
- "model.layers.17.mlp.down_proj.weight": "pytorch_model-00003-of-00006.bin",
64
- "model.layers.17.mlp.gate_up_proj.weight": "pytorch_model-00003-of-00006.bin",
65
- "model.layers.17.post_attention_layernorm.weight": "pytorch_model-00003-of-00006.bin",
66
- "model.layers.17.self_attn.o_proj.weight": "pytorch_model-00003-of-00006.bin",
67
- "model.layers.17.self_attn.qkv_proj.weight": "pytorch_model-00003-of-00006.bin",
68
- "model.layers.18.input_layernorm.weight": "pytorch_model-00003-of-00006.bin",
69
- "model.layers.18.mlp.down_proj.weight": "pytorch_model-00003-of-00006.bin",
70
- "model.layers.18.mlp.gate_up_proj.weight": "pytorch_model-00003-of-00006.bin",
71
- "model.layers.18.post_attention_layernorm.weight": "pytorch_model-00003-of-00006.bin",
72
- "model.layers.18.self_attn.o_proj.weight": "pytorch_model-00003-of-00006.bin",
73
- "model.layers.18.self_attn.qkv_proj.weight": "pytorch_model-00003-of-00006.bin",
74
- "model.layers.19.input_layernorm.weight": "pytorch_model-00003-of-00006.bin",
75
- "model.layers.19.mlp.down_proj.weight": "pytorch_model-00003-of-00006.bin",
76
- "model.layers.19.mlp.gate_up_proj.weight": "pytorch_model-00003-of-00006.bin",
77
- "model.layers.19.post_attention_layernorm.weight": "pytorch_model-00003-of-00006.bin",
78
- "model.layers.19.self_attn.o_proj.weight": "pytorch_model-00003-of-00006.bin",
79
- "model.layers.19.self_attn.qkv_proj.weight": "pytorch_model-00003-of-00006.bin",
80
- "model.layers.2.input_layernorm.weight": "pytorch_model-00001-of-00006.bin",
81
- "model.layers.2.mlp.down_proj.weight": "pytorch_model-00001-of-00006.bin",
82
- "model.layers.2.mlp.gate_up_proj.weight": "pytorch_model-00001-of-00006.bin",
83
- "model.layers.2.post_attention_layernorm.weight": "pytorch_model-00001-of-00006.bin",
84
- "model.layers.2.self_attn.o_proj.weight": "pytorch_model-00001-of-00006.bin",
85
- "model.layers.2.self_attn.qkv_proj.weight": "pytorch_model-00001-of-00006.bin",
86
- "model.layers.20.input_layernorm.weight": "pytorch_model-00004-of-00006.bin",
87
- "model.layers.20.mlp.down_proj.weight": "pytorch_model-00004-of-00006.bin",
88
- "model.layers.20.mlp.gate_up_proj.weight": "pytorch_model-00004-of-00006.bin",
89
- "model.layers.20.post_attention_layernorm.weight": "pytorch_model-00004-of-00006.bin",
90
- "model.layers.20.self_attn.o_proj.weight": "pytorch_model-00003-of-00006.bin",
91
- "model.layers.20.self_attn.qkv_proj.weight": "pytorch_model-00003-of-00006.bin",
92
- "model.layers.21.input_layernorm.weight": "pytorch_model-00004-of-00006.bin",
93
- "model.layers.21.mlp.down_proj.weight": "pytorch_model-00004-of-00006.bin",
94
- "model.layers.21.mlp.gate_up_proj.weight": "pytorch_model-00004-of-00006.bin",
95
- "model.layers.21.post_attention_layernorm.weight": "pytorch_model-00004-of-00006.bin",
96
- "model.layers.21.self_attn.o_proj.weight": "pytorch_model-00004-of-00006.bin",
97
- "model.layers.21.self_attn.qkv_proj.weight": "pytorch_model-00004-of-00006.bin",
98
- "model.layers.22.input_layernorm.weight": "pytorch_model-00004-of-00006.bin",
99
- "model.layers.22.mlp.down_proj.weight": "pytorch_model-00004-of-00006.bin",
100
- "model.layers.22.mlp.gate_up_proj.weight": "pytorch_model-00004-of-00006.bin",
101
- "model.layers.22.post_attention_layernorm.weight": "pytorch_model-00004-of-00006.bin",
102
- "model.layers.22.self_attn.o_proj.weight": "pytorch_model-00004-of-00006.bin",
103
- "model.layers.22.self_attn.qkv_proj.weight": "pytorch_model-00004-of-00006.bin",
104
- "model.layers.23.input_layernorm.weight": "pytorch_model-00004-of-00006.bin",
105
- "model.layers.23.mlp.down_proj.weight": "pytorch_model-00004-of-00006.bin",
106
- "model.layers.23.mlp.gate_up_proj.weight": "pytorch_model-00004-of-00006.bin",
107
- "model.layers.23.post_attention_layernorm.weight": "pytorch_model-00004-of-00006.bin",
108
- "model.layers.23.self_attn.o_proj.weight": "pytorch_model-00004-of-00006.bin",
109
- "model.layers.23.self_attn.qkv_proj.weight": "pytorch_model-00004-of-00006.bin",
110
- "model.layers.24.input_layernorm.weight": "pytorch_model-00004-of-00006.bin",
111
- "model.layers.24.mlp.down_proj.weight": "pytorch_model-00004-of-00006.bin",
112
- "model.layers.24.mlp.gate_up_proj.weight": "pytorch_model-00004-of-00006.bin",
113
- "model.layers.24.post_attention_layernorm.weight": "pytorch_model-00004-of-00006.bin",
114
- "model.layers.24.self_attn.o_proj.weight": "pytorch_model-00004-of-00006.bin",
115
- "model.layers.24.self_attn.qkv_proj.weight": "pytorch_model-00004-of-00006.bin",
116
- "model.layers.25.input_layernorm.weight": "pytorch_model-00004-of-00006.bin",
117
- "model.layers.25.mlp.down_proj.weight": "pytorch_model-00004-of-00006.bin",
118
- "model.layers.25.mlp.gate_up_proj.weight": "pytorch_model-00004-of-00006.bin",
119
- "model.layers.25.post_attention_layernorm.weight": "pytorch_model-00004-of-00006.bin",
120
- "model.layers.25.self_attn.o_proj.weight": "pytorch_model-00004-of-00006.bin",
121
- "model.layers.25.self_attn.qkv_proj.weight": "pytorch_model-00004-of-00006.bin",
122
- "model.layers.26.input_layernorm.weight": "pytorch_model-00004-of-00006.bin",
123
- "model.layers.26.mlp.down_proj.weight": "pytorch_model-00004-of-00006.bin",
124
- "model.layers.26.mlp.gate_up_proj.weight": "pytorch_model-00004-of-00006.bin",
125
- "model.layers.26.post_attention_layernorm.weight": "pytorch_model-00004-of-00006.bin",
126
- "model.layers.26.self_attn.o_proj.weight": "pytorch_model-00004-of-00006.bin",
127
- "model.layers.26.self_attn.qkv_proj.weight": "pytorch_model-00004-of-00006.bin",
128
- "model.layers.27.input_layernorm.weight": "pytorch_model-00005-of-00006.bin",
129
- "model.layers.27.mlp.down_proj.weight": "pytorch_model-00005-of-00006.bin",
130
- "model.layers.27.mlp.gate_up_proj.weight": "pytorch_model-00005-of-00006.bin",
131
- "model.layers.27.post_attention_layernorm.weight": "pytorch_model-00005-of-00006.bin",
132
- "model.layers.27.self_attn.o_proj.weight": "pytorch_model-00004-of-00006.bin",
133
- "model.layers.27.self_attn.qkv_proj.weight": "pytorch_model-00004-of-00006.bin",
134
- "model.layers.28.input_layernorm.weight": "pytorch_model-00005-of-00006.bin",
135
- "model.layers.28.mlp.down_proj.weight": "pytorch_model-00005-of-00006.bin",
136
- "model.layers.28.mlp.gate_up_proj.weight": "pytorch_model-00005-of-00006.bin",
137
- "model.layers.28.post_attention_layernorm.weight": "pytorch_model-00005-of-00006.bin",
138
- "model.layers.28.self_attn.o_proj.weight": "pytorch_model-00005-of-00006.bin",
139
- "model.layers.28.self_attn.qkv_proj.weight": "pytorch_model-00005-of-00006.bin",
140
- "model.layers.29.input_layernorm.weight": "pytorch_model-00005-of-00006.bin",
141
- "model.layers.29.mlp.down_proj.weight": "pytorch_model-00005-of-00006.bin",
142
- "model.layers.29.mlp.gate_up_proj.weight": "pytorch_model-00005-of-00006.bin",
143
- "model.layers.29.post_attention_layernorm.weight": "pytorch_model-00005-of-00006.bin",
144
- "model.layers.29.self_attn.o_proj.weight": "pytorch_model-00005-of-00006.bin",
145
- "model.layers.29.self_attn.qkv_proj.weight": "pytorch_model-00005-of-00006.bin",
146
- "model.layers.3.input_layernorm.weight": "pytorch_model-00001-of-00006.bin",
147
- "model.layers.3.mlp.down_proj.weight": "pytorch_model-00001-of-00006.bin",
148
- "model.layers.3.mlp.gate_up_proj.weight": "pytorch_model-00001-of-00006.bin",
149
- "model.layers.3.post_attention_layernorm.weight": "pytorch_model-00001-of-00006.bin",
150
- "model.layers.3.self_attn.o_proj.weight": "pytorch_model-00001-of-00006.bin",
151
- "model.layers.3.self_attn.qkv_proj.weight": "pytorch_model-00001-of-00006.bin",
152
- "model.layers.30.input_layernorm.weight": "pytorch_model-00005-of-00006.bin",
153
- "model.layers.30.mlp.down_proj.weight": "pytorch_model-00005-of-00006.bin",
154
- "model.layers.30.mlp.gate_up_proj.weight": "pytorch_model-00005-of-00006.bin",
155
- "model.layers.30.post_attention_layernorm.weight": "pytorch_model-00005-of-00006.bin",
156
- "model.layers.30.self_attn.o_proj.weight": "pytorch_model-00005-of-00006.bin",
157
- "model.layers.30.self_attn.qkv_proj.weight": "pytorch_model-00005-of-00006.bin",
158
- "model.layers.31.input_layernorm.weight": "pytorch_model-00005-of-00006.bin",
159
- "model.layers.31.mlp.down_proj.weight": "pytorch_model-00005-of-00006.bin",
160
- "model.layers.31.mlp.gate_up_proj.weight": "pytorch_model-00005-of-00006.bin",
161
- "model.layers.31.post_attention_layernorm.weight": "pytorch_model-00005-of-00006.bin",
162
- "model.layers.31.self_attn.o_proj.weight": "pytorch_model-00005-of-00006.bin",
163
- "model.layers.31.self_attn.qkv_proj.weight": "pytorch_model-00005-of-00006.bin",
164
- "model.layers.32.input_layernorm.weight": "pytorch_model-00005-of-00006.bin",
165
- "model.layers.32.mlp.down_proj.weight": "pytorch_model-00005-of-00006.bin",
166
- "model.layers.32.mlp.gate_up_proj.weight": "pytorch_model-00005-of-00006.bin",
167
- "model.layers.32.post_attention_layernorm.weight": "pytorch_model-00005-of-00006.bin",
168
- "model.layers.32.self_attn.o_proj.weight": "pytorch_model-00005-of-00006.bin",
169
- "model.layers.32.self_attn.qkv_proj.weight": "pytorch_model-00005-of-00006.bin",
170
- "model.layers.33.input_layernorm.weight": "pytorch_model-00005-of-00006.bin",
171
- "model.layers.33.mlp.down_proj.weight": "pytorch_model-00005-of-00006.bin",
172
- "model.layers.33.mlp.gate_up_proj.weight": "pytorch_model-00005-of-00006.bin",
173
- "model.layers.33.post_attention_layernorm.weight": "pytorch_model-00005-of-00006.bin",
174
- "model.layers.33.self_attn.o_proj.weight": "pytorch_model-00005-of-00006.bin",
175
- "model.layers.33.self_attn.qkv_proj.weight": "pytorch_model-00005-of-00006.bin",
176
- "model.layers.34.input_layernorm.weight": "pytorch_model-00006-of-00006.bin",
177
- "model.layers.34.mlp.down_proj.weight": "pytorch_model-00006-of-00006.bin",
178
- "model.layers.34.mlp.gate_up_proj.weight": "pytorch_model-00006-of-00006.bin",
179
- "model.layers.34.post_attention_layernorm.weight": "pytorch_model-00006-of-00006.bin",
180
- "model.layers.34.self_attn.o_proj.weight": "pytorch_model-00005-of-00006.bin",
181
- "model.layers.34.self_attn.qkv_proj.weight": "pytorch_model-00005-of-00006.bin",
182
- "model.layers.35.input_layernorm.weight": "pytorch_model-00006-of-00006.bin",
183
- "model.layers.35.mlp.down_proj.weight": "pytorch_model-00006-of-00006.bin",
184
- "model.layers.35.mlp.gate_up_proj.weight": "pytorch_model-00006-of-00006.bin",
185
- "model.layers.35.post_attention_layernorm.weight": "pytorch_model-00006-of-00006.bin",
186
- "model.layers.35.self_attn.o_proj.weight": "pytorch_model-00006-of-00006.bin",
187
- "model.layers.35.self_attn.qkv_proj.weight": "pytorch_model-00006-of-00006.bin",
188
- "model.layers.36.input_layernorm.weight": "pytorch_model-00006-of-00006.bin",
189
- "model.layers.36.mlp.down_proj.weight": "pytorch_model-00006-of-00006.bin",
190
- "model.layers.36.mlp.gate_up_proj.weight": "pytorch_model-00006-of-00006.bin",
191
- "model.layers.36.post_attention_layernorm.weight": "pytorch_model-00006-of-00006.bin",
192
- "model.layers.36.self_attn.o_proj.weight": "pytorch_model-00006-of-00006.bin",
193
- "model.layers.36.self_attn.qkv_proj.weight": "pytorch_model-00006-of-00006.bin",
194
- "model.layers.37.input_layernorm.weight": "pytorch_model-00006-of-00006.bin",
195
- "model.layers.37.mlp.down_proj.weight": "pytorch_model-00006-of-00006.bin",
196
- "model.layers.37.mlp.gate_up_proj.weight": "pytorch_model-00006-of-00006.bin",
197
- "model.layers.37.post_attention_layernorm.weight": "pytorch_model-00006-of-00006.bin",
198
- "model.layers.37.self_attn.o_proj.weight": "pytorch_model-00006-of-00006.bin",
199
- "model.layers.37.self_attn.qkv_proj.weight": "pytorch_model-00006-of-00006.bin",
200
- "model.layers.38.input_layernorm.weight": "pytorch_model-00006-of-00006.bin",
201
- "model.layers.38.mlp.down_proj.weight": "pytorch_model-00006-of-00006.bin",
202
- "model.layers.38.mlp.gate_up_proj.weight": "pytorch_model-00006-of-00006.bin",
203
- "model.layers.38.post_attention_layernorm.weight": "pytorch_model-00006-of-00006.bin",
204
- "model.layers.38.self_attn.o_proj.weight": "pytorch_model-00006-of-00006.bin",
205
- "model.layers.38.self_attn.qkv_proj.weight": "pytorch_model-00006-of-00006.bin",
206
- "model.layers.39.input_layernorm.weight": "pytorch_model-00006-of-00006.bin",
207
- "model.layers.39.mlp.down_proj.weight": "pytorch_model-00006-of-00006.bin",
208
- "model.layers.39.mlp.gate_up_proj.weight": "pytorch_model-00006-of-00006.bin",
209
- "model.layers.39.post_attention_layernorm.weight": "pytorch_model-00006-of-00006.bin",
210
- "model.layers.39.self_attn.o_proj.weight": "pytorch_model-00006-of-00006.bin",
211
- "model.layers.39.self_attn.qkv_proj.weight": "pytorch_model-00006-of-00006.bin",
212
- "model.layers.4.input_layernorm.weight": "pytorch_model-00001-of-00006.bin",
213
- "model.layers.4.mlp.down_proj.weight": "pytorch_model-00001-of-00006.bin",
214
- "model.layers.4.mlp.gate_up_proj.weight": "pytorch_model-00001-of-00006.bin",
215
- "model.layers.4.post_attention_layernorm.weight": "pytorch_model-00001-of-00006.bin",
216
- "model.layers.4.self_attn.o_proj.weight": "pytorch_model-00001-of-00006.bin",
217
- "model.layers.4.self_attn.qkv_proj.weight": "pytorch_model-00001-of-00006.bin",
218
- "model.layers.5.input_layernorm.weight": "pytorch_model-00002-of-00006.bin",
219
- "model.layers.5.mlp.down_proj.weight": "pytorch_model-00002-of-00006.bin",
220
- "model.layers.5.mlp.gate_up_proj.weight": "pytorch_model-00001-of-00006.bin",
221
- "model.layers.5.post_attention_layernorm.weight": "pytorch_model-00002-of-00006.bin",
222
- "model.layers.5.self_attn.o_proj.weight": "pytorch_model-00001-of-00006.bin",
223
- "model.layers.5.self_attn.qkv_proj.weight": "pytorch_model-00001-of-00006.bin",
224
- "model.layers.6.input_layernorm.weight": "pytorch_model-00002-of-00006.bin",
225
- "model.layers.6.mlp.down_proj.weight": "pytorch_model-00002-of-00006.bin",
226
- "model.layers.6.mlp.gate_up_proj.weight": "pytorch_model-00002-of-00006.bin",
227
- "model.layers.6.post_attention_layernorm.weight": "pytorch_model-00002-of-00006.bin",
228
- "model.layers.6.self_attn.o_proj.weight": "pytorch_model-00002-of-00006.bin",
229
- "model.layers.6.self_attn.qkv_proj.weight": "pytorch_model-00002-of-00006.bin",
230
- "model.layers.7.input_layernorm.weight": "pytorch_model-00002-of-00006.bin",
231
- "model.layers.7.mlp.down_proj.weight": "pytorch_model-00002-of-00006.bin",
232
- "model.layers.7.mlp.gate_up_proj.weight": "pytorch_model-00002-of-00006.bin",
233
- "model.layers.7.post_attention_layernorm.weight": "pytorch_model-00002-of-00006.bin",
234
- "model.layers.7.self_attn.o_proj.weight": "pytorch_model-00002-of-00006.bin",
235
- "model.layers.7.self_attn.qkv_proj.weight": "pytorch_model-00002-of-00006.bin",
236
- "model.layers.8.input_layernorm.weight": "pytorch_model-00002-of-00006.bin",
237
- "model.layers.8.mlp.down_proj.weight": "pytorch_model-00002-of-00006.bin",
238
- "model.layers.8.mlp.gate_up_proj.weight": "pytorch_model-00002-of-00006.bin",
239
- "model.layers.8.post_attention_layernorm.weight": "pytorch_model-00002-of-00006.bin",
240
- "model.layers.8.self_attn.o_proj.weight": "pytorch_model-00002-of-00006.bin",
241
- "model.layers.8.self_attn.qkv_proj.weight": "pytorch_model-00002-of-00006.bin",
242
- "model.layers.9.input_layernorm.weight": "pytorch_model-00002-of-00006.bin",
243
- "model.layers.9.mlp.down_proj.weight": "pytorch_model-00002-of-00006.bin",
244
- "model.layers.9.mlp.gate_up_proj.weight": "pytorch_model-00002-of-00006.bin",
245
- "model.layers.9.post_attention_layernorm.weight": "pytorch_model-00002-of-00006.bin",
246
- "model.layers.9.self_attn.o_proj.weight": "pytorch_model-00002-of-00006.bin",
247
- "model.layers.9.self_attn.qkv_proj.weight": "pytorch_model-00002-of-00006.bin",
248
- "model.norm.weight": "pytorch_model-00006-of-00006.bin"
249
- }
250
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
models/special_tokens_map.json DELETED
@@ -1,18 +0,0 @@
1
- {
2
- "bos_token": {
3
- "content": "<|endoftext|>",
4
- "lstrip": true,
5
- "normalized": false,
6
- "rstrip": true,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "<|endoftext|>",
11
- "lstrip": true,
12
- "normalized": false,
13
- "rstrip": true,
14
- "single_word": false
15
- },
16
- "pad_token": "<|endoftext|>",
17
- "unk_token": "<|endoftext|>"
18
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
models/tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
models/tokenizer_config.json DELETED
@@ -1,782 +0,0 @@
1
- {
2
- "add_prefix_space": false,
3
- "added_tokens_decoder": {
4
- "100256": {
5
- "content": "<|dummy_0|>",
6
- "lstrip": true,
7
- "normalized": false,
8
- "rstrip": true,
9
- "single_word": false,
10
- "special": true
11
- },
12
- "100257": {
13
- "content": "<|endoftext|>",
14
- "lstrip": true,
15
- "normalized": false,
16
- "rstrip": true,
17
- "single_word": false,
18
- "special": true
19
- },
20
- "100258": {
21
- "content": "<|fim_prefix|>",
22
- "lstrip": true,
23
- "normalized": false,
24
- "rstrip": true,
25
- "single_word": false,
26
- "special": true
27
- },
28
- "100259": {
29
- "content": "<|fim_middle|>",
30
- "lstrip": true,
31
- "normalized": false,
32
- "rstrip": true,
33
- "single_word": false,
34
- "special": true
35
- },
36
- "100260": {
37
- "content": "<|fim_suffix|>",
38
- "lstrip": true,
39
- "normalized": false,
40
- "rstrip": true,
41
- "single_word": false,
42
- "special": true
43
- },
44
- "100261": {
45
- "content": "<|dummy_1|>",
46
- "lstrip": true,
47
- "normalized": false,
48
- "rstrip": true,
49
- "single_word": false,
50
- "special": true
51
- },
52
- "100262": {
53
- "content": "<|dummy_2|>",
54
- "lstrip": true,
55
- "normalized": false,
56
- "rstrip": true,
57
- "single_word": false,
58
- "special": true
59
- },
60
- "100263": {
61
- "content": "<|dummy_3|>",
62
- "lstrip": true,
63
- "normalized": false,
64
- "rstrip": true,
65
- "single_word": false,
66
- "special": true
67
- },
68
- "100264": {
69
- "content": "<|im_start|>",
70
- "lstrip": true,
71
- "normalized": false,
72
- "rstrip": true,
73
- "single_word": false,
74
- "special": true
75
- },
76
- "100265": {
77
- "content": "<|im_end|>",
78
- "lstrip": true,
79
- "normalized": false,
80
- "rstrip": true,
81
- "single_word": false,
82
- "special": true
83
- },
84
- "100266": {
85
- "content": "<|im_sep|>",
86
- "lstrip": true,
87
- "normalized": false,
88
- "rstrip": true,
89
- "single_word": false,
90
- "special": true
91
- },
92
- "100267": {
93
- "content": "<|dummy_4|>",
94
- "lstrip": true,
95
- "normalized": false,
96
- "rstrip": true,
97
- "single_word": false,
98
- "special": true
99
- },
100
- "100268": {
101
- "content": "<|dummy_5|>",
102
- "lstrip": true,
103
- "normalized": false,
104
- "rstrip": true,
105
- "single_word": false,
106
- "special": true
107
- },
108
- "100269": {
109
- "content": "<|dummy_6|>",
110
- "lstrip": true,
111
- "normalized": false,
112
- "rstrip": true,
113
- "single_word": false,
114
- "special": true
115
- },
116
- "100270": {
117
- "content": "<|dummy_7|>",
118
- "lstrip": true,
119
- "normalized": false,
120
- "rstrip": true,
121
- "single_word": false,
122
- "special": true
123
- },
124
- "100271": {
125
- "content": "<|dummy_8|>",
126
- "lstrip": true,
127
- "normalized": false,
128
- "rstrip": true,
129
- "single_word": false,
130
- "special": true
131
- },
132
- "100272": {
133
- "content": "<|dummy_9|>",
134
- "lstrip": true,
135
- "normalized": false,
136
- "rstrip": true,
137
- "single_word": false,
138
- "special": true
139
- },
140
- "100273": {
141
- "content": "<|dummy_10|>",
142
- "lstrip": true,
143
- "normalized": false,
144
- "rstrip": true,
145
- "single_word": false,
146
- "special": true
147
- },
148
- "100274": {
149
- "content": "<|dummy_11|>",
150
- "lstrip": true,
151
- "normalized": false,
152
- "rstrip": true,
153
- "single_word": false,
154
- "special": true
155
- },
156
- "100275": {
157
- "content": "<|dummy_12|>",
158
- "lstrip": true,
159
- "normalized": false,
160
- "rstrip": true,
161
- "single_word": false,
162
- "special": true
163
- },
164
- "100276": {
165
- "content": "<|endofprompt|>",
166
- "lstrip": true,
167
- "normalized": false,
168
- "rstrip": true,
169
- "single_word": false,
170
- "special": true
171
- },
172
- "100277": {
173
- "content": "<|dummy_13|>",
174
- "lstrip": true,
175
- "normalized": false,
176
- "rstrip": true,
177
- "single_word": false,
178
- "special": true
179
- },
180
- "100278": {
181
- "content": "<|dummy_14|>",
182
- "lstrip": true,
183
- "normalized": false,
184
- "rstrip": true,
185
- "single_word": false,
186
- "special": true
187
- },
188
- "100279": {
189
- "content": "<|dummy_15|>",
190
- "lstrip": true,
191
- "normalized": false,
192
- "rstrip": true,
193
- "single_word": false,
194
- "special": true
195
- },
196
- "100280": {
197
- "content": "<|dummy_16|>",
198
- "lstrip": true,
199
- "normalized": false,
200
- "rstrip": true,
201
- "single_word": false,
202
- "special": true
203
- },
204
- "100281": {
205
- "content": "<|dummy_17|>",
206
- "lstrip": true,
207
- "normalized": false,
208
- "rstrip": true,
209
- "single_word": false,
210
- "special": true
211
- },
212
- "100282": {
213
- "content": "<|dummy_18|>",
214
- "lstrip": true,
215
- "normalized": false,
216
- "rstrip": true,
217
- "single_word": false,
218
- "special": true
219
- },
220
- "100283": {
221
- "content": "<|dummy_19|>",
222
- "lstrip": true,
223
- "normalized": false,
224
- "rstrip": true,
225
- "single_word": false,
226
- "special": true
227
- },
228
- "100284": {
229
- "content": "<|dummy_20|>",
230
- "lstrip": true,
231
- "normalized": false,
232
- "rstrip": true,
233
- "single_word": false,
234
- "special": true
235
- },
236
- "100285": {
237
- "content": "<|dummy_21|>",
238
- "lstrip": true,
239
- "normalized": false,
240
- "rstrip": true,
241
- "single_word": false,
242
- "special": true
243
- },
244
- "100286": {
245
- "content": "<|dummy_22|>",
246
- "lstrip": true,
247
- "normalized": false,
248
- "rstrip": true,
249
- "single_word": false,
250
- "special": true
251
- },
252
- "100287": {
253
- "content": "<|dummy_23|>",
254
- "lstrip": true,
255
- "normalized": false,
256
- "rstrip": true,
257
- "single_word": false,
258
- "special": true
259
- },
260
- "100288": {
261
- "content": "<|dummy_24|>",
262
- "lstrip": true,
263
- "normalized": false,
264
- "rstrip": true,
265
- "single_word": false,
266
- "special": true
267
- },
268
- "100289": {
269
- "content": "<|dummy_25|>",
270
- "lstrip": true,
271
- "normalized": false,
272
- "rstrip": true,
273
- "single_word": false,
274
- "special": true
275
- },
276
- "100290": {
277
- "content": "<|dummy_26|>",
278
- "lstrip": true,
279
- "normalized": false,
280
- "rstrip": true,
281
- "single_word": false,
282
- "special": true
283
- },
284
- "100291": {
285
- "content": "<|dummy_27|>",
286
- "lstrip": true,
287
- "normalized": false,
288
- "rstrip": true,
289
- "single_word": false,
290
- "special": true
291
- },
292
- "100292": {
293
- "content": "<|dummy_28|>",
294
- "lstrip": true,
295
- "normalized": false,
296
- "rstrip": true,
297
- "single_word": false,
298
- "special": true
299
- },
300
- "100293": {
301
- "content": "<|dummy_29|>",
302
- "lstrip": true,
303
- "normalized": false,
304
- "rstrip": true,
305
- "single_word": false,
306
- "special": true
307
- },
308
- "100294": {
309
- "content": "<|dummy_30|>",
310
- "lstrip": true,
311
- "normalized": false,
312
- "rstrip": true,
313
- "single_word": false,
314
- "special": true
315
- },
316
- "100295": {
317
- "content": "<|dummy_31|>",
318
- "lstrip": true,
319
- "normalized": false,
320
- "rstrip": true,
321
- "single_word": false,
322
- "special": true
323
- },
324
- "100296": {
325
- "content": "<|dummy_32|>",
326
- "lstrip": true,
327
- "normalized": false,
328
- "rstrip": true,
329
- "single_word": false,
330
- "special": true
331
- },
332
- "100297": {
333
- "content": "<|dummy_33|>",
334
- "lstrip": true,
335
- "normalized": false,
336
- "rstrip": true,
337
- "single_word": false,
338
- "special": true
339
- },
340
- "100298": {
341
- "content": "<|dummy_34|>",
342
- "lstrip": true,
343
- "normalized": false,
344
- "rstrip": true,
345
- "single_word": false,
346
- "special": true
347
- },
348
- "100299": {
349
- "content": "<|dummy_35|>",
350
- "lstrip": true,
351
- "normalized": false,
352
- "rstrip": true,
353
- "single_word": false,
354
- "special": true
355
- },
356
- "100300": {
357
- "content": "<|dummy_36|>",
358
- "lstrip": true,
359
- "normalized": false,
360
- "rstrip": true,
361
- "single_word": false,
362
- "special": true
363
- },
364
- "100301": {
365
- "content": "<|dummy_37|>",
366
- "lstrip": true,
367
- "normalized": false,
368
- "rstrip": true,
369
- "single_word": false,
370
- "special": true
371
- },
372
- "100302": {
373
- "content": "<|dummy_38|>",
374
- "lstrip": true,
375
- "normalized": false,
376
- "rstrip": true,
377
- "single_word": false,
378
- "special": true
379
- },
380
- "100303": {
381
- "content": "<|dummy_39|>",
382
- "lstrip": true,
383
- "normalized": false,
384
- "rstrip": true,
385
- "single_word": false,
386
- "special": true
387
- },
388
- "100304": {
389
- "content": "<|dummy_40|>",
390
- "lstrip": true,
391
- "normalized": false,
392
- "rstrip": true,
393
- "single_word": false,
394
- "special": true
395
- },
396
- "100305": {
397
- "content": "<|dummy_41|>",
398
- "lstrip": true,
399
- "normalized": false,
400
- "rstrip": true,
401
- "single_word": false,
402
- "special": true
403
- },
404
- "100306": {
405
- "content": "<|dummy_42|>",
406
- "lstrip": true,
407
- "normalized": false,
408
- "rstrip": true,
409
- "single_word": false,
410
- "special": true
411
- },
412
- "100307": {
413
- "content": "<|dummy_43|>",
414
- "lstrip": true,
415
- "normalized": false,
416
- "rstrip": true,
417
- "single_word": false,
418
- "special": true
419
- },
420
- "100308": {
421
- "content": "<|dummy_44|>",
422
- "lstrip": true,
423
- "normalized": false,
424
- "rstrip": true,
425
- "single_word": false,
426
- "special": true
427
- },
428
- "100309": {
429
- "content": "<|dummy_45|>",
430
- "lstrip": true,
431
- "normalized": false,
432
- "rstrip": true,
433
- "single_word": false,
434
- "special": true
435
- },
436
- "100310": {
437
- "content": "<|dummy_46|>",
438
- "lstrip": true,
439
- "normalized": false,
440
- "rstrip": true,
441
- "single_word": false,
442
- "special": true
443
- },
444
- "100311": {
445
- "content": "<|dummy_47|>",
446
- "lstrip": true,
447
- "normalized": false,
448
- "rstrip": true,
449
- "single_word": false,
450
- "special": true
451
- },
452
- "100312": {
453
- "content": "<|dummy_48|>",
454
- "lstrip": true,
455
- "normalized": false,
456
- "rstrip": true,
457
- "single_word": false,
458
- "special": true
459
- },
460
- "100313": {
461
- "content": "<|dummy_49|>",
462
- "lstrip": true,
463
- "normalized": false,
464
- "rstrip": true,
465
- "single_word": false,
466
- "special": true
467
- },
468
- "100314": {
469
- "content": "<|dummy_50|>",
470
- "lstrip": true,
471
- "normalized": false,
472
- "rstrip": true,
473
- "single_word": false,
474
- "special": true
475
- },
476
- "100315": {
477
- "content": "<|dummy_51|>",
478
- "lstrip": true,
479
- "normalized": false,
480
- "rstrip": true,
481
- "single_word": false,
482
- "special": true
483
- },
484
- "100316": {
485
- "content": "<|dummy_52|>",
486
- "lstrip": true,
487
- "normalized": false,
488
- "rstrip": true,
489
- "single_word": false,
490
- "special": true
491
- },
492
- "100317": {
493
- "content": "<|dummy_53|>",
494
- "lstrip": true,
495
- "normalized": false,
496
- "rstrip": true,
497
- "single_word": false,
498
- "special": true
499
- },
500
- "100318": {
501
- "content": "<|dummy_54|>",
502
- "lstrip": true,
503
- "normalized": false,
504
- "rstrip": true,
505
- "single_word": false,
506
- "special": true
507
- },
508
- "100319": {
509
- "content": "<|dummy_55|>",
510
- "lstrip": true,
511
- "normalized": false,
512
- "rstrip": true,
513
- "single_word": false,
514
- "special": true
515
- },
516
- "100320": {
517
- "content": "<|dummy_56|>",
518
- "lstrip": true,
519
- "normalized": false,
520
- "rstrip": true,
521
- "single_word": false,
522
- "special": true
523
- },
524
- "100321": {
525
- "content": "<|dummy_57|>",
526
- "lstrip": true,
527
- "normalized": false,
528
- "rstrip": true,
529
- "single_word": false,
530
- "special": true
531
- },
532
- "100322": {
533
- "content": "<|dummy_58|>",
534
- "lstrip": true,
535
- "normalized": false,
536
- "rstrip": true,
537
- "single_word": false,
538
- "special": true
539
- },
540
- "100323": {
541
- "content": "<|dummy_59|>",
542
- "lstrip": true,
543
- "normalized": false,
544
- "rstrip": true,
545
- "single_word": false,
546
- "special": true
547
- },
548
- "100324": {
549
- "content": "<|dummy_60|>",
550
- "lstrip": true,
551
- "normalized": false,
552
- "rstrip": true,
553
- "single_word": false,
554
- "special": true
555
- },
556
- "100325": {
557
- "content": "<|dummy_61|>",
558
- "lstrip": true,
559
- "normalized": false,
560
- "rstrip": true,
561
- "single_word": false,
562
- "special": true
563
- },
564
- "100326": {
565
- "content": "<|dummy_62|>",
566
- "lstrip": true,
567
- "normalized": false,
568
- "rstrip": true,
569
- "single_word": false,
570
- "special": true
571
- },
572
- "100327": {
573
- "content": "<|dummy_63|>",
574
- "lstrip": true,
575
- "normalized": false,
576
- "rstrip": true,
577
- "single_word": false,
578
- "special": true
579
- },
580
- "100328": {
581
- "content": "<|dummy_64|>",
582
- "lstrip": true,
583
- "normalized": false,
584
- "rstrip": true,
585
- "single_word": false,
586
- "special": true
587
- },
588
- "100329": {
589
- "content": "<|dummy_65|>",
590
- "lstrip": true,
591
- "normalized": false,
592
- "rstrip": true,
593
- "single_word": false,
594
- "special": true
595
- },
596
- "100330": {
597
- "content": "<|dummy_66|>",
598
- "lstrip": true,
599
- "normalized": false,
600
- "rstrip": true,
601
- "single_word": false,
602
- "special": true
603
- },
604
- "100331": {
605
- "content": "<|dummy_67|>",
606
- "lstrip": true,
607
- "normalized": false,
608
- "rstrip": true,
609
- "single_word": false,
610
- "special": true
611
- },
612
- "100332": {
613
- "content": "<|dummy_68|>",
614
- "lstrip": true,
615
- "normalized": false,
616
- "rstrip": true,
617
- "single_word": false,
618
- "special": true
619
- },
620
- "100333": {
621
- "content": "<|dummy_69|>",
622
- "lstrip": true,
623
- "normalized": false,
624
- "rstrip": true,
625
- "single_word": false,
626
- "special": true
627
- },
628
- "100334": {
629
- "content": "<|dummy_70|>",
630
- "lstrip": true,
631
- "normalized": false,
632
- "rstrip": true,
633
- "single_word": false,
634
- "special": true
635
- },
636
- "100335": {
637
- "content": "<|dummy_71|>",
638
- "lstrip": true,
639
- "normalized": false,
640
- "rstrip": true,
641
- "single_word": false,
642
- "special": true
643
- },
644
- "100336": {
645
- "content": "<|dummy_72|>",
646
- "lstrip": true,
647
- "normalized": false,
648
- "rstrip": true,
649
- "single_word": false,
650
- "special": true
651
- },
652
- "100337": {
653
- "content": "<|dummy_73|>",
654
- "lstrip": true,
655
- "normalized": false,
656
- "rstrip": true,
657
- "single_word": false,
658
- "special": true
659
- },
660
- "100338": {
661
- "content": "<|dummy_74|>",
662
- "lstrip": true,
663
- "normalized": false,
664
- "rstrip": true,
665
- "single_word": false,
666
- "special": true
667
- },
668
- "100339": {
669
- "content": "<|dummy_75|>",
670
- "lstrip": true,
671
- "normalized": false,
672
- "rstrip": true,
673
- "single_word": false,
674
- "special": true
675
- },
676
- "100340": {
677
- "content": "<|dummy_76|>",
678
- "lstrip": true,
679
- "normalized": false,
680
- "rstrip": true,
681
- "single_word": false,
682
- "special": true
683
- },
684
- "100341": {
685
- "content": "<|dummy_77|>",
686
- "lstrip": true,
687
- "normalized": false,
688
- "rstrip": true,
689
- "single_word": false,
690
- "special": true
691
- },
692
- "100342": {
693
- "content": "<|dummy_78|>",
694
- "lstrip": true,
695
- "normalized": false,
696
- "rstrip": true,
697
- "single_word": false,
698
- "special": true
699
- },
700
- "100343": {
701
- "content": "<|dummy_79|>",
702
- "lstrip": true,
703
- "normalized": false,
704
- "rstrip": true,
705
- "single_word": false,
706
- "special": true
707
- },
708
- "100344": {
709
- "content": "<|dummy_80|>",
710
- "lstrip": true,
711
- "normalized": false,
712
- "rstrip": true,
713
- "single_word": false,
714
- "special": true
715
- },
716
- "100345": {
717
- "content": "<|dummy_81|>",
718
- "lstrip": true,
719
- "normalized": false,
720
- "rstrip": true,
721
- "single_word": false,
722
- "special": true
723
- },
724
- "100346": {
725
- "content": "<|dummy_82|>",
726
- "lstrip": true,
727
- "normalized": false,
728
- "rstrip": true,
729
- "single_word": false,
730
- "special": true
731
- },
732
- "100347": {
733
- "content": "<|dummy_83|>",
734
- "lstrip": true,
735
- "normalized": false,
736
- "rstrip": true,
737
- "single_word": false,
738
- "special": true
739
- },
740
- "100348": {
741
- "content": "<|dummy_84|>",
742
- "lstrip": true,
743
- "normalized": false,
744
- "rstrip": true,
745
- "single_word": false,
746
- "special": true
747
- },
748
- "100349": {
749
- "content": "<|dummy_85|>",
750
- "lstrip": true,
751
- "normalized": false,
752
- "rstrip": true,
753
- "single_word": false,
754
- "special": true
755
- },
756
- "100350": {
757
- "content": "<|dummy_86|>",
758
- "lstrip": true,
759
- "normalized": false,
760
- "rstrip": true,
761
- "single_word": false,
762
- "special": true
763
- },
764
- "100351": {
765
- "content": "<|dummy_87|>",
766
- "lstrip": true,
767
- "normalized": false,
768
- "rstrip": true,
769
- "single_word": false,
770
- "special": true
771
- }
772
- },
773
- "bos_token": "<|endoftext|>",
774
- "chat_template": "{% for message in messages %}{% if (message['role'] == 'system') %}{{'<|im_start|>system<|im_sep|>' + message['content'] + '<|im_end|>'}}{% elif (message['role'] == 'user') %}{{'<|im_start|>user<|im_sep|>' + message['content'] + '<|im_end|><|im_start|>assistant<|im_sep|>'}}{% elif (message['role'] == 'assistant') %}{{message['content'] + '<|im_end|>'}}{% endif %}{% endfor %}",
775
- "clean_up_tokenization_spaces": false,
776
- "eos_token": "<|endoftext|>",
777
- "extra_special_tokens": {},
778
- "model_max_length": 16384,
779
- "pad_token": "<|endoftext|>",
780
- "tokenizer_class": "GPT2Tokenizer",
781
- "unk_token": "<|endoftext|>"
782
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
models/vocab.json DELETED
The diff for this file is too large to render. See raw diff