Add files using upload-large-folder tool
Browse files- config.json +51 -0
- generation_config.json +7 -0
- merges.txt +0 -0
- model.safetensors +3 -0
- quant_log.csv +145 -0
- quantize_config.json +21 -0
- special_tokens_map.json +24 -0
- tokenizer.json +0 -0
- tokenizer_config.json +32 -0
- vocab.json +0 -0
config.json
ADDED
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_remove_final_layer_norm": false,
|
3 |
+
"activation_dropout": 0.0,
|
4 |
+
"activation_function": "relu",
|
5 |
+
"architectures": [
|
6 |
+
"OPTForCausalLM"
|
7 |
+
],
|
8 |
+
"attention_dropout": 0.0,
|
9 |
+
"bos_token_id": 2,
|
10 |
+
"do_layer_norm_before": true,
|
11 |
+
"dropout": 0.1,
|
12 |
+
"enable_bias": true,
|
13 |
+
"eos_token_id": 2,
|
14 |
+
"ffn_dim": 8192,
|
15 |
+
"hidden_size": 2048,
|
16 |
+
"init_std": 0.02,
|
17 |
+
"layer_norm_elementwise_affine": true,
|
18 |
+
"layerdrop": 0.0,
|
19 |
+
"max_position_embeddings": 2048,
|
20 |
+
"model_type": "opt",
|
21 |
+
"num_attention_heads": 32,
|
22 |
+
"num_hidden_layers": 24,
|
23 |
+
"pad_token_id": 1,
|
24 |
+
"prefix": "</s>",
|
25 |
+
"quantization_config": {
|
26 |
+
"bits": 4,
|
27 |
+
"checkpoint_format": "gptq",
|
28 |
+
"desc_act": false,
|
29 |
+
"group_size": 128,
|
30 |
+
"lm_head": false,
|
31 |
+
"meta": {
|
32 |
+
"damp_auto_increment": 0.0025,
|
33 |
+
"damp_percent": 0.01,
|
34 |
+
"mse": 0.0,
|
35 |
+
"quantizer": [
|
36 |
+
"gptqmodel:2.2.0"
|
37 |
+
],
|
38 |
+
"static_groups": false,
|
39 |
+
"true_sequential": true,
|
40 |
+
"uri": "https://github.com/modelcloud/gptqmodel"
|
41 |
+
},
|
42 |
+
"pack_dtype": "int32",
|
43 |
+
"quant_method": "gptq",
|
44 |
+
"sym": true
|
45 |
+
},
|
46 |
+
"torch_dtype": "float16",
|
47 |
+
"transformers_version": "4.51.3",
|
48 |
+
"use_cache": true,
|
49 |
+
"vocab_size": 50272,
|
50 |
+
"word_embed_proj_dim": 2048
|
51 |
+
}
|
generation_config.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"bos_token_id": 2,
|
4 |
+
"eos_token_id": 2,
|
5 |
+
"pad_token_id": 1,
|
6 |
+
"transformers_version": "4.51.3"
|
7 |
+
}
|
merges.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fdf50790f50b393e413a8fe3b3270a47e8c3d5f253131bcf1d9087ac4b8deb16
|
3 |
+
size 845033800
|
quant_log.csv
ADDED
@@ -0,0 +1,145 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
layer,module,loss,samples,damp,time
|
2 |
+
0,self_attn.k_proj,0.00353451,0.01000,0.985
|
3 |
+
0,self_attn.v_proj,0.00058977,0.01000,0.967
|
4 |
+
0,self_attn.q_proj,0.00354618,0.01000,0.977
|
5 |
+
0,self_attn.out_proj,0.00000845,0.01000,0.986
|
6 |
+
0,fc1,0.00918082,0.01000,0.991
|
7 |
+
0,fc2,0.00012097,0.01000,4.133
|
8 |
+
1,self_attn.k_proj,0.00341375,0.01000,0.985
|
9 |
+
1,self_attn.v_proj,0.00041851,0.01000,0.983
|
10 |
+
1,self_attn.q_proj,0.00368894,0.01000,0.978
|
11 |
+
1,self_attn.out_proj,0.00000219,0.01000,0.975
|
12 |
+
1,fc1,0.01199323,0.01000,0.972
|
13 |
+
1,fc2,0.00015632,0.01000,4.114
|
14 |
+
2,self_attn.k_proj,0.00492218,0.01000,0.974
|
15 |
+
2,self_attn.v_proj,0.00078813,0.01000,0.981
|
16 |
+
2,self_attn.q_proj,0.00490565,0.01000,0.996
|
17 |
+
2,self_attn.out_proj,0.00000534,0.01000,0.986
|
18 |
+
2,fc1,0.01365813,0.01000,0.983
|
19 |
+
2,fc2,0.00011579,0.01000,4.153
|
20 |
+
3,self_attn.k_proj,0.00530104,0.01000,0.969
|
21 |
+
3,self_attn.v_proj,0.00144508,0.01000,0.974
|
22 |
+
3,self_attn.q_proj,0.00609475,0.01000,0.968
|
23 |
+
3,self_attn.out_proj,0.00000768,0.01000,0.976
|
24 |
+
3,fc1,0.01403668,0.01000,0.986
|
25 |
+
3,fc2,0.00010727,0.01000,4.213
|
26 |
+
4,self_attn.k_proj,0.00831477,0.01000,0.975
|
27 |
+
4,self_attn.v_proj,0.00187180,0.01000,0.980
|
28 |
+
4,self_attn.q_proj,0.00878986,0.01000,0.979
|
29 |
+
4,self_attn.out_proj,0.00001110,0.01000,0.986
|
30 |
+
4,fc1,0.01618612,0.01000,0.988
|
31 |
+
4,fc2,0.00011802,0.01000,4.157
|
32 |
+
5,self_attn.k_proj,0.01036577,0.01000,0.995
|
33 |
+
5,self_attn.v_proj,0.00241232,0.01000,0.988
|
34 |
+
5,self_attn.q_proj,0.01065748,0.01000,0.976
|
35 |
+
5,self_attn.out_proj,0.00001253,0.01000,0.970
|
36 |
+
5,fc1,0.01835590,0.01000,0.993
|
37 |
+
5,fc2,0.00012848,0.01000,4.150
|
38 |
+
6,self_attn.k_proj,0.01596541,0.01000,0.974
|
39 |
+
6,self_attn.v_proj,0.00293466,0.01000,0.969
|
40 |
+
6,self_attn.q_proj,0.01442928,0.01000,0.987
|
41 |
+
6,self_attn.out_proj,0.00002641,0.01000,1.002
|
42 |
+
6,fc1,0.02287802,0.01000,0.974
|
43 |
+
6,fc2,0.00016784,0.01000,4.180
|
44 |
+
7,self_attn.k_proj,0.01730689,0.01000,0.999
|
45 |
+
7,self_attn.v_proj,0.00342599,0.01000,0.992
|
46 |
+
7,self_attn.q_proj,0.01509144,0.01000,0.976
|
47 |
+
7,self_attn.out_proj,0.00003264,0.01000,0.980
|
48 |
+
7,fc1,0.02844299,0.01000,0.982
|
49 |
+
7,fc2,0.00022774,0.01000,4.197
|
50 |
+
8,self_attn.k_proj,0.01908275,0.01000,0.973
|
51 |
+
8,self_attn.v_proj,0.00382682,0.01000,0.997
|
52 |
+
8,self_attn.q_proj,0.01574800,0.01000,0.982
|
53 |
+
8,self_attn.out_proj,0.00003932,0.01000,0.976
|
54 |
+
8,fc1,0.03425221,0.01000,0.998
|
55 |
+
8,fc2,0.00029699,0.01000,4.165
|
56 |
+
9,self_attn.k_proj,0.02025209,0.01000,1.003
|
57 |
+
9,self_attn.v_proj,0.00431333,0.01000,0.971
|
58 |
+
9,self_attn.q_proj,0.01737337,0.01000,0.976
|
59 |
+
9,self_attn.out_proj,0.00005814,0.01000,0.988
|
60 |
+
9,fc1,0.03883628,0.01000,0.978
|
61 |
+
9,fc2,0.00039988,0.01000,4.134
|
62 |
+
10,self_attn.k_proj,0.02029791,0.01000,0.977
|
63 |
+
10,self_attn.v_proj,0.00536017,0.01000,0.977
|
64 |
+
10,self_attn.q_proj,0.01724341,0.01000,1.056
|
65 |
+
10,self_attn.out_proj,0.00007856,0.01000,0.978
|
66 |
+
10,fc1,0.04395340,0.01000,1.004
|
67 |
+
10,fc2,0.00062550,0.01000,4.132
|
68 |
+
11,self_attn.k_proj,0.02109379,0.01000,0.987
|
69 |
+
11,self_attn.v_proj,0.00646726,0.01000,0.978
|
70 |
+
11,self_attn.q_proj,0.01758398,0.01000,0.974
|
71 |
+
11,self_attn.out_proj,0.00010376,0.01000,0.978
|
72 |
+
11,fc1,0.04890034,0.01000,1.010
|
73 |
+
11,fc2,0.00082190,0.01000,4.149
|
74 |
+
12,self_attn.k_proj,0.02365618,0.01000,0.981
|
75 |
+
12,self_attn.v_proj,0.00702407,0.01000,0.981
|
76 |
+
12,self_attn.q_proj,0.01871167,0.01000,0.999
|
77 |
+
12,self_attn.out_proj,0.00017479,0.01000,0.971
|
78 |
+
12,fc1,0.05111321,0.01000,0.977
|
79 |
+
12,fc2,0.00105926,0.01000,4.121
|
80 |
+
13,self_attn.k_proj,0.02559538,0.01000,0.974
|
81 |
+
13,self_attn.v_proj,0.00761801,0.01000,0.987
|
82 |
+
13,self_attn.q_proj,0.01913078,0.01000,0.972
|
83 |
+
13,self_attn.out_proj,0.00017100,0.01000,0.978
|
84 |
+
13,fc1,0.05634150,0.01000,0.994
|
85 |
+
13,fc2,0.00141263,0.01000,4.137
|
86 |
+
14,self_attn.k_proj,0.02446801,0.01000,1.026
|
87 |
+
14,self_attn.v_proj,0.00951312,0.01000,0.971
|
88 |
+
14,self_attn.q_proj,0.01863108,0.01000,0.990
|
89 |
+
14,self_attn.out_proj,0.00018884,0.01000,0.975
|
90 |
+
14,fc1,0.06176755,0.01000,0.981
|
91 |
+
14,fc2,0.00190128,0.01000,4.100
|
92 |
+
15,self_attn.k_proj,0.02212356,0.01000,0.988
|
93 |
+
15,self_attn.v_proj,0.01194986,0.01000,0.973
|
94 |
+
15,self_attn.q_proj,0.01872810,0.01000,1.001
|
95 |
+
15,self_attn.out_proj,0.00019521,0.01000,0.980
|
96 |
+
15,fc1,0.06548359,0.01000,1.006
|
97 |
+
15,fc2,0.00242290,0.01000,4.177
|
98 |
+
16,self_attn.k_proj,0.02222240,0.01000,0.980
|
99 |
+
16,self_attn.v_proj,0.01270989,0.01000,0.977
|
100 |
+
16,self_attn.q_proj,0.01654256,0.01000,0.981
|
101 |
+
16,self_attn.out_proj,0.00027436,0.01000,0.980
|
102 |
+
16,fc1,0.07119486,0.01000,0.983
|
103 |
+
16,fc2,0.00302200,0.01000,4.142
|
104 |
+
17,self_attn.k_proj,0.02143056,0.01000,0.980
|
105 |
+
17,self_attn.v_proj,0.01383860,0.01000,0.985
|
106 |
+
17,self_attn.q_proj,0.01645198,0.01000,0.980
|
107 |
+
17,self_attn.out_proj,0.00032416,0.01000,0.986
|
108 |
+
17,fc1,0.07789719,0.01000,0.999
|
109 |
+
17,fc2,0.00390024,0.01000,4.210
|
110 |
+
18,self_attn.k_proj,0.01963866,0.01000,0.987
|
111 |
+
18,self_attn.v_proj,0.01575041,0.01000,0.970
|
112 |
+
18,self_attn.q_proj,0.01586412,0.01000,0.998
|
113 |
+
18,self_attn.out_proj,0.00036916,0.01000,0.981
|
114 |
+
18,fc1,0.08195859,0.01000,0.987
|
115 |
+
18,fc2,0.00470324,0.01000,4.138
|
116 |
+
19,self_attn.k_proj,0.02000524,0.01000,1.005
|
117 |
+
19,self_attn.v_proj,0.02052886,0.01000,0.984
|
118 |
+
19,self_attn.q_proj,0.01599333,0.01000,0.982
|
119 |
+
19,self_attn.out_proj,0.00053542,0.01000,1.004
|
120 |
+
19,fc1,0.08814689,0.01000,0.996
|
121 |
+
19,fc2,0.00577823,0.01000,4.178
|
122 |
+
20,self_attn.k_proj,0.02025632,0.01000,0.984
|
123 |
+
20,self_attn.v_proj,0.02278810,0.01000,0.991
|
124 |
+
20,self_attn.q_proj,0.01540255,0.01000,0.992
|
125 |
+
20,self_attn.out_proj,0.00058181,0.01000,0.985
|
126 |
+
20,fc1,0.09119438,0.01000,0.986
|
127 |
+
20,fc2,0.00718106,0.01000,4.163
|
128 |
+
21,self_attn.k_proj,0.01847481,0.01000,0.998
|
129 |
+
21,self_attn.v_proj,0.02326399,0.01000,0.994
|
130 |
+
21,self_attn.q_proj,0.01673961,0.01000,0.987
|
131 |
+
21,self_attn.out_proj,0.00048029,0.01000,0.973
|
132 |
+
21,fc1,0.09254406,0.01000,0.976
|
133 |
+
21,fc2,0.00840400,0.01000,4.179
|
134 |
+
22,self_attn.k_proj,0.01888512,0.01000,0.975
|
135 |
+
22,self_attn.v_proj,0.02509475,0.01000,0.958
|
136 |
+
22,self_attn.q_proj,0.02004961,0.01000,1.003
|
137 |
+
22,self_attn.out_proj,0.00058037,0.01000,0.983
|
138 |
+
22,fc1,0.09147193,0.01000,0.987
|
139 |
+
22,fc2,0.00895930,0.01000,4.147
|
140 |
+
23,self_attn.k_proj,0.02566331,0.01000,0.992
|
141 |
+
23,self_attn.v_proj,0.01643573,0.01000,0.980
|
142 |
+
23,self_attn.q_proj,0.06567268,0.01000,1.002
|
143 |
+
23,self_attn.out_proj,0.00118536,0.01000,0.979
|
144 |
+
23,fc1,0.08527403,0.01000,0.988
|
145 |
+
23,fc2,0.00677233,0.01000,4.161
|
quantize_config.json
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bits": 4,
|
3 |
+
"group_size": 128,
|
4 |
+
"desc_act": false,
|
5 |
+
"sym": true,
|
6 |
+
"lm_head": false,
|
7 |
+
"quant_method": "gptq",
|
8 |
+
"checkpoint_format": "gptq",
|
9 |
+
"pack_dtype": "int32",
|
10 |
+
"meta": {
|
11 |
+
"quantizer": [
|
12 |
+
"gptqmodel:2.2.0"
|
13 |
+
],
|
14 |
+
"uri": "https://github.com/modelcloud/gptqmodel",
|
15 |
+
"damp_percent": 0.01,
|
16 |
+
"damp_auto_increment": 0.0025,
|
17 |
+
"static_groups": false,
|
18 |
+
"true_sequential": true,
|
19 |
+
"mse": 0.0
|
20 |
+
}
|
21 |
+
}
|
special_tokens_map.json
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "</s>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": true,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"eos_token": {
|
10 |
+
"content": "</s>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": true,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"pad_token": "<pad>",
|
17 |
+
"unk_token": {
|
18 |
+
"content": "</s>",
|
19 |
+
"lstrip": false,
|
20 |
+
"normalized": true,
|
21 |
+
"rstrip": false,
|
22 |
+
"single_word": false
|
23 |
+
}
|
24 |
+
}
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
ADDED
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": true,
|
3 |
+
"add_prefix_space": false,
|
4 |
+
"added_tokens_decoder": {
|
5 |
+
"1": {
|
6 |
+
"content": "<pad>",
|
7 |
+
"lstrip": false,
|
8 |
+
"normalized": true,
|
9 |
+
"rstrip": false,
|
10 |
+
"single_word": false,
|
11 |
+
"special": true
|
12 |
+
},
|
13 |
+
"2": {
|
14 |
+
"content": "</s>",
|
15 |
+
"lstrip": false,
|
16 |
+
"normalized": true,
|
17 |
+
"rstrip": false,
|
18 |
+
"single_word": false,
|
19 |
+
"special": true
|
20 |
+
}
|
21 |
+
},
|
22 |
+
"bos_token": "</s>",
|
23 |
+
"clean_up_tokenization_spaces": false,
|
24 |
+
"eos_token": "</s>",
|
25 |
+
"errors": "replace",
|
26 |
+
"extra_special_tokens": {},
|
27 |
+
"model_max_length": 1000000000000000019884624838656,
|
28 |
+
"pad_token": "<pad>",
|
29 |
+
"tokenizer_class": "GPT2TokenizerFast",
|
30 |
+
"unk_token": "</s>",
|
31 |
+
"_commit_hash": null
|
32 |
+
}
|
vocab.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|