End of training
Browse files- README.md +14 -17
- added_tokens.json +413 -245
- config.json +7 -8
- generation_config.json +1 -1
- model.safetensors +2 -2
- tokenizer.json +2 -2
- tokenizer_config.json +2 -2
- training_args.bin +2 -2
README.md
CHANGED
@@ -14,7 +14,7 @@ should probably proofread and complete it, then remove this comment. -->
|
|
14 |
|
15 |
This model was trained from scratch on the None dataset.
|
16 |
It achieves the following results on the evaluation set:
|
17 |
-
- Loss: 0.
|
18 |
|
19 |
## Model description
|
20 |
|
@@ -33,30 +33,27 @@ More information needed
|
|
33 |
### Training hyperparameters
|
34 |
|
35 |
The following hyperparameters were used during training:
|
36 |
-
- learning_rate:
|
37 |
-
- train_batch_size:
|
38 |
-
- eval_batch_size:
|
39 |
- seed: 42
|
40 |
-
- optimizer: Use OptimizerNames.
|
41 |
- lr_scheduler_type: linear
|
42 |
-
- lr_scheduler_warmup_steps:
|
43 |
-
- num_epochs:
|
44 |
|
45 |
### Training results
|
46 |
|
47 |
| Training Loss | Epoch | Step | Validation Loss |
|
48 |
|:-------------:|:-----:|:-----:|:---------------:|
|
49 |
-
| 0.
|
50 |
-
| 0.
|
51 |
-
| 0.
|
52 |
-
| 0.2733 | 4.0 | 25404 | 0.1528 |
|
53 |
-
| 0.3071 | 5.0 | 31755 | 0.1805 |
|
54 |
-
| 0.297 | 6.0 | 38106 | 0.1577 |
|
55 |
|
56 |
|
57 |
### Framework versions
|
58 |
|
59 |
-
- Transformers 4.
|
60 |
-
- Pytorch 2.6.0
|
61 |
-
- Datasets
|
62 |
-
- Tokenizers 0.21.
|
|
|
14 |
|
15 |
This model was trained from scratch on the None dataset.
|
16 |
It achieves the following results on the evaluation set:
|
17 |
+
- Loss: 0.1153
|
18 |
|
19 |
## Model description
|
20 |
|
|
|
33 |
### Training hyperparameters
|
34 |
|
35 |
The following hyperparameters were used during training:
|
36 |
+
- learning_rate: 1e-05
|
37 |
+
- train_batch_size: 128
|
38 |
+
- eval_batch_size: 128
|
39 |
- seed: 42
|
40 |
+
- optimizer: Use OptimizerNames.ADAMW_APEX_FUSED with betas=(0.826646043090655,0.991636944120939) and epsilon=3.4341677539323e-07 and optimizer_args=No additional optimizer arguments
|
41 |
- lr_scheduler_type: linear
|
42 |
+
- lr_scheduler_warmup_steps: 5000
|
43 |
+
- num_epochs: 200
|
44 |
|
45 |
### Training results
|
46 |
|
47 |
| Training Loss | Epoch | Step | Validation Loss |
|
48 |
|:-------------:|:-----:|:-----:|:---------------:|
|
49 |
+
| 0.0237 | 1.0 | 18731 | 0.1124 |
|
50 |
+
| 0.0216 | 2.0 | 37462 | 0.1128 |
|
51 |
+
| 0.0201 | 3.0 | 56193 | 0.1153 |
|
|
|
|
|
|
|
52 |
|
53 |
|
54 |
### Framework versions
|
55 |
|
56 |
+
- Transformers 4.50.3
|
57 |
+
- Pytorch 2.6.0+cu126
|
58 |
+
- Datasets 3.3.0
|
59 |
+
- Tokenizers 0.21.1
|
added_tokens.json
CHANGED
@@ -1,247 +1,415 @@
|
|
1 |
{
|
2 |
-
"
|
3 |
-
"
|
4 |
-
"
|
5 |
-
"
|
6 |
-
"
|
7 |
-
"
|
8 |
-
"
|
9 |
-
"
|
10 |
-
"
|
11 |
-
"
|
12 |
-
"
|
13 |
-
"
|
14 |
-
"
|
15 |
-
"
|
16 |
-
"
|
17 |
-
"
|
18 |
-
"
|
19 |
-
"
|
20 |
-
"
|
21 |
-
"
|
22 |
-
"
|
23 |
-
"
|
24 |
-
"
|
25 |
-
"
|
26 |
-
"
|
27 |
-
"
|
28 |
-
"
|
29 |
-
"
|
30 |
-
"
|
31 |
-
"
|
32 |
-
"
|
33 |
-
"
|
34 |
-
"
|
35 |
-
"
|
36 |
-
"
|
37 |
-
"
|
38 |
-
"
|
39 |
-
"
|
40 |
-
"
|
41 |
-
"
|
42 |
-
"
|
43 |
-
"
|
44 |
-
"
|
45 |
-
"
|
46 |
-
"
|
47 |
-
"
|
48 |
-
"
|
49 |
-
"
|
50 |
-
"
|
51 |
-
"
|
52 |
-
"
|
53 |
-
"
|
54 |
-
"
|
55 |
-
"
|
56 |
-
"
|
57 |
-
"
|
58 |
-
"
|
59 |
-
"
|
60 |
-
"
|
61 |
-
"
|
62 |
-
"
|
63 |
-
"
|
64 |
-
"
|
65 |
-
"
|
66 |
-
"
|
67 |
-
"
|
68 |
-
"
|
69 |
-
"
|
70 |
-
"
|
71 |
-
"
|
72 |
-
"
|
73 |
-
"
|
74 |
-
"
|
75 |
-
"
|
76 |
-
"
|
77 |
-
"
|
78 |
-
"
|
79 |
-
"
|
80 |
-
"
|
81 |
-
"
|
82 |
-
"
|
83 |
-
"
|
84 |
-
"
|
85 |
-
"
|
86 |
-
"
|
87 |
-
"
|
88 |
-
"
|
89 |
-
"
|
90 |
-
"
|
91 |
-
"
|
92 |
-
"
|
93 |
-
"
|
94 |
-
"
|
95 |
-
"
|
96 |
-
"
|
97 |
-
"
|
98 |
-
"
|
99 |
-
"
|
100 |
-
"
|
101 |
-
"
|
102 |
-
"
|
103 |
-
"
|
104 |
-
"
|
105 |
-
"
|
106 |
-
"
|
107 |
-
"
|
108 |
-
"
|
109 |
-
"
|
110 |
-
"
|
111 |
-
"
|
112 |
-
"
|
113 |
-
"
|
114 |
-
"
|
115 |
-
"
|
116 |
-
"
|
117 |
-
"
|
118 |
-
"
|
119 |
-
"
|
120 |
-
"
|
121 |
-
"
|
122 |
-
"
|
123 |
-
"
|
124 |
-
"
|
125 |
-
"
|
126 |
-
"
|
127 |
-
"
|
128 |
-
"
|
129 |
-
"
|
130 |
-
"
|
131 |
-
"
|
132 |
-
"
|
133 |
-
"
|
134 |
-
"
|
135 |
-
"
|
136 |
-
"
|
137 |
-
"
|
138 |
-
"
|
139 |
-
"
|
140 |
-
"
|
141 |
-
"
|
142 |
-
"
|
143 |
-
"
|
144 |
-
"
|
145 |
-
"
|
146 |
-
"
|
147 |
-
"
|
148 |
-
"
|
149 |
-
"
|
150 |
-
"
|
151 |
-
"
|
152 |
-
"
|
153 |
-
"
|
154 |
-
"
|
155 |
-
"
|
156 |
-
"
|
157 |
-
"
|
158 |
-
"
|
159 |
-
"
|
160 |
-
"
|
161 |
-
"
|
162 |
-
"
|
163 |
-
"
|
164 |
-
"
|
165 |
-
"
|
166 |
-
"
|
167 |
-
"
|
168 |
-
"
|
169 |
-
"
|
170 |
-
"
|
171 |
-
"
|
172 |
-
"
|
173 |
-
"
|
174 |
-
"
|
175 |
-
"
|
176 |
-
"
|
177 |
-
"
|
178 |
-
"
|
179 |
-
"
|
180 |
-
"
|
181 |
-
"
|
182 |
-
"
|
183 |
-
"
|
184 |
-
"
|
185 |
-
"
|
186 |
-
"
|
187 |
-
"
|
188 |
-
"
|
189 |
-
"
|
190 |
-
"
|
191 |
-
"
|
192 |
-
"
|
193 |
-
"
|
194 |
-
"
|
195 |
-
"
|
196 |
-
"
|
197 |
-
"
|
198 |
-
"
|
199 |
-
"
|
200 |
-
"
|
201 |
-
"
|
202 |
-
"
|
203 |
-
"
|
204 |
-
"
|
205 |
-
"
|
206 |
-
"
|
207 |
-
"
|
208 |
-
"
|
209 |
-
"
|
210 |
-
"
|
211 |
-
"
|
212 |
-
"
|
213 |
-
"
|
214 |
-
"
|
215 |
-
"
|
216 |
-
"
|
217 |
-
"
|
218 |
-
"
|
219 |
-
"
|
220 |
-
"
|
221 |
-
"
|
222 |
-
"
|
223 |
-
"
|
224 |
-
"
|
225 |
-
"
|
226 |
-
"
|
227 |
-
"
|
228 |
-
"
|
229 |
-
"
|
230 |
-
"
|
231 |
-
"
|
232 |
-
"
|
233 |
-
"
|
234 |
-
"
|
235 |
-
"
|
236 |
-
"
|
237 |
-
"
|
238 |
-
"
|
239 |
-
"
|
240 |
-
"
|
241 |
-
"
|
242 |
-
"
|
243 |
-
"
|
244 |
-
"
|
245 |
-
"
|
246 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
247 |
}
|
|
|
1 |
{
|
2 |
+
"ร": 32424,
|
3 |
+
"ล": 32303,
|
4 |
+
"สพ": 32444,
|
5 |
+
"โ": 32402,
|
6 |
+
"๐": 32121,
|
7 |
+
"๐": 32445,
|
8 |
+
"๐": 32105,
|
9 |
+
"๐": 32131,
|
10 |
+
"๐": 32147,
|
11 |
+
"๐": 32415,
|
12 |
+
"๐": 32219,
|
13 |
+
"๐": 32479,
|
14 |
+
"๐": 32156,
|
15 |
+
"๐": 32183,
|
16 |
+
"๐": 32244,
|
17 |
+
"๐": 32176,
|
18 |
+
"๐ ": 32369,
|
19 |
+
"๐ฉ": 32323,
|
20 |
+
"๐ช": 32364,
|
21 |
+
"๐ซ": 32228,
|
22 |
+
"๐ฌ": 32207,
|
23 |
+
"๐ญ": 32405,
|
24 |
+
"๐ฎ": 32242,
|
25 |
+
"๐ฏ": 32123,
|
26 |
+
"๐ฒ": 32391,
|
27 |
+
"๐ณ": 32221,
|
28 |
+
"๐ด": 32142,
|
29 |
+
"๐ธ": 32220,
|
30 |
+
"๐น": 32448,
|
31 |
+
"๐พ": 32417,
|
32 |
+
"๐ฟ": 32465,
|
33 |
+
"๐": 32175,
|
34 |
+
"๐": 32299,
|
35 |
+
"๐": 32494,
|
36 |
+
"๐": 32265,
|
37 |
+
"๐": 32483,
|
38 |
+
"๐": 32421,
|
39 |
+
"๐": 32288,
|
40 |
+
"๐": 32174,
|
41 |
+
"๐": 32387,
|
42 |
+
"๐": 32294,
|
43 |
+
"๐": 32326,
|
44 |
+
"๐": 32342,
|
45 |
+
"๐": 32172,
|
46 |
+
"๐": 32161,
|
47 |
+
"๐": 32447,
|
48 |
+
"๐ฆ": 32497,
|
49 |
+
"๐ฎ": 32141,
|
50 |
+
"๐ฏ": 32206,
|
51 |
+
"๐ฐ": 32453,
|
52 |
+
"๐ฒ": 32130,
|
53 |
+
"๐ณ": 32280,
|
54 |
+
"๐ด": 32311,
|
55 |
+
"๐ต": 32367,
|
56 |
+
"๐ถ": 32251,
|
57 |
+
"๐ท": 32232,
|
58 |
+
"๐น": 32135,
|
59 |
+
"๐บ": 32297,
|
60 |
+
"๐ป": 32502,
|
61 |
+
"๐ผ": 32396,
|
62 |
+
"๐ฝ": 32499,
|
63 |
+
"๐พ": 32340,
|
64 |
+
"๐": 32488,
|
65 |
+
"๐": 32198,
|
66 |
+
"๐": 32143,
|
67 |
+
"๐": 32506,
|
68 |
+
"๐": 32270,
|
69 |
+
"๐
": 32108,
|
70 |
+
"๐": 32487,
|
71 |
+
"๐": 32249,
|
72 |
+
"๐": 32234,
|
73 |
+
"๐": 32259,
|
74 |
+
"๐": 32363,
|
75 |
+
"๐": 32245,
|
76 |
+
"๐": 32171,
|
77 |
+
"๐": 32380,
|
78 |
+
"๐": 32209,
|
79 |
+
"๐": 32160,
|
80 |
+
"๐": 32144,
|
81 |
+
"๐": 32456,
|
82 |
+
"๐": 32413,
|
83 |
+
"๐ ": 32378,
|
84 |
+
"๐ก": 32401,
|
85 |
+
"๐ค": 32468,
|
86 |
+
"๐ฆ": 32301,
|
87 |
+
"๐ฌ": 32508,
|
88 |
+
"๐ฎ": 32443,
|
89 |
+
"๐ต": 32252,
|
90 |
+
"๐ท": 32269,
|
91 |
+
"๐ผ": 32185,
|
92 |
+
"๐": 32336,
|
93 |
+
"๐": 32429,
|
94 |
+
"๐ก": 32510,
|
95 |
+
"๐ข": 32461,
|
96 |
+
"๐ฃ": 32275,
|
97 |
+
"๐ฎ": 32129,
|
98 |
+
"๐ฐ": 32110,
|
99 |
+
"๐ฒ": 32103,
|
100 |
+
"๐ด": 32477,
|
101 |
+
"๐ต": 32267,
|
102 |
+
"๐ถ": 32390,
|
103 |
+
"๐ท": 32100,
|
104 |
+
"๐ธ": 32455,
|
105 |
+
"๐ป": 32273,
|
106 |
+
"๐ผ": 32375,
|
107 |
+
"๐ฝ": 32307,
|
108 |
+
"๐พ": 32261,
|
109 |
+
"๐": 32149,
|
110 |
+
"๐": 32305,
|
111 |
+
"๐": 32481,
|
112 |
+
"๐": 32482,
|
113 |
+
"๐": 32309,
|
114 |
+
"๐": 32404,
|
115 |
+
"๐": 32476,
|
116 |
+
"๐": 32127,
|
117 |
+
"๐": 32423,
|
118 |
+
"๐": 32139,
|
119 |
+
"๐": 32411,
|
120 |
+
"๐": 32324,
|
121 |
+
"๐": 32117,
|
122 |
+
"๐": 32268,
|
123 |
+
"๐": 32434,
|
124 |
+
"๐": 32485,
|
125 |
+
"๐": 32509,
|
126 |
+
"๐": 32210,
|
127 |
+
"๐ ": 32355,
|
128 |
+
"๐ข": 32193,
|
129 |
+
"๐ฃ": 32203,
|
130 |
+
"๐ค": 32406,
|
131 |
+
"๐ฅ": 32113,
|
132 |
+
"๐ฆ": 32432,
|
133 |
+
"๐ง": 32189,
|
134 |
+
"๐จ": 32165,
|
135 |
+
"๐ฉ": 32202,
|
136 |
+
"๐ซ": 32358,
|
137 |
+
"๐ฌ": 32140,
|
138 |
+
"๐ญ": 32238,
|
139 |
+
"๐ฏ": 32361,
|
140 |
+
"๐ฐ": 32241,
|
141 |
+
"๐ฒ": 32503,
|
142 |
+
"๐ด": 32354,
|
143 |
+
"๐ต": 32322,
|
144 |
+
"๐ท": 32166,
|
145 |
+
"๐ธ": 32505,
|
146 |
+
"๐ฝ": 32383,
|
147 |
+
"๐พ": 32151,
|
148 |
+
"๐ฟ": 32295,
|
149 |
+
"๐
": 32395,
|
150 |
+
"๐
": 32418,
|
151 |
+
"๐
": 32351,
|
152 |
+
"๐
": 32180,
|
153 |
+
"๐
": 32337,
|
154 |
+
"๐
": 32376,
|
155 |
+
"๐
": 32359,
|
156 |
+
"๐
": 32460,
|
157 |
+
"๐
": 32145,
|
158 |
+
"๐
": 32212,
|
159 |
+
"๐
": 32385,
|
160 |
+
"๐
": 32449,
|
161 |
+
"๐
": 32237,
|
162 |
+
"๐
": 32195,
|
163 |
+
"๐
": 32122,
|
164 |
+
"๐
": 32155,
|
165 |
+
"๐
": 32360,
|
166 |
+
"๐
": 32240,
|
167 |
+
"๐
ข": 32439,
|
168 |
+
"๐
ค": 32412,
|
169 |
+
"๐
ฅ": 32200,
|
170 |
+
"๐
ฎ": 32484,
|
171 |
+
"๐
ด": 32253,
|
172 |
+
"๐
ธ": 32222,
|
173 |
+
"๐
ป": 32116,
|
174 |
+
"๐
พ": 32462,
|
175 |
+
"๐": 32315,
|
176 |
+
"๐": 32325,
|
177 |
+
"๐": 32178,
|
178 |
+
"๐": 32469,
|
179 |
+
"๐": 32126,
|
180 |
+
"๐": 32134,
|
181 |
+
"๐": 32491,
|
182 |
+
"๐": 32137,
|
183 |
+
"๐": 32343,
|
184 |
+
"๐": 32492,
|
185 |
+
"๐": 32422,
|
186 |
+
"๐": 32472,
|
187 |
+
"๐": 32302,
|
188 |
+
"๐": 32328,
|
189 |
+
"๐ ": 32398,
|
190 |
+
"๐ข": 32128,
|
191 |
+
"๐ค": 32196,
|
192 |
+
"๐ฅ": 32262,
|
193 |
+
"๐ฆ": 32266,
|
194 |
+
"๐ง": 32235,
|
195 |
+
"๐ช": 32146,
|
196 |
+
"๐ฌ": 32416,
|
197 |
+
"๐ญ": 32349,
|
198 |
+
"๐ฏ": 32489,
|
199 |
+
"๐ฐ": 32285,
|
200 |
+
"๐ฒ": 32283,
|
201 |
+
"๐ณ": 32257,
|
202 |
+
"๐ต": 32426,
|
203 |
+
"๐ท": 32194,
|
204 |
+
"๐ธ": 32353,
|
205 |
+
"๐น": 32304,
|
206 |
+
"๐": 32495,
|
207 |
+
"๐
": 32112,
|
208 |
+
"๐": 32478,
|
209 |
+
"๐": 32260,
|
210 |
+
"๐": 32496,
|
211 |
+
"๐": 32441,
|
212 |
+
"๐ก": 32236,
|
213 |
+
"๐ฅ": 32230,
|
214 |
+
"๐ง": 32392,
|
215 |
+
"๐ฌ": 32500,
|
216 |
+
"๐ญ": 32450,
|
217 |
+
"๐ฏ": 32227,
|
218 |
+
"๐ฒ": 32403,
|
219 |
+
"๐ณ": 32427,
|
220 |
+
"๐ด": 32470,
|
221 |
+
"๐ต": 32473,
|
222 |
+
"๐ท": 32345,
|
223 |
+
"๐ธ": 32119,
|
224 |
+
"๐น": 32410,
|
225 |
+
"๐บ": 32339,
|
226 |
+
"๐ป": 32314,
|
227 |
+
"๐ผ": 32504,
|
228 |
+
"๐ฝ": 32306,
|
229 |
+
"๐ฟ": 32329,
|
230 |
+
"๐": 32184,
|
231 |
+
"๐": 32438,
|
232 |
+
"๐": 32258,
|
233 |
+
"๐": 32286,
|
234 |
+
"๐": 32247,
|
235 |
+
"๐": 32352,
|
236 |
+
"๐ ": 32321,
|
237 |
+
"๐ข": 32454,
|
238 |
+
"๐ฃ": 32163,
|
239 |
+
"๐ค": 32168,
|
240 |
+
"๐ฅ": 32313,
|
241 |
+
"๐ฆ": 32188,
|
242 |
+
"๐ง": 32138,
|
243 |
+
"๐จ": 32366,
|
244 |
+
"๐ฉ": 32409,
|
245 |
+
"๐ช": 32223,
|
246 |
+
"๐ซ": 32271,
|
247 |
+
"๐ฌ": 32208,
|
248 |
+
"๐ญ": 32279,
|
249 |
+
"๐ฎ": 32397,
|
250 |
+
"๐ฏ": 32152,
|
251 |
+
"๐ฒ": 32381,
|
252 |
+
"๐น": 32437,
|
253 |
+
"๐ป": 32486,
|
254 |
+
"๐ฝ": 32312,
|
255 |
+
"๐พ": 32310,
|
256 |
+
"๐ฟ": 32463,
|
257 |
+
"๐": 32498,
|
258 |
+
"๐": 32319,
|
259 |
+
"๐
": 32278,
|
260 |
+
"๐": 32224,
|
261 |
+
"๐": 32132,
|
262 |
+
"๐": 32111,
|
263 |
+
"๐": 32106,
|
264 |
+
"๐": 32169,
|
265 |
+
"๐": 32250,
|
266 |
+
"๐": 32162,
|
267 |
+
"๐": 32436,
|
268 |
+
"๐": 32428,
|
269 |
+
"๐": 32474,
|
270 |
+
"๐": 32451,
|
271 |
+
"๐": 32107,
|
272 |
+
"๐": 32333,
|
273 |
+
"๐ ": 32320,
|
274 |
+
"๐ก": 32290,
|
275 |
+
"๐ข": 32493,
|
276 |
+
"๐ฃ": 32199,
|
277 |
+
"๐ช": 32159,
|
278 |
+
"๐ญ": 32243,
|
279 |
+
"๐บ": 32308,
|
280 |
+
"๐ป": 32211,
|
281 |
+
"๐ผ": 32512,
|
282 |
+
"๐ฝ": 32102,
|
283 |
+
"๐พ": 32226,
|
284 |
+
"๐ฟ": 32186,
|
285 |
+
"๐": 32104,
|
286 |
+
"๐": 32407,
|
287 |
+
"๐": 32300,
|
288 |
+
"๐": 32281,
|
289 |
+
"๐": 32382,
|
290 |
+
"๐": 32292,
|
291 |
+
"๐": 32341,
|
292 |
+
"๐": 32158,
|
293 |
+
"๐": 32217,
|
294 |
+
"๐": 32459,
|
295 |
+
"๐จ": 32471,
|
296 |
+
"๐ฉ": 32214,
|
297 |
+
"๐ฌ": 32389,
|
298 |
+
"๐ญ": 32182,
|
299 |
+
"๐ฎ": 32239,
|
300 |
+
"๐ฏ": 32467,
|
301 |
+
"๐ท": 32457,
|
302 |
+
"๐น": 32204,
|
303 |
+
"๐บ": 32191,
|
304 |
+
"๐ป": 32347,
|
305 |
+
"๐ฟ": 32350,
|
306 |
+
"๐": 32231,
|
307 |
+
"๐": 32187,
|
308 |
+
"๐": 32330,
|
309 |
+
"๐": 32296,
|
310 |
+
"๐": 32480,
|
311 |
+
"๐": 32501,
|
312 |
+
"๐": 32490,
|
313 |
+
"๐": 32408,
|
314 |
+
"๐": 32216,
|
315 |
+
"๐": 32357,
|
316 |
+
"๐": 32372,
|
317 |
+
"๐": 32373,
|
318 |
+
"๐": 32346,
|
319 |
+
"๐": 32327,
|
320 |
+
"๐ ": 32274,
|
321 |
+
"๐ก": 32173,
|
322 |
+
"๐ข": 32136,
|
323 |
+
"๐ค": 32414,
|
324 |
+
"๐ฅ": 32289,
|
325 |
+
"๐ฆ": 32379,
|
326 |
+
"๐ง": 32170,
|
327 |
+
"๐ฉ": 32374,
|
328 |
+
"๐ซ": 32368,
|
329 |
+
"๐ฌ": 32133,
|
330 |
+
"๐ญ": 32293,
|
331 |
+
"๐ฐ": 32388,
|
332 |
+
"๐ณ": 32338,
|
333 |
+
"๐บ": 32466,
|
334 |
+
"๐ป": 32356,
|
335 |
+
"๐ผ": 32335,
|
336 |
+
"๐ฝ": 32440,
|
337 |
+
"๐พ": 32154,
|
338 |
+
"๐": 32344,
|
339 |
+
"๐": 32287,
|
340 |
+
"๐
": 32316,
|
341 |
+
"๐": 32365,
|
342 |
+
"๐": 32101,
|
343 |
+
"๐": 32318,
|
344 |
+
"๐": 32190,
|
345 |
+
"๐": 32177,
|
346 |
+
"๐": 32157,
|
347 |
+
"๐": 32150,
|
348 |
+
"๐": 32120,
|
349 |
+
"๐": 32377,
|
350 |
+
"๐": 32400,
|
351 |
+
"๐": 32148,
|
352 |
+
"๐": 32317,
|
353 |
+
"๐": 32218,
|
354 |
+
"๐": 32277,
|
355 |
+
"๐ข": 32256,
|
356 |
+
"๐ฃ": 32371,
|
357 |
+
"๐ค": 32464,
|
358 |
+
"๐ฆ": 32197,
|
359 |
+
"๐จ": 32386,
|
360 |
+
"๐ซ": 32233,
|
361 |
+
"๐ด": 32446,
|
362 |
+
"๐ต": 32124,
|
363 |
+
"๐ถ": 32442,
|
364 |
+
"๐ท": 32164,
|
365 |
+
"๐บ": 32167,
|
366 |
+
"๐": 32425,
|
367 |
+
"๐": 32435,
|
368 |
+
"๐": 32284,
|
369 |
+
"๐": 32264,
|
370 |
+
"๐": 32431,
|
371 |
+
"๐": 32109,
|
372 |
+
"๐": 32334,
|
373 |
+
"๐": 32118,
|
374 |
+
"๐": 32125,
|
375 |
+
"๐": 32430,
|
376 |
+
"๐": 32254,
|
377 |
+
"๐": 32229,
|
378 |
+
"๐": 32507,
|
379 |
+
"๐ ": 32205,
|
380 |
+
"๐ข": 32179,
|
381 |
+
"๐ฃ": 32394,
|
382 |
+
"๐ค": 32458,
|
383 |
+
"๐ฅ": 32255,
|
384 |
+
"๐ฆ": 32114,
|
385 |
+
"๐จ": 32213,
|
386 |
+
"๐ฉ": 32475,
|
387 |
+
"๐ช": 32201,
|
388 |
+
"๐ฌ": 32511,
|
389 |
+
"๐ฎ": 32291,
|
390 |
+
"๐": 32298,
|
391 |
+
"๐": 32192,
|
392 |
+
"๐": 32419,
|
393 |
+
"๐": 32181,
|
394 |
+
"๐": 32115,
|
395 |
+
"๐": 32282,
|
396 |
+
"๐": 32393,
|
397 |
+
"๐": 32246,
|
398 |
+
"๐": 32348,
|
399 |
+
"๐": 32248,
|
400 |
+
"๐": 32215,
|
401 |
+
"๐": 32433,
|
402 |
+
"๐": 32384,
|
403 |
+
"๐ป": 32331,
|
404 |
+
"๐": 32332,
|
405 |
+
"๐": 32420,
|
406 |
+
"๐": 32276,
|
407 |
+
"๐": 32370,
|
408 |
+
"๐": 32263,
|
409 |
+
"๐": 32153,
|
410 |
+
"๐": 32225,
|
411 |
+
"๐": 32362,
|
412 |
+
"๐": 32272,
|
413 |
+
"๐ฑ": 32452,
|
414 |
+
"๐ณ": 32399
|
415 |
}
|
config.json
CHANGED
@@ -1,12 +1,11 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "/Users/lee/GitHub/results/GMY-T5Small/train_1/checkpoint-65840",
|
3 |
"architectures": [
|
4 |
"T5ForConditionalGeneration"
|
5 |
],
|
6 |
"classifier_dropout": 0.0,
|
7 |
-
"d_ff":
|
8 |
"d_kv": 64,
|
9 |
-
"d_model":
|
10 |
"decoder_start_token_id": 0,
|
11 |
"dense_act_fn": "relu",
|
12 |
"dropout_rate": 0.1,
|
@@ -18,9 +17,9 @@
|
|
18 |
"layer_norm_epsilon": 1e-06,
|
19 |
"model_type": "t5",
|
20 |
"n_positions": 512,
|
21 |
-
"num_decoder_layers":
|
22 |
-
"num_heads":
|
23 |
-
"num_layers":
|
24 |
"output_past": true,
|
25 |
"pad_token_id": 0,
|
26 |
"relative_attention_max_distance": 128,
|
@@ -55,7 +54,7 @@
|
|
55 |
}
|
56 |
},
|
57 |
"torch_dtype": "float32",
|
58 |
-
"transformers_version": "4.
|
59 |
"use_cache": true,
|
60 |
-
"vocab_size":
|
61 |
}
|
|
|
1 |
{
|
|
|
2 |
"architectures": [
|
3 |
"T5ForConditionalGeneration"
|
4 |
],
|
5 |
"classifier_dropout": 0.0,
|
6 |
+
"d_ff": 3072,
|
7 |
"d_kv": 64,
|
8 |
+
"d_model": 768,
|
9 |
"decoder_start_token_id": 0,
|
10 |
"dense_act_fn": "relu",
|
11 |
"dropout_rate": 0.1,
|
|
|
17 |
"layer_norm_epsilon": 1e-06,
|
18 |
"model_type": "t5",
|
19 |
"n_positions": 512,
|
20 |
+
"num_decoder_layers": 12,
|
21 |
+
"num_heads": 12,
|
22 |
+
"num_layers": 12,
|
23 |
"output_past": true,
|
24 |
"pad_token_id": 0,
|
25 |
"relative_attention_max_distance": 128,
|
|
|
54 |
}
|
55 |
},
|
56 |
"torch_dtype": "float32",
|
57 |
+
"transformers_version": "4.50.3",
|
58 |
"use_cache": true,
|
59 |
+
"vocab_size": 32513
|
60 |
}
|
generation_config.json
CHANGED
@@ -2,5 +2,5 @@
|
|
2 |
"decoder_start_token_id": 0,
|
3 |
"eos_token_id": 1,
|
4 |
"pad_token_id": 0,
|
5 |
-
"transformers_version": "4.
|
6 |
}
|
|
|
2 |
"decoder_start_token_id": 0,
|
3 |
"eos_token_id": 1,
|
4 |
"pad_token_id": 0,
|
5 |
+
"transformers_version": "4.50.3"
|
6 |
}
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1b355c8952aa6b646e2ceaedda7950541d09bfbea54f24486af95e5935373c5a
|
3 |
+
size 892827432
|
tokenizer.json
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8f1a24a97d5195455dd1fbf567d54e18c575a09f3b208ca957c2d319909b36be
|
3 |
+
size 2496502
|
tokenizer_config.json
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3757494a511ac5c6706bf61d0b94c86e25b0c88c477932122634597858deb533
|
3 |
+
size 91055
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9643441c6cbb1024a83de6334eb0c291be2906f1780a037691fe0da36f334e10
|
3 |
+
size 5496
|