|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.42360452474033633, |
|
"eval_steps": 500, |
|
"global_step": 27468, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0004780741323340042, |
|
"grad_norm": 3.1866345405578613, |
|
"learning_rate": 1.0157273918741808e-06, |
|
"loss": 1.194, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0009561482646680084, |
|
"grad_norm": 1.7708723545074463, |
|
"learning_rate": 2.0314547837483616e-06, |
|
"loss": 1.1124, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.0014342223970020126, |
|
"grad_norm": 1.6835601329803467, |
|
"learning_rate": 3.0471821756225426e-06, |
|
"loss": 1.0585, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.0019122965293360168, |
|
"grad_norm": 1.642537236213684, |
|
"learning_rate": 4.062909567496723e-06, |
|
"loss": 1.0213, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.002390370661670021, |
|
"grad_norm": 1.8476150035858154, |
|
"learning_rate": 5.078636959370905e-06, |
|
"loss": 1.0024, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.0028684447940040252, |
|
"grad_norm": 1.96336829662323, |
|
"learning_rate": 6.094364351245085e-06, |
|
"loss": 0.9938, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.0033465189263380294, |
|
"grad_norm": 1.8158528804779053, |
|
"learning_rate": 7.110091743119267e-06, |
|
"loss": 0.969, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.0038245930586720336, |
|
"grad_norm": 1.9796783924102783, |
|
"learning_rate": 8.125819134993446e-06, |
|
"loss": 0.9573, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.004302667191006038, |
|
"grad_norm": 1.8192514181137085, |
|
"learning_rate": 9.141546526867629e-06, |
|
"loss": 0.9498, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.004780741323340042, |
|
"grad_norm": 2.1396358013153076, |
|
"learning_rate": 1.015727391874181e-05, |
|
"loss": 0.9294, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.005258815455674046, |
|
"grad_norm": 2.0807156562805176, |
|
"learning_rate": 1.117300131061599e-05, |
|
"loss": 0.9186, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.0057368895880080505, |
|
"grad_norm": 2.002887010574341, |
|
"learning_rate": 1.218872870249017e-05, |
|
"loss": 0.9149, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.006214963720342055, |
|
"grad_norm": 2.240675687789917, |
|
"learning_rate": 1.3204456094364351e-05, |
|
"loss": 0.9139, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.006693037852676059, |
|
"grad_norm": 1.9016591310501099, |
|
"learning_rate": 1.4220183486238533e-05, |
|
"loss": 0.8946, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.007171111985010063, |
|
"grad_norm": 1.6729310750961304, |
|
"learning_rate": 1.5235910878112714e-05, |
|
"loss": 0.8959, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.007649186117344067, |
|
"grad_norm": 2.2607779502868652, |
|
"learning_rate": 1.6251638269986893e-05, |
|
"loss": 0.8847, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.00812726024967807, |
|
"grad_norm": 1.699507236480713, |
|
"learning_rate": 1.7267365661861077e-05, |
|
"loss": 0.8773, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.008605334382012076, |
|
"grad_norm": 1.93502938747406, |
|
"learning_rate": 1.8283093053735257e-05, |
|
"loss": 0.8773, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.009083408514346079, |
|
"grad_norm": 2.1234281063079834, |
|
"learning_rate": 1.9298820445609438e-05, |
|
"loss": 0.8669, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.009561482646680084, |
|
"grad_norm": 1.7465944290161133, |
|
"learning_rate": 2.031454783748362e-05, |
|
"loss": 0.8536, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.010039556779014087, |
|
"grad_norm": 1.7039932012557983, |
|
"learning_rate": 2.13302752293578e-05, |
|
"loss": 0.8624, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.010517630911348093, |
|
"grad_norm": 1.551837682723999, |
|
"learning_rate": 2.234600262123198e-05, |
|
"loss": 0.8555, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.010995705043682096, |
|
"grad_norm": 1.7622497081756592, |
|
"learning_rate": 2.336173001310616e-05, |
|
"loss": 0.8475, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.011473779176016101, |
|
"grad_norm": 2.000936269760132, |
|
"learning_rate": 2.437745740498034e-05, |
|
"loss": 0.852, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.011951853308350104, |
|
"grad_norm": 1.6770379543304443, |
|
"learning_rate": 2.5393184796854525e-05, |
|
"loss": 0.8455, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.01242992744068411, |
|
"grad_norm": 1.746506929397583, |
|
"learning_rate": 2.6408912188728702e-05, |
|
"loss": 0.8406, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.012908001573018113, |
|
"grad_norm": 1.507759690284729, |
|
"learning_rate": 2.7424639580602886e-05, |
|
"loss": 0.8556, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.013386075705352118, |
|
"grad_norm": 1.6984492540359497, |
|
"learning_rate": 2.8440366972477066e-05, |
|
"loss": 0.8281, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.013864149837686121, |
|
"grad_norm": 1.5145736932754517, |
|
"learning_rate": 2.9456094364351244e-05, |
|
"loss": 0.8359, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.014342223970020126, |
|
"grad_norm": 1.5700000524520874, |
|
"learning_rate": 3.0471821756225428e-05, |
|
"loss": 0.8305, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.01482029810235413, |
|
"grad_norm": 1.6657596826553345, |
|
"learning_rate": 3.148754914809961e-05, |
|
"loss": 0.8263, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.015298372234688135, |
|
"grad_norm": 1.527628779411316, |
|
"learning_rate": 3.2503276539973785e-05, |
|
"loss": 0.8318, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.015776446367022138, |
|
"grad_norm": 1.8353967666625977, |
|
"learning_rate": 3.351900393184797e-05, |
|
"loss": 0.8192, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.01625452049935614, |
|
"grad_norm": 1.2118042707443237, |
|
"learning_rate": 3.453473132372215e-05, |
|
"loss": 0.819, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.016732594631690145, |
|
"grad_norm": 1.3235970735549927, |
|
"learning_rate": 3.555045871559633e-05, |
|
"loss": 0.8176, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.01721066876402415, |
|
"grad_norm": 1.6729888916015625, |
|
"learning_rate": 3.6566186107470514e-05, |
|
"loss": 0.8202, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.017688742896358155, |
|
"grad_norm": 1.3251298666000366, |
|
"learning_rate": 3.7581913499344695e-05, |
|
"loss": 0.8104, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.018166817028692158, |
|
"grad_norm": 1.5231342315673828, |
|
"learning_rate": 3.8597640891218876e-05, |
|
"loss": 0.8113, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.01864489116102616, |
|
"grad_norm": 1.3263883590698242, |
|
"learning_rate": 3.9613368283093056e-05, |
|
"loss": 0.7981, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.019122965293360168, |
|
"grad_norm": 1.317791223526001, |
|
"learning_rate": 4.062909567496724e-05, |
|
"loss": 0.7962, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.01960103942569417, |
|
"grad_norm": 1.566698670387268, |
|
"learning_rate": 4.164482306684142e-05, |
|
"loss": 0.8072, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.020079113558028175, |
|
"grad_norm": 1.2935110330581665, |
|
"learning_rate": 4.26605504587156e-05, |
|
"loss": 0.7818, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.020557187690362178, |
|
"grad_norm": 1.336227536201477, |
|
"learning_rate": 4.367627785058978e-05, |
|
"loss": 0.7917, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.021035261822696185, |
|
"grad_norm": 1.5656548738479614, |
|
"learning_rate": 4.469200524246396e-05, |
|
"loss": 0.7785, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.02151333595503019, |
|
"grad_norm": 1.5208159685134888, |
|
"learning_rate": 4.570773263433814e-05, |
|
"loss": 0.7839, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.02199141008736419, |
|
"grad_norm": 1.4816375970840454, |
|
"learning_rate": 4.672346002621232e-05, |
|
"loss": 0.7789, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 0.022469484219698195, |
|
"grad_norm": 1.402166485786438, |
|
"learning_rate": 4.77391874180865e-05, |
|
"loss": 0.7803, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 0.022947558352032202, |
|
"grad_norm": 1.2986265420913696, |
|
"learning_rate": 4.875491480996068e-05, |
|
"loss": 0.7739, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 0.023425632484366205, |
|
"grad_norm": 1.3171271085739136, |
|
"learning_rate": 4.977064220183487e-05, |
|
"loss": 0.7694, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 0.02390370661670021, |
|
"grad_norm": 1.4870030879974365, |
|
"learning_rate": 4.9999915451558777e-05, |
|
"loss": 0.7663, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.024381780749034212, |
|
"grad_norm": 1.7419476509094238, |
|
"learning_rate": 4.999955597496219e-05, |
|
"loss": 0.7731, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 0.02485985488136822, |
|
"grad_norm": 1.109238624572754, |
|
"learning_rate": 4.9998914381774255e-05, |
|
"loss": 0.7704, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 0.025337929013702222, |
|
"grad_norm": 1.3478920459747314, |
|
"learning_rate": 4.999799067923527e-05, |
|
"loss": 0.7723, |
|
"step": 1643 |
|
}, |
|
{ |
|
"epoch": 0.025816003146036225, |
|
"grad_norm": 1.2739650011062622, |
|
"learning_rate": 4.999678487776908e-05, |
|
"loss": 0.7699, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 0.02629407727837023, |
|
"grad_norm": 1.3892935514450073, |
|
"learning_rate": 4.9995296990983006e-05, |
|
"loss": 0.7709, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.026772151410704235, |
|
"grad_norm": 1.2031512260437012, |
|
"learning_rate": 4.999352703566763e-05, |
|
"loss": 0.7557, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 0.02725022554303824, |
|
"grad_norm": 1.185471773147583, |
|
"learning_rate": 4.999147503179668e-05, |
|
"loss": 0.7645, |
|
"step": 1767 |
|
}, |
|
{ |
|
"epoch": 0.027728299675372242, |
|
"grad_norm": 1.2331740856170654, |
|
"learning_rate": 4.998914100252672e-05, |
|
"loss": 0.7655, |
|
"step": 1798 |
|
}, |
|
{ |
|
"epoch": 0.028206373807706245, |
|
"grad_norm": 1.1817049980163574, |
|
"learning_rate": 4.998652497419696e-05, |
|
"loss": 0.7617, |
|
"step": 1829 |
|
}, |
|
{ |
|
"epoch": 0.028684447940040252, |
|
"grad_norm": 1.2225860357284546, |
|
"learning_rate": 4.9983626976328927e-05, |
|
"loss": 0.7558, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.029162522072374256, |
|
"grad_norm": 1.6452528238296509, |
|
"learning_rate": 4.998044704162613e-05, |
|
"loss": 0.7566, |
|
"step": 1891 |
|
}, |
|
{ |
|
"epoch": 0.02964059620470826, |
|
"grad_norm": 1.3764828443527222, |
|
"learning_rate": 4.9976985205973705e-05, |
|
"loss": 0.7545, |
|
"step": 1922 |
|
}, |
|
{ |
|
"epoch": 0.030118670337042262, |
|
"grad_norm": 1.6581465005874634, |
|
"learning_rate": 4.997324150843799e-05, |
|
"loss": 0.7519, |
|
"step": 1953 |
|
}, |
|
{ |
|
"epoch": 0.03059674446937627, |
|
"grad_norm": 1.1065036058425903, |
|
"learning_rate": 4.99692159912661e-05, |
|
"loss": 0.7579, |
|
"step": 1984 |
|
}, |
|
{ |
|
"epoch": 0.031074818601710272, |
|
"grad_norm": 2.6465444564819336, |
|
"learning_rate": 4.996490869988546e-05, |
|
"loss": 0.7538, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.031552892734044276, |
|
"grad_norm": 1.3028968572616577, |
|
"learning_rate": 4.996031968290326e-05, |
|
"loss": 0.7522, |
|
"step": 2046 |
|
}, |
|
{ |
|
"epoch": 0.03203096686637828, |
|
"grad_norm": 1.0450382232666016, |
|
"learning_rate": 4.995544899210594e-05, |
|
"loss": 0.742, |
|
"step": 2077 |
|
}, |
|
{ |
|
"epoch": 0.03250904099871228, |
|
"grad_norm": 0.9411994814872742, |
|
"learning_rate": 4.9950296682458583e-05, |
|
"loss": 0.7401, |
|
"step": 2108 |
|
}, |
|
{ |
|
"epoch": 0.03298711513104629, |
|
"grad_norm": 1.0501608848571777, |
|
"learning_rate": 4.994486281210429e-05, |
|
"loss": 0.7455, |
|
"step": 2139 |
|
}, |
|
{ |
|
"epoch": 0.03346518926338029, |
|
"grad_norm": 1.266400694847107, |
|
"learning_rate": 4.9939147442363566e-05, |
|
"loss": 0.7391, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.033943263395714296, |
|
"grad_norm": 1.2862213850021362, |
|
"learning_rate": 4.9933150637733574e-05, |
|
"loss": 0.7397, |
|
"step": 2201 |
|
}, |
|
{ |
|
"epoch": 0.0344213375280483, |
|
"grad_norm": 1.2715409994125366, |
|
"learning_rate": 4.992687246588743e-05, |
|
"loss": 0.7467, |
|
"step": 2232 |
|
}, |
|
{ |
|
"epoch": 0.0348994116603823, |
|
"grad_norm": 1.3290003538131714, |
|
"learning_rate": 4.992031299767347e-05, |
|
"loss": 0.7432, |
|
"step": 2263 |
|
}, |
|
{ |
|
"epoch": 0.03537748579271631, |
|
"grad_norm": 1.0561761856079102, |
|
"learning_rate": 4.9913472307114386e-05, |
|
"loss": 0.7336, |
|
"step": 2294 |
|
}, |
|
{ |
|
"epoch": 0.035855559925050316, |
|
"grad_norm": 1.3272422552108765, |
|
"learning_rate": 4.9906350471406446e-05, |
|
"loss": 0.7251, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.036333634057384316, |
|
"grad_norm": 1.0749491453170776, |
|
"learning_rate": 4.989894757091861e-05, |
|
"loss": 0.7205, |
|
"step": 2356 |
|
}, |
|
{ |
|
"epoch": 0.03681170818971832, |
|
"grad_norm": 1.1581461429595947, |
|
"learning_rate": 4.989126368919158e-05, |
|
"loss": 0.7311, |
|
"step": 2387 |
|
}, |
|
{ |
|
"epoch": 0.03728978232205232, |
|
"grad_norm": 1.0796961784362793, |
|
"learning_rate": 4.988329891293693e-05, |
|
"loss": 0.7259, |
|
"step": 2418 |
|
}, |
|
{ |
|
"epoch": 0.03776785645438633, |
|
"grad_norm": 1.1916818618774414, |
|
"learning_rate": 4.987505333203608e-05, |
|
"loss": 0.7258, |
|
"step": 2449 |
|
}, |
|
{ |
|
"epoch": 0.038245930586720336, |
|
"grad_norm": 0.9515872001647949, |
|
"learning_rate": 4.9866527039539276e-05, |
|
"loss": 0.7273, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.038724004719054336, |
|
"grad_norm": 1.1217246055603027, |
|
"learning_rate": 4.9857720131664594e-05, |
|
"loss": 0.7269, |
|
"step": 2511 |
|
}, |
|
{ |
|
"epoch": 0.03920207885138834, |
|
"grad_norm": 0.9570199847221375, |
|
"learning_rate": 4.9848632707796773e-05, |
|
"loss": 0.7289, |
|
"step": 2542 |
|
}, |
|
{ |
|
"epoch": 0.03968015298372235, |
|
"grad_norm": 1.2980682849884033, |
|
"learning_rate": 4.9839264870486155e-05, |
|
"loss": 0.7382, |
|
"step": 2573 |
|
}, |
|
{ |
|
"epoch": 0.04015822711605635, |
|
"grad_norm": 2.1257143020629883, |
|
"learning_rate": 4.9829616725447526e-05, |
|
"loss": 0.8112, |
|
"step": 2604 |
|
}, |
|
{ |
|
"epoch": 0.04063630124839036, |
|
"grad_norm": 1.5091110467910767, |
|
"learning_rate": 4.981968838155888e-05, |
|
"loss": 0.7451, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.041114375380724356, |
|
"grad_norm": 2.4548749923706055, |
|
"learning_rate": 4.980947995086024e-05, |
|
"loss": 0.7358, |
|
"step": 2666 |
|
}, |
|
{ |
|
"epoch": 0.04159244951305836, |
|
"grad_norm": 1.176115870475769, |
|
"learning_rate": 4.979899154855234e-05, |
|
"loss": 0.7368, |
|
"step": 2697 |
|
}, |
|
{ |
|
"epoch": 0.04207052364539237, |
|
"grad_norm": 1.063672661781311, |
|
"learning_rate": 4.9788223292995386e-05, |
|
"loss": 0.7465, |
|
"step": 2728 |
|
}, |
|
{ |
|
"epoch": 0.04254859777772637, |
|
"grad_norm": 1.3062779903411865, |
|
"learning_rate": 4.977717530570768e-05, |
|
"loss": 0.7413, |
|
"step": 2759 |
|
}, |
|
{ |
|
"epoch": 0.04302667191006038, |
|
"grad_norm": 1.5550280809402466, |
|
"learning_rate": 4.976584771136425e-05, |
|
"loss": 0.724, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.043504746042394384, |
|
"grad_norm": 1.2525728940963745, |
|
"learning_rate": 4.975424063779547e-05, |
|
"loss": 0.7216, |
|
"step": 2821 |
|
}, |
|
{ |
|
"epoch": 0.04398282017472838, |
|
"grad_norm": 1.158134937286377, |
|
"learning_rate": 4.974235421598557e-05, |
|
"loss": 0.728, |
|
"step": 2852 |
|
}, |
|
{ |
|
"epoch": 0.04446089430706239, |
|
"grad_norm": 1.262291669845581, |
|
"learning_rate": 4.973018858007122e-05, |
|
"loss": 0.7191, |
|
"step": 2883 |
|
}, |
|
{ |
|
"epoch": 0.04493896843939639, |
|
"grad_norm": 2.826028347015381, |
|
"learning_rate": 4.9717743867339963e-05, |
|
"loss": 0.7211, |
|
"step": 2914 |
|
}, |
|
{ |
|
"epoch": 0.0454170425717304, |
|
"grad_norm": 1.0346958637237549, |
|
"learning_rate": 4.9705020218228695e-05, |
|
"loss": 0.7268, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 0.045895116704064404, |
|
"grad_norm": 1.4338330030441284, |
|
"learning_rate": 4.969201777632205e-05, |
|
"loss": 0.7154, |
|
"step": 2976 |
|
}, |
|
{ |
|
"epoch": 0.046373190836398404, |
|
"grad_norm": 0.9223676919937134, |
|
"learning_rate": 4.9678736688350846e-05, |
|
"loss": 0.7195, |
|
"step": 3007 |
|
}, |
|
{ |
|
"epoch": 0.04685126496873241, |
|
"grad_norm": 0.9936623573303223, |
|
"learning_rate": 4.966517710419033e-05, |
|
"loss": 0.7194, |
|
"step": 3038 |
|
}, |
|
{ |
|
"epoch": 0.04732933910106642, |
|
"grad_norm": 1.017962098121643, |
|
"learning_rate": 4.965133917685858e-05, |
|
"loss": 0.713, |
|
"step": 3069 |
|
}, |
|
{ |
|
"epoch": 0.04780741323340042, |
|
"grad_norm": 0.9654473662376404, |
|
"learning_rate": 4.9637223062514714e-05, |
|
"loss": 0.7096, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.048285487365734424, |
|
"grad_norm": 0.9372695684432983, |
|
"learning_rate": 4.962282892045718e-05, |
|
"loss": 0.7116, |
|
"step": 3131 |
|
}, |
|
{ |
|
"epoch": 0.048763561498068424, |
|
"grad_norm": 0.9450846910476685, |
|
"learning_rate": 4.9608156913121904e-05, |
|
"loss": 0.7129, |
|
"step": 3162 |
|
}, |
|
{ |
|
"epoch": 0.04924163563040243, |
|
"grad_norm": 1.0803054571151733, |
|
"learning_rate": 4.959320720608049e-05, |
|
"loss": 0.706, |
|
"step": 3193 |
|
}, |
|
{ |
|
"epoch": 0.04971970976273644, |
|
"grad_norm": 1.2218348979949951, |
|
"learning_rate": 4.9577979968038354e-05, |
|
"loss": 0.7076, |
|
"step": 3224 |
|
}, |
|
{ |
|
"epoch": 0.05019778389507044, |
|
"grad_norm": 1.0429767370224, |
|
"learning_rate": 4.956247537083282e-05, |
|
"loss": 0.7089, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.050675858027404444, |
|
"grad_norm": 0.9912049770355225, |
|
"learning_rate": 4.9546693589431145e-05, |
|
"loss": 0.7016, |
|
"step": 3286 |
|
}, |
|
{ |
|
"epoch": 0.051153932159738444, |
|
"grad_norm": 0.9687103033065796, |
|
"learning_rate": 4.9530634801928595e-05, |
|
"loss": 0.7071, |
|
"step": 3317 |
|
}, |
|
{ |
|
"epoch": 0.05163200629207245, |
|
"grad_norm": 0.8178670406341553, |
|
"learning_rate": 4.9514299189546395e-05, |
|
"loss": 0.6991, |
|
"step": 3348 |
|
}, |
|
{ |
|
"epoch": 0.05211008042440646, |
|
"grad_norm": 0.8601382374763489, |
|
"learning_rate": 4.949768693662973e-05, |
|
"loss": 0.6978, |
|
"step": 3379 |
|
}, |
|
{ |
|
"epoch": 0.05258815455674046, |
|
"grad_norm": 1.076370120048523, |
|
"learning_rate": 4.948079823064559e-05, |
|
"loss": 0.7044, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.053066228689074464, |
|
"grad_norm": 3.9457356929779053, |
|
"learning_rate": 4.946363326218074e-05, |
|
"loss": 0.6978, |
|
"step": 3441 |
|
}, |
|
{ |
|
"epoch": 0.05354430282140847, |
|
"grad_norm": 0.8186474442481995, |
|
"learning_rate": 4.9446192224939525e-05, |
|
"loss": 0.6974, |
|
"step": 3472 |
|
}, |
|
{ |
|
"epoch": 0.05402237695374247, |
|
"grad_norm": 0.9643816947937012, |
|
"learning_rate": 4.942847531574167e-05, |
|
"loss": 0.7025, |
|
"step": 3503 |
|
}, |
|
{ |
|
"epoch": 0.05450045108607648, |
|
"grad_norm": 1.024248480796814, |
|
"learning_rate": 4.941048273452008e-05, |
|
"loss": 0.7006, |
|
"step": 3534 |
|
}, |
|
{ |
|
"epoch": 0.05497852521841048, |
|
"grad_norm": 0.7718949317932129, |
|
"learning_rate": 4.9392214684318605e-05, |
|
"loss": 0.7024, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 0.055456599350744484, |
|
"grad_norm": 1.1313899755477905, |
|
"learning_rate": 4.93736713712897e-05, |
|
"loss": 0.701, |
|
"step": 3596 |
|
}, |
|
{ |
|
"epoch": 0.05593467348307849, |
|
"grad_norm": 1.0118827819824219, |
|
"learning_rate": 4.9354853004692124e-05, |
|
"loss": 0.7036, |
|
"step": 3627 |
|
}, |
|
{ |
|
"epoch": 0.05641274761541249, |
|
"grad_norm": 0.9465724229812622, |
|
"learning_rate": 4.93357597968886e-05, |
|
"loss": 0.6869, |
|
"step": 3658 |
|
}, |
|
{ |
|
"epoch": 0.0568908217477465, |
|
"grad_norm": 1.0233882665634155, |
|
"learning_rate": 4.931639196334338e-05, |
|
"loss": 0.6944, |
|
"step": 3689 |
|
}, |
|
{ |
|
"epoch": 0.057368895880080505, |
|
"grad_norm": 1.008609652519226, |
|
"learning_rate": 4.9296749722619826e-05, |
|
"loss": 0.6916, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.057846970012414504, |
|
"grad_norm": 1.0083181858062744, |
|
"learning_rate": 4.9276833296377966e-05, |
|
"loss": 0.6886, |
|
"step": 3751 |
|
}, |
|
{ |
|
"epoch": 0.05832504414474851, |
|
"grad_norm": 0.9374220967292786, |
|
"learning_rate": 4.925664290937196e-05, |
|
"loss": 0.6976, |
|
"step": 3782 |
|
}, |
|
{ |
|
"epoch": 0.05880311827708251, |
|
"grad_norm": 0.9066904783248901, |
|
"learning_rate": 4.9236178789447576e-05, |
|
"loss": 0.6911, |
|
"step": 3813 |
|
}, |
|
{ |
|
"epoch": 0.05928119240941652, |
|
"grad_norm": 0.9702699780464172, |
|
"learning_rate": 4.921544116753962e-05, |
|
"loss": 0.6959, |
|
"step": 3844 |
|
}, |
|
{ |
|
"epoch": 0.059759266541750525, |
|
"grad_norm": 0.8405037522315979, |
|
"learning_rate": 4.919443027766935e-05, |
|
"loss": 0.6896, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.060237340674084525, |
|
"grad_norm": 0.9460383653640747, |
|
"learning_rate": 4.91731463569418e-05, |
|
"loss": 0.6871, |
|
"step": 3906 |
|
}, |
|
{ |
|
"epoch": 0.06071541480641853, |
|
"grad_norm": 0.9355078339576721, |
|
"learning_rate": 4.915158964554312e-05, |
|
"loss": 0.6843, |
|
"step": 3937 |
|
}, |
|
{ |
|
"epoch": 0.06119348893875254, |
|
"grad_norm": 0.7211058735847473, |
|
"learning_rate": 4.912976038673786e-05, |
|
"loss": 0.6861, |
|
"step": 3968 |
|
}, |
|
{ |
|
"epoch": 0.06167156307108654, |
|
"grad_norm": 0.8674766421318054, |
|
"learning_rate": 4.9107658826866254e-05, |
|
"loss": 0.6939, |
|
"step": 3999 |
|
}, |
|
{ |
|
"epoch": 0.062149637203420545, |
|
"grad_norm": 0.8166181445121765, |
|
"learning_rate": 4.908528521534139e-05, |
|
"loss": 0.692, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.06262771133575455, |
|
"grad_norm": 0.841305136680603, |
|
"learning_rate": 4.906263980464644e-05, |
|
"loss": 0.6855, |
|
"step": 4061 |
|
}, |
|
{ |
|
"epoch": 0.06310578546808855, |
|
"grad_norm": 0.8942857384681702, |
|
"learning_rate": 4.903972285033178e-05, |
|
"loss": 0.6946, |
|
"step": 4092 |
|
}, |
|
{ |
|
"epoch": 0.06358385960042255, |
|
"grad_norm": 0.8595120310783386, |
|
"learning_rate": 4.901653461101213e-05, |
|
"loss": 0.6825, |
|
"step": 4123 |
|
}, |
|
{ |
|
"epoch": 0.06406193373275657, |
|
"grad_norm": 0.8155812621116638, |
|
"learning_rate": 4.8993075348363626e-05, |
|
"loss": 0.6821, |
|
"step": 4154 |
|
}, |
|
{ |
|
"epoch": 0.06454000786509057, |
|
"grad_norm": 0.8901113271713257, |
|
"learning_rate": 4.896934532712084e-05, |
|
"loss": 0.6898, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 0.06501808199742456, |
|
"grad_norm": 0.9637976288795471, |
|
"learning_rate": 4.8945344815073846e-05, |
|
"loss": 0.6829, |
|
"step": 4216 |
|
}, |
|
{ |
|
"epoch": 0.06549615612975858, |
|
"grad_norm": 0.7048139572143555, |
|
"learning_rate": 4.892107408306516e-05, |
|
"loss": 0.6834, |
|
"step": 4247 |
|
}, |
|
{ |
|
"epoch": 0.06597423026209258, |
|
"grad_norm": 0.8655612468719482, |
|
"learning_rate": 4.889653340498669e-05, |
|
"loss": 0.6778, |
|
"step": 4278 |
|
}, |
|
{ |
|
"epoch": 0.06645230439442658, |
|
"grad_norm": 0.8204261064529419, |
|
"learning_rate": 4.8871723057776664e-05, |
|
"loss": 0.6672, |
|
"step": 4309 |
|
}, |
|
{ |
|
"epoch": 0.06693037852676058, |
|
"grad_norm": 0.9259466528892517, |
|
"learning_rate": 4.8846643321416476e-05, |
|
"loss": 0.6778, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.06740845265909459, |
|
"grad_norm": 0.9826278686523438, |
|
"learning_rate": 4.882129447892753e-05, |
|
"loss": 0.6882, |
|
"step": 4371 |
|
}, |
|
{ |
|
"epoch": 0.06788652679142859, |
|
"grad_norm": 0.7376586198806763, |
|
"learning_rate": 4.8795676816368076e-05, |
|
"loss": 0.6802, |
|
"step": 4402 |
|
}, |
|
{ |
|
"epoch": 0.06836460092376259, |
|
"grad_norm": 0.6778755784034729, |
|
"learning_rate": 4.876979062282995e-05, |
|
"loss": 0.6814, |
|
"step": 4433 |
|
}, |
|
{ |
|
"epoch": 0.0688426750560966, |
|
"grad_norm": 0.8117042183876038, |
|
"learning_rate": 4.8743636190435325e-05, |
|
"loss": 0.6759, |
|
"step": 4464 |
|
}, |
|
{ |
|
"epoch": 0.0693207491884306, |
|
"grad_norm": 0.9493227005004883, |
|
"learning_rate": 4.871721381433344e-05, |
|
"loss": 0.6744, |
|
"step": 4495 |
|
}, |
|
{ |
|
"epoch": 0.0697988233207646, |
|
"grad_norm": 1.3356900215148926, |
|
"learning_rate": 4.869052379269719e-05, |
|
"loss": 0.6763, |
|
"step": 4526 |
|
}, |
|
{ |
|
"epoch": 0.07027689745309862, |
|
"grad_norm": 1.1542543172836304, |
|
"learning_rate": 4.866356642671985e-05, |
|
"loss": 0.6788, |
|
"step": 4557 |
|
}, |
|
{ |
|
"epoch": 0.07075497158543262, |
|
"grad_norm": 0.785176157951355, |
|
"learning_rate": 4.8636342020611634e-05, |
|
"loss": 0.6853, |
|
"step": 4588 |
|
}, |
|
{ |
|
"epoch": 0.07123304571776662, |
|
"grad_norm": 0.8102776408195496, |
|
"learning_rate": 4.860885088159626e-05, |
|
"loss": 0.6794, |
|
"step": 4619 |
|
}, |
|
{ |
|
"epoch": 0.07171111985010063, |
|
"grad_norm": 0.9685975313186646, |
|
"learning_rate": 4.858109331990751e-05, |
|
"loss": 0.6823, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.07218919398243463, |
|
"grad_norm": 0.7929072976112366, |
|
"learning_rate": 4.855306964878567e-05, |
|
"loss": 0.6781, |
|
"step": 4681 |
|
}, |
|
{ |
|
"epoch": 0.07266726811476863, |
|
"grad_norm": 0.8171564340591431, |
|
"learning_rate": 4.8524780184474084e-05, |
|
"loss": 0.6685, |
|
"step": 4712 |
|
}, |
|
{ |
|
"epoch": 0.07314534224710265, |
|
"grad_norm": 0.9218119382858276, |
|
"learning_rate": 4.8496225246215496e-05, |
|
"loss": 0.6796, |
|
"step": 4743 |
|
}, |
|
{ |
|
"epoch": 0.07362341637943665, |
|
"grad_norm": 3.1681158542633057, |
|
"learning_rate": 4.8467405156248505e-05, |
|
"loss": 0.676, |
|
"step": 4774 |
|
}, |
|
{ |
|
"epoch": 0.07410149051177065, |
|
"grad_norm": 0.9336240887641907, |
|
"learning_rate": 4.843832023980392e-05, |
|
"loss": 0.6734, |
|
"step": 4805 |
|
}, |
|
{ |
|
"epoch": 0.07457956464410465, |
|
"grad_norm": 0.8264138102531433, |
|
"learning_rate": 4.840897082510106e-05, |
|
"loss": 0.6905, |
|
"step": 4836 |
|
}, |
|
{ |
|
"epoch": 0.07505763877643866, |
|
"grad_norm": 0.819663405418396, |
|
"learning_rate": 4.8379357243344084e-05, |
|
"loss": 0.6814, |
|
"step": 4867 |
|
}, |
|
{ |
|
"epoch": 0.07553571290877266, |
|
"grad_norm": 3.124502420425415, |
|
"learning_rate": 4.8349479828718236e-05, |
|
"loss": 0.6704, |
|
"step": 4898 |
|
}, |
|
{ |
|
"epoch": 0.07601378704110666, |
|
"grad_norm": 0.7769860625267029, |
|
"learning_rate": 4.8319338918386075e-05, |
|
"loss": 0.6728, |
|
"step": 4929 |
|
}, |
|
{ |
|
"epoch": 0.07649186117344067, |
|
"grad_norm": 0.8232171535491943, |
|
"learning_rate": 4.828893485248369e-05, |
|
"loss": 0.6798, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.07696993530577467, |
|
"grad_norm": 0.8771420121192932, |
|
"learning_rate": 4.825826797411682e-05, |
|
"loss": 0.6722, |
|
"step": 4991 |
|
}, |
|
{ |
|
"epoch": 0.07744800943810867, |
|
"grad_norm": 0.8321033716201782, |
|
"learning_rate": 4.822733862935702e-05, |
|
"loss": 0.6724, |
|
"step": 5022 |
|
}, |
|
{ |
|
"epoch": 0.07792608357044269, |
|
"grad_norm": 0.9346029162406921, |
|
"learning_rate": 4.819614716723775e-05, |
|
"loss": 0.6764, |
|
"step": 5053 |
|
}, |
|
{ |
|
"epoch": 0.07840415770277669, |
|
"grad_norm": 0.9193580150604248, |
|
"learning_rate": 4.8164693939750425e-05, |
|
"loss": 0.6789, |
|
"step": 5084 |
|
}, |
|
{ |
|
"epoch": 0.07888223183511069, |
|
"grad_norm": 1.0850661993026733, |
|
"learning_rate": 4.813297930184042e-05, |
|
"loss": 0.6685, |
|
"step": 5115 |
|
}, |
|
{ |
|
"epoch": 0.0793603059674447, |
|
"grad_norm": 0.6851856708526611, |
|
"learning_rate": 4.810100361140314e-05, |
|
"loss": 0.6621, |
|
"step": 5146 |
|
}, |
|
{ |
|
"epoch": 0.0798383800997787, |
|
"grad_norm": 0.7950114607810974, |
|
"learning_rate": 4.8068767229279885e-05, |
|
"loss": 0.6642, |
|
"step": 5177 |
|
}, |
|
{ |
|
"epoch": 0.0803164542321127, |
|
"grad_norm": 0.9632556438446045, |
|
"learning_rate": 4.8036270519253854e-05, |
|
"loss": 0.6628, |
|
"step": 5208 |
|
}, |
|
{ |
|
"epoch": 0.0807945283644467, |
|
"grad_norm": 0.7066652178764343, |
|
"learning_rate": 4.8003513848046e-05, |
|
"loss": 0.6642, |
|
"step": 5239 |
|
}, |
|
{ |
|
"epoch": 0.08127260249678071, |
|
"grad_norm": 0.7749651670455933, |
|
"learning_rate": 4.79704975853109e-05, |
|
"loss": 0.6633, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.08175067662911471, |
|
"grad_norm": 1.702022671699524, |
|
"learning_rate": 4.793722210363262e-05, |
|
"loss": 0.6745, |
|
"step": 5301 |
|
}, |
|
{ |
|
"epoch": 0.08222875076144871, |
|
"grad_norm": 0.8134759068489075, |
|
"learning_rate": 4.7903687778520414e-05, |
|
"loss": 0.659, |
|
"step": 5332 |
|
}, |
|
{ |
|
"epoch": 0.08270682489378273, |
|
"grad_norm": 0.8805097341537476, |
|
"learning_rate": 4.7869894988404593e-05, |
|
"loss": 0.6779, |
|
"step": 5363 |
|
}, |
|
{ |
|
"epoch": 0.08318489902611673, |
|
"grad_norm": 0.9312698841094971, |
|
"learning_rate": 4.783584411463221e-05, |
|
"loss": 0.6634, |
|
"step": 5394 |
|
}, |
|
{ |
|
"epoch": 0.08366297315845073, |
|
"grad_norm": 0.7013948559761047, |
|
"learning_rate": 4.780153554146274e-05, |
|
"loss": 0.6589, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 0.08414104729078474, |
|
"grad_norm": 0.8027428388595581, |
|
"learning_rate": 4.7766969656063766e-05, |
|
"loss": 0.6603, |
|
"step": 5456 |
|
}, |
|
{ |
|
"epoch": 0.08461912142311874, |
|
"grad_norm": 1.0179306268692017, |
|
"learning_rate": 4.773214684850662e-05, |
|
"loss": 0.6595, |
|
"step": 5487 |
|
}, |
|
{ |
|
"epoch": 0.08509719555545274, |
|
"grad_norm": 0.8512592911720276, |
|
"learning_rate": 4.769706751176193e-05, |
|
"loss": 0.6659, |
|
"step": 5518 |
|
}, |
|
{ |
|
"epoch": 0.08557526968778675, |
|
"grad_norm": 0.8476304411888123, |
|
"learning_rate": 4.7661732041695264e-05, |
|
"loss": 0.6658, |
|
"step": 5549 |
|
}, |
|
{ |
|
"epoch": 0.08605334382012075, |
|
"grad_norm": 0.771584153175354, |
|
"learning_rate": 4.762614083706258e-05, |
|
"loss": 0.6622, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.08653141795245475, |
|
"grad_norm": 0.8719794154167175, |
|
"learning_rate": 4.759029429950581e-05, |
|
"loss": 0.661, |
|
"step": 5611 |
|
}, |
|
{ |
|
"epoch": 0.08700949208478877, |
|
"grad_norm": 0.9247443675994873, |
|
"learning_rate": 4.7554192833548235e-05, |
|
"loss": 0.6475, |
|
"step": 5642 |
|
}, |
|
{ |
|
"epoch": 0.08748756621712277, |
|
"grad_norm": 0.9540857672691345, |
|
"learning_rate": 4.751783684659e-05, |
|
"loss": 0.662, |
|
"step": 5673 |
|
}, |
|
{ |
|
"epoch": 0.08796564034945677, |
|
"grad_norm": 0.7939008474349976, |
|
"learning_rate": 4.748122674890348e-05, |
|
"loss": 0.6514, |
|
"step": 5704 |
|
}, |
|
{ |
|
"epoch": 0.08844371448179077, |
|
"grad_norm": 0.613531768321991, |
|
"learning_rate": 4.7444362953628654e-05, |
|
"loss": 0.6513, |
|
"step": 5735 |
|
}, |
|
{ |
|
"epoch": 0.08892178861412478, |
|
"grad_norm": 0.8084924817085266, |
|
"learning_rate": 4.7407245876768424e-05, |
|
"loss": 0.6465, |
|
"step": 5766 |
|
}, |
|
{ |
|
"epoch": 0.08939986274645878, |
|
"grad_norm": 0.7709007263183594, |
|
"learning_rate": 4.736987593718397e-05, |
|
"loss": 0.6618, |
|
"step": 5797 |
|
}, |
|
{ |
|
"epoch": 0.08987793687879278, |
|
"grad_norm": 0.6461811065673828, |
|
"learning_rate": 4.733225355658999e-05, |
|
"loss": 0.6516, |
|
"step": 5828 |
|
}, |
|
{ |
|
"epoch": 0.0903560110111268, |
|
"grad_norm": 0.6879326105117798, |
|
"learning_rate": 4.7294379159549926e-05, |
|
"loss": 0.655, |
|
"step": 5859 |
|
}, |
|
{ |
|
"epoch": 0.0908340851434608, |
|
"grad_norm": 0.7594075798988342, |
|
"learning_rate": 4.725625317347119e-05, |
|
"loss": 0.655, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 0.0913121592757948, |
|
"grad_norm": 0.774758517742157, |
|
"learning_rate": 4.7217876028600374e-05, |
|
"loss": 0.6697, |
|
"step": 5921 |
|
}, |
|
{ |
|
"epoch": 0.09179023340812881, |
|
"grad_norm": 0.9164844155311584, |
|
"learning_rate": 4.717924815801832e-05, |
|
"loss": 0.6483, |
|
"step": 5952 |
|
}, |
|
{ |
|
"epoch": 0.09226830754046281, |
|
"grad_norm": 0.792631208896637, |
|
"learning_rate": 4.714036999763532e-05, |
|
"loss": 0.6644, |
|
"step": 5983 |
|
}, |
|
{ |
|
"epoch": 0.09274638167279681, |
|
"grad_norm": 0.7735174298286438, |
|
"learning_rate": 4.7101241986186116e-05, |
|
"loss": 0.6545, |
|
"step": 6014 |
|
}, |
|
{ |
|
"epoch": 0.09322445580513082, |
|
"grad_norm": 0.725825309753418, |
|
"learning_rate": 4.7061864565225e-05, |
|
"loss": 0.6579, |
|
"step": 6045 |
|
}, |
|
{ |
|
"epoch": 0.09370252993746482, |
|
"grad_norm": 0.7191294431686401, |
|
"learning_rate": 4.702223817912081e-05, |
|
"loss": 0.6509, |
|
"step": 6076 |
|
}, |
|
{ |
|
"epoch": 0.09418060406979882, |
|
"grad_norm": 0.8470766544342041, |
|
"learning_rate": 4.698236327505195e-05, |
|
"loss": 0.6515, |
|
"step": 6107 |
|
}, |
|
{ |
|
"epoch": 0.09465867820213283, |
|
"grad_norm": 0.7250267863273621, |
|
"learning_rate": 4.694224030300127e-05, |
|
"loss": 0.6484, |
|
"step": 6138 |
|
}, |
|
{ |
|
"epoch": 0.09513675233446683, |
|
"grad_norm": 0.7370029091835022, |
|
"learning_rate": 4.690186971575107e-05, |
|
"loss": 0.6639, |
|
"step": 6169 |
|
}, |
|
{ |
|
"epoch": 0.09561482646680083, |
|
"grad_norm": 0.7768808007240295, |
|
"learning_rate": 4.6861251968877916e-05, |
|
"loss": 0.657, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.09609290059913483, |
|
"grad_norm": 0.770039439201355, |
|
"learning_rate": 4.68203875207476e-05, |
|
"loss": 0.6588, |
|
"step": 6231 |
|
}, |
|
{ |
|
"epoch": 0.09657097473146885, |
|
"grad_norm": 0.637853741645813, |
|
"learning_rate": 4.677927683250983e-05, |
|
"loss": 0.6438, |
|
"step": 6262 |
|
}, |
|
{ |
|
"epoch": 0.09704904886380285, |
|
"grad_norm": 0.6896365284919739, |
|
"learning_rate": 4.6737920368093156e-05, |
|
"loss": 0.6493, |
|
"step": 6293 |
|
}, |
|
{ |
|
"epoch": 0.09752712299613685, |
|
"grad_norm": 0.721318781375885, |
|
"learning_rate": 4.669631859419965e-05, |
|
"loss": 0.6427, |
|
"step": 6324 |
|
}, |
|
{ |
|
"epoch": 0.09800519712847086, |
|
"grad_norm": 0.7492154836654663, |
|
"learning_rate": 4.6654471980299676e-05, |
|
"loss": 0.6474, |
|
"step": 6355 |
|
}, |
|
{ |
|
"epoch": 0.09848327126080486, |
|
"grad_norm": 0.8512872457504272, |
|
"learning_rate": 4.661238099862658e-05, |
|
"loss": 0.6423, |
|
"step": 6386 |
|
}, |
|
{ |
|
"epoch": 0.09896134539313886, |
|
"grad_norm": 0.6349690556526184, |
|
"learning_rate": 4.657004612417138e-05, |
|
"loss": 0.6499, |
|
"step": 6417 |
|
}, |
|
{ |
|
"epoch": 0.09943941952547287, |
|
"grad_norm": 0.8040255904197693, |
|
"learning_rate": 4.6527467834677374e-05, |
|
"loss": 0.6495, |
|
"step": 6448 |
|
}, |
|
{ |
|
"epoch": 0.09991749365780687, |
|
"grad_norm": 0.6428426504135132, |
|
"learning_rate": 4.648464661063478e-05, |
|
"loss": 0.6565, |
|
"step": 6479 |
|
}, |
|
{ |
|
"epoch": 0.10039556779014087, |
|
"grad_norm": 0.8141711950302124, |
|
"learning_rate": 4.6441582935275264e-05, |
|
"loss": 0.6592, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 0.10087364192247489, |
|
"grad_norm": 0.8665099740028381, |
|
"learning_rate": 4.6398277294566586e-05, |
|
"loss": 0.6497, |
|
"step": 6541 |
|
}, |
|
{ |
|
"epoch": 0.10135171605480889, |
|
"grad_norm": 0.709786057472229, |
|
"learning_rate": 4.6354730177207e-05, |
|
"loss": 0.6546, |
|
"step": 6572 |
|
}, |
|
{ |
|
"epoch": 0.10182979018714289, |
|
"grad_norm": 0.8345597982406616, |
|
"learning_rate": 4.6310942074619787e-05, |
|
"loss": 0.6547, |
|
"step": 6603 |
|
}, |
|
{ |
|
"epoch": 0.10230786431947689, |
|
"grad_norm": 0.746769905090332, |
|
"learning_rate": 4.626691348094777e-05, |
|
"loss": 0.6526, |
|
"step": 6634 |
|
}, |
|
{ |
|
"epoch": 0.1027859384518109, |
|
"grad_norm": 0.7869583368301392, |
|
"learning_rate": 4.622264489304762e-05, |
|
"loss": 0.6458, |
|
"step": 6665 |
|
}, |
|
{ |
|
"epoch": 0.1032640125841449, |
|
"grad_norm": 0.5438331365585327, |
|
"learning_rate": 4.617813681048434e-05, |
|
"loss": 0.6468, |
|
"step": 6696 |
|
}, |
|
{ |
|
"epoch": 0.1037420867164789, |
|
"grad_norm": 0.7230411171913147, |
|
"learning_rate": 4.61333897355256e-05, |
|
"loss": 0.6435, |
|
"step": 6727 |
|
}, |
|
{ |
|
"epoch": 0.10422016084881291, |
|
"grad_norm": 0.9861361384391785, |
|
"learning_rate": 4.608840417313604e-05, |
|
"loss": 0.6488, |
|
"step": 6758 |
|
}, |
|
{ |
|
"epoch": 0.10469823498114691, |
|
"grad_norm": 0.7873183488845825, |
|
"learning_rate": 4.6043180630971646e-05, |
|
"loss": 0.6485, |
|
"step": 6789 |
|
}, |
|
{ |
|
"epoch": 0.10517630911348091, |
|
"grad_norm": 0.7746345400810242, |
|
"learning_rate": 4.599771961937391e-05, |
|
"loss": 0.6384, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 0.10565438324581493, |
|
"grad_norm": 0.8218130469322205, |
|
"learning_rate": 4.5952021651364204e-05, |
|
"loss": 0.6434, |
|
"step": 6851 |
|
}, |
|
{ |
|
"epoch": 0.10613245737814893, |
|
"grad_norm": 0.7047727108001709, |
|
"learning_rate": 4.590608724263786e-05, |
|
"loss": 0.6562, |
|
"step": 6882 |
|
}, |
|
{ |
|
"epoch": 0.10661053151048293, |
|
"grad_norm": 0.9560827612876892, |
|
"learning_rate": 4.585991691155845e-05, |
|
"loss": 0.6422, |
|
"step": 6913 |
|
}, |
|
{ |
|
"epoch": 0.10708860564281694, |
|
"grad_norm": 1.1834834814071655, |
|
"learning_rate": 4.581351117915188e-05, |
|
"loss": 0.6525, |
|
"step": 6944 |
|
}, |
|
{ |
|
"epoch": 0.10756667977515094, |
|
"grad_norm": 0.8007238507270813, |
|
"learning_rate": 4.5766870569100534e-05, |
|
"loss": 0.6575, |
|
"step": 6975 |
|
}, |
|
{ |
|
"epoch": 0.10804475390748494, |
|
"grad_norm": 0.6816396713256836, |
|
"learning_rate": 4.571999560773736e-05, |
|
"loss": 0.6453, |
|
"step": 7006 |
|
}, |
|
{ |
|
"epoch": 0.10852282803981896, |
|
"grad_norm": 0.7240257859230042, |
|
"learning_rate": 4.5672886824039915e-05, |
|
"loss": 0.6542, |
|
"step": 7037 |
|
}, |
|
{ |
|
"epoch": 0.10900090217215296, |
|
"grad_norm": 0.6335741877555847, |
|
"learning_rate": 4.5625544749624435e-05, |
|
"loss": 0.6449, |
|
"step": 7068 |
|
}, |
|
{ |
|
"epoch": 0.10947897630448696, |
|
"grad_norm": 0.6071763038635254, |
|
"learning_rate": 4.5577969918739794e-05, |
|
"loss": 0.6572, |
|
"step": 7099 |
|
}, |
|
{ |
|
"epoch": 0.10995705043682096, |
|
"grad_norm": 0.8242653608322144, |
|
"learning_rate": 4.5530162868261486e-05, |
|
"loss": 0.6483, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 0.11043512456915497, |
|
"grad_norm": 0.7106221318244934, |
|
"learning_rate": 4.548212413768558e-05, |
|
"loss": 0.6376, |
|
"step": 7161 |
|
}, |
|
{ |
|
"epoch": 0.11091319870148897, |
|
"grad_norm": 0.6587203145027161, |
|
"learning_rate": 4.543385426912261e-05, |
|
"loss": 0.6426, |
|
"step": 7192 |
|
}, |
|
{ |
|
"epoch": 0.11139127283382297, |
|
"grad_norm": 0.8035815954208374, |
|
"learning_rate": 4.53853538072915e-05, |
|
"loss": 0.6428, |
|
"step": 7223 |
|
}, |
|
{ |
|
"epoch": 0.11186934696615698, |
|
"grad_norm": 0.6661431193351746, |
|
"learning_rate": 4.533662329951336e-05, |
|
"loss": 0.6431, |
|
"step": 7254 |
|
}, |
|
{ |
|
"epoch": 0.11234742109849098, |
|
"grad_norm": 0.5891786813735962, |
|
"learning_rate": 4.528766329570536e-05, |
|
"loss": 0.6356, |
|
"step": 7285 |
|
}, |
|
{ |
|
"epoch": 0.11282549523082498, |
|
"grad_norm": 0.7475966811180115, |
|
"learning_rate": 4.523847434837447e-05, |
|
"loss": 0.6348, |
|
"step": 7316 |
|
}, |
|
{ |
|
"epoch": 0.113303569363159, |
|
"grad_norm": 0.7461502552032471, |
|
"learning_rate": 4.518905701261128e-05, |
|
"loss": 0.639, |
|
"step": 7347 |
|
}, |
|
{ |
|
"epoch": 0.113781643495493, |
|
"grad_norm": 0.7351141571998596, |
|
"learning_rate": 4.5139411846083715e-05, |
|
"loss": 0.6473, |
|
"step": 7378 |
|
}, |
|
{ |
|
"epoch": 0.114259717627827, |
|
"grad_norm": 0.7713771462440491, |
|
"learning_rate": 4.508953940903073e-05, |
|
"loss": 0.6433, |
|
"step": 7409 |
|
}, |
|
{ |
|
"epoch": 0.11473779176016101, |
|
"grad_norm": 0.639986515045166, |
|
"learning_rate": 4.5039440264255994e-05, |
|
"loss": 0.6411, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 0.11521586589249501, |
|
"grad_norm": 0.8399244546890259, |
|
"learning_rate": 4.498911497712155e-05, |
|
"loss": 0.6421, |
|
"step": 7471 |
|
}, |
|
{ |
|
"epoch": 0.11569394002482901, |
|
"grad_norm": 0.7681542634963989, |
|
"learning_rate": 4.493856411554142e-05, |
|
"loss": 0.6371, |
|
"step": 7502 |
|
}, |
|
{ |
|
"epoch": 0.11617201415716301, |
|
"grad_norm": 0.6907945275306702, |
|
"learning_rate": 4.4887788249975206e-05, |
|
"loss": 0.6446, |
|
"step": 7533 |
|
}, |
|
{ |
|
"epoch": 0.11665008828949702, |
|
"grad_norm": 0.7653492093086243, |
|
"learning_rate": 4.4836787953421656e-05, |
|
"loss": 0.6407, |
|
"step": 7564 |
|
}, |
|
{ |
|
"epoch": 0.11712816242183102, |
|
"grad_norm": 0.6535690426826477, |
|
"learning_rate": 4.478556380141218e-05, |
|
"loss": 0.6388, |
|
"step": 7595 |
|
}, |
|
{ |
|
"epoch": 0.11760623655416502, |
|
"grad_norm": 2.6184701919555664, |
|
"learning_rate": 4.4734116372004375e-05, |
|
"loss": 0.6382, |
|
"step": 7626 |
|
}, |
|
{ |
|
"epoch": 0.11808431068649904, |
|
"grad_norm": 0.7846981287002563, |
|
"learning_rate": 4.4682446245775477e-05, |
|
"loss": 0.6364, |
|
"step": 7657 |
|
}, |
|
{ |
|
"epoch": 0.11856238481883304, |
|
"grad_norm": 0.8151688575744629, |
|
"learning_rate": 4.463055400581586e-05, |
|
"loss": 0.6427, |
|
"step": 7688 |
|
}, |
|
{ |
|
"epoch": 0.11904045895116704, |
|
"grad_norm": 0.7663447856903076, |
|
"learning_rate": 4.4578440237722374e-05, |
|
"loss": 0.643, |
|
"step": 7719 |
|
}, |
|
{ |
|
"epoch": 0.11951853308350105, |
|
"grad_norm": 0.7225657105445862, |
|
"learning_rate": 4.452610552959183e-05, |
|
"loss": 0.6395, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 0.11999660721583505, |
|
"grad_norm": 0.7033439874649048, |
|
"learning_rate": 4.447355047201428e-05, |
|
"loss": 0.6354, |
|
"step": 7781 |
|
}, |
|
{ |
|
"epoch": 0.12047468134816905, |
|
"grad_norm": 0.7693649530410767, |
|
"learning_rate": 4.4420775658066414e-05, |
|
"loss": 0.6436, |
|
"step": 7812 |
|
}, |
|
{ |
|
"epoch": 0.12095275548050306, |
|
"grad_norm": 0.7597894668579102, |
|
"learning_rate": 4.436778168330484e-05, |
|
"loss": 0.6411, |
|
"step": 7843 |
|
}, |
|
{ |
|
"epoch": 0.12143082961283706, |
|
"grad_norm": 0.7899183630943298, |
|
"learning_rate": 4.4314569145759353e-05, |
|
"loss": 0.6381, |
|
"step": 7874 |
|
}, |
|
{ |
|
"epoch": 0.12190890374517106, |
|
"grad_norm": 0.7656906247138977, |
|
"learning_rate": 4.42611386459262e-05, |
|
"loss": 0.6386, |
|
"step": 7905 |
|
}, |
|
{ |
|
"epoch": 0.12238697787750508, |
|
"grad_norm": 0.8353867530822754, |
|
"learning_rate": 4.420749078676133e-05, |
|
"loss": 0.6386, |
|
"step": 7936 |
|
}, |
|
{ |
|
"epoch": 0.12286505200983908, |
|
"grad_norm": 0.6712770462036133, |
|
"learning_rate": 4.4153626173673516e-05, |
|
"loss": 0.6438, |
|
"step": 7967 |
|
}, |
|
{ |
|
"epoch": 0.12334312614217308, |
|
"grad_norm": 0.8008986115455627, |
|
"learning_rate": 4.409954541451762e-05, |
|
"loss": 0.6384, |
|
"step": 7998 |
|
}, |
|
{ |
|
"epoch": 0.12382120027450708, |
|
"grad_norm": 0.6632928252220154, |
|
"learning_rate": 4.404524911958764e-05, |
|
"loss": 0.629, |
|
"step": 8029 |
|
}, |
|
{ |
|
"epoch": 0.12429927440684109, |
|
"grad_norm": 0.6943359971046448, |
|
"learning_rate": 4.399073790160989e-05, |
|
"loss": 0.6456, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 0.12477734853917509, |
|
"grad_norm": 0.6177495718002319, |
|
"learning_rate": 4.393601237573607e-05, |
|
"loss": 0.6426, |
|
"step": 8091 |
|
}, |
|
{ |
|
"epoch": 0.1252554226715091, |
|
"grad_norm": 0.6089790463447571, |
|
"learning_rate": 4.388107315953628e-05, |
|
"loss": 0.6396, |
|
"step": 8122 |
|
}, |
|
{ |
|
"epoch": 0.1257334968038431, |
|
"grad_norm": 0.6960969567298889, |
|
"learning_rate": 4.382592087299212e-05, |
|
"loss": 0.6293, |
|
"step": 8153 |
|
}, |
|
{ |
|
"epoch": 0.1262115709361771, |
|
"grad_norm": 0.6519069075584412, |
|
"learning_rate": 4.377055613848964e-05, |
|
"loss": 0.6284, |
|
"step": 8184 |
|
}, |
|
{ |
|
"epoch": 0.1266896450685111, |
|
"grad_norm": 0.6861445903778076, |
|
"learning_rate": 4.3714979580812355e-05, |
|
"loss": 0.6386, |
|
"step": 8215 |
|
}, |
|
{ |
|
"epoch": 0.1271677192008451, |
|
"grad_norm": 0.7522263526916504, |
|
"learning_rate": 4.365919182713416e-05, |
|
"loss": 0.6424, |
|
"step": 8246 |
|
}, |
|
{ |
|
"epoch": 0.1276457933331791, |
|
"grad_norm": 0.6868363618850708, |
|
"learning_rate": 4.360319350701226e-05, |
|
"loss": 0.6367, |
|
"step": 8277 |
|
}, |
|
{ |
|
"epoch": 0.12812386746551313, |
|
"grad_norm": 0.5939040780067444, |
|
"learning_rate": 4.3546985252380115e-05, |
|
"loss": 0.6406, |
|
"step": 8308 |
|
}, |
|
{ |
|
"epoch": 0.12860194159784713, |
|
"grad_norm": 0.7341668605804443, |
|
"learning_rate": 4.349056769754021e-05, |
|
"loss": 0.6346, |
|
"step": 8339 |
|
}, |
|
{ |
|
"epoch": 0.12908001573018113, |
|
"grad_norm": 0.7468376755714417, |
|
"learning_rate": 4.3433941479156994e-05, |
|
"loss": 0.6398, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 0.12955808986251513, |
|
"grad_norm": 0.6253494024276733, |
|
"learning_rate": 4.3377107236249647e-05, |
|
"loss": 0.6375, |
|
"step": 8401 |
|
}, |
|
{ |
|
"epoch": 0.13003616399484913, |
|
"grad_norm": 0.6635178923606873, |
|
"learning_rate": 4.332006561018488e-05, |
|
"loss": 0.6299, |
|
"step": 8432 |
|
}, |
|
{ |
|
"epoch": 0.13051423812718313, |
|
"grad_norm": 0.584648609161377, |
|
"learning_rate": 4.3262817244669683e-05, |
|
"loss": 0.6277, |
|
"step": 8463 |
|
}, |
|
{ |
|
"epoch": 0.13099231225951716, |
|
"grad_norm": 0.5907359719276428, |
|
"learning_rate": 4.3205362785744083e-05, |
|
"loss": 0.6328, |
|
"step": 8494 |
|
}, |
|
{ |
|
"epoch": 0.13147038639185116, |
|
"grad_norm": 0.7864153385162354, |
|
"learning_rate": 4.314770288177384e-05, |
|
"loss": 0.6365, |
|
"step": 8525 |
|
}, |
|
{ |
|
"epoch": 0.13194846052418516, |
|
"grad_norm": 0.6572133302688599, |
|
"learning_rate": 4.308983818344313e-05, |
|
"loss": 0.6312, |
|
"step": 8556 |
|
}, |
|
{ |
|
"epoch": 0.13242653465651916, |
|
"grad_norm": 0.7108786702156067, |
|
"learning_rate": 4.3031769343747206e-05, |
|
"loss": 0.6355, |
|
"step": 8587 |
|
}, |
|
{ |
|
"epoch": 0.13290460878885316, |
|
"grad_norm": 0.6737267374992371, |
|
"learning_rate": 4.297349701798505e-05, |
|
"loss": 0.6256, |
|
"step": 8618 |
|
}, |
|
{ |
|
"epoch": 0.13338268292118716, |
|
"grad_norm": 0.6710911989212036, |
|
"learning_rate": 4.2915021863751916e-05, |
|
"loss": 0.632, |
|
"step": 8649 |
|
}, |
|
{ |
|
"epoch": 0.13386075705352116, |
|
"grad_norm": 0.6295626759529114, |
|
"learning_rate": 4.285634454093198e-05, |
|
"loss": 0.6372, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 0.13433883118585518, |
|
"grad_norm": 0.6405526399612427, |
|
"learning_rate": 4.279746571169086e-05, |
|
"loss": 0.6364, |
|
"step": 8711 |
|
}, |
|
{ |
|
"epoch": 0.13481690531818918, |
|
"grad_norm": 0.6847979426383972, |
|
"learning_rate": 4.2738386040468136e-05, |
|
"loss": 0.6246, |
|
"step": 8742 |
|
}, |
|
{ |
|
"epoch": 0.13529497945052318, |
|
"grad_norm": 0.7338352799415588, |
|
"learning_rate": 4.2679106193969866e-05, |
|
"loss": 0.6373, |
|
"step": 8773 |
|
}, |
|
{ |
|
"epoch": 0.13577305358285718, |
|
"grad_norm": 0.6530189514160156, |
|
"learning_rate": 4.261962684116106e-05, |
|
"loss": 0.6217, |
|
"step": 8804 |
|
}, |
|
{ |
|
"epoch": 0.13625112771519118, |
|
"grad_norm": 0.6450487375259399, |
|
"learning_rate": 4.2559948653258145e-05, |
|
"loss": 0.6365, |
|
"step": 8835 |
|
}, |
|
{ |
|
"epoch": 0.13672920184752518, |
|
"grad_norm": 0.706732988357544, |
|
"learning_rate": 4.250007230372134e-05, |
|
"loss": 0.6294, |
|
"step": 8866 |
|
}, |
|
{ |
|
"epoch": 0.1372072759798592, |
|
"grad_norm": 0.687240481376648, |
|
"learning_rate": 4.2439998468247126e-05, |
|
"loss": 0.6332, |
|
"step": 8897 |
|
}, |
|
{ |
|
"epoch": 0.1376853501121932, |
|
"grad_norm": 0.6686804890632629, |
|
"learning_rate": 4.2379727824760566e-05, |
|
"loss": 0.6334, |
|
"step": 8928 |
|
}, |
|
{ |
|
"epoch": 0.1381634242445272, |
|
"grad_norm": 0.8371458649635315, |
|
"learning_rate": 4.231926105340768e-05, |
|
"loss": 0.6249, |
|
"step": 8959 |
|
}, |
|
{ |
|
"epoch": 0.1386414983768612, |
|
"grad_norm": 0.7204101085662842, |
|
"learning_rate": 4.225859883654776e-05, |
|
"loss": 0.6309, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 0.1391195725091952, |
|
"grad_norm": 0.8539360165596008, |
|
"learning_rate": 4.219774185874569e-05, |
|
"loss": 0.6326, |
|
"step": 9021 |
|
}, |
|
{ |
|
"epoch": 0.1395976466415292, |
|
"grad_norm": 0.7264754176139832, |
|
"learning_rate": 4.213669080676418e-05, |
|
"loss": 0.6342, |
|
"step": 9052 |
|
}, |
|
{ |
|
"epoch": 0.14007572077386324, |
|
"grad_norm": 0.6681591868400574, |
|
"learning_rate": 4.2075446369556056e-05, |
|
"loss": 0.6295, |
|
"step": 9083 |
|
}, |
|
{ |
|
"epoch": 0.14055379490619724, |
|
"grad_norm": 0.6293045282363892, |
|
"learning_rate": 4.201400923825648e-05, |
|
"loss": 0.6304, |
|
"step": 9114 |
|
}, |
|
{ |
|
"epoch": 0.14103186903853124, |
|
"grad_norm": 0.6432914137840271, |
|
"learning_rate": 4.195238010617511e-05, |
|
"loss": 0.6215, |
|
"step": 9145 |
|
}, |
|
{ |
|
"epoch": 0.14150994317086524, |
|
"grad_norm": 0.7524629235267639, |
|
"learning_rate": 4.1890559668788344e-05, |
|
"loss": 0.625, |
|
"step": 9176 |
|
}, |
|
{ |
|
"epoch": 0.14198801730319924, |
|
"grad_norm": 0.7128170728683472, |
|
"learning_rate": 4.1828548623731405e-05, |
|
"loss": 0.6342, |
|
"step": 9207 |
|
}, |
|
{ |
|
"epoch": 0.14246609143553324, |
|
"grad_norm": 0.651841938495636, |
|
"learning_rate": 4.1766347670790506e-05, |
|
"loss": 0.6303, |
|
"step": 9238 |
|
}, |
|
{ |
|
"epoch": 0.14294416556786724, |
|
"grad_norm": 0.6290706396102905, |
|
"learning_rate": 4.170395751189495e-05, |
|
"loss": 0.6297, |
|
"step": 9269 |
|
}, |
|
{ |
|
"epoch": 0.14342223970020127, |
|
"grad_norm": 0.6372231841087341, |
|
"learning_rate": 4.164137885110921e-05, |
|
"loss": 0.6244, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 0.14390031383253526, |
|
"grad_norm": 0.8424429297447205, |
|
"learning_rate": 4.157861239462495e-05, |
|
"loss": 0.6363, |
|
"step": 9331 |
|
}, |
|
{ |
|
"epoch": 0.14437838796486926, |
|
"grad_norm": 0.7121688723564148, |
|
"learning_rate": 4.1515658850753114e-05, |
|
"loss": 0.63, |
|
"step": 9362 |
|
}, |
|
{ |
|
"epoch": 0.14485646209720326, |
|
"grad_norm": 0.7196950316429138, |
|
"learning_rate": 4.145251892991588e-05, |
|
"loss": 0.6254, |
|
"step": 9393 |
|
}, |
|
{ |
|
"epoch": 0.14533453622953726, |
|
"grad_norm": 0.6595038175582886, |
|
"learning_rate": 4.138919334463868e-05, |
|
"loss": 0.6207, |
|
"step": 9424 |
|
}, |
|
{ |
|
"epoch": 0.14581261036187126, |
|
"grad_norm": 0.6118171811103821, |
|
"learning_rate": 4.1325682809542124e-05, |
|
"loss": 0.6233, |
|
"step": 9455 |
|
}, |
|
{ |
|
"epoch": 0.1462906844942053, |
|
"grad_norm": 0.7232321500778198, |
|
"learning_rate": 4.126198804133398e-05, |
|
"loss": 0.6328, |
|
"step": 9486 |
|
}, |
|
{ |
|
"epoch": 0.1467687586265393, |
|
"grad_norm": 0.691681981086731, |
|
"learning_rate": 4.1198109758801055e-05, |
|
"loss": 0.634, |
|
"step": 9517 |
|
}, |
|
{ |
|
"epoch": 0.1472468327588733, |
|
"grad_norm": 0.6887443661689758, |
|
"learning_rate": 4.113404868280107e-05, |
|
"loss": 0.6275, |
|
"step": 9548 |
|
}, |
|
{ |
|
"epoch": 0.1477249068912073, |
|
"grad_norm": 0.6932326555252075, |
|
"learning_rate": 4.106980553625457e-05, |
|
"loss": 0.6233, |
|
"step": 9579 |
|
}, |
|
{ |
|
"epoch": 0.1482029810235413, |
|
"grad_norm": 0.7189227342605591, |
|
"learning_rate": 4.100538104413674e-05, |
|
"loss": 0.6229, |
|
"step": 9610 |
|
}, |
|
{ |
|
"epoch": 0.1486810551558753, |
|
"grad_norm": 0.6898639798164368, |
|
"learning_rate": 4.09407759334692e-05, |
|
"loss": 0.6251, |
|
"step": 9641 |
|
}, |
|
{ |
|
"epoch": 0.1491591292882093, |
|
"grad_norm": 0.6311922669410706, |
|
"learning_rate": 4.087599093331186e-05, |
|
"loss": 0.6249, |
|
"step": 9672 |
|
}, |
|
{ |
|
"epoch": 0.14963720342054332, |
|
"grad_norm": 0.5781577825546265, |
|
"learning_rate": 4.081102677475462e-05, |
|
"loss": 0.6337, |
|
"step": 9703 |
|
}, |
|
{ |
|
"epoch": 0.15011527755287732, |
|
"grad_norm": 0.6525286436080933, |
|
"learning_rate": 4.0745884190909194e-05, |
|
"loss": 0.6347, |
|
"step": 9734 |
|
}, |
|
{ |
|
"epoch": 0.15059335168521132, |
|
"grad_norm": 0.6761188507080078, |
|
"learning_rate": 4.0680563916900796e-05, |
|
"loss": 0.6267, |
|
"step": 9765 |
|
}, |
|
{ |
|
"epoch": 0.15107142581754532, |
|
"grad_norm": 0.674532413482666, |
|
"learning_rate": 4.0615066689859815e-05, |
|
"loss": 0.6303, |
|
"step": 9796 |
|
}, |
|
{ |
|
"epoch": 0.15154949994987932, |
|
"grad_norm": 0.5923493504524231, |
|
"learning_rate": 4.0549393248913584e-05, |
|
"loss": 0.6241, |
|
"step": 9827 |
|
}, |
|
{ |
|
"epoch": 0.15202757408221332, |
|
"grad_norm": 0.6309007406234741, |
|
"learning_rate": 4.048354433517794e-05, |
|
"loss": 0.6231, |
|
"step": 9858 |
|
}, |
|
{ |
|
"epoch": 0.15250564821454735, |
|
"grad_norm": 0.7521491050720215, |
|
"learning_rate": 4.0417520691748916e-05, |
|
"loss": 0.6214, |
|
"step": 9889 |
|
}, |
|
{ |
|
"epoch": 0.15298372234688135, |
|
"grad_norm": 0.8196119666099548, |
|
"learning_rate": 4.035132306369438e-05, |
|
"loss": 0.6254, |
|
"step": 9920 |
|
}, |
|
{ |
|
"epoch": 0.15346179647921535, |
|
"grad_norm": 0.6895152926445007, |
|
"learning_rate": 4.028495219804555e-05, |
|
"loss": 0.6227, |
|
"step": 9951 |
|
}, |
|
{ |
|
"epoch": 0.15393987061154935, |
|
"grad_norm": 0.6823534369468689, |
|
"learning_rate": 4.021840884378864e-05, |
|
"loss": 0.6281, |
|
"step": 9982 |
|
}, |
|
{ |
|
"epoch": 0.15441794474388335, |
|
"grad_norm": 0.6531606912612915, |
|
"learning_rate": 4.015169375185633e-05, |
|
"loss": 0.6259, |
|
"step": 10013 |
|
}, |
|
{ |
|
"epoch": 0.15489601887621735, |
|
"grad_norm": 0.5898056626319885, |
|
"learning_rate": 4.0084807675119396e-05, |
|
"loss": 0.6184, |
|
"step": 10044 |
|
}, |
|
{ |
|
"epoch": 0.15537409300855134, |
|
"grad_norm": 0.6368978023529053, |
|
"learning_rate": 4.0017751368378106e-05, |
|
"loss": 0.6196, |
|
"step": 10075 |
|
}, |
|
{ |
|
"epoch": 0.15585216714088537, |
|
"grad_norm": 0.8086898922920227, |
|
"learning_rate": 3.995052558835377e-05, |
|
"loss": 0.6226, |
|
"step": 10106 |
|
}, |
|
{ |
|
"epoch": 0.15633024127321937, |
|
"grad_norm": 0.6620404124259949, |
|
"learning_rate": 3.988313109368017e-05, |
|
"loss": 0.6146, |
|
"step": 10137 |
|
}, |
|
{ |
|
"epoch": 0.15680831540555337, |
|
"grad_norm": 0.6264041662216187, |
|
"learning_rate": 3.981556864489504e-05, |
|
"loss": 0.6254, |
|
"step": 10168 |
|
}, |
|
{ |
|
"epoch": 0.15728638953788737, |
|
"grad_norm": 0.6619133353233337, |
|
"learning_rate": 3.974783900443142e-05, |
|
"loss": 0.6233, |
|
"step": 10199 |
|
}, |
|
{ |
|
"epoch": 0.15776446367022137, |
|
"grad_norm": 0.6223868131637573, |
|
"learning_rate": 3.9679942936609095e-05, |
|
"loss": 0.6232, |
|
"step": 10230 |
|
}, |
|
{ |
|
"epoch": 0.15824253780255537, |
|
"grad_norm": 0.7436427474021912, |
|
"learning_rate": 3.961188120762596e-05, |
|
"loss": 0.6194, |
|
"step": 10261 |
|
}, |
|
{ |
|
"epoch": 0.1587206119348894, |
|
"grad_norm": 0.7166286110877991, |
|
"learning_rate": 3.954365458554938e-05, |
|
"loss": 0.6267, |
|
"step": 10292 |
|
}, |
|
{ |
|
"epoch": 0.1591986860672234, |
|
"grad_norm": 0.6377813220024109, |
|
"learning_rate": 3.947526384030751e-05, |
|
"loss": 0.6145, |
|
"step": 10323 |
|
}, |
|
{ |
|
"epoch": 0.1596767601995574, |
|
"grad_norm": 1.5197011232376099, |
|
"learning_rate": 3.9406709743680624e-05, |
|
"loss": 0.6261, |
|
"step": 10354 |
|
}, |
|
{ |
|
"epoch": 0.1601548343318914, |
|
"grad_norm": 0.6208730340003967, |
|
"learning_rate": 3.9337993069292366e-05, |
|
"loss": 0.6201, |
|
"step": 10385 |
|
}, |
|
{ |
|
"epoch": 0.1606329084642254, |
|
"grad_norm": 0.6887170076370239, |
|
"learning_rate": 3.926911459260109e-05, |
|
"loss": 0.6235, |
|
"step": 10416 |
|
}, |
|
{ |
|
"epoch": 0.1611109825965594, |
|
"grad_norm": 0.6381047368049622, |
|
"learning_rate": 3.920007509089102e-05, |
|
"loss": 0.6189, |
|
"step": 10447 |
|
}, |
|
{ |
|
"epoch": 0.1615890567288934, |
|
"grad_norm": 0.6249068379402161, |
|
"learning_rate": 3.913087534326357e-05, |
|
"loss": 0.6222, |
|
"step": 10478 |
|
}, |
|
{ |
|
"epoch": 0.16206713086122743, |
|
"grad_norm": 0.659757137298584, |
|
"learning_rate": 3.9061516130628475e-05, |
|
"loss": 0.6345, |
|
"step": 10509 |
|
}, |
|
{ |
|
"epoch": 0.16254520499356143, |
|
"grad_norm": 0.6268470883369446, |
|
"learning_rate": 3.8991998235695025e-05, |
|
"loss": 0.6197, |
|
"step": 10540 |
|
}, |
|
{ |
|
"epoch": 0.16302327912589543, |
|
"grad_norm": 0.6520307660102844, |
|
"learning_rate": 3.8922322442963224e-05, |
|
"loss": 0.6122, |
|
"step": 10571 |
|
}, |
|
{ |
|
"epoch": 0.16350135325822943, |
|
"grad_norm": 0.5919711589813232, |
|
"learning_rate": 3.885248953871491e-05, |
|
"loss": 0.6237, |
|
"step": 10602 |
|
}, |
|
{ |
|
"epoch": 0.16397942739056343, |
|
"grad_norm": 0.6761168837547302, |
|
"learning_rate": 3.8782500311004915e-05, |
|
"loss": 0.615, |
|
"step": 10633 |
|
}, |
|
{ |
|
"epoch": 0.16445750152289743, |
|
"grad_norm": 0.7123913168907166, |
|
"learning_rate": 3.871235554965218e-05, |
|
"loss": 0.6178, |
|
"step": 10664 |
|
}, |
|
{ |
|
"epoch": 0.16493557565523145, |
|
"grad_norm": 0.5414535403251648, |
|
"learning_rate": 3.864205604623078e-05, |
|
"loss": 0.6319, |
|
"step": 10695 |
|
}, |
|
{ |
|
"epoch": 0.16541364978756545, |
|
"grad_norm": 0.741256594657898, |
|
"learning_rate": 3.857160259406107e-05, |
|
"loss": 0.6102, |
|
"step": 10726 |
|
}, |
|
{ |
|
"epoch": 0.16589172391989945, |
|
"grad_norm": 0.6938264966011047, |
|
"learning_rate": 3.8500995988200674e-05, |
|
"loss": 0.6127, |
|
"step": 10757 |
|
}, |
|
{ |
|
"epoch": 0.16636979805223345, |
|
"grad_norm": 0.7152143716812134, |
|
"learning_rate": 3.843023702543556e-05, |
|
"loss": 0.6119, |
|
"step": 10788 |
|
}, |
|
{ |
|
"epoch": 0.16684787218456745, |
|
"grad_norm": 0.6516321897506714, |
|
"learning_rate": 3.8359326504270984e-05, |
|
"loss": 0.6224, |
|
"step": 10819 |
|
}, |
|
{ |
|
"epoch": 0.16732594631690145, |
|
"grad_norm": 0.6300469040870667, |
|
"learning_rate": 3.828826522492255e-05, |
|
"loss": 0.6229, |
|
"step": 10850 |
|
}, |
|
{ |
|
"epoch": 0.16780402044923548, |
|
"grad_norm": 0.645636260509491, |
|
"learning_rate": 3.821705398930713e-05, |
|
"loss": 0.6284, |
|
"step": 10881 |
|
}, |
|
{ |
|
"epoch": 0.16828209458156948, |
|
"grad_norm": 0.5895135402679443, |
|
"learning_rate": 3.814569360103385e-05, |
|
"loss": 0.6142, |
|
"step": 10912 |
|
}, |
|
{ |
|
"epoch": 0.16876016871390348, |
|
"grad_norm": 0.6440445184707642, |
|
"learning_rate": 3.807418486539499e-05, |
|
"loss": 0.6158, |
|
"step": 10943 |
|
}, |
|
{ |
|
"epoch": 0.16923824284623748, |
|
"grad_norm": 0.6739510893821716, |
|
"learning_rate": 3.80025285893569e-05, |
|
"loss": 0.6149, |
|
"step": 10974 |
|
}, |
|
{ |
|
"epoch": 0.16971631697857148, |
|
"grad_norm": 0.627185046672821, |
|
"learning_rate": 3.793072558155093e-05, |
|
"loss": 0.614, |
|
"step": 11005 |
|
}, |
|
{ |
|
"epoch": 0.17019439111090548, |
|
"grad_norm": 0.710350513458252, |
|
"learning_rate": 3.785877665226426e-05, |
|
"loss": 0.6214, |
|
"step": 11036 |
|
}, |
|
{ |
|
"epoch": 0.17067246524323948, |
|
"grad_norm": 0.6784375309944153, |
|
"learning_rate": 3.778668261343079e-05, |
|
"loss": 0.6123, |
|
"step": 11067 |
|
}, |
|
{ |
|
"epoch": 0.1711505393755735, |
|
"grad_norm": 0.6646184325218201, |
|
"learning_rate": 3.771444427862192e-05, |
|
"loss": 0.6178, |
|
"step": 11098 |
|
}, |
|
{ |
|
"epoch": 0.1716286135079075, |
|
"grad_norm": 0.7227056622505188, |
|
"learning_rate": 3.7642062463037465e-05, |
|
"loss": 0.6158, |
|
"step": 11129 |
|
}, |
|
{ |
|
"epoch": 0.1721066876402415, |
|
"grad_norm": 0.5937723517417908, |
|
"learning_rate": 3.7569537983496373e-05, |
|
"loss": 0.6194, |
|
"step": 11160 |
|
}, |
|
{ |
|
"epoch": 0.1725847617725755, |
|
"grad_norm": 0.6352291703224182, |
|
"learning_rate": 3.749687165842753e-05, |
|
"loss": 0.6286, |
|
"step": 11191 |
|
}, |
|
{ |
|
"epoch": 0.1730628359049095, |
|
"grad_norm": 0.5864464044570923, |
|
"learning_rate": 3.7424064307860536e-05, |
|
"loss": 0.6186, |
|
"step": 11222 |
|
}, |
|
{ |
|
"epoch": 0.1735409100372435, |
|
"grad_norm": 0.5402609705924988, |
|
"learning_rate": 3.735111675341645e-05, |
|
"loss": 0.6072, |
|
"step": 11253 |
|
}, |
|
{ |
|
"epoch": 0.17401898416957753, |
|
"grad_norm": 0.6429933905601501, |
|
"learning_rate": 3.7278029818298524e-05, |
|
"loss": 0.608, |
|
"step": 11284 |
|
}, |
|
{ |
|
"epoch": 0.17449705830191153, |
|
"grad_norm": 0.5420451164245605, |
|
"learning_rate": 3.720480432728287e-05, |
|
"loss": 0.6174, |
|
"step": 11315 |
|
}, |
|
{ |
|
"epoch": 0.17497513243424553, |
|
"grad_norm": 0.5691730380058289, |
|
"learning_rate": 3.71314411067092e-05, |
|
"loss": 0.6138, |
|
"step": 11346 |
|
}, |
|
{ |
|
"epoch": 0.17545320656657953, |
|
"grad_norm": 0.6198572516441345, |
|
"learning_rate": 3.70579409844715e-05, |
|
"loss": 0.6166, |
|
"step": 11377 |
|
}, |
|
{ |
|
"epoch": 0.17593128069891353, |
|
"grad_norm": 0.6781229972839355, |
|
"learning_rate": 3.698430479000865e-05, |
|
"loss": 0.6083, |
|
"step": 11408 |
|
}, |
|
{ |
|
"epoch": 0.17640935483124753, |
|
"grad_norm": 0.6572268605232239, |
|
"learning_rate": 3.691053335429509e-05, |
|
"loss": 0.6179, |
|
"step": 11439 |
|
}, |
|
{ |
|
"epoch": 0.17688742896358153, |
|
"grad_norm": 0.6720935702323914, |
|
"learning_rate": 3.683662750983147e-05, |
|
"loss": 0.6039, |
|
"step": 11470 |
|
}, |
|
{ |
|
"epoch": 0.17736550309591556, |
|
"grad_norm": 0.6098059415817261, |
|
"learning_rate": 3.676258809063518e-05, |
|
"loss": 0.608, |
|
"step": 11501 |
|
}, |
|
{ |
|
"epoch": 0.17784357722824956, |
|
"grad_norm": 0.6108371615409851, |
|
"learning_rate": 3.6688415932231004e-05, |
|
"loss": 0.6147, |
|
"step": 11532 |
|
}, |
|
{ |
|
"epoch": 0.17832165136058356, |
|
"grad_norm": 0.8087718486785889, |
|
"learning_rate": 3.661411187164166e-05, |
|
"loss": 0.6091, |
|
"step": 11563 |
|
}, |
|
{ |
|
"epoch": 0.17879972549291756, |
|
"grad_norm": 0.719902753829956, |
|
"learning_rate": 3.65396767473784e-05, |
|
"loss": 0.6107, |
|
"step": 11594 |
|
}, |
|
{ |
|
"epoch": 0.17927779962525156, |
|
"grad_norm": 0.5758282542228699, |
|
"learning_rate": 3.6465111399431465e-05, |
|
"loss": 0.6236, |
|
"step": 11625 |
|
}, |
|
{ |
|
"epoch": 0.17975587375758556, |
|
"grad_norm": 0.5602071285247803, |
|
"learning_rate": 3.6390416669260674e-05, |
|
"loss": 0.611, |
|
"step": 11656 |
|
}, |
|
{ |
|
"epoch": 0.1802339478899196, |
|
"grad_norm": 0.6070683002471924, |
|
"learning_rate": 3.63155933997859e-05, |
|
"loss": 0.609, |
|
"step": 11687 |
|
}, |
|
{ |
|
"epoch": 0.1807120220222536, |
|
"grad_norm": 0.64506596326828, |
|
"learning_rate": 3.624064243537758e-05, |
|
"loss": 0.605, |
|
"step": 11718 |
|
}, |
|
{ |
|
"epoch": 0.1811900961545876, |
|
"grad_norm": 0.6906920075416565, |
|
"learning_rate": 3.616556462184716e-05, |
|
"loss": 0.6168, |
|
"step": 11749 |
|
}, |
|
{ |
|
"epoch": 0.1816681702869216, |
|
"grad_norm": 0.6724758148193359, |
|
"learning_rate": 3.609036080643755e-05, |
|
"loss": 0.6143, |
|
"step": 11780 |
|
}, |
|
{ |
|
"epoch": 0.1821462444192556, |
|
"grad_norm": 0.6033251881599426, |
|
"learning_rate": 3.60150318378136e-05, |
|
"loss": 0.6161, |
|
"step": 11811 |
|
}, |
|
{ |
|
"epoch": 0.1826243185515896, |
|
"grad_norm": 0.6713966727256775, |
|
"learning_rate": 3.5939578566052465e-05, |
|
"loss": 0.619, |
|
"step": 11842 |
|
}, |
|
{ |
|
"epoch": 0.1831023926839236, |
|
"grad_norm": 0.6794861555099487, |
|
"learning_rate": 3.586400184263408e-05, |
|
"loss": 0.6116, |
|
"step": 11873 |
|
}, |
|
{ |
|
"epoch": 0.18358046681625761, |
|
"grad_norm": 0.5780847668647766, |
|
"learning_rate": 3.578830252043148e-05, |
|
"loss": 0.6159, |
|
"step": 11904 |
|
}, |
|
{ |
|
"epoch": 0.18405854094859161, |
|
"grad_norm": 0.6031593680381775, |
|
"learning_rate": 3.571248145370125e-05, |
|
"loss": 0.6135, |
|
"step": 11935 |
|
}, |
|
{ |
|
"epoch": 0.18453661508092561, |
|
"grad_norm": 0.6339271664619446, |
|
"learning_rate": 3.5636539498073794e-05, |
|
"loss": 0.6113, |
|
"step": 11966 |
|
}, |
|
{ |
|
"epoch": 0.18501468921325961, |
|
"grad_norm": 0.6663223505020142, |
|
"learning_rate": 3.556047751054378e-05, |
|
"loss": 0.6184, |
|
"step": 11997 |
|
}, |
|
{ |
|
"epoch": 0.18549276334559361, |
|
"grad_norm": 0.9799377918243408, |
|
"learning_rate": 3.548429634946039e-05, |
|
"loss": 0.6103, |
|
"step": 12028 |
|
}, |
|
{ |
|
"epoch": 0.18597083747792761, |
|
"grad_norm": 0.7260358929634094, |
|
"learning_rate": 3.540799687451768e-05, |
|
"loss": 0.6245, |
|
"step": 12059 |
|
}, |
|
{ |
|
"epoch": 0.18644891161026164, |
|
"grad_norm": 0.7097154259681702, |
|
"learning_rate": 3.533157994674485e-05, |
|
"loss": 0.6194, |
|
"step": 12090 |
|
}, |
|
{ |
|
"epoch": 0.18692698574259564, |
|
"grad_norm": 0.5897482633590698, |
|
"learning_rate": 3.5255046428496546e-05, |
|
"loss": 0.6171, |
|
"step": 12121 |
|
}, |
|
{ |
|
"epoch": 0.18740505987492964, |
|
"grad_norm": 0.6374841332435608, |
|
"learning_rate": 3.517839718344311e-05, |
|
"loss": 0.6141, |
|
"step": 12152 |
|
}, |
|
{ |
|
"epoch": 0.18788313400726364, |
|
"grad_norm": 0.6854335069656372, |
|
"learning_rate": 3.510163307656086e-05, |
|
"loss": 0.6077, |
|
"step": 12183 |
|
}, |
|
{ |
|
"epoch": 0.18836120813959764, |
|
"grad_norm": 0.6838460564613342, |
|
"learning_rate": 3.5024754974122324e-05, |
|
"loss": 0.6129, |
|
"step": 12214 |
|
}, |
|
{ |
|
"epoch": 0.18883928227193164, |
|
"grad_norm": 0.6604870557785034, |
|
"learning_rate": 3.494776374368643e-05, |
|
"loss": 0.6193, |
|
"step": 12245 |
|
}, |
|
{ |
|
"epoch": 0.18931735640426567, |
|
"grad_norm": 0.5817708373069763, |
|
"learning_rate": 3.4870660254088724e-05, |
|
"loss": 0.6086, |
|
"step": 12276 |
|
}, |
|
{ |
|
"epoch": 0.18979543053659967, |
|
"grad_norm": 0.5379335284233093, |
|
"learning_rate": 3.479344537543164e-05, |
|
"loss": 0.6088, |
|
"step": 12307 |
|
}, |
|
{ |
|
"epoch": 0.19027350466893367, |
|
"grad_norm": 0.6126767992973328, |
|
"learning_rate": 3.4716119979074565e-05, |
|
"loss": 0.6131, |
|
"step": 12338 |
|
}, |
|
{ |
|
"epoch": 0.19075157880126767, |
|
"grad_norm": 0.609724223613739, |
|
"learning_rate": 3.463868493762412e-05, |
|
"loss": 0.6106, |
|
"step": 12369 |
|
}, |
|
{ |
|
"epoch": 0.19122965293360167, |
|
"grad_norm": 0.6327598094940186, |
|
"learning_rate": 3.456114112492418e-05, |
|
"loss": 0.6134, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 0.19170772706593567, |
|
"grad_norm": 0.5619271993637085, |
|
"learning_rate": 3.4483489416046164e-05, |
|
"loss": 0.6161, |
|
"step": 12431 |
|
}, |
|
{ |
|
"epoch": 0.19218580119826967, |
|
"grad_norm": 0.6915563344955444, |
|
"learning_rate": 3.440573068727905e-05, |
|
"loss": 0.6046, |
|
"step": 12462 |
|
}, |
|
{ |
|
"epoch": 0.1926638753306037, |
|
"grad_norm": 0.7094136476516724, |
|
"learning_rate": 3.4327865816119495e-05, |
|
"loss": 0.6087, |
|
"step": 12493 |
|
}, |
|
{ |
|
"epoch": 0.1931419494629377, |
|
"grad_norm": 0.627680778503418, |
|
"learning_rate": 3.4249895681262025e-05, |
|
"loss": 0.604, |
|
"step": 12524 |
|
}, |
|
{ |
|
"epoch": 0.1936200235952717, |
|
"grad_norm": 0.6224892735481262, |
|
"learning_rate": 3.417182116258899e-05, |
|
"loss": 0.6047, |
|
"step": 12555 |
|
}, |
|
{ |
|
"epoch": 0.1940980977276057, |
|
"grad_norm": 0.5795034766197205, |
|
"learning_rate": 3.409364314116074e-05, |
|
"loss": 0.6111, |
|
"step": 12586 |
|
}, |
|
{ |
|
"epoch": 0.1945761718599397, |
|
"grad_norm": 0.7537661194801331, |
|
"learning_rate": 3.401536249920559e-05, |
|
"loss": 0.6079, |
|
"step": 12617 |
|
}, |
|
{ |
|
"epoch": 0.1950542459922737, |
|
"grad_norm": 0.838455319404602, |
|
"learning_rate": 3.393698012010998e-05, |
|
"loss": 0.6036, |
|
"step": 12648 |
|
}, |
|
{ |
|
"epoch": 0.19553232012460772, |
|
"grad_norm": 0.650592029094696, |
|
"learning_rate": 3.385849688840839e-05, |
|
"loss": 0.6088, |
|
"step": 12679 |
|
}, |
|
{ |
|
"epoch": 0.19601039425694172, |
|
"grad_norm": 0.7277738451957703, |
|
"learning_rate": 3.3779913689773414e-05, |
|
"loss": 0.6073, |
|
"step": 12710 |
|
}, |
|
{ |
|
"epoch": 0.19648846838927572, |
|
"grad_norm": 0.6338685154914856, |
|
"learning_rate": 3.370123141100578e-05, |
|
"loss": 0.6085, |
|
"step": 12741 |
|
}, |
|
{ |
|
"epoch": 0.19696654252160972, |
|
"grad_norm": 0.5530194044113159, |
|
"learning_rate": 3.3622450940024305e-05, |
|
"loss": 0.6063, |
|
"step": 12772 |
|
}, |
|
{ |
|
"epoch": 0.19744461665394372, |
|
"grad_norm": 0.634303867816925, |
|
"learning_rate": 3.35435731658559e-05, |
|
"loss": 0.6036, |
|
"step": 12803 |
|
}, |
|
{ |
|
"epoch": 0.19792269078627772, |
|
"grad_norm": 0.6769723892211914, |
|
"learning_rate": 3.346459897862552e-05, |
|
"loss": 0.6094, |
|
"step": 12834 |
|
}, |
|
{ |
|
"epoch": 0.19840076491861172, |
|
"grad_norm": 0.5914043188095093, |
|
"learning_rate": 3.338552926954613e-05, |
|
"loss": 0.6093, |
|
"step": 12865 |
|
}, |
|
{ |
|
"epoch": 0.19887883905094575, |
|
"grad_norm": 0.6945207118988037, |
|
"learning_rate": 3.330636493090868e-05, |
|
"loss": 0.609, |
|
"step": 12896 |
|
}, |
|
{ |
|
"epoch": 0.19935691318327975, |
|
"grad_norm": 0.7246667742729187, |
|
"learning_rate": 3.322710685607193e-05, |
|
"loss": 0.6245, |
|
"step": 12927 |
|
}, |
|
{ |
|
"epoch": 0.19983498731561375, |
|
"grad_norm": 0.5429117679595947, |
|
"learning_rate": 3.314775593945251e-05, |
|
"loss": 0.6112, |
|
"step": 12958 |
|
}, |
|
{ |
|
"epoch": 0.20031306144794775, |
|
"grad_norm": 0.5964083671569824, |
|
"learning_rate": 3.3068313076514714e-05, |
|
"loss": 0.6037, |
|
"step": 12989 |
|
}, |
|
{ |
|
"epoch": 0.20079113558028175, |
|
"grad_norm": 0.6770813465118408, |
|
"learning_rate": 3.298877916376047e-05, |
|
"loss": 0.6046, |
|
"step": 13020 |
|
}, |
|
{ |
|
"epoch": 0.20126920971261575, |
|
"grad_norm": 0.5819186568260193, |
|
"learning_rate": 3.290915509871915e-05, |
|
"loss": 0.6102, |
|
"step": 13051 |
|
}, |
|
{ |
|
"epoch": 0.20174728384494978, |
|
"grad_norm": 0.6465575098991394, |
|
"learning_rate": 3.282944177993753e-05, |
|
"loss": 0.6082, |
|
"step": 13082 |
|
}, |
|
{ |
|
"epoch": 0.20222535797728378, |
|
"grad_norm": 0.607782244682312, |
|
"learning_rate": 3.274964010696957e-05, |
|
"loss": 0.6067, |
|
"step": 13113 |
|
}, |
|
{ |
|
"epoch": 0.20270343210961778, |
|
"grad_norm": 0.6200498938560486, |
|
"learning_rate": 3.266975098036629e-05, |
|
"loss": 0.6096, |
|
"step": 13144 |
|
}, |
|
{ |
|
"epoch": 0.20318150624195178, |
|
"grad_norm": 0.6002140045166016, |
|
"learning_rate": 3.258977530166562e-05, |
|
"loss": 0.6029, |
|
"step": 13175 |
|
}, |
|
{ |
|
"epoch": 0.20365958037428578, |
|
"grad_norm": 0.6123709082603455, |
|
"learning_rate": 3.250971397338227e-05, |
|
"loss": 0.6067, |
|
"step": 13206 |
|
}, |
|
{ |
|
"epoch": 0.20413765450661978, |
|
"grad_norm": 0.5950725078582764, |
|
"learning_rate": 3.2429567898997404e-05, |
|
"loss": 0.6123, |
|
"step": 13237 |
|
}, |
|
{ |
|
"epoch": 0.20461572863895378, |
|
"grad_norm": 0.5370542407035828, |
|
"learning_rate": 3.234933798294859e-05, |
|
"loss": 0.6037, |
|
"step": 13268 |
|
}, |
|
{ |
|
"epoch": 0.2050938027712878, |
|
"grad_norm": 0.6526908874511719, |
|
"learning_rate": 3.2269025130619535e-05, |
|
"loss": 0.6093, |
|
"step": 13299 |
|
}, |
|
{ |
|
"epoch": 0.2055718769036218, |
|
"grad_norm": 0.6925700306892395, |
|
"learning_rate": 3.218863024832985e-05, |
|
"loss": 0.6077, |
|
"step": 13330 |
|
}, |
|
{ |
|
"epoch": 0.2060499510359558, |
|
"grad_norm": 0.6531749367713928, |
|
"learning_rate": 3.2108154243324864e-05, |
|
"loss": 0.6126, |
|
"step": 13361 |
|
}, |
|
{ |
|
"epoch": 0.2065280251682898, |
|
"grad_norm": 0.7173174023628235, |
|
"learning_rate": 3.2027598023765345e-05, |
|
"loss": 0.5991, |
|
"step": 13392 |
|
}, |
|
{ |
|
"epoch": 0.2070060993006238, |
|
"grad_norm": 0.7028422355651855, |
|
"learning_rate": 3.194696249871729e-05, |
|
"loss": 0.6037, |
|
"step": 13423 |
|
}, |
|
{ |
|
"epoch": 0.2074841734329578, |
|
"grad_norm": 0.5837274193763733, |
|
"learning_rate": 3.186624857814164e-05, |
|
"loss": 0.6135, |
|
"step": 13454 |
|
}, |
|
{ |
|
"epoch": 0.20796224756529183, |
|
"grad_norm": 0.6421070098876953, |
|
"learning_rate": 3.178545717288401e-05, |
|
"loss": 0.6095, |
|
"step": 13485 |
|
}, |
|
{ |
|
"epoch": 0.20844032169762583, |
|
"grad_norm": 0.6141335964202881, |
|
"learning_rate": 3.170458919466444e-05, |
|
"loss": 0.6161, |
|
"step": 13516 |
|
}, |
|
{ |
|
"epoch": 0.20891839582995983, |
|
"grad_norm": 0.6881201267242432, |
|
"learning_rate": 3.1623645556067063e-05, |
|
"loss": 0.6053, |
|
"step": 13547 |
|
}, |
|
{ |
|
"epoch": 0.20939646996229383, |
|
"grad_norm": 0.655423104763031, |
|
"learning_rate": 3.154262717052985e-05, |
|
"loss": 0.6037, |
|
"step": 13578 |
|
}, |
|
{ |
|
"epoch": 0.20987454409462783, |
|
"grad_norm": 0.5702130198478699, |
|
"learning_rate": 3.146153495233426e-05, |
|
"loss": 0.603, |
|
"step": 13609 |
|
}, |
|
{ |
|
"epoch": 0.21035261822696183, |
|
"grad_norm": 0.6374569535255432, |
|
"learning_rate": 3.1380369816594944e-05, |
|
"loss": 0.6048, |
|
"step": 13640 |
|
}, |
|
{ |
|
"epoch": 0.21083069235929583, |
|
"grad_norm": 0.6103787422180176, |
|
"learning_rate": 3.129913267924946e-05, |
|
"loss": 0.608, |
|
"step": 13671 |
|
}, |
|
{ |
|
"epoch": 0.21130876649162986, |
|
"grad_norm": 0.6299352049827576, |
|
"learning_rate": 3.121782445704782e-05, |
|
"loss": 0.5993, |
|
"step": 13702 |
|
}, |
|
{ |
|
"epoch": 0.21178684062396386, |
|
"grad_norm": 0.6262964010238647, |
|
"learning_rate": 3.11364460675423e-05, |
|
"loss": 0.6085, |
|
"step": 13733 |
|
}, |
|
{ |
|
"epoch": 0.21226491475629786, |
|
"grad_norm": 0.5756493210792542, |
|
"learning_rate": 3.1054998429076934e-05, |
|
"loss": 0.6051, |
|
"step": 13764 |
|
}, |
|
{ |
|
"epoch": 0.21274298888863186, |
|
"grad_norm": 0.5393940806388855, |
|
"learning_rate": 3.097348246077728e-05, |
|
"loss": 0.6072, |
|
"step": 13795 |
|
}, |
|
{ |
|
"epoch": 0.21322106302096586, |
|
"grad_norm": 0.5831958651542664, |
|
"learning_rate": 3.0891899082539924e-05, |
|
"loss": 0.6015, |
|
"step": 13826 |
|
}, |
|
{ |
|
"epoch": 0.21369913715329986, |
|
"grad_norm": 0.6049664616584778, |
|
"learning_rate": 3.0810249215022233e-05, |
|
"loss": 0.6136, |
|
"step": 13857 |
|
}, |
|
{ |
|
"epoch": 0.21417721128563388, |
|
"grad_norm": 0.6627909541130066, |
|
"learning_rate": 3.0728533779631865e-05, |
|
"loss": 0.6027, |
|
"step": 13888 |
|
}, |
|
{ |
|
"epoch": 0.21465528541796788, |
|
"grad_norm": 0.568748950958252, |
|
"learning_rate": 3.064675369851637e-05, |
|
"loss": 0.601, |
|
"step": 13919 |
|
}, |
|
{ |
|
"epoch": 0.21513335955030188, |
|
"grad_norm": 0.6373331546783447, |
|
"learning_rate": 3.056490989455289e-05, |
|
"loss": 0.606, |
|
"step": 13950 |
|
}, |
|
{ |
|
"epoch": 0.21561143368263588, |
|
"grad_norm": 0.5549852252006531, |
|
"learning_rate": 3.0483003291337596e-05, |
|
"loss": 0.5931, |
|
"step": 13981 |
|
}, |
|
{ |
|
"epoch": 0.21608950781496988, |
|
"grad_norm": 0.5983837246894836, |
|
"learning_rate": 3.040103481317539e-05, |
|
"loss": 0.6023, |
|
"step": 14012 |
|
}, |
|
{ |
|
"epoch": 0.21656758194730388, |
|
"grad_norm": 0.6063233017921448, |
|
"learning_rate": 3.03190053850694e-05, |
|
"loss": 0.5974, |
|
"step": 14043 |
|
}, |
|
{ |
|
"epoch": 0.2170456560796379, |
|
"grad_norm": 3.3559274673461914, |
|
"learning_rate": 3.0236915932710573e-05, |
|
"loss": 0.6057, |
|
"step": 14074 |
|
}, |
|
{ |
|
"epoch": 0.2175237302119719, |
|
"grad_norm": 0.5984488725662231, |
|
"learning_rate": 3.0154767382467232e-05, |
|
"loss": 0.5953, |
|
"step": 14105 |
|
}, |
|
{ |
|
"epoch": 0.2180018043443059, |
|
"grad_norm": 0.5625097751617432, |
|
"learning_rate": 3.0072560661374582e-05, |
|
"loss": 0.6029, |
|
"step": 14136 |
|
}, |
|
{ |
|
"epoch": 0.2184798784766399, |
|
"grad_norm": 0.6621974110603333, |
|
"learning_rate": 2.999029669712431e-05, |
|
"loss": 0.6042, |
|
"step": 14167 |
|
}, |
|
{ |
|
"epoch": 0.2189579526089739, |
|
"grad_norm": 0.7209954857826233, |
|
"learning_rate": 2.990797641805408e-05, |
|
"loss": 0.6069, |
|
"step": 14198 |
|
}, |
|
{ |
|
"epoch": 0.2194360267413079, |
|
"grad_norm": 0.4810869097709656, |
|
"learning_rate": 2.982560075313704e-05, |
|
"loss": 0.6131, |
|
"step": 14229 |
|
}, |
|
{ |
|
"epoch": 0.2199141008736419, |
|
"grad_norm": 0.6061887145042419, |
|
"learning_rate": 2.9743170631971368e-05, |
|
"loss": 0.6012, |
|
"step": 14260 |
|
}, |
|
{ |
|
"epoch": 0.22039217500597594, |
|
"grad_norm": 0.5912594795227051, |
|
"learning_rate": 2.9660686984769792e-05, |
|
"loss": 0.6042, |
|
"step": 14291 |
|
}, |
|
{ |
|
"epoch": 0.22087024913830994, |
|
"grad_norm": 0.5847635269165039, |
|
"learning_rate": 2.9578150742349047e-05, |
|
"loss": 0.5945, |
|
"step": 14322 |
|
}, |
|
{ |
|
"epoch": 0.22134832327064394, |
|
"grad_norm": 0.617660403251648, |
|
"learning_rate": 2.949556283611942e-05, |
|
"loss": 0.6008, |
|
"step": 14353 |
|
}, |
|
{ |
|
"epoch": 0.22182639740297794, |
|
"grad_norm": 0.6549617052078247, |
|
"learning_rate": 2.9412924198074206e-05, |
|
"loss": 0.6055, |
|
"step": 14384 |
|
}, |
|
{ |
|
"epoch": 0.22230447153531194, |
|
"grad_norm": 0.6084825396537781, |
|
"learning_rate": 2.9330235760779208e-05, |
|
"loss": 0.6059, |
|
"step": 14415 |
|
}, |
|
{ |
|
"epoch": 0.22278254566764594, |
|
"grad_norm": 0.619442880153656, |
|
"learning_rate": 2.9247498457362188e-05, |
|
"loss": 0.6003, |
|
"step": 14446 |
|
}, |
|
{ |
|
"epoch": 0.22326061979997996, |
|
"grad_norm": 0.5925353169441223, |
|
"learning_rate": 2.9164713221502373e-05, |
|
"loss": 0.6064, |
|
"step": 14477 |
|
}, |
|
{ |
|
"epoch": 0.22373869393231396, |
|
"grad_norm": 0.6110435724258423, |
|
"learning_rate": 2.9081880987419912e-05, |
|
"loss": 0.5992, |
|
"step": 14508 |
|
}, |
|
{ |
|
"epoch": 0.22421676806464796, |
|
"grad_norm": 0.5053293108940125, |
|
"learning_rate": 2.8999002689865296e-05, |
|
"loss": 0.5938, |
|
"step": 14539 |
|
}, |
|
{ |
|
"epoch": 0.22469484219698196, |
|
"grad_norm": 0.6076425909996033, |
|
"learning_rate": 2.8916079264108852e-05, |
|
"loss": 0.6055, |
|
"step": 14570 |
|
}, |
|
{ |
|
"epoch": 0.22517291632931596, |
|
"grad_norm": 0.6807154417037964, |
|
"learning_rate": 2.883311164593017e-05, |
|
"loss": 0.602, |
|
"step": 14601 |
|
}, |
|
{ |
|
"epoch": 0.22565099046164996, |
|
"grad_norm": 0.6125125885009766, |
|
"learning_rate": 2.875010077160754e-05, |
|
"loss": 0.6044, |
|
"step": 14632 |
|
}, |
|
{ |
|
"epoch": 0.22612906459398396, |
|
"grad_norm": 0.6103615164756775, |
|
"learning_rate": 2.866704757790741e-05, |
|
"loss": 0.5956, |
|
"step": 14663 |
|
}, |
|
{ |
|
"epoch": 0.226607138726318, |
|
"grad_norm": 0.6321137547492981, |
|
"learning_rate": 2.858395300207376e-05, |
|
"loss": 0.6033, |
|
"step": 14694 |
|
}, |
|
{ |
|
"epoch": 0.227085212858652, |
|
"grad_norm": 0.5844994783401489, |
|
"learning_rate": 2.8500817981817607e-05, |
|
"loss": 0.6015, |
|
"step": 14725 |
|
}, |
|
{ |
|
"epoch": 0.227563286990986, |
|
"grad_norm": 0.648857593536377, |
|
"learning_rate": 2.8417643455306336e-05, |
|
"loss": 0.5939, |
|
"step": 14756 |
|
}, |
|
{ |
|
"epoch": 0.22804136112332, |
|
"grad_norm": 0.6076569557189941, |
|
"learning_rate": 2.8334430361153185e-05, |
|
"loss": 0.5999, |
|
"step": 14787 |
|
}, |
|
{ |
|
"epoch": 0.228519435255654, |
|
"grad_norm": 0.5311121344566345, |
|
"learning_rate": 2.8251179638406612e-05, |
|
"loss": 0.5961, |
|
"step": 14818 |
|
}, |
|
{ |
|
"epoch": 0.228997509387988, |
|
"grad_norm": 0.6423148512840271, |
|
"learning_rate": 2.8167892226539704e-05, |
|
"loss": 0.5965, |
|
"step": 14849 |
|
}, |
|
{ |
|
"epoch": 0.22947558352032202, |
|
"grad_norm": 0.6332517266273499, |
|
"learning_rate": 2.8084569065439588e-05, |
|
"loss": 0.5934, |
|
"step": 14880 |
|
}, |
|
{ |
|
"epoch": 0.22995365765265602, |
|
"grad_norm": 0.6193598508834839, |
|
"learning_rate": 2.8001211095396807e-05, |
|
"loss": 0.6075, |
|
"step": 14911 |
|
}, |
|
{ |
|
"epoch": 0.23043173178499002, |
|
"grad_norm": 0.6870059967041016, |
|
"learning_rate": 2.791781925709473e-05, |
|
"loss": 0.6044, |
|
"step": 14942 |
|
}, |
|
{ |
|
"epoch": 0.23090980591732402, |
|
"grad_norm": 0.5324540138244629, |
|
"learning_rate": 2.7834394491598908e-05, |
|
"loss": 0.5908, |
|
"step": 14973 |
|
}, |
|
{ |
|
"epoch": 0.23138788004965802, |
|
"grad_norm": 1.536717414855957, |
|
"learning_rate": 2.7750937740346485e-05, |
|
"loss": 0.5953, |
|
"step": 15004 |
|
}, |
|
{ |
|
"epoch": 0.23186595418199202, |
|
"grad_norm": 0.550554096698761, |
|
"learning_rate": 2.7667449945135564e-05, |
|
"loss": 0.5952, |
|
"step": 15035 |
|
}, |
|
{ |
|
"epoch": 0.23234402831432602, |
|
"grad_norm": 1.174824833869934, |
|
"learning_rate": 2.7583932048114557e-05, |
|
"loss": 0.6065, |
|
"step": 15066 |
|
}, |
|
{ |
|
"epoch": 0.23282210244666005, |
|
"grad_norm": 0.6926533579826355, |
|
"learning_rate": 2.7500384991771587e-05, |
|
"loss": 0.6031, |
|
"step": 15097 |
|
}, |
|
{ |
|
"epoch": 0.23330017657899405, |
|
"grad_norm": 0.6488378047943115, |
|
"learning_rate": 2.7416809718923825e-05, |
|
"loss": 0.5977, |
|
"step": 15128 |
|
}, |
|
{ |
|
"epoch": 0.23377825071132805, |
|
"grad_norm": 0.5631073713302612, |
|
"learning_rate": 2.7333207172706864e-05, |
|
"loss": 0.6024, |
|
"step": 15159 |
|
}, |
|
{ |
|
"epoch": 0.23425632484366204, |
|
"grad_norm": 0.5837413668632507, |
|
"learning_rate": 2.7249578296564088e-05, |
|
"loss": 0.5961, |
|
"step": 15190 |
|
}, |
|
{ |
|
"epoch": 0.23473439897599604, |
|
"grad_norm": 0.5875633955001831, |
|
"learning_rate": 2.7165924034235973e-05, |
|
"loss": 0.6027, |
|
"step": 15221 |
|
}, |
|
{ |
|
"epoch": 0.23521247310833004, |
|
"grad_norm": 0.5024504661560059, |
|
"learning_rate": 2.708224532974953e-05, |
|
"loss": 0.5951, |
|
"step": 15252 |
|
}, |
|
{ |
|
"epoch": 0.23569054724066407, |
|
"grad_norm": 0.5877059698104858, |
|
"learning_rate": 2.6998543127407538e-05, |
|
"loss": 0.5953, |
|
"step": 15283 |
|
}, |
|
{ |
|
"epoch": 0.23616862137299807, |
|
"grad_norm": 0.593683660030365, |
|
"learning_rate": 2.6914818371777988e-05, |
|
"loss": 0.5921, |
|
"step": 15314 |
|
}, |
|
{ |
|
"epoch": 0.23664669550533207, |
|
"grad_norm": 0.6094300150871277, |
|
"learning_rate": 2.6831072007683373e-05, |
|
"loss": 0.6037, |
|
"step": 15345 |
|
}, |
|
{ |
|
"epoch": 0.23712476963766607, |
|
"grad_norm": 0.5903526544570923, |
|
"learning_rate": 2.6747304980190018e-05, |
|
"loss": 0.5937, |
|
"step": 15376 |
|
}, |
|
{ |
|
"epoch": 0.23760284377000007, |
|
"grad_norm": 0.5828642845153809, |
|
"learning_rate": 2.6663518234597453e-05, |
|
"loss": 0.5991, |
|
"step": 15407 |
|
}, |
|
{ |
|
"epoch": 0.23808091790233407, |
|
"grad_norm": 0.728405237197876, |
|
"learning_rate": 2.6579712716427696e-05, |
|
"loss": 0.5882, |
|
"step": 15438 |
|
}, |
|
{ |
|
"epoch": 0.2385589920346681, |
|
"grad_norm": 0.535317599773407, |
|
"learning_rate": 2.6495889371414652e-05, |
|
"loss": 0.6042, |
|
"step": 15469 |
|
}, |
|
{ |
|
"epoch": 0.2390370661670021, |
|
"grad_norm": 0.6083411574363708, |
|
"learning_rate": 2.6412049145493367e-05, |
|
"loss": 0.601, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.2395151402993361, |
|
"grad_norm": 0.6077481508255005, |
|
"learning_rate": 2.632819298478939e-05, |
|
"loss": 0.5932, |
|
"step": 15531 |
|
}, |
|
{ |
|
"epoch": 0.2399932144316701, |
|
"grad_norm": 0.7210131883621216, |
|
"learning_rate": 2.6244321835608105e-05, |
|
"loss": 0.602, |
|
"step": 15562 |
|
}, |
|
{ |
|
"epoch": 0.2404712885640041, |
|
"grad_norm": 0.7543363571166992, |
|
"learning_rate": 2.6160436644424024e-05, |
|
"loss": 0.5919, |
|
"step": 15593 |
|
}, |
|
{ |
|
"epoch": 0.2409493626963381, |
|
"grad_norm": 0.6915935277938843, |
|
"learning_rate": 2.6076538357870133e-05, |
|
"loss": 0.5992, |
|
"step": 15624 |
|
}, |
|
{ |
|
"epoch": 0.2414274368286721, |
|
"grad_norm": 0.5868044495582581, |
|
"learning_rate": 2.5992627922727196e-05, |
|
"loss": 0.5909, |
|
"step": 15655 |
|
}, |
|
{ |
|
"epoch": 0.24190551096100613, |
|
"grad_norm": 0.5753960013389587, |
|
"learning_rate": 2.5908706285913066e-05, |
|
"loss": 0.5996, |
|
"step": 15686 |
|
}, |
|
{ |
|
"epoch": 0.24238358509334013, |
|
"grad_norm": 0.602874219417572, |
|
"learning_rate": 2.5824774394472008e-05, |
|
"loss": 0.5925, |
|
"step": 15717 |
|
}, |
|
{ |
|
"epoch": 0.24286165922567413, |
|
"grad_norm": 0.6005914211273193, |
|
"learning_rate": 2.5740833195563996e-05, |
|
"loss": 0.5965, |
|
"step": 15748 |
|
}, |
|
{ |
|
"epoch": 0.24333973335800813, |
|
"grad_norm": 0.6699410676956177, |
|
"learning_rate": 2.5656883636454067e-05, |
|
"loss": 0.5926, |
|
"step": 15779 |
|
}, |
|
{ |
|
"epoch": 0.24381780749034213, |
|
"grad_norm": 0.6079136729240417, |
|
"learning_rate": 2.557292666450159e-05, |
|
"loss": 0.5895, |
|
"step": 15810 |
|
}, |
|
{ |
|
"epoch": 0.24429588162267613, |
|
"grad_norm": 0.5411943197250366, |
|
"learning_rate": 2.5488963227149566e-05, |
|
"loss": 0.5982, |
|
"step": 15841 |
|
}, |
|
{ |
|
"epoch": 0.24477395575501015, |
|
"grad_norm": 1.2917388677597046, |
|
"learning_rate": 2.5404994271913983e-05, |
|
"loss": 0.6009, |
|
"step": 15872 |
|
}, |
|
{ |
|
"epoch": 0.24525202988734415, |
|
"grad_norm": 0.6021159887313843, |
|
"learning_rate": 2.5321020746373085e-05, |
|
"loss": 0.5942, |
|
"step": 15903 |
|
}, |
|
{ |
|
"epoch": 0.24573010401967815, |
|
"grad_norm": 0.6222237944602966, |
|
"learning_rate": 2.52370435981567e-05, |
|
"loss": 0.5879, |
|
"step": 15934 |
|
}, |
|
{ |
|
"epoch": 0.24620817815201215, |
|
"grad_norm": 0.609616756439209, |
|
"learning_rate": 2.5153063774935533e-05, |
|
"loss": 0.5977, |
|
"step": 15965 |
|
}, |
|
{ |
|
"epoch": 0.24668625228434615, |
|
"grad_norm": 0.5242776274681091, |
|
"learning_rate": 2.506908222441045e-05, |
|
"loss": 0.5942, |
|
"step": 15996 |
|
}, |
|
{ |
|
"epoch": 0.24716432641668015, |
|
"grad_norm": 0.5265721082687378, |
|
"learning_rate": 2.498509989430187e-05, |
|
"loss": 0.5886, |
|
"step": 16027 |
|
}, |
|
{ |
|
"epoch": 0.24764240054901415, |
|
"grad_norm": 0.6138966679573059, |
|
"learning_rate": 2.4901117732338958e-05, |
|
"loss": 0.5978, |
|
"step": 16058 |
|
}, |
|
{ |
|
"epoch": 0.24812047468134818, |
|
"grad_norm": 0.6608225107192993, |
|
"learning_rate": 2.481713668624899e-05, |
|
"loss": 0.59, |
|
"step": 16089 |
|
}, |
|
{ |
|
"epoch": 0.24859854881368218, |
|
"grad_norm": 0.6166261434555054, |
|
"learning_rate": 2.4733157703746663e-05, |
|
"loss": 0.5953, |
|
"step": 16120 |
|
}, |
|
{ |
|
"epoch": 0.24907662294601618, |
|
"grad_norm": 0.5305414795875549, |
|
"learning_rate": 2.4649181732523392e-05, |
|
"loss": 0.5977, |
|
"step": 16151 |
|
}, |
|
{ |
|
"epoch": 0.24955469707835018, |
|
"grad_norm": 0.5814329981803894, |
|
"learning_rate": 2.4565209720236582e-05, |
|
"loss": 0.5871, |
|
"step": 16182 |
|
}, |
|
{ |
|
"epoch": 0.2500327712106842, |
|
"grad_norm": 0.7118150591850281, |
|
"learning_rate": 2.4481242614498975e-05, |
|
"loss": 0.6003, |
|
"step": 16213 |
|
}, |
|
{ |
|
"epoch": 0.2505108453430182, |
|
"grad_norm": 0.4954952597618103, |
|
"learning_rate": 2.439728136286796e-05, |
|
"loss": 0.5929, |
|
"step": 16244 |
|
}, |
|
{ |
|
"epoch": 0.2509889194753522, |
|
"grad_norm": 0.5879791975021362, |
|
"learning_rate": 2.4313326912834852e-05, |
|
"loss": 0.5851, |
|
"step": 16275 |
|
}, |
|
{ |
|
"epoch": 0.2514669936076862, |
|
"grad_norm": 0.5314292311668396, |
|
"learning_rate": 2.4229380211814206e-05, |
|
"loss": 0.593, |
|
"step": 16306 |
|
}, |
|
{ |
|
"epoch": 0.2519450677400202, |
|
"grad_norm": 0.6319591403007507, |
|
"learning_rate": 2.4145442207133124e-05, |
|
"loss": 0.5925, |
|
"step": 16337 |
|
}, |
|
{ |
|
"epoch": 0.2524231418723542, |
|
"grad_norm": 0.5172461867332458, |
|
"learning_rate": 2.406151384602059e-05, |
|
"loss": 0.5959, |
|
"step": 16368 |
|
}, |
|
{ |
|
"epoch": 0.25290121600468823, |
|
"grad_norm": 0.5364922285079956, |
|
"learning_rate": 2.3977596075596747e-05, |
|
"loss": 0.6051, |
|
"step": 16399 |
|
}, |
|
{ |
|
"epoch": 0.2533792901370222, |
|
"grad_norm": 0.5659622550010681, |
|
"learning_rate": 2.3893689842862223e-05, |
|
"loss": 0.5937, |
|
"step": 16430 |
|
}, |
|
{ |
|
"epoch": 0.25385736426935623, |
|
"grad_norm": 0.7728786468505859, |
|
"learning_rate": 2.3809796094687475e-05, |
|
"loss": 0.5978, |
|
"step": 16461 |
|
}, |
|
{ |
|
"epoch": 0.2543354384016902, |
|
"grad_norm": 0.5832898616790771, |
|
"learning_rate": 2.372591577780202e-05, |
|
"loss": 0.5965, |
|
"step": 16492 |
|
}, |
|
{ |
|
"epoch": 0.25481351253402423, |
|
"grad_norm": 0.7000778913497925, |
|
"learning_rate": 2.3642049838783838e-05, |
|
"loss": 0.5914, |
|
"step": 16523 |
|
}, |
|
{ |
|
"epoch": 0.2552915866663582, |
|
"grad_norm": 0.6954141855239868, |
|
"learning_rate": 2.3558199224048666e-05, |
|
"loss": 0.5936, |
|
"step": 16554 |
|
}, |
|
{ |
|
"epoch": 0.25576966079869223, |
|
"grad_norm": 0.5859754681587219, |
|
"learning_rate": 2.347436487983929e-05, |
|
"loss": 0.5913, |
|
"step": 16585 |
|
}, |
|
{ |
|
"epoch": 0.25624773493102626, |
|
"grad_norm": 0.6719058156013489, |
|
"learning_rate": 2.3390547752214888e-05, |
|
"loss": 0.5896, |
|
"step": 16616 |
|
}, |
|
{ |
|
"epoch": 0.25672580906336023, |
|
"grad_norm": 0.5692052841186523, |
|
"learning_rate": 2.330674878704035e-05, |
|
"loss": 0.5912, |
|
"step": 16647 |
|
}, |
|
{ |
|
"epoch": 0.25720388319569426, |
|
"grad_norm": 0.5615118145942688, |
|
"learning_rate": 2.322296892997561e-05, |
|
"loss": 0.5913, |
|
"step": 16678 |
|
}, |
|
{ |
|
"epoch": 0.25768195732802823, |
|
"grad_norm": 0.6449048519134521, |
|
"learning_rate": 2.313920912646497e-05, |
|
"loss": 0.5933, |
|
"step": 16709 |
|
}, |
|
{ |
|
"epoch": 0.25816003146036226, |
|
"grad_norm": 0.5827602744102478, |
|
"learning_rate": 2.305547032172643e-05, |
|
"loss": 0.5912, |
|
"step": 16740 |
|
}, |
|
{ |
|
"epoch": 0.2586381055926963, |
|
"grad_norm": 0.6197795271873474, |
|
"learning_rate": 2.2971753460741014e-05, |
|
"loss": 0.5911, |
|
"step": 16771 |
|
}, |
|
{ |
|
"epoch": 0.25911617972503026, |
|
"grad_norm": 0.6223908066749573, |
|
"learning_rate": 2.288805948824212e-05, |
|
"loss": 0.5971, |
|
"step": 16802 |
|
}, |
|
{ |
|
"epoch": 0.2595942538573643, |
|
"grad_norm": 0.5632069706916809, |
|
"learning_rate": 2.2804389348704858e-05, |
|
"loss": 0.5942, |
|
"step": 16833 |
|
}, |
|
{ |
|
"epoch": 0.26007232798969826, |
|
"grad_norm": 0.5121845006942749, |
|
"learning_rate": 2.2720743986335374e-05, |
|
"loss": 0.5891, |
|
"step": 16864 |
|
}, |
|
{ |
|
"epoch": 0.2605504021220323, |
|
"grad_norm": 0.5367355942726135, |
|
"learning_rate": 2.2637124345060233e-05, |
|
"loss": 0.5904, |
|
"step": 16895 |
|
}, |
|
{ |
|
"epoch": 0.26102847625436626, |
|
"grad_norm": 0.5924873948097229, |
|
"learning_rate": 2.2553531368515695e-05, |
|
"loss": 0.5883, |
|
"step": 16926 |
|
}, |
|
{ |
|
"epoch": 0.2615065503867003, |
|
"grad_norm": 0.5673496127128601, |
|
"learning_rate": 2.2469966000037144e-05, |
|
"loss": 0.5938, |
|
"step": 16957 |
|
}, |
|
{ |
|
"epoch": 0.2619846245190343, |
|
"grad_norm": 0.5983547568321228, |
|
"learning_rate": 2.2386429182648417e-05, |
|
"loss": 0.5857, |
|
"step": 16988 |
|
}, |
|
{ |
|
"epoch": 0.2624626986513683, |
|
"grad_norm": 0.6841906905174255, |
|
"learning_rate": 2.230292185905114e-05, |
|
"loss": 0.6016, |
|
"step": 17019 |
|
}, |
|
{ |
|
"epoch": 0.2629407727837023, |
|
"grad_norm": 0.5953052639961243, |
|
"learning_rate": 2.2219444971614116e-05, |
|
"loss": 0.6035, |
|
"step": 17050 |
|
}, |
|
{ |
|
"epoch": 0.2634188469160363, |
|
"grad_norm": 0.6069731116294861, |
|
"learning_rate": 2.2135999462362655e-05, |
|
"loss": 0.5854, |
|
"step": 17081 |
|
}, |
|
{ |
|
"epoch": 0.2638969210483703, |
|
"grad_norm": 0.5507410168647766, |
|
"learning_rate": 2.2052586272968003e-05, |
|
"loss": 0.5892, |
|
"step": 17112 |
|
}, |
|
{ |
|
"epoch": 0.2643749951807043, |
|
"grad_norm": 0.5835263133049011, |
|
"learning_rate": 2.196920634473666e-05, |
|
"loss": 0.596, |
|
"step": 17143 |
|
}, |
|
{ |
|
"epoch": 0.2648530693130383, |
|
"grad_norm": 0.5266990065574646, |
|
"learning_rate": 2.1885860618599787e-05, |
|
"loss": 0.584, |
|
"step": 17174 |
|
}, |
|
{ |
|
"epoch": 0.26533114344537234, |
|
"grad_norm": 0.5418040156364441, |
|
"learning_rate": 2.1802550035102577e-05, |
|
"loss": 0.5954, |
|
"step": 17205 |
|
}, |
|
{ |
|
"epoch": 0.2658092175777063, |
|
"grad_norm": 0.5487755537033081, |
|
"learning_rate": 2.171927553439363e-05, |
|
"loss": 0.5948, |
|
"step": 17236 |
|
}, |
|
{ |
|
"epoch": 0.26628729171004034, |
|
"grad_norm": 0.6113549470901489, |
|
"learning_rate": 2.1636038056214376e-05, |
|
"loss": 0.5889, |
|
"step": 17267 |
|
}, |
|
{ |
|
"epoch": 0.2667653658423743, |
|
"grad_norm": 0.7934780716896057, |
|
"learning_rate": 2.155283853988844e-05, |
|
"loss": 0.5891, |
|
"step": 17298 |
|
}, |
|
{ |
|
"epoch": 0.26724343997470834, |
|
"grad_norm": 0.5100197792053223, |
|
"learning_rate": 2.146967792431106e-05, |
|
"loss": 0.5926, |
|
"step": 17329 |
|
}, |
|
{ |
|
"epoch": 0.2677215141070423, |
|
"grad_norm": 0.5467435121536255, |
|
"learning_rate": 2.138655714793849e-05, |
|
"loss": 0.5923, |
|
"step": 17360 |
|
}, |
|
{ |
|
"epoch": 0.26819958823937634, |
|
"grad_norm": 0.557858407497406, |
|
"learning_rate": 2.1303477148777367e-05, |
|
"loss": 0.5983, |
|
"step": 17391 |
|
}, |
|
{ |
|
"epoch": 0.26867766237171037, |
|
"grad_norm": 0.5128514170646667, |
|
"learning_rate": 2.122043886437421e-05, |
|
"loss": 0.592, |
|
"step": 17422 |
|
}, |
|
{ |
|
"epoch": 0.26915573650404434, |
|
"grad_norm": 0.5660766363143921, |
|
"learning_rate": 2.1137443231804765e-05, |
|
"loss": 0.5865, |
|
"step": 17453 |
|
}, |
|
{ |
|
"epoch": 0.26963381063637837, |
|
"grad_norm": 0.6714511513710022, |
|
"learning_rate": 2.105449118766347e-05, |
|
"loss": 0.5902, |
|
"step": 17484 |
|
}, |
|
{ |
|
"epoch": 0.27011188476871234, |
|
"grad_norm": 0.6227546334266663, |
|
"learning_rate": 2.097158366805287e-05, |
|
"loss": 0.5884, |
|
"step": 17515 |
|
}, |
|
{ |
|
"epoch": 0.27058995890104637, |
|
"grad_norm": 0.5964284539222717, |
|
"learning_rate": 2.0888721608573047e-05, |
|
"loss": 0.5815, |
|
"step": 17546 |
|
}, |
|
{ |
|
"epoch": 0.2710680330333804, |
|
"grad_norm": 0.6127567887306213, |
|
"learning_rate": 2.0805905944311087e-05, |
|
"loss": 0.5927, |
|
"step": 17577 |
|
}, |
|
{ |
|
"epoch": 0.27154610716571437, |
|
"grad_norm": 0.5167319178581238, |
|
"learning_rate": 2.0723137609830497e-05, |
|
"loss": 0.5934, |
|
"step": 17608 |
|
}, |
|
{ |
|
"epoch": 0.2720241812980484, |
|
"grad_norm": 0.6052663326263428, |
|
"learning_rate": 2.0640417539160686e-05, |
|
"loss": 0.5927, |
|
"step": 17639 |
|
}, |
|
{ |
|
"epoch": 0.27250225543038237, |
|
"grad_norm": 0.6475414633750916, |
|
"learning_rate": 2.0557746665786427e-05, |
|
"loss": 0.5927, |
|
"step": 17670 |
|
}, |
|
{ |
|
"epoch": 0.2729803295627164, |
|
"grad_norm": 0.5984682440757751, |
|
"learning_rate": 2.0475125922637256e-05, |
|
"loss": 0.5902, |
|
"step": 17701 |
|
}, |
|
{ |
|
"epoch": 0.27345840369505037, |
|
"grad_norm": 0.5002421140670776, |
|
"learning_rate": 2.0392556242077047e-05, |
|
"loss": 0.6001, |
|
"step": 17732 |
|
}, |
|
{ |
|
"epoch": 0.2739364778273844, |
|
"grad_norm": 0.6157301664352417, |
|
"learning_rate": 2.031003855589343e-05, |
|
"loss": 0.5844, |
|
"step": 17763 |
|
}, |
|
{ |
|
"epoch": 0.2744145519597184, |
|
"grad_norm": 0.6168811321258545, |
|
"learning_rate": 2.022757379528727e-05, |
|
"loss": 0.5906, |
|
"step": 17794 |
|
}, |
|
{ |
|
"epoch": 0.2748926260920524, |
|
"grad_norm": 0.5338504910469055, |
|
"learning_rate": 2.0145162890862184e-05, |
|
"loss": 0.5824, |
|
"step": 17825 |
|
}, |
|
{ |
|
"epoch": 0.2753707002243864, |
|
"grad_norm": 0.5488714575767517, |
|
"learning_rate": 2.0062806772614022e-05, |
|
"loss": 0.5879, |
|
"step": 17856 |
|
}, |
|
{ |
|
"epoch": 0.2758487743567204, |
|
"grad_norm": 0.5172461271286011, |
|
"learning_rate": 1.9980506369920392e-05, |
|
"loss": 0.5877, |
|
"step": 17887 |
|
}, |
|
{ |
|
"epoch": 0.2763268484890544, |
|
"grad_norm": 0.5921024084091187, |
|
"learning_rate": 1.989826261153015e-05, |
|
"loss": 0.5922, |
|
"step": 17918 |
|
}, |
|
{ |
|
"epoch": 0.2768049226213884, |
|
"grad_norm": 0.5407642126083374, |
|
"learning_rate": 1.9816076425552923e-05, |
|
"loss": 0.5897, |
|
"step": 17949 |
|
}, |
|
{ |
|
"epoch": 0.2772829967537224, |
|
"grad_norm": 0.536432147026062, |
|
"learning_rate": 1.9733948739448676e-05, |
|
"loss": 0.5915, |
|
"step": 17980 |
|
}, |
|
{ |
|
"epoch": 0.27776107088605645, |
|
"grad_norm": 0.6100837588310242, |
|
"learning_rate": 1.9651880480017155e-05, |
|
"loss": 0.5899, |
|
"step": 18011 |
|
}, |
|
{ |
|
"epoch": 0.2782391450183904, |
|
"grad_norm": 0.5521765351295471, |
|
"learning_rate": 1.9569872573387516e-05, |
|
"loss": 0.5875, |
|
"step": 18042 |
|
}, |
|
{ |
|
"epoch": 0.27871721915072445, |
|
"grad_norm": 0.5669123530387878, |
|
"learning_rate": 1.9487925945007854e-05, |
|
"loss": 0.5915, |
|
"step": 18073 |
|
}, |
|
{ |
|
"epoch": 0.2791952932830584, |
|
"grad_norm": 0.5691494345664978, |
|
"learning_rate": 1.9406041519634726e-05, |
|
"loss": 0.5813, |
|
"step": 18104 |
|
}, |
|
{ |
|
"epoch": 0.27967336741539245, |
|
"grad_norm": 0.6357378959655762, |
|
"learning_rate": 1.932422022132275e-05, |
|
"loss": 0.5832, |
|
"step": 18135 |
|
}, |
|
{ |
|
"epoch": 0.2801514415477265, |
|
"grad_norm": 0.6269022822380066, |
|
"learning_rate": 1.924246297341414e-05, |
|
"loss": 0.5878, |
|
"step": 18166 |
|
}, |
|
{ |
|
"epoch": 0.28062951568006045, |
|
"grad_norm": 0.5049974322319031, |
|
"learning_rate": 1.9160770698528338e-05, |
|
"loss": 0.5791, |
|
"step": 18197 |
|
}, |
|
{ |
|
"epoch": 0.2811075898123945, |
|
"grad_norm": 0.5020114779472351, |
|
"learning_rate": 1.907914431855156e-05, |
|
"loss": 0.5804, |
|
"step": 18228 |
|
}, |
|
{ |
|
"epoch": 0.28158566394472845, |
|
"grad_norm": 0.6124894022941589, |
|
"learning_rate": 1.8997584754626412e-05, |
|
"loss": 0.582, |
|
"step": 18259 |
|
}, |
|
{ |
|
"epoch": 0.2820637380770625, |
|
"grad_norm": 0.532447099685669, |
|
"learning_rate": 1.8916092927141486e-05, |
|
"loss": 0.5836, |
|
"step": 18290 |
|
}, |
|
{ |
|
"epoch": 0.28254181220939645, |
|
"grad_norm": 0.6032750010490417, |
|
"learning_rate": 1.883466975572098e-05, |
|
"loss": 0.5814, |
|
"step": 18321 |
|
}, |
|
{ |
|
"epoch": 0.2830198863417305, |
|
"grad_norm": 0.5806031227111816, |
|
"learning_rate": 1.8753316159214312e-05, |
|
"loss": 0.5784, |
|
"step": 18352 |
|
}, |
|
{ |
|
"epoch": 0.2834979604740645, |
|
"grad_norm": 0.6188846826553345, |
|
"learning_rate": 1.8672033055685766e-05, |
|
"loss": 0.5795, |
|
"step": 18383 |
|
}, |
|
{ |
|
"epoch": 0.2839760346063985, |
|
"grad_norm": 0.5964253544807434, |
|
"learning_rate": 1.8590821362404116e-05, |
|
"loss": 0.5859, |
|
"step": 18414 |
|
}, |
|
{ |
|
"epoch": 0.2844541087387325, |
|
"grad_norm": 0.5597705245018005, |
|
"learning_rate": 1.8509681995832294e-05, |
|
"loss": 0.5877, |
|
"step": 18445 |
|
}, |
|
{ |
|
"epoch": 0.2849321828710665, |
|
"grad_norm": 0.5870900750160217, |
|
"learning_rate": 1.8428615871617004e-05, |
|
"loss": 0.5879, |
|
"step": 18476 |
|
}, |
|
{ |
|
"epoch": 0.2854102570034005, |
|
"grad_norm": 0.6172513961791992, |
|
"learning_rate": 1.8347623904578448e-05, |
|
"loss": 0.5825, |
|
"step": 18507 |
|
}, |
|
{ |
|
"epoch": 0.2858883311357345, |
|
"grad_norm": 0.6141090989112854, |
|
"learning_rate": 1.8266707008699975e-05, |
|
"loss": 0.5794, |
|
"step": 18538 |
|
}, |
|
{ |
|
"epoch": 0.2863664052680685, |
|
"grad_norm": 0.5404875874519348, |
|
"learning_rate": 1.818586609711774e-05, |
|
"loss": 0.5779, |
|
"step": 18569 |
|
}, |
|
{ |
|
"epoch": 0.28684447940040253, |
|
"grad_norm": 0.5638667941093445, |
|
"learning_rate": 1.8105102082110462e-05, |
|
"loss": 0.5907, |
|
"step": 18600 |
|
}, |
|
{ |
|
"epoch": 0.2873225535327365, |
|
"grad_norm": 0.6166080236434937, |
|
"learning_rate": 1.8024415875089058e-05, |
|
"loss": 0.5787, |
|
"step": 18631 |
|
}, |
|
{ |
|
"epoch": 0.28780062766507053, |
|
"grad_norm": 0.6575906872749329, |
|
"learning_rate": 1.7943808386586407e-05, |
|
"loss": 0.5868, |
|
"step": 18662 |
|
}, |
|
{ |
|
"epoch": 0.2882787017974045, |
|
"grad_norm": 0.6117292046546936, |
|
"learning_rate": 1.7863280526247073e-05, |
|
"loss": 0.586, |
|
"step": 18693 |
|
}, |
|
{ |
|
"epoch": 0.28875677592973853, |
|
"grad_norm": 0.5406068563461304, |
|
"learning_rate": 1.7782833202817003e-05, |
|
"loss": 0.5828, |
|
"step": 18724 |
|
}, |
|
{ |
|
"epoch": 0.2892348500620725, |
|
"grad_norm": 0.5712701678276062, |
|
"learning_rate": 1.7702467324133327e-05, |
|
"loss": 0.585, |
|
"step": 18755 |
|
}, |
|
{ |
|
"epoch": 0.28971292419440653, |
|
"grad_norm": 0.602117121219635, |
|
"learning_rate": 1.7622183797114042e-05, |
|
"loss": 0.5862, |
|
"step": 18786 |
|
}, |
|
{ |
|
"epoch": 0.29019099832674056, |
|
"grad_norm": 0.5379143953323364, |
|
"learning_rate": 1.7541983527747838e-05, |
|
"loss": 0.5868, |
|
"step": 18817 |
|
}, |
|
{ |
|
"epoch": 0.29066907245907453, |
|
"grad_norm": 0.5759631991386414, |
|
"learning_rate": 1.746186742108387e-05, |
|
"loss": 0.586, |
|
"step": 18848 |
|
}, |
|
{ |
|
"epoch": 0.29114714659140856, |
|
"grad_norm": 0.543383002281189, |
|
"learning_rate": 1.73818363812215e-05, |
|
"loss": 0.5849, |
|
"step": 18879 |
|
}, |
|
{ |
|
"epoch": 0.29162522072374253, |
|
"grad_norm": 0.6425350308418274, |
|
"learning_rate": 1.7301891311300153e-05, |
|
"loss": 0.5888, |
|
"step": 18910 |
|
}, |
|
{ |
|
"epoch": 0.29210329485607656, |
|
"grad_norm": 0.5259391069412231, |
|
"learning_rate": 1.7222033113489055e-05, |
|
"loss": 0.5846, |
|
"step": 18941 |
|
}, |
|
{ |
|
"epoch": 0.2925813689884106, |
|
"grad_norm": 0.5230016708374023, |
|
"learning_rate": 1.7142262688977127e-05, |
|
"loss": 0.5836, |
|
"step": 18972 |
|
}, |
|
{ |
|
"epoch": 0.29305944312074456, |
|
"grad_norm": 0.5167984962463379, |
|
"learning_rate": 1.7062580937962764e-05, |
|
"loss": 0.5839, |
|
"step": 19003 |
|
}, |
|
{ |
|
"epoch": 0.2935375172530786, |
|
"grad_norm": 0.6248798370361328, |
|
"learning_rate": 1.698298875964369e-05, |
|
"loss": 0.5826, |
|
"step": 19034 |
|
}, |
|
{ |
|
"epoch": 0.29401559138541256, |
|
"grad_norm": 0.5567784309387207, |
|
"learning_rate": 1.690348705220684e-05, |
|
"loss": 0.5798, |
|
"step": 19065 |
|
}, |
|
{ |
|
"epoch": 0.2944936655177466, |
|
"grad_norm": 0.6178439855575562, |
|
"learning_rate": 1.6824076712818156e-05, |
|
"loss": 0.5872, |
|
"step": 19096 |
|
}, |
|
{ |
|
"epoch": 0.29497173965008056, |
|
"grad_norm": 0.5599183440208435, |
|
"learning_rate": 1.6744758637612533e-05, |
|
"loss": 0.5912, |
|
"step": 19127 |
|
}, |
|
{ |
|
"epoch": 0.2954498137824146, |
|
"grad_norm": 0.5310354828834534, |
|
"learning_rate": 1.6665533721683664e-05, |
|
"loss": 0.5825, |
|
"step": 19158 |
|
}, |
|
{ |
|
"epoch": 0.2959278879147486, |
|
"grad_norm": 0.477606862783432, |
|
"learning_rate": 1.6586402859073974e-05, |
|
"loss": 0.5834, |
|
"step": 19189 |
|
}, |
|
{ |
|
"epoch": 0.2964059620470826, |
|
"grad_norm": 0.6715511679649353, |
|
"learning_rate": 1.6507366942764463e-05, |
|
"loss": 0.587, |
|
"step": 19220 |
|
}, |
|
{ |
|
"epoch": 0.2968840361794166, |
|
"grad_norm": 0.6267414689064026, |
|
"learning_rate": 1.6428426864664732e-05, |
|
"loss": 0.5748, |
|
"step": 19251 |
|
}, |
|
{ |
|
"epoch": 0.2973621103117506, |
|
"grad_norm": 0.5867800712585449, |
|
"learning_rate": 1.6349583515602816e-05, |
|
"loss": 0.5847, |
|
"step": 19282 |
|
}, |
|
{ |
|
"epoch": 0.2978401844440846, |
|
"grad_norm": 0.6102165579795837, |
|
"learning_rate": 1.6270837785315208e-05, |
|
"loss": 0.585, |
|
"step": 19313 |
|
}, |
|
{ |
|
"epoch": 0.2983182585764186, |
|
"grad_norm": 0.5824636816978455, |
|
"learning_rate": 1.619219056243676e-05, |
|
"loss": 0.5791, |
|
"step": 19344 |
|
}, |
|
{ |
|
"epoch": 0.2987963327087526, |
|
"grad_norm": 0.558809757232666, |
|
"learning_rate": 1.6113642734490698e-05, |
|
"loss": 0.5746, |
|
"step": 19375 |
|
}, |
|
{ |
|
"epoch": 0.29927440684108664, |
|
"grad_norm": 0.5723096132278442, |
|
"learning_rate": 1.6035195187878577e-05, |
|
"loss": 0.5808, |
|
"step": 19406 |
|
}, |
|
{ |
|
"epoch": 0.2997524809734206, |
|
"grad_norm": 0.5783522725105286, |
|
"learning_rate": 1.5956848807870305e-05, |
|
"loss": 0.5909, |
|
"step": 19437 |
|
}, |
|
{ |
|
"epoch": 0.30023055510575464, |
|
"grad_norm": 0.605096697807312, |
|
"learning_rate": 1.587860447859413e-05, |
|
"loss": 0.5827, |
|
"step": 19468 |
|
}, |
|
{ |
|
"epoch": 0.3007086292380886, |
|
"grad_norm": 0.5810924768447876, |
|
"learning_rate": 1.5800463083026686e-05, |
|
"loss": 0.5874, |
|
"step": 19499 |
|
}, |
|
{ |
|
"epoch": 0.30118670337042264, |
|
"grad_norm": 0.5270748734474182, |
|
"learning_rate": 1.572242550298298e-05, |
|
"loss": 0.5724, |
|
"step": 19530 |
|
}, |
|
{ |
|
"epoch": 0.30166477750275666, |
|
"grad_norm": 0.6422134041786194, |
|
"learning_rate": 1.56444926191065e-05, |
|
"loss": 0.5879, |
|
"step": 19561 |
|
}, |
|
{ |
|
"epoch": 0.30214285163509064, |
|
"grad_norm": 0.5272889733314514, |
|
"learning_rate": 1.5566665310859257e-05, |
|
"loss": 0.5772, |
|
"step": 19592 |
|
}, |
|
{ |
|
"epoch": 0.30262092576742466, |
|
"grad_norm": 0.7464088201522827, |
|
"learning_rate": 1.5488944456511846e-05, |
|
"loss": 0.5874, |
|
"step": 19623 |
|
}, |
|
{ |
|
"epoch": 0.30309899989975864, |
|
"grad_norm": 0.5078160166740417, |
|
"learning_rate": 1.5411330933133546e-05, |
|
"loss": 0.5781, |
|
"step": 19654 |
|
}, |
|
{ |
|
"epoch": 0.30357707403209266, |
|
"grad_norm": 0.56827712059021, |
|
"learning_rate": 1.533382561658241e-05, |
|
"loss": 0.579, |
|
"step": 19685 |
|
}, |
|
{ |
|
"epoch": 0.30405514816442664, |
|
"grad_norm": 0.5845567584037781, |
|
"learning_rate": 1.525642938149541e-05, |
|
"loss": 0.5774, |
|
"step": 19716 |
|
}, |
|
{ |
|
"epoch": 0.30453322229676066, |
|
"grad_norm": 0.5950722694396973, |
|
"learning_rate": 1.5179143101278536e-05, |
|
"loss": 0.576, |
|
"step": 19747 |
|
}, |
|
{ |
|
"epoch": 0.3050112964290947, |
|
"grad_norm": 0.5643905997276306, |
|
"learning_rate": 1.5101967648096955e-05, |
|
"loss": 0.5803, |
|
"step": 19778 |
|
}, |
|
{ |
|
"epoch": 0.30548937056142866, |
|
"grad_norm": 0.6215445399284363, |
|
"learning_rate": 1.5024903892865172e-05, |
|
"loss": 0.579, |
|
"step": 19809 |
|
}, |
|
{ |
|
"epoch": 0.3059674446937627, |
|
"grad_norm": 0.5208318829536438, |
|
"learning_rate": 1.4947952705237184e-05, |
|
"loss": 0.5777, |
|
"step": 19840 |
|
}, |
|
{ |
|
"epoch": 0.30644551882609666, |
|
"grad_norm": 0.6431962251663208, |
|
"learning_rate": 1.4871114953596682e-05, |
|
"loss": 0.5829, |
|
"step": 19871 |
|
}, |
|
{ |
|
"epoch": 0.3069235929584307, |
|
"grad_norm": 0.4828816056251526, |
|
"learning_rate": 1.4794391505047256e-05, |
|
"loss": 0.5742, |
|
"step": 19902 |
|
}, |
|
{ |
|
"epoch": 0.30740166709076466, |
|
"grad_norm": 0.6670913696289062, |
|
"learning_rate": 1.4717783225402596e-05, |
|
"loss": 0.5793, |
|
"step": 19933 |
|
}, |
|
{ |
|
"epoch": 0.3078797412230987, |
|
"grad_norm": 0.5313072204589844, |
|
"learning_rate": 1.4641290979176735e-05, |
|
"loss": 0.5847, |
|
"step": 19964 |
|
}, |
|
{ |
|
"epoch": 0.3083578153554327, |
|
"grad_norm": 0.5336267948150635, |
|
"learning_rate": 1.4564915629574246e-05, |
|
"loss": 0.5844, |
|
"step": 19995 |
|
}, |
|
{ |
|
"epoch": 0.3088358894877667, |
|
"grad_norm": 0.6394053101539612, |
|
"learning_rate": 1.4488658038480601e-05, |
|
"loss": 0.5807, |
|
"step": 20026 |
|
}, |
|
{ |
|
"epoch": 0.3093139636201007, |
|
"grad_norm": 0.6073569059371948, |
|
"learning_rate": 1.4412519066452323e-05, |
|
"loss": 0.5844, |
|
"step": 20057 |
|
}, |
|
{ |
|
"epoch": 0.3097920377524347, |
|
"grad_norm": 0.6105561852455139, |
|
"learning_rate": 1.4336499572707373e-05, |
|
"loss": 0.5829, |
|
"step": 20088 |
|
}, |
|
{ |
|
"epoch": 0.3102701118847687, |
|
"grad_norm": 0.6145188212394714, |
|
"learning_rate": 1.4260600415115433e-05, |
|
"loss": 0.5847, |
|
"step": 20119 |
|
}, |
|
{ |
|
"epoch": 0.3107481860171027, |
|
"grad_norm": 0.5367770195007324, |
|
"learning_rate": 1.4184822450188137e-05, |
|
"loss": 0.5873, |
|
"step": 20150 |
|
}, |
|
{ |
|
"epoch": 0.3112262601494367, |
|
"grad_norm": 0.6103881001472473, |
|
"learning_rate": 1.410916653306954e-05, |
|
"loss": 0.5772, |
|
"step": 20181 |
|
}, |
|
{ |
|
"epoch": 0.31170433428177075, |
|
"grad_norm": 0.5279451012611389, |
|
"learning_rate": 1.403363351752639e-05, |
|
"loss": 0.5724, |
|
"step": 20212 |
|
}, |
|
{ |
|
"epoch": 0.3121824084141047, |
|
"grad_norm": 0.5088768005371094, |
|
"learning_rate": 1.3958224255938485e-05, |
|
"loss": 0.5799, |
|
"step": 20243 |
|
}, |
|
{ |
|
"epoch": 0.31266048254643874, |
|
"grad_norm": 0.6134737730026245, |
|
"learning_rate": 1.388293959928911e-05, |
|
"loss": 0.5876, |
|
"step": 20274 |
|
}, |
|
{ |
|
"epoch": 0.3131385566787727, |
|
"grad_norm": 0.5696072578430176, |
|
"learning_rate": 1.3807780397155379e-05, |
|
"loss": 0.5768, |
|
"step": 20305 |
|
}, |
|
{ |
|
"epoch": 0.31361663081110674, |
|
"grad_norm": 0.6008872389793396, |
|
"learning_rate": 1.3732747497698655e-05, |
|
"loss": 0.5863, |
|
"step": 20336 |
|
}, |
|
{ |
|
"epoch": 0.3140947049434408, |
|
"grad_norm": 0.5617655515670776, |
|
"learning_rate": 1.3657841747655038e-05, |
|
"loss": 0.577, |
|
"step": 20367 |
|
}, |
|
{ |
|
"epoch": 0.31457277907577474, |
|
"grad_norm": 0.6002852320671082, |
|
"learning_rate": 1.3583063992325706e-05, |
|
"loss": 0.5812, |
|
"step": 20398 |
|
}, |
|
{ |
|
"epoch": 0.31505085320810877, |
|
"grad_norm": 0.5778743028640747, |
|
"learning_rate": 1.3508415075567496e-05, |
|
"loss": 0.5813, |
|
"step": 20429 |
|
}, |
|
{ |
|
"epoch": 0.31552892734044274, |
|
"grad_norm": 0.5733532309532166, |
|
"learning_rate": 1.343389583978327e-05, |
|
"loss": 0.589, |
|
"step": 20460 |
|
}, |
|
{ |
|
"epoch": 0.31600700147277677, |
|
"grad_norm": 0.5163986682891846, |
|
"learning_rate": 1.3359507125912468e-05, |
|
"loss": 0.575, |
|
"step": 20491 |
|
}, |
|
{ |
|
"epoch": 0.31648507560511074, |
|
"grad_norm": 0.5018510818481445, |
|
"learning_rate": 1.3285249773421627e-05, |
|
"loss": 0.5791, |
|
"step": 20522 |
|
}, |
|
{ |
|
"epoch": 0.31696314973744477, |
|
"grad_norm": 0.53333979845047, |
|
"learning_rate": 1.3211124620294884e-05, |
|
"loss": 0.5869, |
|
"step": 20553 |
|
}, |
|
{ |
|
"epoch": 0.3174412238697788, |
|
"grad_norm": 0.6099141240119934, |
|
"learning_rate": 1.313713250302451e-05, |
|
"loss": 0.5796, |
|
"step": 20584 |
|
}, |
|
{ |
|
"epoch": 0.31791929800211277, |
|
"grad_norm": 0.6042903065681458, |
|
"learning_rate": 1.3063274256601479e-05, |
|
"loss": 0.5694, |
|
"step": 20615 |
|
}, |
|
{ |
|
"epoch": 0.3183973721344468, |
|
"grad_norm": 0.5727267265319824, |
|
"learning_rate": 1.2989550714506086e-05, |
|
"loss": 0.5802, |
|
"step": 20646 |
|
}, |
|
{ |
|
"epoch": 0.31887544626678077, |
|
"grad_norm": 0.5630910396575928, |
|
"learning_rate": 1.291596270869846e-05, |
|
"loss": 0.5629, |
|
"step": 20677 |
|
}, |
|
{ |
|
"epoch": 0.3193535203991148, |
|
"grad_norm": 0.5013255476951599, |
|
"learning_rate": 1.284251106960927e-05, |
|
"loss": 0.5829, |
|
"step": 20708 |
|
}, |
|
{ |
|
"epoch": 0.31983159453144877, |
|
"grad_norm": 0.5741969347000122, |
|
"learning_rate": 1.2769196626130263e-05, |
|
"loss": 0.5744, |
|
"step": 20739 |
|
}, |
|
{ |
|
"epoch": 0.3203096686637828, |
|
"grad_norm": 0.552740216255188, |
|
"learning_rate": 1.2696020205604969e-05, |
|
"loss": 0.5808, |
|
"step": 20770 |
|
}, |
|
{ |
|
"epoch": 0.3207877427961168, |
|
"grad_norm": 0.6418048739433289, |
|
"learning_rate": 1.2622982633819359e-05, |
|
"loss": 0.5871, |
|
"step": 20801 |
|
}, |
|
{ |
|
"epoch": 0.3212658169284508, |
|
"grad_norm": 0.6518685817718506, |
|
"learning_rate": 1.2550084734992484e-05, |
|
"loss": 0.572, |
|
"step": 20832 |
|
}, |
|
{ |
|
"epoch": 0.3217438910607848, |
|
"grad_norm": 0.535139262676239, |
|
"learning_rate": 1.247732733176724e-05, |
|
"loss": 0.5768, |
|
"step": 20863 |
|
}, |
|
{ |
|
"epoch": 0.3222219651931188, |
|
"grad_norm": 0.579274594783783, |
|
"learning_rate": 1.2404711245201044e-05, |
|
"loss": 0.5803, |
|
"step": 20894 |
|
}, |
|
{ |
|
"epoch": 0.3227000393254528, |
|
"grad_norm": 0.5379701256752014, |
|
"learning_rate": 1.2332237294756535e-05, |
|
"loss": 0.5844, |
|
"step": 20925 |
|
}, |
|
{ |
|
"epoch": 0.3231781134577868, |
|
"grad_norm": 0.48069506883621216, |
|
"learning_rate": 1.225990629829241e-05, |
|
"loss": 0.5728, |
|
"step": 20956 |
|
}, |
|
{ |
|
"epoch": 0.3236561875901208, |
|
"grad_norm": 0.576770007610321, |
|
"learning_rate": 1.2187719072054136e-05, |
|
"loss": 0.5838, |
|
"step": 20987 |
|
}, |
|
{ |
|
"epoch": 0.32413426172245485, |
|
"grad_norm": 0.6682817339897156, |
|
"learning_rate": 1.2115676430664735e-05, |
|
"loss": 0.5746, |
|
"step": 21018 |
|
}, |
|
{ |
|
"epoch": 0.3246123358547888, |
|
"grad_norm": 0.576655387878418, |
|
"learning_rate": 1.2043779187115647e-05, |
|
"loss": 0.5776, |
|
"step": 21049 |
|
}, |
|
{ |
|
"epoch": 0.32509040998712285, |
|
"grad_norm": 0.5452989935874939, |
|
"learning_rate": 1.1972028152757476e-05, |
|
"loss": 0.5862, |
|
"step": 21080 |
|
}, |
|
{ |
|
"epoch": 0.3255684841194568, |
|
"grad_norm": 0.5150167942047119, |
|
"learning_rate": 1.1900424137290889e-05, |
|
"loss": 0.5903, |
|
"step": 21111 |
|
}, |
|
{ |
|
"epoch": 0.32604655825179085, |
|
"grad_norm": 0.5409998297691345, |
|
"learning_rate": 1.1828967948757482e-05, |
|
"loss": 0.5776, |
|
"step": 21142 |
|
}, |
|
{ |
|
"epoch": 0.3265246323841249, |
|
"grad_norm": 0.5723608136177063, |
|
"learning_rate": 1.175766039353062e-05, |
|
"loss": 0.5832, |
|
"step": 21173 |
|
}, |
|
{ |
|
"epoch": 0.32700270651645885, |
|
"grad_norm": 0.5304202437400818, |
|
"learning_rate": 1.1686502276306382e-05, |
|
"loss": 0.5797, |
|
"step": 21204 |
|
}, |
|
{ |
|
"epoch": 0.3274807806487929, |
|
"grad_norm": 0.5880327820777893, |
|
"learning_rate": 1.1615494400094445e-05, |
|
"loss": 0.5807, |
|
"step": 21235 |
|
}, |
|
{ |
|
"epoch": 0.32795885478112685, |
|
"grad_norm": 0.5001874566078186, |
|
"learning_rate": 1.1544637566209029e-05, |
|
"loss": 0.5784, |
|
"step": 21266 |
|
}, |
|
{ |
|
"epoch": 0.3284369289134609, |
|
"grad_norm": 0.6035870909690857, |
|
"learning_rate": 1.1473932574259886e-05, |
|
"loss": 0.5758, |
|
"step": 21297 |
|
}, |
|
{ |
|
"epoch": 0.32891500304579485, |
|
"grad_norm": 0.5601738691329956, |
|
"learning_rate": 1.1403380222143247e-05, |
|
"loss": 0.5805, |
|
"step": 21328 |
|
}, |
|
{ |
|
"epoch": 0.3293930771781289, |
|
"grad_norm": 0.5500284433364868, |
|
"learning_rate": 1.1332981306032808e-05, |
|
"loss": 0.5744, |
|
"step": 21359 |
|
}, |
|
{ |
|
"epoch": 0.3298711513104629, |
|
"grad_norm": 0.515215277671814, |
|
"learning_rate": 1.1262736620370762e-05, |
|
"loss": 0.58, |
|
"step": 21390 |
|
}, |
|
{ |
|
"epoch": 0.3303492254427969, |
|
"grad_norm": 0.6095441579818726, |
|
"learning_rate": 1.1192646957858854e-05, |
|
"loss": 0.5731, |
|
"step": 21421 |
|
}, |
|
{ |
|
"epoch": 0.3308272995751309, |
|
"grad_norm": 0.5884034633636475, |
|
"learning_rate": 1.1122713109449381e-05, |
|
"loss": 0.5769, |
|
"step": 21452 |
|
}, |
|
{ |
|
"epoch": 0.3313053737074649, |
|
"grad_norm": 0.49037814140319824, |
|
"learning_rate": 1.105293586433634e-05, |
|
"loss": 0.5812, |
|
"step": 21483 |
|
}, |
|
{ |
|
"epoch": 0.3317834478397989, |
|
"grad_norm": 0.5256391167640686, |
|
"learning_rate": 1.0983316009946446e-05, |
|
"loss": 0.5807, |
|
"step": 21514 |
|
}, |
|
{ |
|
"epoch": 0.3322615219721329, |
|
"grad_norm": 0.6110324263572693, |
|
"learning_rate": 1.0913854331930282e-05, |
|
"loss": 0.5735, |
|
"step": 21545 |
|
}, |
|
{ |
|
"epoch": 0.3327395961044669, |
|
"grad_norm": 0.5897322297096252, |
|
"learning_rate": 1.0844551614153456e-05, |
|
"loss": 0.5748, |
|
"step": 21576 |
|
}, |
|
{ |
|
"epoch": 0.33321767023680093, |
|
"grad_norm": 0.5892772078514099, |
|
"learning_rate": 1.0775408638687725e-05, |
|
"loss": 0.5733, |
|
"step": 21607 |
|
}, |
|
{ |
|
"epoch": 0.3336957443691349, |
|
"grad_norm": 0.5256847739219666, |
|
"learning_rate": 1.0706426185802165e-05, |
|
"loss": 0.5786, |
|
"step": 21638 |
|
}, |
|
{ |
|
"epoch": 0.33417381850146893, |
|
"grad_norm": 0.5861433744430542, |
|
"learning_rate": 1.0637605033954371e-05, |
|
"loss": 0.5749, |
|
"step": 21669 |
|
}, |
|
{ |
|
"epoch": 0.3346518926338029, |
|
"grad_norm": 0.5262039303779602, |
|
"learning_rate": 1.05689459597817e-05, |
|
"loss": 0.5675, |
|
"step": 21700 |
|
}, |
|
{ |
|
"epoch": 0.33512996676613693, |
|
"grad_norm": 0.5511645674705505, |
|
"learning_rate": 1.050044973809246e-05, |
|
"loss": 0.5802, |
|
"step": 21731 |
|
}, |
|
{ |
|
"epoch": 0.33560804089847096, |
|
"grad_norm": 0.5599253177642822, |
|
"learning_rate": 1.043211714185722e-05, |
|
"loss": 0.5825, |
|
"step": 21762 |
|
}, |
|
{ |
|
"epoch": 0.33608611503080493, |
|
"grad_norm": 0.5281076431274414, |
|
"learning_rate": 1.036394894220003e-05, |
|
"loss": 0.5801, |
|
"step": 21793 |
|
}, |
|
{ |
|
"epoch": 0.33656418916313896, |
|
"grad_norm": 0.5938977599143982, |
|
"learning_rate": 1.0295945908389751e-05, |
|
"loss": 0.5782, |
|
"step": 21824 |
|
}, |
|
{ |
|
"epoch": 0.33704226329547293, |
|
"grad_norm": 0.5920172929763794, |
|
"learning_rate": 1.0228108807831393e-05, |
|
"loss": 0.5703, |
|
"step": 21855 |
|
}, |
|
{ |
|
"epoch": 0.33752033742780696, |
|
"grad_norm": 0.5725109577178955, |
|
"learning_rate": 1.01604384060574e-05, |
|
"loss": 0.5764, |
|
"step": 21886 |
|
}, |
|
{ |
|
"epoch": 0.33799841156014093, |
|
"grad_norm": 0.5738679766654968, |
|
"learning_rate": 1.009293546671907e-05, |
|
"loss": 0.5726, |
|
"step": 21917 |
|
}, |
|
{ |
|
"epoch": 0.33847648569247496, |
|
"grad_norm": 0.5508648753166199, |
|
"learning_rate": 1.002560075157791e-05, |
|
"loss": 0.5698, |
|
"step": 21948 |
|
}, |
|
{ |
|
"epoch": 0.338954559824809, |
|
"grad_norm": 0.5032240748405457, |
|
"learning_rate": 9.958435020496995e-06, |
|
"loss": 0.5765, |
|
"step": 21979 |
|
}, |
|
{ |
|
"epoch": 0.33943263395714296, |
|
"grad_norm": 0.5450903177261353, |
|
"learning_rate": 9.89143903143249e-06, |
|
"loss": 0.5723, |
|
"step": 22010 |
|
}, |
|
{ |
|
"epoch": 0.339910708089477, |
|
"grad_norm": 0.5665742754936218, |
|
"learning_rate": 9.824613540425038e-06, |
|
"loss": 0.5696, |
|
"step": 22041 |
|
}, |
|
{ |
|
"epoch": 0.34038878222181096, |
|
"grad_norm": 0.572994589805603, |
|
"learning_rate": 9.757959301591197e-06, |
|
"loss": 0.5768, |
|
"step": 22072 |
|
}, |
|
{ |
|
"epoch": 0.340866856354145, |
|
"grad_norm": 0.5740769505500793, |
|
"learning_rate": 9.691477067115017e-06, |
|
"loss": 0.5748, |
|
"step": 22103 |
|
}, |
|
{ |
|
"epoch": 0.34134493048647896, |
|
"grad_norm": 0.5839989185333252, |
|
"learning_rate": 9.625167587239467e-06, |
|
"loss": 0.5764, |
|
"step": 22134 |
|
}, |
|
{ |
|
"epoch": 0.341823004618813, |
|
"grad_norm": 5.201451778411865, |
|
"learning_rate": 9.559031610258007e-06, |
|
"loss": 0.5676, |
|
"step": 22165 |
|
}, |
|
{ |
|
"epoch": 0.342301078751147, |
|
"grad_norm": 0.5265706181526184, |
|
"learning_rate": 9.493069882506164e-06, |
|
"loss": 0.5733, |
|
"step": 22196 |
|
}, |
|
{ |
|
"epoch": 0.342779152883481, |
|
"grad_norm": 0.596712052822113, |
|
"learning_rate": 9.427283148353056e-06, |
|
"loss": 0.575, |
|
"step": 22227 |
|
}, |
|
{ |
|
"epoch": 0.343257227015815, |
|
"grad_norm": 0.6111375093460083, |
|
"learning_rate": 9.361672150193052e-06, |
|
"loss": 0.5784, |
|
"step": 22258 |
|
}, |
|
{ |
|
"epoch": 0.343735301148149, |
|
"grad_norm": 0.5548860430717468, |
|
"learning_rate": 9.29623762843734e-06, |
|
"loss": 0.5689, |
|
"step": 22289 |
|
}, |
|
{ |
|
"epoch": 0.344213375280483, |
|
"grad_norm": 0.5465906858444214, |
|
"learning_rate": 9.230980321505594e-06, |
|
"loss": 0.58, |
|
"step": 22320 |
|
}, |
|
{ |
|
"epoch": 0.344691449412817, |
|
"grad_norm": 0.589872419834137, |
|
"learning_rate": 9.165900965817668e-06, |
|
"loss": 0.5717, |
|
"step": 22351 |
|
}, |
|
{ |
|
"epoch": 0.345169523545151, |
|
"grad_norm": 0.581211268901825, |
|
"learning_rate": 9.101000295785245e-06, |
|
"loss": 0.5736, |
|
"step": 22382 |
|
}, |
|
{ |
|
"epoch": 0.34564759767748504, |
|
"grad_norm": 0.6026866436004639, |
|
"learning_rate": 9.036279043803565e-06, |
|
"loss": 0.5763, |
|
"step": 22413 |
|
}, |
|
{ |
|
"epoch": 0.346125671809819, |
|
"grad_norm": 0.6060176491737366, |
|
"learning_rate": 8.971737940243147e-06, |
|
"loss": 0.572, |
|
"step": 22444 |
|
}, |
|
{ |
|
"epoch": 0.34660374594215304, |
|
"grad_norm": 0.5560024976730347, |
|
"learning_rate": 8.907377713441592e-06, |
|
"loss": 0.5705, |
|
"step": 22475 |
|
}, |
|
{ |
|
"epoch": 0.347081820074487, |
|
"grad_norm": 0.6205755472183228, |
|
"learning_rate": 8.843199089695293e-06, |
|
"loss": 0.5685, |
|
"step": 22506 |
|
}, |
|
{ |
|
"epoch": 0.34755989420682104, |
|
"grad_norm": 0.5278075337409973, |
|
"learning_rate": 8.779202793251311e-06, |
|
"loss": 0.5688, |
|
"step": 22537 |
|
}, |
|
{ |
|
"epoch": 0.34803796833915507, |
|
"grad_norm": 0.5662840008735657, |
|
"learning_rate": 8.715389546299149e-06, |
|
"loss": 0.5853, |
|
"step": 22568 |
|
}, |
|
{ |
|
"epoch": 0.34851604247148904, |
|
"grad_norm": 0.5269712209701538, |
|
"learning_rate": 8.651760068962617e-06, |
|
"loss": 0.5802, |
|
"step": 22599 |
|
}, |
|
{ |
|
"epoch": 0.34899411660382307, |
|
"grad_norm": 0.5230329632759094, |
|
"learning_rate": 8.588315079291733e-06, |
|
"loss": 0.568, |
|
"step": 22630 |
|
}, |
|
{ |
|
"epoch": 0.34947219073615704, |
|
"grad_norm": 0.627826988697052, |
|
"learning_rate": 8.52505529325457e-06, |
|
"loss": 0.568, |
|
"step": 22661 |
|
}, |
|
{ |
|
"epoch": 0.34995026486849107, |
|
"grad_norm": 0.6054178476333618, |
|
"learning_rate": 8.461981424729216e-06, |
|
"loss": 0.5756, |
|
"step": 22692 |
|
}, |
|
{ |
|
"epoch": 0.35042833900082504, |
|
"grad_norm": 0.47537288069725037, |
|
"learning_rate": 8.399094185495725e-06, |
|
"loss": 0.5667, |
|
"step": 22723 |
|
}, |
|
{ |
|
"epoch": 0.35090641313315907, |
|
"grad_norm": 0.5262076258659363, |
|
"learning_rate": 8.336394285228017e-06, |
|
"loss": 0.5807, |
|
"step": 22754 |
|
}, |
|
{ |
|
"epoch": 0.3513844872654931, |
|
"grad_norm": 0.6217135190963745, |
|
"learning_rate": 8.273882431485952e-06, |
|
"loss": 0.5706, |
|
"step": 22785 |
|
}, |
|
{ |
|
"epoch": 0.35186256139782707, |
|
"grad_norm": 0.5373092889785767, |
|
"learning_rate": 8.211559329707316e-06, |
|
"loss": 0.5743, |
|
"step": 22816 |
|
}, |
|
{ |
|
"epoch": 0.3523406355301611, |
|
"grad_norm": 0.5938353538513184, |
|
"learning_rate": 8.149425683199823e-06, |
|
"loss": 0.5811, |
|
"step": 22847 |
|
}, |
|
{ |
|
"epoch": 0.35281870966249507, |
|
"grad_norm": 0.654653787612915, |
|
"learning_rate": 8.08748219313325e-06, |
|
"loss": 0.5772, |
|
"step": 22878 |
|
}, |
|
{ |
|
"epoch": 0.3532967837948291, |
|
"grad_norm": 0.589327335357666, |
|
"learning_rate": 8.025729558531453e-06, |
|
"loss": 0.5715, |
|
"step": 22909 |
|
}, |
|
{ |
|
"epoch": 0.35377485792716307, |
|
"grad_norm": 0.6570223569869995, |
|
"learning_rate": 7.964168476264508e-06, |
|
"loss": 0.5807, |
|
"step": 22940 |
|
}, |
|
{ |
|
"epoch": 0.3542529320594971, |
|
"grad_norm": 0.6160106658935547, |
|
"learning_rate": 7.902799641040884e-06, |
|
"loss": 0.5641, |
|
"step": 22971 |
|
}, |
|
{ |
|
"epoch": 0.3547310061918311, |
|
"grad_norm": 0.5942509770393372, |
|
"learning_rate": 7.841623745399523e-06, |
|
"loss": 0.566, |
|
"step": 23002 |
|
}, |
|
{ |
|
"epoch": 0.3552090803241651, |
|
"grad_norm": 0.6464576125144958, |
|
"learning_rate": 7.780641479702114e-06, |
|
"loss": 0.5736, |
|
"step": 23033 |
|
}, |
|
{ |
|
"epoch": 0.3556871544564991, |
|
"grad_norm": 0.5047611594200134, |
|
"learning_rate": 7.719853532125227e-06, |
|
"loss": 0.5745, |
|
"step": 23064 |
|
}, |
|
{ |
|
"epoch": 0.3561652285888331, |
|
"grad_norm": 0.6161003112792969, |
|
"learning_rate": 7.65926058865258e-06, |
|
"loss": 0.5723, |
|
"step": 23095 |
|
}, |
|
{ |
|
"epoch": 0.3566433027211671, |
|
"grad_norm": 0.6291042566299438, |
|
"learning_rate": 7.598863333067313e-06, |
|
"loss": 0.5709, |
|
"step": 23126 |
|
}, |
|
{ |
|
"epoch": 0.35712137685350115, |
|
"grad_norm": 0.4892687499523163, |
|
"learning_rate": 7.538662446944253e-06, |
|
"loss": 0.5624, |
|
"step": 23157 |
|
}, |
|
{ |
|
"epoch": 0.3575994509858351, |
|
"grad_norm": 0.5112323760986328, |
|
"learning_rate": 7.478658609642211e-06, |
|
"loss": 0.5727, |
|
"step": 23188 |
|
}, |
|
{ |
|
"epoch": 0.35807752511816915, |
|
"grad_norm": 0.5709079504013062, |
|
"learning_rate": 7.418852498296327e-06, |
|
"loss": 0.571, |
|
"step": 23219 |
|
}, |
|
{ |
|
"epoch": 0.3585555992505031, |
|
"grad_norm": 0.5238727927207947, |
|
"learning_rate": 7.359244787810457e-06, |
|
"loss": 0.57, |
|
"step": 23250 |
|
}, |
|
{ |
|
"epoch": 0.35903367338283715, |
|
"grad_norm": 0.5535005331039429, |
|
"learning_rate": 7.299836150849493e-06, |
|
"loss": 0.5727, |
|
"step": 23281 |
|
}, |
|
{ |
|
"epoch": 0.3595117475151711, |
|
"grad_norm": 0.5350549817085266, |
|
"learning_rate": 7.240627257831847e-06, |
|
"loss": 0.5719, |
|
"step": 23312 |
|
}, |
|
{ |
|
"epoch": 0.35998982164750515, |
|
"grad_norm": 0.5879747271537781, |
|
"learning_rate": 7.1816187769218195e-06, |
|
"loss": 0.5773, |
|
"step": 23343 |
|
}, |
|
{ |
|
"epoch": 0.3604678957798392, |
|
"grad_norm": 0.5242079496383667, |
|
"learning_rate": 7.1228113740220895e-06, |
|
"loss": 0.5768, |
|
"step": 23374 |
|
}, |
|
{ |
|
"epoch": 0.36094596991217315, |
|
"grad_norm": 0.6171791553497314, |
|
"learning_rate": 7.064205712766226e-06, |
|
"loss": 0.5769, |
|
"step": 23405 |
|
}, |
|
{ |
|
"epoch": 0.3614240440445072, |
|
"grad_norm": 0.5774204730987549, |
|
"learning_rate": 7.005802454511129e-06, |
|
"loss": 0.5712, |
|
"step": 23436 |
|
}, |
|
{ |
|
"epoch": 0.36190211817684115, |
|
"grad_norm": 0.5799962878227234, |
|
"learning_rate": 6.947602258329639e-06, |
|
"loss": 0.5762, |
|
"step": 23467 |
|
}, |
|
{ |
|
"epoch": 0.3623801923091752, |
|
"grad_norm": 0.5970575213432312, |
|
"learning_rate": 6.889605781003078e-06, |
|
"loss": 0.5713, |
|
"step": 23498 |
|
}, |
|
{ |
|
"epoch": 0.36285826644150915, |
|
"grad_norm": 0.5728057026863098, |
|
"learning_rate": 6.831813677013776e-06, |
|
"loss": 0.5691, |
|
"step": 23529 |
|
}, |
|
{ |
|
"epoch": 0.3633363405738432, |
|
"grad_norm": 0.6017605066299438, |
|
"learning_rate": 6.774226598537792e-06, |
|
"loss": 0.5701, |
|
"step": 23560 |
|
}, |
|
{ |
|
"epoch": 0.3638144147061772, |
|
"grad_norm": 0.5233104825019836, |
|
"learning_rate": 6.716845195437482e-06, |
|
"loss": 0.5688, |
|
"step": 23591 |
|
}, |
|
{ |
|
"epoch": 0.3642924888385112, |
|
"grad_norm": 0.5983685851097107, |
|
"learning_rate": 6.659670115254168e-06, |
|
"loss": 0.5679, |
|
"step": 23622 |
|
}, |
|
{ |
|
"epoch": 0.3647705629708452, |
|
"grad_norm": 0.5858353972434998, |
|
"learning_rate": 6.602702003200872e-06, |
|
"loss": 0.5761, |
|
"step": 23653 |
|
}, |
|
{ |
|
"epoch": 0.3652486371031792, |
|
"grad_norm": 0.5972424149513245, |
|
"learning_rate": 6.545941502154992e-06, |
|
"loss": 0.5802, |
|
"step": 23684 |
|
}, |
|
{ |
|
"epoch": 0.3657267112355132, |
|
"grad_norm": 0.6174728870391846, |
|
"learning_rate": 6.489389252651057e-06, |
|
"loss": 0.5829, |
|
"step": 23715 |
|
}, |
|
{ |
|
"epoch": 0.3662047853678472, |
|
"grad_norm": 0.5846561789512634, |
|
"learning_rate": 6.4330458928735325e-06, |
|
"loss": 0.5762, |
|
"step": 23746 |
|
}, |
|
{ |
|
"epoch": 0.3666828595001812, |
|
"grad_norm": 0.5355708599090576, |
|
"learning_rate": 6.376912058649559e-06, |
|
"loss": 0.572, |
|
"step": 23777 |
|
}, |
|
{ |
|
"epoch": 0.36716093363251523, |
|
"grad_norm": 0.5898821353912354, |
|
"learning_rate": 6.320988383441845e-06, |
|
"loss": 0.5747, |
|
"step": 23808 |
|
}, |
|
{ |
|
"epoch": 0.3676390077648492, |
|
"grad_norm": 0.5782723426818848, |
|
"learning_rate": 6.265275498341452e-06, |
|
"loss": 0.5671, |
|
"step": 23839 |
|
}, |
|
{ |
|
"epoch": 0.36811708189718323, |
|
"grad_norm": 0.5315823554992676, |
|
"learning_rate": 6.209774032060714e-06, |
|
"loss": 0.5771, |
|
"step": 23870 |
|
}, |
|
{ |
|
"epoch": 0.3685951560295172, |
|
"grad_norm": 0.5441523194313049, |
|
"learning_rate": 6.1544846109261365e-06, |
|
"loss": 0.5775, |
|
"step": 23901 |
|
}, |
|
{ |
|
"epoch": 0.36907323016185123, |
|
"grad_norm": 0.5592854619026184, |
|
"learning_rate": 6.099407858871342e-06, |
|
"loss": 0.5743, |
|
"step": 23932 |
|
}, |
|
{ |
|
"epoch": 0.36955130429418526, |
|
"grad_norm": 0.5144609212875366, |
|
"learning_rate": 6.044544397429958e-06, |
|
"loss": 0.5796, |
|
"step": 23963 |
|
}, |
|
{ |
|
"epoch": 0.37002937842651923, |
|
"grad_norm": 0.5619729161262512, |
|
"learning_rate": 5.989894845728708e-06, |
|
"loss": 0.5703, |
|
"step": 23994 |
|
}, |
|
{ |
|
"epoch": 0.37050745255885326, |
|
"grad_norm": 0.5606945157051086, |
|
"learning_rate": 5.9354598204803605e-06, |
|
"loss": 0.5799, |
|
"step": 24025 |
|
}, |
|
{ |
|
"epoch": 0.37098552669118723, |
|
"grad_norm": 0.6192461848258972, |
|
"learning_rate": 5.881239935976762e-06, |
|
"loss": 0.5785, |
|
"step": 24056 |
|
}, |
|
{ |
|
"epoch": 0.37146360082352126, |
|
"grad_norm": 0.5790452361106873, |
|
"learning_rate": 5.827235804081954e-06, |
|
"loss": 0.5719, |
|
"step": 24087 |
|
}, |
|
{ |
|
"epoch": 0.37194167495585523, |
|
"grad_norm": 0.5728137493133545, |
|
"learning_rate": 5.773448034225221e-06, |
|
"loss": 0.5703, |
|
"step": 24118 |
|
}, |
|
{ |
|
"epoch": 0.37241974908818926, |
|
"grad_norm": 0.5850538015365601, |
|
"learning_rate": 5.719877233394228e-06, |
|
"loss": 0.5635, |
|
"step": 24149 |
|
}, |
|
{ |
|
"epoch": 0.3728978232205233, |
|
"grad_norm": 0.6578365564346313, |
|
"learning_rate": 5.666524006128191e-06, |
|
"loss": 0.5727, |
|
"step": 24180 |
|
}, |
|
{ |
|
"epoch": 0.37337589735285726, |
|
"grad_norm": 0.6143426299095154, |
|
"learning_rate": 5.613388954511015e-06, |
|
"loss": 0.574, |
|
"step": 24211 |
|
}, |
|
{ |
|
"epoch": 0.3738539714851913, |
|
"grad_norm": 0.62923663854599, |
|
"learning_rate": 5.560472678164552e-06, |
|
"loss": 0.5704, |
|
"step": 24242 |
|
}, |
|
{ |
|
"epoch": 0.37433204561752526, |
|
"grad_norm": 0.5755168199539185, |
|
"learning_rate": 5.507775774241775e-06, |
|
"loss": 0.5688, |
|
"step": 24273 |
|
}, |
|
{ |
|
"epoch": 0.3748101197498593, |
|
"grad_norm": 0.5668964385986328, |
|
"learning_rate": 5.4552988374200945e-06, |
|
"loss": 0.5692, |
|
"step": 24304 |
|
}, |
|
{ |
|
"epoch": 0.37528819388219326, |
|
"grad_norm": 0.5009447336196899, |
|
"learning_rate": 5.403042459894597e-06, |
|
"loss": 0.5702, |
|
"step": 24335 |
|
}, |
|
{ |
|
"epoch": 0.3757662680145273, |
|
"grad_norm": 0.5665227174758911, |
|
"learning_rate": 5.3510072313714135e-06, |
|
"loss": 0.5711, |
|
"step": 24366 |
|
}, |
|
{ |
|
"epoch": 0.3762443421468613, |
|
"grad_norm": 0.5384367108345032, |
|
"learning_rate": 5.2991937390610205e-06, |
|
"loss": 0.5683, |
|
"step": 24397 |
|
}, |
|
{ |
|
"epoch": 0.3767224162791953, |
|
"grad_norm": 0.5419508814811707, |
|
"learning_rate": 5.247602567671625e-06, |
|
"loss": 0.571, |
|
"step": 24428 |
|
}, |
|
{ |
|
"epoch": 0.3772004904115293, |
|
"grad_norm": 0.5364875793457031, |
|
"learning_rate": 5.196234299402603e-06, |
|
"loss": 0.563, |
|
"step": 24459 |
|
}, |
|
{ |
|
"epoch": 0.3776785645438633, |
|
"grad_norm": 0.5826189517974854, |
|
"learning_rate": 5.145089513937865e-06, |
|
"loss": 0.5691, |
|
"step": 24490 |
|
}, |
|
{ |
|
"epoch": 0.3781566386761973, |
|
"grad_norm": 0.5769446492195129, |
|
"learning_rate": 5.094168788439369e-06, |
|
"loss": 0.5762, |
|
"step": 24521 |
|
}, |
|
{ |
|
"epoch": 0.37863471280853134, |
|
"grad_norm": 0.5343894958496094, |
|
"learning_rate": 5.043472697540594e-06, |
|
"loss": 0.571, |
|
"step": 24552 |
|
}, |
|
{ |
|
"epoch": 0.3791127869408653, |
|
"grad_norm": 0.5326864719390869, |
|
"learning_rate": 4.993001813340012e-06, |
|
"loss": 0.5733, |
|
"step": 24583 |
|
}, |
|
{ |
|
"epoch": 0.37959086107319934, |
|
"grad_norm": 0.5287531614303589, |
|
"learning_rate": 4.942756705394702e-06, |
|
"loss": 0.5747, |
|
"step": 24614 |
|
}, |
|
{ |
|
"epoch": 0.3800689352055333, |
|
"grad_norm": 0.5341933965682983, |
|
"learning_rate": 4.892737940713884e-06, |
|
"loss": 0.5663, |
|
"step": 24645 |
|
}, |
|
{ |
|
"epoch": 0.38054700933786734, |
|
"grad_norm": 0.5358434915542603, |
|
"learning_rate": 4.842946083752511e-06, |
|
"loss": 0.5684, |
|
"step": 24676 |
|
}, |
|
{ |
|
"epoch": 0.3810250834702013, |
|
"grad_norm": 0.5361096262931824, |
|
"learning_rate": 4.79338169640493e-06, |
|
"loss": 0.5678, |
|
"step": 24707 |
|
}, |
|
{ |
|
"epoch": 0.38150315760253534, |
|
"grad_norm": 0.5188061594963074, |
|
"learning_rate": 4.74404533799851e-06, |
|
"loss": 0.5719, |
|
"step": 24738 |
|
}, |
|
{ |
|
"epoch": 0.38198123173486936, |
|
"grad_norm": 0.5487860441207886, |
|
"learning_rate": 4.694937565287344e-06, |
|
"loss": 0.5773, |
|
"step": 24769 |
|
}, |
|
{ |
|
"epoch": 0.38245930586720334, |
|
"grad_norm": 0.5515732169151306, |
|
"learning_rate": 4.646058932445985e-06, |
|
"loss": 0.5686, |
|
"step": 24800 |
|
}, |
|
{ |
|
"epoch": 0.38293737999953736, |
|
"grad_norm": 0.5656226277351379, |
|
"learning_rate": 4.597409991063148e-06, |
|
"loss": 0.5672, |
|
"step": 24831 |
|
}, |
|
{ |
|
"epoch": 0.38341545413187134, |
|
"grad_norm": 0.49390023946762085, |
|
"learning_rate": 4.5489912901355375e-06, |
|
"loss": 0.5714, |
|
"step": 24862 |
|
}, |
|
{ |
|
"epoch": 0.38389352826420536, |
|
"grad_norm": 0.5774050354957581, |
|
"learning_rate": 4.500803376061608e-06, |
|
"loss": 0.5685, |
|
"step": 24893 |
|
}, |
|
{ |
|
"epoch": 0.38437160239653934, |
|
"grad_norm": 0.5529614090919495, |
|
"learning_rate": 4.45284679263541e-06, |
|
"loss": 0.5751, |
|
"step": 24924 |
|
}, |
|
{ |
|
"epoch": 0.38484967652887336, |
|
"grad_norm": 0.5055797696113586, |
|
"learning_rate": 4.4051220810404775e-06, |
|
"loss": 0.5712, |
|
"step": 24955 |
|
}, |
|
{ |
|
"epoch": 0.3853277506612074, |
|
"grad_norm": 0.5665472149848938, |
|
"learning_rate": 4.3576297798437025e-06, |
|
"loss": 0.5707, |
|
"step": 24986 |
|
}, |
|
{ |
|
"epoch": 0.38580582479354136, |
|
"grad_norm": 0.5053175091743469, |
|
"learning_rate": 4.3103704249892436e-06, |
|
"loss": 0.5698, |
|
"step": 25017 |
|
}, |
|
{ |
|
"epoch": 0.3862838989258754, |
|
"grad_norm": 0.5849627256393433, |
|
"learning_rate": 4.263344549792487e-06, |
|
"loss": 0.5662, |
|
"step": 25048 |
|
}, |
|
{ |
|
"epoch": 0.38676197305820936, |
|
"grad_norm": 0.6158141493797302, |
|
"learning_rate": 4.216552684934056e-06, |
|
"loss": 0.5741, |
|
"step": 25079 |
|
}, |
|
{ |
|
"epoch": 0.3872400471905434, |
|
"grad_norm": 0.5662816166877747, |
|
"learning_rate": 4.169995358453777e-06, |
|
"loss": 0.5695, |
|
"step": 25110 |
|
}, |
|
{ |
|
"epoch": 0.38771812132287736, |
|
"grad_norm": 0.5143694281578064, |
|
"learning_rate": 4.123673095744757e-06, |
|
"loss": 0.5724, |
|
"step": 25141 |
|
}, |
|
{ |
|
"epoch": 0.3881961954552114, |
|
"grad_norm": 0.5500748157501221, |
|
"learning_rate": 4.077586419547435e-06, |
|
"loss": 0.5706, |
|
"step": 25172 |
|
}, |
|
{ |
|
"epoch": 0.3886742695875454, |
|
"grad_norm": 0.5526003837585449, |
|
"learning_rate": 4.03173584994368e-06, |
|
"loss": 0.5723, |
|
"step": 25203 |
|
}, |
|
{ |
|
"epoch": 0.3891523437198794, |
|
"grad_norm": 0.6642509698867798, |
|
"learning_rate": 3.986121904350948e-06, |
|
"loss": 0.5595, |
|
"step": 25234 |
|
}, |
|
{ |
|
"epoch": 0.3896304178522134, |
|
"grad_norm": 0.593861997127533, |
|
"learning_rate": 3.940745097516407e-06, |
|
"loss": 0.5705, |
|
"step": 25265 |
|
}, |
|
{ |
|
"epoch": 0.3901084919845474, |
|
"grad_norm": 0.5373812913894653, |
|
"learning_rate": 3.89560594151116e-06, |
|
"loss": 0.5708, |
|
"step": 25296 |
|
}, |
|
{ |
|
"epoch": 0.3905865661168814, |
|
"grad_norm": 0.5997393131256104, |
|
"learning_rate": 3.850704945724456e-06, |
|
"loss": 0.573, |
|
"step": 25327 |
|
}, |
|
{ |
|
"epoch": 0.39106464024921544, |
|
"grad_norm": 0.5451370477676392, |
|
"learning_rate": 3.8060426168579077e-06, |
|
"loss": 0.5606, |
|
"step": 25358 |
|
}, |
|
{ |
|
"epoch": 0.3915427143815494, |
|
"grad_norm": 0.6029524207115173, |
|
"learning_rate": 3.7616194589198407e-06, |
|
"loss": 0.5654, |
|
"step": 25389 |
|
}, |
|
{ |
|
"epoch": 0.39202078851388344, |
|
"grad_norm": 0.6756661534309387, |
|
"learning_rate": 3.7174359732195574e-06, |
|
"loss": 0.5634, |
|
"step": 25420 |
|
}, |
|
{ |
|
"epoch": 0.3924988626462174, |
|
"grad_norm": 0.5419114232063293, |
|
"learning_rate": 3.673492658361677e-06, |
|
"loss": 0.5686, |
|
"step": 25451 |
|
}, |
|
{ |
|
"epoch": 0.39297693677855144, |
|
"grad_norm": 0.6168972849845886, |
|
"learning_rate": 3.6297900102405467e-06, |
|
"loss": 0.5788, |
|
"step": 25482 |
|
}, |
|
{ |
|
"epoch": 0.3934550109108854, |
|
"grad_norm": 0.5589380860328674, |
|
"learning_rate": 3.586328522034607e-06, |
|
"loss": 0.5766, |
|
"step": 25513 |
|
}, |
|
{ |
|
"epoch": 0.39393308504321944, |
|
"grad_norm": 0.5675833225250244, |
|
"learning_rate": 3.543108684200838e-06, |
|
"loss": 0.5673, |
|
"step": 25544 |
|
}, |
|
{ |
|
"epoch": 0.39441115917555347, |
|
"grad_norm": 0.5859025120735168, |
|
"learning_rate": 3.5001309844692464e-06, |
|
"loss": 0.5742, |
|
"step": 25575 |
|
}, |
|
{ |
|
"epoch": 0.39488923330788744, |
|
"grad_norm": 0.6126372814178467, |
|
"learning_rate": 3.4573959078373215e-06, |
|
"loss": 0.5684, |
|
"step": 25606 |
|
}, |
|
{ |
|
"epoch": 0.39536730744022147, |
|
"grad_norm": 0.5508156418800354, |
|
"learning_rate": 3.4149039365646063e-06, |
|
"loss": 0.5665, |
|
"step": 25637 |
|
}, |
|
{ |
|
"epoch": 0.39584538157255544, |
|
"grad_norm": 0.5331482887268066, |
|
"learning_rate": 3.3726555501672143e-06, |
|
"loss": 0.5613, |
|
"step": 25668 |
|
}, |
|
{ |
|
"epoch": 0.39632345570488947, |
|
"grad_norm": 0.5536491870880127, |
|
"learning_rate": 3.33065122541244e-06, |
|
"loss": 0.5685, |
|
"step": 25699 |
|
}, |
|
{ |
|
"epoch": 0.39680152983722344, |
|
"grad_norm": 0.5023953318595886, |
|
"learning_rate": 3.288891436313385e-06, |
|
"loss": 0.568, |
|
"step": 25730 |
|
}, |
|
{ |
|
"epoch": 0.39727960396955747, |
|
"grad_norm": 0.5827000737190247, |
|
"learning_rate": 3.2473766541235963e-06, |
|
"loss": 0.5649, |
|
"step": 25761 |
|
}, |
|
{ |
|
"epoch": 0.3977576781018915, |
|
"grad_norm": 0.6178136467933655, |
|
"learning_rate": 3.2061073473317466e-06, |
|
"loss": 0.5742, |
|
"step": 25792 |
|
}, |
|
{ |
|
"epoch": 0.39823575223422547, |
|
"grad_norm": 0.49690869450569153, |
|
"learning_rate": 3.1650839816563444e-06, |
|
"loss": 0.5655, |
|
"step": 25823 |
|
}, |
|
{ |
|
"epoch": 0.3987138263665595, |
|
"grad_norm": 0.5496050119400024, |
|
"learning_rate": 3.1243070200405093e-06, |
|
"loss": 0.5686, |
|
"step": 25854 |
|
}, |
|
{ |
|
"epoch": 0.39919190049889347, |
|
"grad_norm": 0.55158531665802, |
|
"learning_rate": 3.0837769226467e-06, |
|
"loss": 0.571, |
|
"step": 25885 |
|
}, |
|
{ |
|
"epoch": 0.3996699746312275, |
|
"grad_norm": 0.5945053696632385, |
|
"learning_rate": 3.0434941468515666e-06, |
|
"loss": 0.5732, |
|
"step": 25916 |
|
}, |
|
{ |
|
"epoch": 0.4001480487635615, |
|
"grad_norm": 0.5174633860588074, |
|
"learning_rate": 3.003459147240753e-06, |
|
"loss": 0.5704, |
|
"step": 25947 |
|
}, |
|
{ |
|
"epoch": 0.4006261228958955, |
|
"grad_norm": 0.588633120059967, |
|
"learning_rate": 2.9636723756037875e-06, |
|
"loss": 0.5677, |
|
"step": 25978 |
|
}, |
|
{ |
|
"epoch": 0.4011041970282295, |
|
"grad_norm": 0.5764910578727722, |
|
"learning_rate": 2.9241342809289833e-06, |
|
"loss": 0.5718, |
|
"step": 26009 |
|
}, |
|
{ |
|
"epoch": 0.4015822711605635, |
|
"grad_norm": 0.5911231637001038, |
|
"learning_rate": 2.8848453093983594e-06, |
|
"loss": 0.576, |
|
"step": 26040 |
|
}, |
|
{ |
|
"epoch": 0.4020603452928975, |
|
"grad_norm": 0.5014284253120422, |
|
"learning_rate": 2.8458059043826257e-06, |
|
"loss": 0.5699, |
|
"step": 26071 |
|
}, |
|
{ |
|
"epoch": 0.4025384194252315, |
|
"grad_norm": 0.5628160834312439, |
|
"learning_rate": 2.807016506436172e-06, |
|
"loss": 0.5712, |
|
"step": 26102 |
|
}, |
|
{ |
|
"epoch": 0.4030164935575655, |
|
"grad_norm": 0.536497950553894, |
|
"learning_rate": 2.7684775532920566e-06, |
|
"loss": 0.565, |
|
"step": 26133 |
|
}, |
|
{ |
|
"epoch": 0.40349456768989955, |
|
"grad_norm": 0.5986963510513306, |
|
"learning_rate": 2.7301894798571425e-06, |
|
"loss": 0.5741, |
|
"step": 26164 |
|
}, |
|
{ |
|
"epoch": 0.4039726418222335, |
|
"grad_norm": 0.5085678100585938, |
|
"learning_rate": 2.6921527182071386e-06, |
|
"loss": 0.5645, |
|
"step": 26195 |
|
}, |
|
{ |
|
"epoch": 0.40445071595456755, |
|
"grad_norm": 0.5823872685432434, |
|
"learning_rate": 2.654367697581725e-06, |
|
"loss": 0.5578, |
|
"step": 26226 |
|
}, |
|
{ |
|
"epoch": 0.4049287900869015, |
|
"grad_norm": 0.5357898473739624, |
|
"learning_rate": 2.6168348443797175e-06, |
|
"loss": 0.5715, |
|
"step": 26257 |
|
}, |
|
{ |
|
"epoch": 0.40540686421923555, |
|
"grad_norm": 0.6874336004257202, |
|
"learning_rate": 2.5795545821542757e-06, |
|
"loss": 0.5723, |
|
"step": 26288 |
|
}, |
|
{ |
|
"epoch": 0.4058849383515695, |
|
"grad_norm": 0.5069230198860168, |
|
"learning_rate": 2.54252733160808e-06, |
|
"loss": 0.5739, |
|
"step": 26319 |
|
}, |
|
{ |
|
"epoch": 0.40636301248390355, |
|
"grad_norm": 0.5667560696601868, |
|
"learning_rate": 2.5057535105886294e-06, |
|
"loss": 0.5638, |
|
"step": 26350 |
|
}, |
|
{ |
|
"epoch": 0.4068410866162376, |
|
"grad_norm": 0.5513550639152527, |
|
"learning_rate": 2.4692335340834953e-06, |
|
"loss": 0.5706, |
|
"step": 26381 |
|
}, |
|
{ |
|
"epoch": 0.40731916074857155, |
|
"grad_norm": 0.583328366279602, |
|
"learning_rate": 2.432967814215639e-06, |
|
"loss": 0.5635, |
|
"step": 26412 |
|
}, |
|
{ |
|
"epoch": 0.4077972348809056, |
|
"grad_norm": 0.5765253305435181, |
|
"learning_rate": 2.396956760238794e-06, |
|
"loss": 0.5715, |
|
"step": 26443 |
|
}, |
|
{ |
|
"epoch": 0.40827530901323955, |
|
"grad_norm": 0.5591960549354553, |
|
"learning_rate": 2.361200778532796e-06, |
|
"loss": 0.5637, |
|
"step": 26474 |
|
}, |
|
{ |
|
"epoch": 0.4087533831455736, |
|
"grad_norm": 0.5683372616767883, |
|
"learning_rate": 2.325700272599049e-06, |
|
"loss": 0.5667, |
|
"step": 26505 |
|
}, |
|
{ |
|
"epoch": 0.40923145727790755, |
|
"grad_norm": 0.7753663063049316, |
|
"learning_rate": 2.2904556430559415e-06, |
|
"loss": 0.5684, |
|
"step": 26536 |
|
}, |
|
{ |
|
"epoch": 0.4097095314102416, |
|
"grad_norm": 0.5882099866867065, |
|
"learning_rate": 2.2554672876343106e-06, |
|
"loss": 0.5644, |
|
"step": 26567 |
|
}, |
|
{ |
|
"epoch": 0.4101876055425756, |
|
"grad_norm": 0.5205914378166199, |
|
"learning_rate": 2.220735601173002e-06, |
|
"loss": 0.5722, |
|
"step": 26598 |
|
}, |
|
{ |
|
"epoch": 0.4106656796749096, |
|
"grad_norm": 0.5754354000091553, |
|
"learning_rate": 2.186260975614382e-06, |
|
"loss": 0.5811, |
|
"step": 26629 |
|
}, |
|
{ |
|
"epoch": 0.4111437538072436, |
|
"grad_norm": 0.5526401996612549, |
|
"learning_rate": 2.1520437999999034e-06, |
|
"loss": 0.5656, |
|
"step": 26660 |
|
}, |
|
{ |
|
"epoch": 0.4116218279395776, |
|
"grad_norm": 0.5454549789428711, |
|
"learning_rate": 2.1180844604657526e-06, |
|
"loss": 0.5753, |
|
"step": 26691 |
|
}, |
|
{ |
|
"epoch": 0.4120999020719116, |
|
"grad_norm": 0.6748654246330261, |
|
"learning_rate": 2.084383340238455e-06, |
|
"loss": 0.5736, |
|
"step": 26722 |
|
}, |
|
{ |
|
"epoch": 0.41257797620424563, |
|
"grad_norm": 0.506722092628479, |
|
"learning_rate": 2.0509408196305704e-06, |
|
"loss": 0.5766, |
|
"step": 26753 |
|
}, |
|
{ |
|
"epoch": 0.4130560503365796, |
|
"grad_norm": 0.6061763763427734, |
|
"learning_rate": 2.017757276036403e-06, |
|
"loss": 0.5707, |
|
"step": 26784 |
|
}, |
|
{ |
|
"epoch": 0.41353412446891363, |
|
"grad_norm": 0.5422362685203552, |
|
"learning_rate": 1.984833083927726e-06, |
|
"loss": 0.5741, |
|
"step": 26815 |
|
}, |
|
{ |
|
"epoch": 0.4140121986012476, |
|
"grad_norm": 0.5236985683441162, |
|
"learning_rate": 1.952168614849581e-06, |
|
"loss": 0.5618, |
|
"step": 26846 |
|
}, |
|
{ |
|
"epoch": 0.41449027273358163, |
|
"grad_norm": 0.48521238565444946, |
|
"learning_rate": 1.919764237416058e-06, |
|
"loss": 0.5642, |
|
"step": 26877 |
|
}, |
|
{ |
|
"epoch": 0.4149683468659156, |
|
"grad_norm": 0.4921460449695587, |
|
"learning_rate": 1.8876203173061463e-06, |
|
"loss": 0.5643, |
|
"step": 26908 |
|
}, |
|
{ |
|
"epoch": 0.41544642099824963, |
|
"grad_norm": 0.5896371006965637, |
|
"learning_rate": 1.8557372172596206e-06, |
|
"loss": 0.5711, |
|
"step": 26939 |
|
}, |
|
{ |
|
"epoch": 0.41592449513058366, |
|
"grad_norm": 0.5045610666275024, |
|
"learning_rate": 1.8241152970729341e-06, |
|
"loss": 0.5803, |
|
"step": 26970 |
|
}, |
|
{ |
|
"epoch": 0.41640256926291763, |
|
"grad_norm": 0.6058014035224915, |
|
"learning_rate": 1.7927549135951572e-06, |
|
"loss": 0.5676, |
|
"step": 27001 |
|
}, |
|
{ |
|
"epoch": 0.41688064339525166, |
|
"grad_norm": 0.4907824397087097, |
|
"learning_rate": 1.7616564207239477e-06, |
|
"loss": 0.5643, |
|
"step": 27032 |
|
}, |
|
{ |
|
"epoch": 0.41735871752758563, |
|
"grad_norm": 0.5123028755187988, |
|
"learning_rate": 1.730820169401584e-06, |
|
"loss": 0.5696, |
|
"step": 27063 |
|
}, |
|
{ |
|
"epoch": 0.41783679165991966, |
|
"grad_norm": 0.646056592464447, |
|
"learning_rate": 1.7002465076109558e-06, |
|
"loss": 0.5692, |
|
"step": 27094 |
|
}, |
|
{ |
|
"epoch": 0.41831486579225363, |
|
"grad_norm": 0.5923252701759338, |
|
"learning_rate": 1.6699357803716898e-06, |
|
"loss": 0.5614, |
|
"step": 27125 |
|
}, |
|
{ |
|
"epoch": 0.41879293992458766, |
|
"grad_norm": 0.6700661778450012, |
|
"learning_rate": 1.6398883297362305e-06, |
|
"loss": 0.5685, |
|
"step": 27156 |
|
}, |
|
{ |
|
"epoch": 0.4192710140569217, |
|
"grad_norm": 0.6059920787811279, |
|
"learning_rate": 1.6101044947859606e-06, |
|
"loss": 0.5664, |
|
"step": 27187 |
|
}, |
|
{ |
|
"epoch": 0.41974908818925566, |
|
"grad_norm": 0.5154827833175659, |
|
"learning_rate": 1.5805846116274114e-06, |
|
"loss": 0.569, |
|
"step": 27218 |
|
}, |
|
{ |
|
"epoch": 0.4202271623215897, |
|
"grad_norm": 0.48046451807022095, |
|
"learning_rate": 1.5513290133884611e-06, |
|
"loss": 0.5744, |
|
"step": 27249 |
|
}, |
|
{ |
|
"epoch": 0.42070523645392366, |
|
"grad_norm": 0.5736571550369263, |
|
"learning_rate": 1.5223380302145512e-06, |
|
"loss": 0.5661, |
|
"step": 27280 |
|
}, |
|
{ |
|
"epoch": 0.4211833105862577, |
|
"grad_norm": 0.4849691092967987, |
|
"learning_rate": 1.4936119892649925e-06, |
|
"loss": 0.5602, |
|
"step": 27311 |
|
}, |
|
{ |
|
"epoch": 0.42166138471859166, |
|
"grad_norm": 0.45344623923301697, |
|
"learning_rate": 1.4651512147092482e-06, |
|
"loss": 0.5731, |
|
"step": 27342 |
|
}, |
|
{ |
|
"epoch": 0.4221394588509257, |
|
"grad_norm": 0.5611448287963867, |
|
"learning_rate": 1.4369560277232908e-06, |
|
"loss": 0.5758, |
|
"step": 27373 |
|
}, |
|
{ |
|
"epoch": 0.4226175329832597, |
|
"grad_norm": 0.5461937189102173, |
|
"learning_rate": 1.409026746485978e-06, |
|
"loss": 0.5768, |
|
"step": 27404 |
|
}, |
|
{ |
|
"epoch": 0.4230956071155937, |
|
"grad_norm": 0.6484282612800598, |
|
"learning_rate": 1.3813636861754464e-06, |
|
"loss": 0.5611, |
|
"step": 27435 |
|
}, |
|
{ |
|
"epoch": 0.4235736812479277, |
|
"grad_norm": 0.5170944929122925, |
|
"learning_rate": 1.3539671589655773e-06, |
|
"loss": 0.5661, |
|
"step": 27466 |
|
} |
|
], |
|
"logging_steps": 31, |
|
"max_steps": 30517, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 3052, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.909218670290351e+19, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|