|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.1882686776623717, |
|
"eval_steps": 500, |
|
"global_step": 12208, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0004780741323340042, |
|
"grad_norm": 3.1866345405578613, |
|
"learning_rate": 1.0157273918741808e-06, |
|
"loss": 1.194, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0009561482646680084, |
|
"grad_norm": 1.7708723545074463, |
|
"learning_rate": 2.0314547837483616e-06, |
|
"loss": 1.1124, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.0014342223970020126, |
|
"grad_norm": 1.6835601329803467, |
|
"learning_rate": 3.0471821756225426e-06, |
|
"loss": 1.0585, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.0019122965293360168, |
|
"grad_norm": 1.642537236213684, |
|
"learning_rate": 4.062909567496723e-06, |
|
"loss": 1.0213, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.002390370661670021, |
|
"grad_norm": 1.8476150035858154, |
|
"learning_rate": 5.078636959370905e-06, |
|
"loss": 1.0024, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.0028684447940040252, |
|
"grad_norm": 1.96336829662323, |
|
"learning_rate": 6.094364351245085e-06, |
|
"loss": 0.9938, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.0033465189263380294, |
|
"grad_norm": 1.8158528804779053, |
|
"learning_rate": 7.110091743119267e-06, |
|
"loss": 0.969, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.0038245930586720336, |
|
"grad_norm": 1.9796783924102783, |
|
"learning_rate": 8.125819134993446e-06, |
|
"loss": 0.9573, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.004302667191006038, |
|
"grad_norm": 1.8192514181137085, |
|
"learning_rate": 9.141546526867629e-06, |
|
"loss": 0.9498, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.004780741323340042, |
|
"grad_norm": 2.1396358013153076, |
|
"learning_rate": 1.015727391874181e-05, |
|
"loss": 0.9294, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.005258815455674046, |
|
"grad_norm": 2.0807156562805176, |
|
"learning_rate": 1.117300131061599e-05, |
|
"loss": 0.9186, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.0057368895880080505, |
|
"grad_norm": 2.002887010574341, |
|
"learning_rate": 1.218872870249017e-05, |
|
"loss": 0.9149, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.006214963720342055, |
|
"grad_norm": 2.240675687789917, |
|
"learning_rate": 1.3204456094364351e-05, |
|
"loss": 0.9139, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.006693037852676059, |
|
"grad_norm": 1.9016591310501099, |
|
"learning_rate": 1.4220183486238533e-05, |
|
"loss": 0.8946, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.007171111985010063, |
|
"grad_norm": 1.6729310750961304, |
|
"learning_rate": 1.5235910878112714e-05, |
|
"loss": 0.8959, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.007649186117344067, |
|
"grad_norm": 2.2607779502868652, |
|
"learning_rate": 1.6251638269986893e-05, |
|
"loss": 0.8847, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.00812726024967807, |
|
"grad_norm": 1.699507236480713, |
|
"learning_rate": 1.7267365661861077e-05, |
|
"loss": 0.8773, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.008605334382012076, |
|
"grad_norm": 1.93502938747406, |
|
"learning_rate": 1.8283093053735257e-05, |
|
"loss": 0.8773, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.009083408514346079, |
|
"grad_norm": 2.1234281063079834, |
|
"learning_rate": 1.9298820445609438e-05, |
|
"loss": 0.8669, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.009561482646680084, |
|
"grad_norm": 1.7465944290161133, |
|
"learning_rate": 2.031454783748362e-05, |
|
"loss": 0.8536, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.010039556779014087, |
|
"grad_norm": 1.7039932012557983, |
|
"learning_rate": 2.13302752293578e-05, |
|
"loss": 0.8624, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.010517630911348093, |
|
"grad_norm": 1.551837682723999, |
|
"learning_rate": 2.234600262123198e-05, |
|
"loss": 0.8555, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.010995705043682096, |
|
"grad_norm": 1.7622497081756592, |
|
"learning_rate": 2.336173001310616e-05, |
|
"loss": 0.8475, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.011473779176016101, |
|
"grad_norm": 2.000936269760132, |
|
"learning_rate": 2.437745740498034e-05, |
|
"loss": 0.852, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.011951853308350104, |
|
"grad_norm": 1.6770379543304443, |
|
"learning_rate": 2.5393184796854525e-05, |
|
"loss": 0.8455, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.01242992744068411, |
|
"grad_norm": 1.746506929397583, |
|
"learning_rate": 2.6408912188728702e-05, |
|
"loss": 0.8406, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.012908001573018113, |
|
"grad_norm": 1.507759690284729, |
|
"learning_rate": 2.7424639580602886e-05, |
|
"loss": 0.8556, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.013386075705352118, |
|
"grad_norm": 1.6984492540359497, |
|
"learning_rate": 2.8440366972477066e-05, |
|
"loss": 0.8281, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.013864149837686121, |
|
"grad_norm": 1.5145736932754517, |
|
"learning_rate": 2.9456094364351244e-05, |
|
"loss": 0.8359, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.014342223970020126, |
|
"grad_norm": 1.5700000524520874, |
|
"learning_rate": 3.0471821756225428e-05, |
|
"loss": 0.8305, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.01482029810235413, |
|
"grad_norm": 1.6657596826553345, |
|
"learning_rate": 3.148754914809961e-05, |
|
"loss": 0.8263, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.015298372234688135, |
|
"grad_norm": 1.527628779411316, |
|
"learning_rate": 3.2503276539973785e-05, |
|
"loss": 0.8318, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.015776446367022138, |
|
"grad_norm": 1.8353967666625977, |
|
"learning_rate": 3.351900393184797e-05, |
|
"loss": 0.8192, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.01625452049935614, |
|
"grad_norm": 1.2118042707443237, |
|
"learning_rate": 3.453473132372215e-05, |
|
"loss": 0.819, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.016732594631690145, |
|
"grad_norm": 1.3235970735549927, |
|
"learning_rate": 3.555045871559633e-05, |
|
"loss": 0.8176, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.01721066876402415, |
|
"grad_norm": 1.6729888916015625, |
|
"learning_rate": 3.6566186107470514e-05, |
|
"loss": 0.8202, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.017688742896358155, |
|
"grad_norm": 1.3251298666000366, |
|
"learning_rate": 3.7581913499344695e-05, |
|
"loss": 0.8104, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.018166817028692158, |
|
"grad_norm": 1.5231342315673828, |
|
"learning_rate": 3.8597640891218876e-05, |
|
"loss": 0.8113, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.01864489116102616, |
|
"grad_norm": 1.3263883590698242, |
|
"learning_rate": 3.9613368283093056e-05, |
|
"loss": 0.7981, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.019122965293360168, |
|
"grad_norm": 1.317791223526001, |
|
"learning_rate": 4.062909567496724e-05, |
|
"loss": 0.7962, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.01960103942569417, |
|
"grad_norm": 1.566698670387268, |
|
"learning_rate": 4.164482306684142e-05, |
|
"loss": 0.8072, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.020079113558028175, |
|
"grad_norm": 1.2935110330581665, |
|
"learning_rate": 4.26605504587156e-05, |
|
"loss": 0.7818, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.020557187690362178, |
|
"grad_norm": 1.336227536201477, |
|
"learning_rate": 4.367627785058978e-05, |
|
"loss": 0.7917, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.021035261822696185, |
|
"grad_norm": 1.5656548738479614, |
|
"learning_rate": 4.469200524246396e-05, |
|
"loss": 0.7785, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.02151333595503019, |
|
"grad_norm": 1.5208159685134888, |
|
"learning_rate": 4.570773263433814e-05, |
|
"loss": 0.7839, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.02199141008736419, |
|
"grad_norm": 1.4816375970840454, |
|
"learning_rate": 4.672346002621232e-05, |
|
"loss": 0.7789, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 0.022469484219698195, |
|
"grad_norm": 1.402166485786438, |
|
"learning_rate": 4.77391874180865e-05, |
|
"loss": 0.7803, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 0.022947558352032202, |
|
"grad_norm": 1.2986265420913696, |
|
"learning_rate": 4.875491480996068e-05, |
|
"loss": 0.7739, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 0.023425632484366205, |
|
"grad_norm": 1.3171271085739136, |
|
"learning_rate": 4.977064220183487e-05, |
|
"loss": 0.7694, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 0.02390370661670021, |
|
"grad_norm": 1.4870030879974365, |
|
"learning_rate": 4.9999915451558777e-05, |
|
"loss": 0.7663, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.024381780749034212, |
|
"grad_norm": 1.7419476509094238, |
|
"learning_rate": 4.999955597496219e-05, |
|
"loss": 0.7731, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 0.02485985488136822, |
|
"grad_norm": 1.109238624572754, |
|
"learning_rate": 4.9998914381774255e-05, |
|
"loss": 0.7704, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 0.025337929013702222, |
|
"grad_norm": 1.3478920459747314, |
|
"learning_rate": 4.999799067923527e-05, |
|
"loss": 0.7723, |
|
"step": 1643 |
|
}, |
|
{ |
|
"epoch": 0.025816003146036225, |
|
"grad_norm": 1.2739650011062622, |
|
"learning_rate": 4.999678487776908e-05, |
|
"loss": 0.7699, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 0.02629407727837023, |
|
"grad_norm": 1.3892935514450073, |
|
"learning_rate": 4.9995296990983006e-05, |
|
"loss": 0.7709, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.026772151410704235, |
|
"grad_norm": 1.2031512260437012, |
|
"learning_rate": 4.999352703566763e-05, |
|
"loss": 0.7557, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 0.02725022554303824, |
|
"grad_norm": 1.185471773147583, |
|
"learning_rate": 4.999147503179668e-05, |
|
"loss": 0.7645, |
|
"step": 1767 |
|
}, |
|
{ |
|
"epoch": 0.027728299675372242, |
|
"grad_norm": 1.2331740856170654, |
|
"learning_rate": 4.998914100252672e-05, |
|
"loss": 0.7655, |
|
"step": 1798 |
|
}, |
|
{ |
|
"epoch": 0.028206373807706245, |
|
"grad_norm": 1.1817049980163574, |
|
"learning_rate": 4.998652497419696e-05, |
|
"loss": 0.7617, |
|
"step": 1829 |
|
}, |
|
{ |
|
"epoch": 0.028684447940040252, |
|
"grad_norm": 1.2225860357284546, |
|
"learning_rate": 4.9983626976328927e-05, |
|
"loss": 0.7558, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.029162522072374256, |
|
"grad_norm": 1.6452528238296509, |
|
"learning_rate": 4.998044704162613e-05, |
|
"loss": 0.7566, |
|
"step": 1891 |
|
}, |
|
{ |
|
"epoch": 0.02964059620470826, |
|
"grad_norm": 1.3764828443527222, |
|
"learning_rate": 4.9976985205973705e-05, |
|
"loss": 0.7545, |
|
"step": 1922 |
|
}, |
|
{ |
|
"epoch": 0.030118670337042262, |
|
"grad_norm": 1.6581465005874634, |
|
"learning_rate": 4.997324150843799e-05, |
|
"loss": 0.7519, |
|
"step": 1953 |
|
}, |
|
{ |
|
"epoch": 0.03059674446937627, |
|
"grad_norm": 1.1065036058425903, |
|
"learning_rate": 4.99692159912661e-05, |
|
"loss": 0.7579, |
|
"step": 1984 |
|
}, |
|
{ |
|
"epoch": 0.031074818601710272, |
|
"grad_norm": 2.6465444564819336, |
|
"learning_rate": 4.996490869988546e-05, |
|
"loss": 0.7538, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.031552892734044276, |
|
"grad_norm": 1.3028968572616577, |
|
"learning_rate": 4.996031968290326e-05, |
|
"loss": 0.7522, |
|
"step": 2046 |
|
}, |
|
{ |
|
"epoch": 0.03203096686637828, |
|
"grad_norm": 1.0450382232666016, |
|
"learning_rate": 4.995544899210594e-05, |
|
"loss": 0.742, |
|
"step": 2077 |
|
}, |
|
{ |
|
"epoch": 0.03250904099871228, |
|
"grad_norm": 0.9411994814872742, |
|
"learning_rate": 4.9950296682458583e-05, |
|
"loss": 0.7401, |
|
"step": 2108 |
|
}, |
|
{ |
|
"epoch": 0.03298711513104629, |
|
"grad_norm": 1.0501608848571777, |
|
"learning_rate": 4.994486281210429e-05, |
|
"loss": 0.7455, |
|
"step": 2139 |
|
}, |
|
{ |
|
"epoch": 0.03346518926338029, |
|
"grad_norm": 1.266400694847107, |
|
"learning_rate": 4.9939147442363566e-05, |
|
"loss": 0.7391, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.033943263395714296, |
|
"grad_norm": 1.2862213850021362, |
|
"learning_rate": 4.9933150637733574e-05, |
|
"loss": 0.7397, |
|
"step": 2201 |
|
}, |
|
{ |
|
"epoch": 0.0344213375280483, |
|
"grad_norm": 1.2715409994125366, |
|
"learning_rate": 4.992687246588743e-05, |
|
"loss": 0.7467, |
|
"step": 2232 |
|
}, |
|
{ |
|
"epoch": 0.0348994116603823, |
|
"grad_norm": 1.3290003538131714, |
|
"learning_rate": 4.992031299767347e-05, |
|
"loss": 0.7432, |
|
"step": 2263 |
|
}, |
|
{ |
|
"epoch": 0.03537748579271631, |
|
"grad_norm": 1.0561761856079102, |
|
"learning_rate": 4.9913472307114386e-05, |
|
"loss": 0.7336, |
|
"step": 2294 |
|
}, |
|
{ |
|
"epoch": 0.035855559925050316, |
|
"grad_norm": 1.3272422552108765, |
|
"learning_rate": 4.9906350471406446e-05, |
|
"loss": 0.7251, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.036333634057384316, |
|
"grad_norm": 1.0749491453170776, |
|
"learning_rate": 4.989894757091861e-05, |
|
"loss": 0.7205, |
|
"step": 2356 |
|
}, |
|
{ |
|
"epoch": 0.03681170818971832, |
|
"grad_norm": 1.1581461429595947, |
|
"learning_rate": 4.989126368919158e-05, |
|
"loss": 0.7311, |
|
"step": 2387 |
|
}, |
|
{ |
|
"epoch": 0.03728978232205232, |
|
"grad_norm": 1.0796961784362793, |
|
"learning_rate": 4.988329891293693e-05, |
|
"loss": 0.7259, |
|
"step": 2418 |
|
}, |
|
{ |
|
"epoch": 0.03776785645438633, |
|
"grad_norm": 1.1916818618774414, |
|
"learning_rate": 4.987505333203608e-05, |
|
"loss": 0.7258, |
|
"step": 2449 |
|
}, |
|
{ |
|
"epoch": 0.038245930586720336, |
|
"grad_norm": 0.9515872001647949, |
|
"learning_rate": 4.9866527039539276e-05, |
|
"loss": 0.7273, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.038724004719054336, |
|
"grad_norm": 1.1217246055603027, |
|
"learning_rate": 4.9857720131664594e-05, |
|
"loss": 0.7269, |
|
"step": 2511 |
|
}, |
|
{ |
|
"epoch": 0.03920207885138834, |
|
"grad_norm": 0.9570199847221375, |
|
"learning_rate": 4.9848632707796773e-05, |
|
"loss": 0.7289, |
|
"step": 2542 |
|
}, |
|
{ |
|
"epoch": 0.03968015298372235, |
|
"grad_norm": 1.2980682849884033, |
|
"learning_rate": 4.9839264870486155e-05, |
|
"loss": 0.7382, |
|
"step": 2573 |
|
}, |
|
{ |
|
"epoch": 0.04015822711605635, |
|
"grad_norm": 2.1257143020629883, |
|
"learning_rate": 4.9829616725447526e-05, |
|
"loss": 0.8112, |
|
"step": 2604 |
|
}, |
|
{ |
|
"epoch": 0.04063630124839036, |
|
"grad_norm": 1.5091110467910767, |
|
"learning_rate": 4.981968838155888e-05, |
|
"loss": 0.7451, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.041114375380724356, |
|
"grad_norm": 2.4548749923706055, |
|
"learning_rate": 4.980947995086024e-05, |
|
"loss": 0.7358, |
|
"step": 2666 |
|
}, |
|
{ |
|
"epoch": 0.04159244951305836, |
|
"grad_norm": 1.176115870475769, |
|
"learning_rate": 4.979899154855234e-05, |
|
"loss": 0.7368, |
|
"step": 2697 |
|
}, |
|
{ |
|
"epoch": 0.04207052364539237, |
|
"grad_norm": 1.063672661781311, |
|
"learning_rate": 4.9788223292995386e-05, |
|
"loss": 0.7465, |
|
"step": 2728 |
|
}, |
|
{ |
|
"epoch": 0.04254859777772637, |
|
"grad_norm": 1.3062779903411865, |
|
"learning_rate": 4.977717530570768e-05, |
|
"loss": 0.7413, |
|
"step": 2759 |
|
}, |
|
{ |
|
"epoch": 0.04302667191006038, |
|
"grad_norm": 1.5550280809402466, |
|
"learning_rate": 4.976584771136425e-05, |
|
"loss": 0.724, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.043504746042394384, |
|
"grad_norm": 1.2525728940963745, |
|
"learning_rate": 4.975424063779547e-05, |
|
"loss": 0.7216, |
|
"step": 2821 |
|
}, |
|
{ |
|
"epoch": 0.04398282017472838, |
|
"grad_norm": 1.158134937286377, |
|
"learning_rate": 4.974235421598557e-05, |
|
"loss": 0.728, |
|
"step": 2852 |
|
}, |
|
{ |
|
"epoch": 0.04446089430706239, |
|
"grad_norm": 1.262291669845581, |
|
"learning_rate": 4.973018858007122e-05, |
|
"loss": 0.7191, |
|
"step": 2883 |
|
}, |
|
{ |
|
"epoch": 0.04493896843939639, |
|
"grad_norm": 2.826028347015381, |
|
"learning_rate": 4.9717743867339963e-05, |
|
"loss": 0.7211, |
|
"step": 2914 |
|
}, |
|
{ |
|
"epoch": 0.0454170425717304, |
|
"grad_norm": 1.0346958637237549, |
|
"learning_rate": 4.9705020218228695e-05, |
|
"loss": 0.7268, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 0.045895116704064404, |
|
"grad_norm": 1.4338330030441284, |
|
"learning_rate": 4.969201777632205e-05, |
|
"loss": 0.7154, |
|
"step": 2976 |
|
}, |
|
{ |
|
"epoch": 0.046373190836398404, |
|
"grad_norm": 0.9223676919937134, |
|
"learning_rate": 4.9678736688350846e-05, |
|
"loss": 0.7195, |
|
"step": 3007 |
|
}, |
|
{ |
|
"epoch": 0.04685126496873241, |
|
"grad_norm": 0.9936623573303223, |
|
"learning_rate": 4.966517710419033e-05, |
|
"loss": 0.7194, |
|
"step": 3038 |
|
}, |
|
{ |
|
"epoch": 0.04732933910106642, |
|
"grad_norm": 1.017962098121643, |
|
"learning_rate": 4.965133917685858e-05, |
|
"loss": 0.713, |
|
"step": 3069 |
|
}, |
|
{ |
|
"epoch": 0.04780741323340042, |
|
"grad_norm": 0.9654473662376404, |
|
"learning_rate": 4.9637223062514714e-05, |
|
"loss": 0.7096, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.048285487365734424, |
|
"grad_norm": 0.9372695684432983, |
|
"learning_rate": 4.962282892045718e-05, |
|
"loss": 0.7116, |
|
"step": 3131 |
|
}, |
|
{ |
|
"epoch": 0.048763561498068424, |
|
"grad_norm": 0.9450846910476685, |
|
"learning_rate": 4.9608156913121904e-05, |
|
"loss": 0.7129, |
|
"step": 3162 |
|
}, |
|
{ |
|
"epoch": 0.04924163563040243, |
|
"grad_norm": 1.0803054571151733, |
|
"learning_rate": 4.959320720608049e-05, |
|
"loss": 0.706, |
|
"step": 3193 |
|
}, |
|
{ |
|
"epoch": 0.04971970976273644, |
|
"grad_norm": 1.2218348979949951, |
|
"learning_rate": 4.9577979968038354e-05, |
|
"loss": 0.7076, |
|
"step": 3224 |
|
}, |
|
{ |
|
"epoch": 0.05019778389507044, |
|
"grad_norm": 1.0429767370224, |
|
"learning_rate": 4.956247537083282e-05, |
|
"loss": 0.7089, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.050675858027404444, |
|
"grad_norm": 0.9912049770355225, |
|
"learning_rate": 4.9546693589431145e-05, |
|
"loss": 0.7016, |
|
"step": 3286 |
|
}, |
|
{ |
|
"epoch": 0.051153932159738444, |
|
"grad_norm": 0.9687103033065796, |
|
"learning_rate": 4.9530634801928595e-05, |
|
"loss": 0.7071, |
|
"step": 3317 |
|
}, |
|
{ |
|
"epoch": 0.05163200629207245, |
|
"grad_norm": 0.8178670406341553, |
|
"learning_rate": 4.9514299189546395e-05, |
|
"loss": 0.6991, |
|
"step": 3348 |
|
}, |
|
{ |
|
"epoch": 0.05211008042440646, |
|
"grad_norm": 0.8601382374763489, |
|
"learning_rate": 4.949768693662973e-05, |
|
"loss": 0.6978, |
|
"step": 3379 |
|
}, |
|
{ |
|
"epoch": 0.05258815455674046, |
|
"grad_norm": 1.076370120048523, |
|
"learning_rate": 4.948079823064559e-05, |
|
"loss": 0.7044, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.053066228689074464, |
|
"grad_norm": 3.9457356929779053, |
|
"learning_rate": 4.946363326218074e-05, |
|
"loss": 0.6978, |
|
"step": 3441 |
|
}, |
|
{ |
|
"epoch": 0.05354430282140847, |
|
"grad_norm": 0.8186474442481995, |
|
"learning_rate": 4.9446192224939525e-05, |
|
"loss": 0.6974, |
|
"step": 3472 |
|
}, |
|
{ |
|
"epoch": 0.05402237695374247, |
|
"grad_norm": 0.9643816947937012, |
|
"learning_rate": 4.942847531574167e-05, |
|
"loss": 0.7025, |
|
"step": 3503 |
|
}, |
|
{ |
|
"epoch": 0.05450045108607648, |
|
"grad_norm": 1.024248480796814, |
|
"learning_rate": 4.941048273452008e-05, |
|
"loss": 0.7006, |
|
"step": 3534 |
|
}, |
|
{ |
|
"epoch": 0.05497852521841048, |
|
"grad_norm": 0.7718949317932129, |
|
"learning_rate": 4.9392214684318605e-05, |
|
"loss": 0.7024, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 0.055456599350744484, |
|
"grad_norm": 1.1313899755477905, |
|
"learning_rate": 4.93736713712897e-05, |
|
"loss": 0.701, |
|
"step": 3596 |
|
}, |
|
{ |
|
"epoch": 0.05593467348307849, |
|
"grad_norm": 1.0118827819824219, |
|
"learning_rate": 4.9354853004692124e-05, |
|
"loss": 0.7036, |
|
"step": 3627 |
|
}, |
|
{ |
|
"epoch": 0.05641274761541249, |
|
"grad_norm": 0.9465724229812622, |
|
"learning_rate": 4.93357597968886e-05, |
|
"loss": 0.6869, |
|
"step": 3658 |
|
}, |
|
{ |
|
"epoch": 0.0568908217477465, |
|
"grad_norm": 1.0233882665634155, |
|
"learning_rate": 4.931639196334338e-05, |
|
"loss": 0.6944, |
|
"step": 3689 |
|
}, |
|
{ |
|
"epoch": 0.057368895880080505, |
|
"grad_norm": 1.008609652519226, |
|
"learning_rate": 4.9296749722619826e-05, |
|
"loss": 0.6916, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.057846970012414504, |
|
"grad_norm": 1.0083181858062744, |
|
"learning_rate": 4.9276833296377966e-05, |
|
"loss": 0.6886, |
|
"step": 3751 |
|
}, |
|
{ |
|
"epoch": 0.05832504414474851, |
|
"grad_norm": 0.9374220967292786, |
|
"learning_rate": 4.925664290937196e-05, |
|
"loss": 0.6976, |
|
"step": 3782 |
|
}, |
|
{ |
|
"epoch": 0.05880311827708251, |
|
"grad_norm": 0.9066904783248901, |
|
"learning_rate": 4.9236178789447576e-05, |
|
"loss": 0.6911, |
|
"step": 3813 |
|
}, |
|
{ |
|
"epoch": 0.05928119240941652, |
|
"grad_norm": 0.9702699780464172, |
|
"learning_rate": 4.921544116753962e-05, |
|
"loss": 0.6959, |
|
"step": 3844 |
|
}, |
|
{ |
|
"epoch": 0.059759266541750525, |
|
"grad_norm": 0.8405037522315979, |
|
"learning_rate": 4.919443027766935e-05, |
|
"loss": 0.6896, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.060237340674084525, |
|
"grad_norm": 0.9460383653640747, |
|
"learning_rate": 4.91731463569418e-05, |
|
"loss": 0.6871, |
|
"step": 3906 |
|
}, |
|
{ |
|
"epoch": 0.06071541480641853, |
|
"grad_norm": 0.9355078339576721, |
|
"learning_rate": 4.915158964554312e-05, |
|
"loss": 0.6843, |
|
"step": 3937 |
|
}, |
|
{ |
|
"epoch": 0.06119348893875254, |
|
"grad_norm": 0.7211058735847473, |
|
"learning_rate": 4.912976038673786e-05, |
|
"loss": 0.6861, |
|
"step": 3968 |
|
}, |
|
{ |
|
"epoch": 0.06167156307108654, |
|
"grad_norm": 0.8674766421318054, |
|
"learning_rate": 4.9107658826866254e-05, |
|
"loss": 0.6939, |
|
"step": 3999 |
|
}, |
|
{ |
|
"epoch": 0.062149637203420545, |
|
"grad_norm": 0.8166181445121765, |
|
"learning_rate": 4.908528521534139e-05, |
|
"loss": 0.692, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.06262771133575455, |
|
"grad_norm": 0.841305136680603, |
|
"learning_rate": 4.906263980464644e-05, |
|
"loss": 0.6855, |
|
"step": 4061 |
|
}, |
|
{ |
|
"epoch": 0.06310578546808855, |
|
"grad_norm": 0.8942857384681702, |
|
"learning_rate": 4.903972285033178e-05, |
|
"loss": 0.6946, |
|
"step": 4092 |
|
}, |
|
{ |
|
"epoch": 0.06358385960042255, |
|
"grad_norm": 0.8595120310783386, |
|
"learning_rate": 4.901653461101213e-05, |
|
"loss": 0.6825, |
|
"step": 4123 |
|
}, |
|
{ |
|
"epoch": 0.06406193373275657, |
|
"grad_norm": 0.8155812621116638, |
|
"learning_rate": 4.8993075348363626e-05, |
|
"loss": 0.6821, |
|
"step": 4154 |
|
}, |
|
{ |
|
"epoch": 0.06454000786509057, |
|
"grad_norm": 0.8901113271713257, |
|
"learning_rate": 4.896934532712084e-05, |
|
"loss": 0.6898, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 0.06501808199742456, |
|
"grad_norm": 0.9637976288795471, |
|
"learning_rate": 4.8945344815073846e-05, |
|
"loss": 0.6829, |
|
"step": 4216 |
|
}, |
|
{ |
|
"epoch": 0.06549615612975858, |
|
"grad_norm": 0.7048139572143555, |
|
"learning_rate": 4.892107408306516e-05, |
|
"loss": 0.6834, |
|
"step": 4247 |
|
}, |
|
{ |
|
"epoch": 0.06597423026209258, |
|
"grad_norm": 0.8655612468719482, |
|
"learning_rate": 4.889653340498669e-05, |
|
"loss": 0.6778, |
|
"step": 4278 |
|
}, |
|
{ |
|
"epoch": 0.06645230439442658, |
|
"grad_norm": 0.8204261064529419, |
|
"learning_rate": 4.8871723057776664e-05, |
|
"loss": 0.6672, |
|
"step": 4309 |
|
}, |
|
{ |
|
"epoch": 0.06693037852676058, |
|
"grad_norm": 0.9259466528892517, |
|
"learning_rate": 4.8846643321416476e-05, |
|
"loss": 0.6778, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.06740845265909459, |
|
"grad_norm": 0.9826278686523438, |
|
"learning_rate": 4.882129447892753e-05, |
|
"loss": 0.6882, |
|
"step": 4371 |
|
}, |
|
{ |
|
"epoch": 0.06788652679142859, |
|
"grad_norm": 0.7376586198806763, |
|
"learning_rate": 4.8795676816368076e-05, |
|
"loss": 0.6802, |
|
"step": 4402 |
|
}, |
|
{ |
|
"epoch": 0.06836460092376259, |
|
"grad_norm": 0.6778755784034729, |
|
"learning_rate": 4.876979062282995e-05, |
|
"loss": 0.6814, |
|
"step": 4433 |
|
}, |
|
{ |
|
"epoch": 0.0688426750560966, |
|
"grad_norm": 0.8117042183876038, |
|
"learning_rate": 4.8743636190435325e-05, |
|
"loss": 0.6759, |
|
"step": 4464 |
|
}, |
|
{ |
|
"epoch": 0.0693207491884306, |
|
"grad_norm": 0.9493227005004883, |
|
"learning_rate": 4.871721381433344e-05, |
|
"loss": 0.6744, |
|
"step": 4495 |
|
}, |
|
{ |
|
"epoch": 0.0697988233207646, |
|
"grad_norm": 1.3356900215148926, |
|
"learning_rate": 4.869052379269719e-05, |
|
"loss": 0.6763, |
|
"step": 4526 |
|
}, |
|
{ |
|
"epoch": 0.07027689745309862, |
|
"grad_norm": 1.1542543172836304, |
|
"learning_rate": 4.866356642671985e-05, |
|
"loss": 0.6788, |
|
"step": 4557 |
|
}, |
|
{ |
|
"epoch": 0.07075497158543262, |
|
"grad_norm": 0.785176157951355, |
|
"learning_rate": 4.8636342020611634e-05, |
|
"loss": 0.6853, |
|
"step": 4588 |
|
}, |
|
{ |
|
"epoch": 0.07123304571776662, |
|
"grad_norm": 0.8102776408195496, |
|
"learning_rate": 4.860885088159626e-05, |
|
"loss": 0.6794, |
|
"step": 4619 |
|
}, |
|
{ |
|
"epoch": 0.07171111985010063, |
|
"grad_norm": 0.9685975313186646, |
|
"learning_rate": 4.858109331990751e-05, |
|
"loss": 0.6823, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.07218919398243463, |
|
"grad_norm": 0.7929072976112366, |
|
"learning_rate": 4.855306964878567e-05, |
|
"loss": 0.6781, |
|
"step": 4681 |
|
}, |
|
{ |
|
"epoch": 0.07266726811476863, |
|
"grad_norm": 0.8171564340591431, |
|
"learning_rate": 4.8524780184474084e-05, |
|
"loss": 0.6685, |
|
"step": 4712 |
|
}, |
|
{ |
|
"epoch": 0.07314534224710265, |
|
"grad_norm": 0.9218119382858276, |
|
"learning_rate": 4.8496225246215496e-05, |
|
"loss": 0.6796, |
|
"step": 4743 |
|
}, |
|
{ |
|
"epoch": 0.07362341637943665, |
|
"grad_norm": 3.1681158542633057, |
|
"learning_rate": 4.8467405156248505e-05, |
|
"loss": 0.676, |
|
"step": 4774 |
|
}, |
|
{ |
|
"epoch": 0.07410149051177065, |
|
"grad_norm": 0.9336240887641907, |
|
"learning_rate": 4.843832023980392e-05, |
|
"loss": 0.6734, |
|
"step": 4805 |
|
}, |
|
{ |
|
"epoch": 0.07457956464410465, |
|
"grad_norm": 0.8264138102531433, |
|
"learning_rate": 4.840897082510106e-05, |
|
"loss": 0.6905, |
|
"step": 4836 |
|
}, |
|
{ |
|
"epoch": 0.07505763877643866, |
|
"grad_norm": 0.819663405418396, |
|
"learning_rate": 4.8379357243344084e-05, |
|
"loss": 0.6814, |
|
"step": 4867 |
|
}, |
|
{ |
|
"epoch": 0.07553571290877266, |
|
"grad_norm": 3.124502420425415, |
|
"learning_rate": 4.8349479828718236e-05, |
|
"loss": 0.6704, |
|
"step": 4898 |
|
}, |
|
{ |
|
"epoch": 0.07601378704110666, |
|
"grad_norm": 0.7769860625267029, |
|
"learning_rate": 4.8319338918386075e-05, |
|
"loss": 0.6728, |
|
"step": 4929 |
|
}, |
|
{ |
|
"epoch": 0.07649186117344067, |
|
"grad_norm": 0.8232171535491943, |
|
"learning_rate": 4.828893485248369e-05, |
|
"loss": 0.6798, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.07696993530577467, |
|
"grad_norm": 0.8771420121192932, |
|
"learning_rate": 4.825826797411682e-05, |
|
"loss": 0.6722, |
|
"step": 4991 |
|
}, |
|
{ |
|
"epoch": 0.07744800943810867, |
|
"grad_norm": 0.8321033716201782, |
|
"learning_rate": 4.822733862935702e-05, |
|
"loss": 0.6724, |
|
"step": 5022 |
|
}, |
|
{ |
|
"epoch": 0.07792608357044269, |
|
"grad_norm": 0.9346029162406921, |
|
"learning_rate": 4.819614716723775e-05, |
|
"loss": 0.6764, |
|
"step": 5053 |
|
}, |
|
{ |
|
"epoch": 0.07840415770277669, |
|
"grad_norm": 0.9193580150604248, |
|
"learning_rate": 4.8164693939750425e-05, |
|
"loss": 0.6789, |
|
"step": 5084 |
|
}, |
|
{ |
|
"epoch": 0.07888223183511069, |
|
"grad_norm": 1.0850661993026733, |
|
"learning_rate": 4.813297930184042e-05, |
|
"loss": 0.6685, |
|
"step": 5115 |
|
}, |
|
{ |
|
"epoch": 0.0793603059674447, |
|
"grad_norm": 0.6851856708526611, |
|
"learning_rate": 4.810100361140314e-05, |
|
"loss": 0.6621, |
|
"step": 5146 |
|
}, |
|
{ |
|
"epoch": 0.0798383800997787, |
|
"grad_norm": 0.7950114607810974, |
|
"learning_rate": 4.8068767229279885e-05, |
|
"loss": 0.6642, |
|
"step": 5177 |
|
}, |
|
{ |
|
"epoch": 0.0803164542321127, |
|
"grad_norm": 0.9632556438446045, |
|
"learning_rate": 4.8036270519253854e-05, |
|
"loss": 0.6628, |
|
"step": 5208 |
|
}, |
|
{ |
|
"epoch": 0.0807945283644467, |
|
"grad_norm": 0.7066652178764343, |
|
"learning_rate": 4.8003513848046e-05, |
|
"loss": 0.6642, |
|
"step": 5239 |
|
}, |
|
{ |
|
"epoch": 0.08127260249678071, |
|
"grad_norm": 0.7749651670455933, |
|
"learning_rate": 4.79704975853109e-05, |
|
"loss": 0.6633, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.08175067662911471, |
|
"grad_norm": 1.702022671699524, |
|
"learning_rate": 4.793722210363262e-05, |
|
"loss": 0.6745, |
|
"step": 5301 |
|
}, |
|
{ |
|
"epoch": 0.08222875076144871, |
|
"grad_norm": 0.8134759068489075, |
|
"learning_rate": 4.7903687778520414e-05, |
|
"loss": 0.659, |
|
"step": 5332 |
|
}, |
|
{ |
|
"epoch": 0.08270682489378273, |
|
"grad_norm": 0.8805097341537476, |
|
"learning_rate": 4.7869894988404593e-05, |
|
"loss": 0.6779, |
|
"step": 5363 |
|
}, |
|
{ |
|
"epoch": 0.08318489902611673, |
|
"grad_norm": 0.9312698841094971, |
|
"learning_rate": 4.783584411463221e-05, |
|
"loss": 0.6634, |
|
"step": 5394 |
|
}, |
|
{ |
|
"epoch": 0.08366297315845073, |
|
"grad_norm": 0.7013948559761047, |
|
"learning_rate": 4.780153554146274e-05, |
|
"loss": 0.6589, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 0.08414104729078474, |
|
"grad_norm": 0.8027428388595581, |
|
"learning_rate": 4.7766969656063766e-05, |
|
"loss": 0.6603, |
|
"step": 5456 |
|
}, |
|
{ |
|
"epoch": 0.08461912142311874, |
|
"grad_norm": 1.0179306268692017, |
|
"learning_rate": 4.773214684850662e-05, |
|
"loss": 0.6595, |
|
"step": 5487 |
|
}, |
|
{ |
|
"epoch": 0.08509719555545274, |
|
"grad_norm": 0.8512592911720276, |
|
"learning_rate": 4.769706751176193e-05, |
|
"loss": 0.6659, |
|
"step": 5518 |
|
}, |
|
{ |
|
"epoch": 0.08557526968778675, |
|
"grad_norm": 0.8476304411888123, |
|
"learning_rate": 4.7661732041695264e-05, |
|
"loss": 0.6658, |
|
"step": 5549 |
|
}, |
|
{ |
|
"epoch": 0.08605334382012075, |
|
"grad_norm": 0.771584153175354, |
|
"learning_rate": 4.762614083706258e-05, |
|
"loss": 0.6622, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.08653141795245475, |
|
"grad_norm": 0.8719794154167175, |
|
"learning_rate": 4.759029429950581e-05, |
|
"loss": 0.661, |
|
"step": 5611 |
|
}, |
|
{ |
|
"epoch": 0.08700949208478877, |
|
"grad_norm": 0.9247443675994873, |
|
"learning_rate": 4.7554192833548235e-05, |
|
"loss": 0.6475, |
|
"step": 5642 |
|
}, |
|
{ |
|
"epoch": 0.08748756621712277, |
|
"grad_norm": 0.9540857672691345, |
|
"learning_rate": 4.751783684659e-05, |
|
"loss": 0.662, |
|
"step": 5673 |
|
}, |
|
{ |
|
"epoch": 0.08796564034945677, |
|
"grad_norm": 0.7939008474349976, |
|
"learning_rate": 4.748122674890348e-05, |
|
"loss": 0.6514, |
|
"step": 5704 |
|
}, |
|
{ |
|
"epoch": 0.08844371448179077, |
|
"grad_norm": 0.613531768321991, |
|
"learning_rate": 4.7444362953628654e-05, |
|
"loss": 0.6513, |
|
"step": 5735 |
|
}, |
|
{ |
|
"epoch": 0.08892178861412478, |
|
"grad_norm": 0.8084924817085266, |
|
"learning_rate": 4.7407245876768424e-05, |
|
"loss": 0.6465, |
|
"step": 5766 |
|
}, |
|
{ |
|
"epoch": 0.08939986274645878, |
|
"grad_norm": 0.7709007263183594, |
|
"learning_rate": 4.736987593718397e-05, |
|
"loss": 0.6618, |
|
"step": 5797 |
|
}, |
|
{ |
|
"epoch": 0.08987793687879278, |
|
"grad_norm": 0.6461811065673828, |
|
"learning_rate": 4.733225355658999e-05, |
|
"loss": 0.6516, |
|
"step": 5828 |
|
}, |
|
{ |
|
"epoch": 0.0903560110111268, |
|
"grad_norm": 0.6879326105117798, |
|
"learning_rate": 4.7294379159549926e-05, |
|
"loss": 0.655, |
|
"step": 5859 |
|
}, |
|
{ |
|
"epoch": 0.0908340851434608, |
|
"grad_norm": 0.7594075798988342, |
|
"learning_rate": 4.725625317347119e-05, |
|
"loss": 0.655, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 0.0913121592757948, |
|
"grad_norm": 0.774758517742157, |
|
"learning_rate": 4.7217876028600374e-05, |
|
"loss": 0.6697, |
|
"step": 5921 |
|
}, |
|
{ |
|
"epoch": 0.09179023340812881, |
|
"grad_norm": 0.9164844155311584, |
|
"learning_rate": 4.717924815801832e-05, |
|
"loss": 0.6483, |
|
"step": 5952 |
|
}, |
|
{ |
|
"epoch": 0.09226830754046281, |
|
"grad_norm": 0.792631208896637, |
|
"learning_rate": 4.714036999763532e-05, |
|
"loss": 0.6644, |
|
"step": 5983 |
|
}, |
|
{ |
|
"epoch": 0.09274638167279681, |
|
"grad_norm": 0.7735174298286438, |
|
"learning_rate": 4.7101241986186116e-05, |
|
"loss": 0.6545, |
|
"step": 6014 |
|
}, |
|
{ |
|
"epoch": 0.09322445580513082, |
|
"grad_norm": 0.725825309753418, |
|
"learning_rate": 4.7061864565225e-05, |
|
"loss": 0.6579, |
|
"step": 6045 |
|
}, |
|
{ |
|
"epoch": 0.09370252993746482, |
|
"grad_norm": 0.7191294431686401, |
|
"learning_rate": 4.702223817912081e-05, |
|
"loss": 0.6509, |
|
"step": 6076 |
|
}, |
|
{ |
|
"epoch": 0.09418060406979882, |
|
"grad_norm": 0.8470766544342041, |
|
"learning_rate": 4.698236327505195e-05, |
|
"loss": 0.6515, |
|
"step": 6107 |
|
}, |
|
{ |
|
"epoch": 0.09465867820213283, |
|
"grad_norm": 0.7250267863273621, |
|
"learning_rate": 4.694224030300127e-05, |
|
"loss": 0.6484, |
|
"step": 6138 |
|
}, |
|
{ |
|
"epoch": 0.09513675233446683, |
|
"grad_norm": 0.7370029091835022, |
|
"learning_rate": 4.690186971575107e-05, |
|
"loss": 0.6639, |
|
"step": 6169 |
|
}, |
|
{ |
|
"epoch": 0.09561482646680083, |
|
"grad_norm": 0.7768808007240295, |
|
"learning_rate": 4.6861251968877916e-05, |
|
"loss": 0.657, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.09609290059913483, |
|
"grad_norm": 0.770039439201355, |
|
"learning_rate": 4.68203875207476e-05, |
|
"loss": 0.6588, |
|
"step": 6231 |
|
}, |
|
{ |
|
"epoch": 0.09657097473146885, |
|
"grad_norm": 0.637853741645813, |
|
"learning_rate": 4.677927683250983e-05, |
|
"loss": 0.6438, |
|
"step": 6262 |
|
}, |
|
{ |
|
"epoch": 0.09704904886380285, |
|
"grad_norm": 0.6896365284919739, |
|
"learning_rate": 4.6737920368093156e-05, |
|
"loss": 0.6493, |
|
"step": 6293 |
|
}, |
|
{ |
|
"epoch": 0.09752712299613685, |
|
"grad_norm": 0.721318781375885, |
|
"learning_rate": 4.669631859419965e-05, |
|
"loss": 0.6427, |
|
"step": 6324 |
|
}, |
|
{ |
|
"epoch": 0.09800519712847086, |
|
"grad_norm": 0.7492154836654663, |
|
"learning_rate": 4.6654471980299676e-05, |
|
"loss": 0.6474, |
|
"step": 6355 |
|
}, |
|
{ |
|
"epoch": 0.09848327126080486, |
|
"grad_norm": 0.8512872457504272, |
|
"learning_rate": 4.661238099862658e-05, |
|
"loss": 0.6423, |
|
"step": 6386 |
|
}, |
|
{ |
|
"epoch": 0.09896134539313886, |
|
"grad_norm": 0.6349690556526184, |
|
"learning_rate": 4.657004612417138e-05, |
|
"loss": 0.6499, |
|
"step": 6417 |
|
}, |
|
{ |
|
"epoch": 0.09943941952547287, |
|
"grad_norm": 0.8040255904197693, |
|
"learning_rate": 4.6527467834677374e-05, |
|
"loss": 0.6495, |
|
"step": 6448 |
|
}, |
|
{ |
|
"epoch": 0.09991749365780687, |
|
"grad_norm": 0.6428426504135132, |
|
"learning_rate": 4.648464661063478e-05, |
|
"loss": 0.6565, |
|
"step": 6479 |
|
}, |
|
{ |
|
"epoch": 0.10039556779014087, |
|
"grad_norm": 0.8141711950302124, |
|
"learning_rate": 4.6441582935275264e-05, |
|
"loss": 0.6592, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 0.10087364192247489, |
|
"grad_norm": 0.8665099740028381, |
|
"learning_rate": 4.6398277294566586e-05, |
|
"loss": 0.6497, |
|
"step": 6541 |
|
}, |
|
{ |
|
"epoch": 0.10135171605480889, |
|
"grad_norm": 0.709786057472229, |
|
"learning_rate": 4.6354730177207e-05, |
|
"loss": 0.6546, |
|
"step": 6572 |
|
}, |
|
{ |
|
"epoch": 0.10182979018714289, |
|
"grad_norm": 0.8345597982406616, |
|
"learning_rate": 4.6310942074619787e-05, |
|
"loss": 0.6547, |
|
"step": 6603 |
|
}, |
|
{ |
|
"epoch": 0.10230786431947689, |
|
"grad_norm": 0.746769905090332, |
|
"learning_rate": 4.626691348094777e-05, |
|
"loss": 0.6526, |
|
"step": 6634 |
|
}, |
|
{ |
|
"epoch": 0.1027859384518109, |
|
"grad_norm": 0.7869583368301392, |
|
"learning_rate": 4.622264489304762e-05, |
|
"loss": 0.6458, |
|
"step": 6665 |
|
}, |
|
{ |
|
"epoch": 0.1032640125841449, |
|
"grad_norm": 0.5438331365585327, |
|
"learning_rate": 4.617813681048434e-05, |
|
"loss": 0.6468, |
|
"step": 6696 |
|
}, |
|
{ |
|
"epoch": 0.1037420867164789, |
|
"grad_norm": 0.7230411171913147, |
|
"learning_rate": 4.61333897355256e-05, |
|
"loss": 0.6435, |
|
"step": 6727 |
|
}, |
|
{ |
|
"epoch": 0.10422016084881291, |
|
"grad_norm": 0.9861361384391785, |
|
"learning_rate": 4.608840417313604e-05, |
|
"loss": 0.6488, |
|
"step": 6758 |
|
}, |
|
{ |
|
"epoch": 0.10469823498114691, |
|
"grad_norm": 0.7873183488845825, |
|
"learning_rate": 4.6043180630971646e-05, |
|
"loss": 0.6485, |
|
"step": 6789 |
|
}, |
|
{ |
|
"epoch": 0.10517630911348091, |
|
"grad_norm": 0.7746345400810242, |
|
"learning_rate": 4.599771961937391e-05, |
|
"loss": 0.6384, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 0.10565438324581493, |
|
"grad_norm": 0.8218130469322205, |
|
"learning_rate": 4.5952021651364204e-05, |
|
"loss": 0.6434, |
|
"step": 6851 |
|
}, |
|
{ |
|
"epoch": 0.10613245737814893, |
|
"grad_norm": 0.7047727108001709, |
|
"learning_rate": 4.590608724263786e-05, |
|
"loss": 0.6562, |
|
"step": 6882 |
|
}, |
|
{ |
|
"epoch": 0.10661053151048293, |
|
"grad_norm": 0.9560827612876892, |
|
"learning_rate": 4.585991691155845e-05, |
|
"loss": 0.6422, |
|
"step": 6913 |
|
}, |
|
{ |
|
"epoch": 0.10708860564281694, |
|
"grad_norm": 1.1834834814071655, |
|
"learning_rate": 4.581351117915188e-05, |
|
"loss": 0.6525, |
|
"step": 6944 |
|
}, |
|
{ |
|
"epoch": 0.10756667977515094, |
|
"grad_norm": 0.8007238507270813, |
|
"learning_rate": 4.5766870569100534e-05, |
|
"loss": 0.6575, |
|
"step": 6975 |
|
}, |
|
{ |
|
"epoch": 0.10804475390748494, |
|
"grad_norm": 0.6816396713256836, |
|
"learning_rate": 4.571999560773736e-05, |
|
"loss": 0.6453, |
|
"step": 7006 |
|
}, |
|
{ |
|
"epoch": 0.10852282803981896, |
|
"grad_norm": 0.7240257859230042, |
|
"learning_rate": 4.5672886824039915e-05, |
|
"loss": 0.6542, |
|
"step": 7037 |
|
}, |
|
{ |
|
"epoch": 0.10900090217215296, |
|
"grad_norm": 0.6335741877555847, |
|
"learning_rate": 4.5625544749624435e-05, |
|
"loss": 0.6449, |
|
"step": 7068 |
|
}, |
|
{ |
|
"epoch": 0.10947897630448696, |
|
"grad_norm": 0.6071763038635254, |
|
"learning_rate": 4.5577969918739794e-05, |
|
"loss": 0.6572, |
|
"step": 7099 |
|
}, |
|
{ |
|
"epoch": 0.10995705043682096, |
|
"grad_norm": 0.8242653608322144, |
|
"learning_rate": 4.5530162868261486e-05, |
|
"loss": 0.6483, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 0.11043512456915497, |
|
"grad_norm": 0.7106221318244934, |
|
"learning_rate": 4.548212413768558e-05, |
|
"loss": 0.6376, |
|
"step": 7161 |
|
}, |
|
{ |
|
"epoch": 0.11091319870148897, |
|
"grad_norm": 0.6587203145027161, |
|
"learning_rate": 4.543385426912261e-05, |
|
"loss": 0.6426, |
|
"step": 7192 |
|
}, |
|
{ |
|
"epoch": 0.11139127283382297, |
|
"grad_norm": 0.8035815954208374, |
|
"learning_rate": 4.53853538072915e-05, |
|
"loss": 0.6428, |
|
"step": 7223 |
|
}, |
|
{ |
|
"epoch": 0.11186934696615698, |
|
"grad_norm": 0.6661431193351746, |
|
"learning_rate": 4.533662329951336e-05, |
|
"loss": 0.6431, |
|
"step": 7254 |
|
}, |
|
{ |
|
"epoch": 0.11234742109849098, |
|
"grad_norm": 0.5891786813735962, |
|
"learning_rate": 4.528766329570536e-05, |
|
"loss": 0.6356, |
|
"step": 7285 |
|
}, |
|
{ |
|
"epoch": 0.11282549523082498, |
|
"grad_norm": 0.7475966811180115, |
|
"learning_rate": 4.523847434837447e-05, |
|
"loss": 0.6348, |
|
"step": 7316 |
|
}, |
|
{ |
|
"epoch": 0.113303569363159, |
|
"grad_norm": 0.7461502552032471, |
|
"learning_rate": 4.518905701261128e-05, |
|
"loss": 0.639, |
|
"step": 7347 |
|
}, |
|
{ |
|
"epoch": 0.113781643495493, |
|
"grad_norm": 0.7351141571998596, |
|
"learning_rate": 4.5139411846083715e-05, |
|
"loss": 0.6473, |
|
"step": 7378 |
|
}, |
|
{ |
|
"epoch": 0.114259717627827, |
|
"grad_norm": 0.7713771462440491, |
|
"learning_rate": 4.508953940903073e-05, |
|
"loss": 0.6433, |
|
"step": 7409 |
|
}, |
|
{ |
|
"epoch": 0.11473779176016101, |
|
"grad_norm": 0.639986515045166, |
|
"learning_rate": 4.5039440264255994e-05, |
|
"loss": 0.6411, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 0.11521586589249501, |
|
"grad_norm": 0.8399244546890259, |
|
"learning_rate": 4.498911497712155e-05, |
|
"loss": 0.6421, |
|
"step": 7471 |
|
}, |
|
{ |
|
"epoch": 0.11569394002482901, |
|
"grad_norm": 0.7681542634963989, |
|
"learning_rate": 4.493856411554142e-05, |
|
"loss": 0.6371, |
|
"step": 7502 |
|
}, |
|
{ |
|
"epoch": 0.11617201415716301, |
|
"grad_norm": 0.6907945275306702, |
|
"learning_rate": 4.4887788249975206e-05, |
|
"loss": 0.6446, |
|
"step": 7533 |
|
}, |
|
{ |
|
"epoch": 0.11665008828949702, |
|
"grad_norm": 0.7653492093086243, |
|
"learning_rate": 4.4836787953421656e-05, |
|
"loss": 0.6407, |
|
"step": 7564 |
|
}, |
|
{ |
|
"epoch": 0.11712816242183102, |
|
"grad_norm": 0.6535690426826477, |
|
"learning_rate": 4.478556380141218e-05, |
|
"loss": 0.6388, |
|
"step": 7595 |
|
}, |
|
{ |
|
"epoch": 0.11760623655416502, |
|
"grad_norm": 2.6184701919555664, |
|
"learning_rate": 4.4734116372004375e-05, |
|
"loss": 0.6382, |
|
"step": 7626 |
|
}, |
|
{ |
|
"epoch": 0.11808431068649904, |
|
"grad_norm": 0.7846981287002563, |
|
"learning_rate": 4.4682446245775477e-05, |
|
"loss": 0.6364, |
|
"step": 7657 |
|
}, |
|
{ |
|
"epoch": 0.11856238481883304, |
|
"grad_norm": 0.8151688575744629, |
|
"learning_rate": 4.463055400581586e-05, |
|
"loss": 0.6427, |
|
"step": 7688 |
|
}, |
|
{ |
|
"epoch": 0.11904045895116704, |
|
"grad_norm": 0.7663447856903076, |
|
"learning_rate": 4.4578440237722374e-05, |
|
"loss": 0.643, |
|
"step": 7719 |
|
}, |
|
{ |
|
"epoch": 0.11951853308350105, |
|
"grad_norm": 0.7225657105445862, |
|
"learning_rate": 4.452610552959183e-05, |
|
"loss": 0.6395, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 0.11999660721583505, |
|
"grad_norm": 0.7033439874649048, |
|
"learning_rate": 4.447355047201428e-05, |
|
"loss": 0.6354, |
|
"step": 7781 |
|
}, |
|
{ |
|
"epoch": 0.12047468134816905, |
|
"grad_norm": 0.7693649530410767, |
|
"learning_rate": 4.4420775658066414e-05, |
|
"loss": 0.6436, |
|
"step": 7812 |
|
}, |
|
{ |
|
"epoch": 0.12095275548050306, |
|
"grad_norm": 0.7597894668579102, |
|
"learning_rate": 4.436778168330484e-05, |
|
"loss": 0.6411, |
|
"step": 7843 |
|
}, |
|
{ |
|
"epoch": 0.12143082961283706, |
|
"grad_norm": 0.7899183630943298, |
|
"learning_rate": 4.4314569145759353e-05, |
|
"loss": 0.6381, |
|
"step": 7874 |
|
}, |
|
{ |
|
"epoch": 0.12190890374517106, |
|
"grad_norm": 0.7656906247138977, |
|
"learning_rate": 4.42611386459262e-05, |
|
"loss": 0.6386, |
|
"step": 7905 |
|
}, |
|
{ |
|
"epoch": 0.12238697787750508, |
|
"grad_norm": 0.8353867530822754, |
|
"learning_rate": 4.420749078676133e-05, |
|
"loss": 0.6386, |
|
"step": 7936 |
|
}, |
|
{ |
|
"epoch": 0.12286505200983908, |
|
"grad_norm": 0.6712770462036133, |
|
"learning_rate": 4.4153626173673516e-05, |
|
"loss": 0.6438, |
|
"step": 7967 |
|
}, |
|
{ |
|
"epoch": 0.12334312614217308, |
|
"grad_norm": 0.8008986115455627, |
|
"learning_rate": 4.409954541451762e-05, |
|
"loss": 0.6384, |
|
"step": 7998 |
|
}, |
|
{ |
|
"epoch": 0.12382120027450708, |
|
"grad_norm": 0.6632928252220154, |
|
"learning_rate": 4.404524911958764e-05, |
|
"loss": 0.629, |
|
"step": 8029 |
|
}, |
|
{ |
|
"epoch": 0.12429927440684109, |
|
"grad_norm": 0.6943359971046448, |
|
"learning_rate": 4.399073790160989e-05, |
|
"loss": 0.6456, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 0.12477734853917509, |
|
"grad_norm": 0.6177495718002319, |
|
"learning_rate": 4.393601237573607e-05, |
|
"loss": 0.6426, |
|
"step": 8091 |
|
}, |
|
{ |
|
"epoch": 0.1252554226715091, |
|
"grad_norm": 0.6089790463447571, |
|
"learning_rate": 4.388107315953628e-05, |
|
"loss": 0.6396, |
|
"step": 8122 |
|
}, |
|
{ |
|
"epoch": 0.1257334968038431, |
|
"grad_norm": 0.6960969567298889, |
|
"learning_rate": 4.382592087299212e-05, |
|
"loss": 0.6293, |
|
"step": 8153 |
|
}, |
|
{ |
|
"epoch": 0.1262115709361771, |
|
"grad_norm": 0.6519069075584412, |
|
"learning_rate": 4.377055613848964e-05, |
|
"loss": 0.6284, |
|
"step": 8184 |
|
}, |
|
{ |
|
"epoch": 0.1266896450685111, |
|
"grad_norm": 0.6861445903778076, |
|
"learning_rate": 4.3714979580812355e-05, |
|
"loss": 0.6386, |
|
"step": 8215 |
|
}, |
|
{ |
|
"epoch": 0.1271677192008451, |
|
"grad_norm": 0.7522263526916504, |
|
"learning_rate": 4.365919182713416e-05, |
|
"loss": 0.6424, |
|
"step": 8246 |
|
}, |
|
{ |
|
"epoch": 0.1276457933331791, |
|
"grad_norm": 0.6868363618850708, |
|
"learning_rate": 4.360319350701226e-05, |
|
"loss": 0.6367, |
|
"step": 8277 |
|
}, |
|
{ |
|
"epoch": 0.12812386746551313, |
|
"grad_norm": 0.5939040780067444, |
|
"learning_rate": 4.3546985252380115e-05, |
|
"loss": 0.6406, |
|
"step": 8308 |
|
}, |
|
{ |
|
"epoch": 0.12860194159784713, |
|
"grad_norm": 0.7341668605804443, |
|
"learning_rate": 4.349056769754021e-05, |
|
"loss": 0.6346, |
|
"step": 8339 |
|
}, |
|
{ |
|
"epoch": 0.12908001573018113, |
|
"grad_norm": 0.7468376755714417, |
|
"learning_rate": 4.3433941479156994e-05, |
|
"loss": 0.6398, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 0.12955808986251513, |
|
"grad_norm": 0.6253494024276733, |
|
"learning_rate": 4.3377107236249647e-05, |
|
"loss": 0.6375, |
|
"step": 8401 |
|
}, |
|
{ |
|
"epoch": 0.13003616399484913, |
|
"grad_norm": 0.6635178923606873, |
|
"learning_rate": 4.332006561018488e-05, |
|
"loss": 0.6299, |
|
"step": 8432 |
|
}, |
|
{ |
|
"epoch": 0.13051423812718313, |
|
"grad_norm": 0.584648609161377, |
|
"learning_rate": 4.3262817244669683e-05, |
|
"loss": 0.6277, |
|
"step": 8463 |
|
}, |
|
{ |
|
"epoch": 0.13099231225951716, |
|
"grad_norm": 0.5907359719276428, |
|
"learning_rate": 4.3205362785744083e-05, |
|
"loss": 0.6328, |
|
"step": 8494 |
|
}, |
|
{ |
|
"epoch": 0.13147038639185116, |
|
"grad_norm": 0.7864153385162354, |
|
"learning_rate": 4.314770288177384e-05, |
|
"loss": 0.6365, |
|
"step": 8525 |
|
}, |
|
{ |
|
"epoch": 0.13194846052418516, |
|
"grad_norm": 0.6572133302688599, |
|
"learning_rate": 4.308983818344313e-05, |
|
"loss": 0.6312, |
|
"step": 8556 |
|
}, |
|
{ |
|
"epoch": 0.13242653465651916, |
|
"grad_norm": 0.7108786702156067, |
|
"learning_rate": 4.3031769343747206e-05, |
|
"loss": 0.6355, |
|
"step": 8587 |
|
}, |
|
{ |
|
"epoch": 0.13290460878885316, |
|
"grad_norm": 0.6737267374992371, |
|
"learning_rate": 4.297349701798505e-05, |
|
"loss": 0.6256, |
|
"step": 8618 |
|
}, |
|
{ |
|
"epoch": 0.13338268292118716, |
|
"grad_norm": 0.6710911989212036, |
|
"learning_rate": 4.2915021863751916e-05, |
|
"loss": 0.632, |
|
"step": 8649 |
|
}, |
|
{ |
|
"epoch": 0.13386075705352116, |
|
"grad_norm": 0.6295626759529114, |
|
"learning_rate": 4.285634454093198e-05, |
|
"loss": 0.6372, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 0.13433883118585518, |
|
"grad_norm": 0.6405526399612427, |
|
"learning_rate": 4.279746571169086e-05, |
|
"loss": 0.6364, |
|
"step": 8711 |
|
}, |
|
{ |
|
"epoch": 0.13481690531818918, |
|
"grad_norm": 0.6847979426383972, |
|
"learning_rate": 4.2738386040468136e-05, |
|
"loss": 0.6246, |
|
"step": 8742 |
|
}, |
|
{ |
|
"epoch": 0.13529497945052318, |
|
"grad_norm": 0.7338352799415588, |
|
"learning_rate": 4.2679106193969866e-05, |
|
"loss": 0.6373, |
|
"step": 8773 |
|
}, |
|
{ |
|
"epoch": 0.13577305358285718, |
|
"grad_norm": 0.6530189514160156, |
|
"learning_rate": 4.261962684116106e-05, |
|
"loss": 0.6217, |
|
"step": 8804 |
|
}, |
|
{ |
|
"epoch": 0.13625112771519118, |
|
"grad_norm": 0.6450487375259399, |
|
"learning_rate": 4.2559948653258145e-05, |
|
"loss": 0.6365, |
|
"step": 8835 |
|
}, |
|
{ |
|
"epoch": 0.13672920184752518, |
|
"grad_norm": 0.706732988357544, |
|
"learning_rate": 4.250007230372134e-05, |
|
"loss": 0.6294, |
|
"step": 8866 |
|
}, |
|
{ |
|
"epoch": 0.1372072759798592, |
|
"grad_norm": 0.687240481376648, |
|
"learning_rate": 4.2439998468247126e-05, |
|
"loss": 0.6332, |
|
"step": 8897 |
|
}, |
|
{ |
|
"epoch": 0.1376853501121932, |
|
"grad_norm": 0.6686804890632629, |
|
"learning_rate": 4.2379727824760566e-05, |
|
"loss": 0.6334, |
|
"step": 8928 |
|
}, |
|
{ |
|
"epoch": 0.1381634242445272, |
|
"grad_norm": 0.8371458649635315, |
|
"learning_rate": 4.231926105340768e-05, |
|
"loss": 0.6249, |
|
"step": 8959 |
|
}, |
|
{ |
|
"epoch": 0.1386414983768612, |
|
"grad_norm": 0.7204101085662842, |
|
"learning_rate": 4.225859883654776e-05, |
|
"loss": 0.6309, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 0.1391195725091952, |
|
"grad_norm": 0.8539360165596008, |
|
"learning_rate": 4.219774185874569e-05, |
|
"loss": 0.6326, |
|
"step": 9021 |
|
}, |
|
{ |
|
"epoch": 0.1395976466415292, |
|
"grad_norm": 0.7264754176139832, |
|
"learning_rate": 4.213669080676418e-05, |
|
"loss": 0.6342, |
|
"step": 9052 |
|
}, |
|
{ |
|
"epoch": 0.14007572077386324, |
|
"grad_norm": 0.6681591868400574, |
|
"learning_rate": 4.2075446369556056e-05, |
|
"loss": 0.6295, |
|
"step": 9083 |
|
}, |
|
{ |
|
"epoch": 0.14055379490619724, |
|
"grad_norm": 0.6293045282363892, |
|
"learning_rate": 4.201400923825648e-05, |
|
"loss": 0.6304, |
|
"step": 9114 |
|
}, |
|
{ |
|
"epoch": 0.14103186903853124, |
|
"grad_norm": 0.6432914137840271, |
|
"learning_rate": 4.195238010617511e-05, |
|
"loss": 0.6215, |
|
"step": 9145 |
|
}, |
|
{ |
|
"epoch": 0.14150994317086524, |
|
"grad_norm": 0.7524629235267639, |
|
"learning_rate": 4.1890559668788344e-05, |
|
"loss": 0.625, |
|
"step": 9176 |
|
}, |
|
{ |
|
"epoch": 0.14198801730319924, |
|
"grad_norm": 0.7128170728683472, |
|
"learning_rate": 4.1828548623731405e-05, |
|
"loss": 0.6342, |
|
"step": 9207 |
|
}, |
|
{ |
|
"epoch": 0.14246609143553324, |
|
"grad_norm": 0.651841938495636, |
|
"learning_rate": 4.1766347670790506e-05, |
|
"loss": 0.6303, |
|
"step": 9238 |
|
}, |
|
{ |
|
"epoch": 0.14294416556786724, |
|
"grad_norm": 0.6290706396102905, |
|
"learning_rate": 4.170395751189495e-05, |
|
"loss": 0.6297, |
|
"step": 9269 |
|
}, |
|
{ |
|
"epoch": 0.14342223970020127, |
|
"grad_norm": 0.6372231841087341, |
|
"learning_rate": 4.164137885110921e-05, |
|
"loss": 0.6244, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 0.14390031383253526, |
|
"grad_norm": 0.8424429297447205, |
|
"learning_rate": 4.157861239462495e-05, |
|
"loss": 0.6363, |
|
"step": 9331 |
|
}, |
|
{ |
|
"epoch": 0.14437838796486926, |
|
"grad_norm": 0.7121688723564148, |
|
"learning_rate": 4.1515658850753114e-05, |
|
"loss": 0.63, |
|
"step": 9362 |
|
}, |
|
{ |
|
"epoch": 0.14485646209720326, |
|
"grad_norm": 0.7196950316429138, |
|
"learning_rate": 4.145251892991588e-05, |
|
"loss": 0.6254, |
|
"step": 9393 |
|
}, |
|
{ |
|
"epoch": 0.14533453622953726, |
|
"grad_norm": 0.6595038175582886, |
|
"learning_rate": 4.138919334463868e-05, |
|
"loss": 0.6207, |
|
"step": 9424 |
|
}, |
|
{ |
|
"epoch": 0.14581261036187126, |
|
"grad_norm": 0.6118171811103821, |
|
"learning_rate": 4.1325682809542124e-05, |
|
"loss": 0.6233, |
|
"step": 9455 |
|
}, |
|
{ |
|
"epoch": 0.1462906844942053, |
|
"grad_norm": 0.7232321500778198, |
|
"learning_rate": 4.126198804133398e-05, |
|
"loss": 0.6328, |
|
"step": 9486 |
|
}, |
|
{ |
|
"epoch": 0.1467687586265393, |
|
"grad_norm": 0.691681981086731, |
|
"learning_rate": 4.1198109758801055e-05, |
|
"loss": 0.634, |
|
"step": 9517 |
|
}, |
|
{ |
|
"epoch": 0.1472468327588733, |
|
"grad_norm": 0.6887443661689758, |
|
"learning_rate": 4.113404868280107e-05, |
|
"loss": 0.6275, |
|
"step": 9548 |
|
}, |
|
{ |
|
"epoch": 0.1477249068912073, |
|
"grad_norm": 0.6932326555252075, |
|
"learning_rate": 4.106980553625457e-05, |
|
"loss": 0.6233, |
|
"step": 9579 |
|
}, |
|
{ |
|
"epoch": 0.1482029810235413, |
|
"grad_norm": 0.7189227342605591, |
|
"learning_rate": 4.100538104413674e-05, |
|
"loss": 0.6229, |
|
"step": 9610 |
|
}, |
|
{ |
|
"epoch": 0.1486810551558753, |
|
"grad_norm": 0.6898639798164368, |
|
"learning_rate": 4.09407759334692e-05, |
|
"loss": 0.6251, |
|
"step": 9641 |
|
}, |
|
{ |
|
"epoch": 0.1491591292882093, |
|
"grad_norm": 0.6311922669410706, |
|
"learning_rate": 4.087599093331186e-05, |
|
"loss": 0.6249, |
|
"step": 9672 |
|
}, |
|
{ |
|
"epoch": 0.14963720342054332, |
|
"grad_norm": 0.5781577825546265, |
|
"learning_rate": 4.081102677475462e-05, |
|
"loss": 0.6337, |
|
"step": 9703 |
|
}, |
|
{ |
|
"epoch": 0.15011527755287732, |
|
"grad_norm": 0.6525286436080933, |
|
"learning_rate": 4.0745884190909194e-05, |
|
"loss": 0.6347, |
|
"step": 9734 |
|
}, |
|
{ |
|
"epoch": 0.15059335168521132, |
|
"grad_norm": 0.6761188507080078, |
|
"learning_rate": 4.0680563916900796e-05, |
|
"loss": 0.6267, |
|
"step": 9765 |
|
}, |
|
{ |
|
"epoch": 0.15107142581754532, |
|
"grad_norm": 0.674532413482666, |
|
"learning_rate": 4.0615066689859815e-05, |
|
"loss": 0.6303, |
|
"step": 9796 |
|
}, |
|
{ |
|
"epoch": 0.15154949994987932, |
|
"grad_norm": 0.5923493504524231, |
|
"learning_rate": 4.0549393248913584e-05, |
|
"loss": 0.6241, |
|
"step": 9827 |
|
}, |
|
{ |
|
"epoch": 0.15202757408221332, |
|
"grad_norm": 0.6309007406234741, |
|
"learning_rate": 4.048354433517794e-05, |
|
"loss": 0.6231, |
|
"step": 9858 |
|
}, |
|
{ |
|
"epoch": 0.15250564821454735, |
|
"grad_norm": 0.7521491050720215, |
|
"learning_rate": 4.0417520691748916e-05, |
|
"loss": 0.6214, |
|
"step": 9889 |
|
}, |
|
{ |
|
"epoch": 0.15298372234688135, |
|
"grad_norm": 0.8196119666099548, |
|
"learning_rate": 4.035132306369438e-05, |
|
"loss": 0.6254, |
|
"step": 9920 |
|
}, |
|
{ |
|
"epoch": 0.15346179647921535, |
|
"grad_norm": 0.6895152926445007, |
|
"learning_rate": 4.028495219804555e-05, |
|
"loss": 0.6227, |
|
"step": 9951 |
|
}, |
|
{ |
|
"epoch": 0.15393987061154935, |
|
"grad_norm": 0.6823534369468689, |
|
"learning_rate": 4.021840884378864e-05, |
|
"loss": 0.6281, |
|
"step": 9982 |
|
}, |
|
{ |
|
"epoch": 0.15441794474388335, |
|
"grad_norm": 0.6531606912612915, |
|
"learning_rate": 4.015169375185633e-05, |
|
"loss": 0.6259, |
|
"step": 10013 |
|
}, |
|
{ |
|
"epoch": 0.15489601887621735, |
|
"grad_norm": 0.5898056626319885, |
|
"learning_rate": 4.0084807675119396e-05, |
|
"loss": 0.6184, |
|
"step": 10044 |
|
}, |
|
{ |
|
"epoch": 0.15537409300855134, |
|
"grad_norm": 0.6368978023529053, |
|
"learning_rate": 4.0017751368378106e-05, |
|
"loss": 0.6196, |
|
"step": 10075 |
|
}, |
|
{ |
|
"epoch": 0.15585216714088537, |
|
"grad_norm": 0.8086898922920227, |
|
"learning_rate": 3.995052558835377e-05, |
|
"loss": 0.6226, |
|
"step": 10106 |
|
}, |
|
{ |
|
"epoch": 0.15633024127321937, |
|
"grad_norm": 0.6620404124259949, |
|
"learning_rate": 3.988313109368017e-05, |
|
"loss": 0.6146, |
|
"step": 10137 |
|
}, |
|
{ |
|
"epoch": 0.15680831540555337, |
|
"grad_norm": 0.6264041662216187, |
|
"learning_rate": 3.981556864489504e-05, |
|
"loss": 0.6254, |
|
"step": 10168 |
|
}, |
|
{ |
|
"epoch": 0.15728638953788737, |
|
"grad_norm": 0.6619133353233337, |
|
"learning_rate": 3.974783900443142e-05, |
|
"loss": 0.6233, |
|
"step": 10199 |
|
}, |
|
{ |
|
"epoch": 0.15776446367022137, |
|
"grad_norm": 0.6223868131637573, |
|
"learning_rate": 3.9679942936609095e-05, |
|
"loss": 0.6232, |
|
"step": 10230 |
|
}, |
|
{ |
|
"epoch": 0.15824253780255537, |
|
"grad_norm": 0.7436427474021912, |
|
"learning_rate": 3.961188120762596e-05, |
|
"loss": 0.6194, |
|
"step": 10261 |
|
}, |
|
{ |
|
"epoch": 0.1587206119348894, |
|
"grad_norm": 0.7166286110877991, |
|
"learning_rate": 3.954365458554938e-05, |
|
"loss": 0.6267, |
|
"step": 10292 |
|
}, |
|
{ |
|
"epoch": 0.1591986860672234, |
|
"grad_norm": 0.6377813220024109, |
|
"learning_rate": 3.947526384030751e-05, |
|
"loss": 0.6145, |
|
"step": 10323 |
|
}, |
|
{ |
|
"epoch": 0.1596767601995574, |
|
"grad_norm": 1.5197011232376099, |
|
"learning_rate": 3.9406709743680624e-05, |
|
"loss": 0.6261, |
|
"step": 10354 |
|
}, |
|
{ |
|
"epoch": 0.1601548343318914, |
|
"grad_norm": 0.6208730340003967, |
|
"learning_rate": 3.9337993069292366e-05, |
|
"loss": 0.6201, |
|
"step": 10385 |
|
}, |
|
{ |
|
"epoch": 0.1606329084642254, |
|
"grad_norm": 0.6887170076370239, |
|
"learning_rate": 3.926911459260109e-05, |
|
"loss": 0.6235, |
|
"step": 10416 |
|
}, |
|
{ |
|
"epoch": 0.1611109825965594, |
|
"grad_norm": 0.6381047368049622, |
|
"learning_rate": 3.920007509089102e-05, |
|
"loss": 0.6189, |
|
"step": 10447 |
|
}, |
|
{ |
|
"epoch": 0.1615890567288934, |
|
"grad_norm": 0.6249068379402161, |
|
"learning_rate": 3.913087534326357e-05, |
|
"loss": 0.6222, |
|
"step": 10478 |
|
}, |
|
{ |
|
"epoch": 0.16206713086122743, |
|
"grad_norm": 0.659757137298584, |
|
"learning_rate": 3.9061516130628475e-05, |
|
"loss": 0.6345, |
|
"step": 10509 |
|
}, |
|
{ |
|
"epoch": 0.16254520499356143, |
|
"grad_norm": 0.6268470883369446, |
|
"learning_rate": 3.8991998235695025e-05, |
|
"loss": 0.6197, |
|
"step": 10540 |
|
}, |
|
{ |
|
"epoch": 0.16302327912589543, |
|
"grad_norm": 0.6520307660102844, |
|
"learning_rate": 3.8922322442963224e-05, |
|
"loss": 0.6122, |
|
"step": 10571 |
|
}, |
|
{ |
|
"epoch": 0.16350135325822943, |
|
"grad_norm": 0.5919711589813232, |
|
"learning_rate": 3.885248953871491e-05, |
|
"loss": 0.6237, |
|
"step": 10602 |
|
}, |
|
{ |
|
"epoch": 0.16397942739056343, |
|
"grad_norm": 0.6761168837547302, |
|
"learning_rate": 3.8782500311004915e-05, |
|
"loss": 0.615, |
|
"step": 10633 |
|
}, |
|
{ |
|
"epoch": 0.16445750152289743, |
|
"grad_norm": 0.7123913168907166, |
|
"learning_rate": 3.871235554965218e-05, |
|
"loss": 0.6178, |
|
"step": 10664 |
|
}, |
|
{ |
|
"epoch": 0.16493557565523145, |
|
"grad_norm": 0.5414535403251648, |
|
"learning_rate": 3.864205604623078e-05, |
|
"loss": 0.6319, |
|
"step": 10695 |
|
}, |
|
{ |
|
"epoch": 0.16541364978756545, |
|
"grad_norm": 0.741256594657898, |
|
"learning_rate": 3.857160259406107e-05, |
|
"loss": 0.6102, |
|
"step": 10726 |
|
}, |
|
{ |
|
"epoch": 0.16589172391989945, |
|
"grad_norm": 0.6938264966011047, |
|
"learning_rate": 3.8500995988200674e-05, |
|
"loss": 0.6127, |
|
"step": 10757 |
|
}, |
|
{ |
|
"epoch": 0.16636979805223345, |
|
"grad_norm": 0.7152143716812134, |
|
"learning_rate": 3.843023702543556e-05, |
|
"loss": 0.6119, |
|
"step": 10788 |
|
}, |
|
{ |
|
"epoch": 0.16684787218456745, |
|
"grad_norm": 0.6516321897506714, |
|
"learning_rate": 3.8359326504270984e-05, |
|
"loss": 0.6224, |
|
"step": 10819 |
|
}, |
|
{ |
|
"epoch": 0.16732594631690145, |
|
"grad_norm": 0.6300469040870667, |
|
"learning_rate": 3.828826522492255e-05, |
|
"loss": 0.6229, |
|
"step": 10850 |
|
}, |
|
{ |
|
"epoch": 0.16780402044923548, |
|
"grad_norm": 0.645636260509491, |
|
"learning_rate": 3.821705398930713e-05, |
|
"loss": 0.6284, |
|
"step": 10881 |
|
}, |
|
{ |
|
"epoch": 0.16828209458156948, |
|
"grad_norm": 0.5895135402679443, |
|
"learning_rate": 3.814569360103385e-05, |
|
"loss": 0.6142, |
|
"step": 10912 |
|
}, |
|
{ |
|
"epoch": 0.16876016871390348, |
|
"grad_norm": 0.6440445184707642, |
|
"learning_rate": 3.807418486539499e-05, |
|
"loss": 0.6158, |
|
"step": 10943 |
|
}, |
|
{ |
|
"epoch": 0.16923824284623748, |
|
"grad_norm": 0.6739510893821716, |
|
"learning_rate": 3.80025285893569e-05, |
|
"loss": 0.6149, |
|
"step": 10974 |
|
}, |
|
{ |
|
"epoch": 0.16971631697857148, |
|
"grad_norm": 0.627185046672821, |
|
"learning_rate": 3.793072558155093e-05, |
|
"loss": 0.614, |
|
"step": 11005 |
|
}, |
|
{ |
|
"epoch": 0.17019439111090548, |
|
"grad_norm": 0.710350513458252, |
|
"learning_rate": 3.785877665226426e-05, |
|
"loss": 0.6214, |
|
"step": 11036 |
|
}, |
|
{ |
|
"epoch": 0.17067246524323948, |
|
"grad_norm": 0.6784375309944153, |
|
"learning_rate": 3.778668261343079e-05, |
|
"loss": 0.6123, |
|
"step": 11067 |
|
}, |
|
{ |
|
"epoch": 0.1711505393755735, |
|
"grad_norm": 0.6646184325218201, |
|
"learning_rate": 3.771444427862192e-05, |
|
"loss": 0.6178, |
|
"step": 11098 |
|
}, |
|
{ |
|
"epoch": 0.1716286135079075, |
|
"grad_norm": 0.7227056622505188, |
|
"learning_rate": 3.7642062463037465e-05, |
|
"loss": 0.6158, |
|
"step": 11129 |
|
}, |
|
{ |
|
"epoch": 0.1721066876402415, |
|
"grad_norm": 0.5937723517417908, |
|
"learning_rate": 3.7569537983496373e-05, |
|
"loss": 0.6194, |
|
"step": 11160 |
|
}, |
|
{ |
|
"epoch": 0.1725847617725755, |
|
"grad_norm": 0.6352291703224182, |
|
"learning_rate": 3.749687165842753e-05, |
|
"loss": 0.6286, |
|
"step": 11191 |
|
}, |
|
{ |
|
"epoch": 0.1730628359049095, |
|
"grad_norm": 0.5864464044570923, |
|
"learning_rate": 3.7424064307860536e-05, |
|
"loss": 0.6186, |
|
"step": 11222 |
|
}, |
|
{ |
|
"epoch": 0.1735409100372435, |
|
"grad_norm": 0.5402609705924988, |
|
"learning_rate": 3.735111675341645e-05, |
|
"loss": 0.6072, |
|
"step": 11253 |
|
}, |
|
{ |
|
"epoch": 0.17401898416957753, |
|
"grad_norm": 0.6429933905601501, |
|
"learning_rate": 3.7278029818298524e-05, |
|
"loss": 0.608, |
|
"step": 11284 |
|
}, |
|
{ |
|
"epoch": 0.17449705830191153, |
|
"grad_norm": 0.5420451164245605, |
|
"learning_rate": 3.720480432728287e-05, |
|
"loss": 0.6174, |
|
"step": 11315 |
|
}, |
|
{ |
|
"epoch": 0.17497513243424553, |
|
"grad_norm": 0.5691730380058289, |
|
"learning_rate": 3.71314411067092e-05, |
|
"loss": 0.6138, |
|
"step": 11346 |
|
}, |
|
{ |
|
"epoch": 0.17545320656657953, |
|
"grad_norm": 0.6198572516441345, |
|
"learning_rate": 3.70579409844715e-05, |
|
"loss": 0.6166, |
|
"step": 11377 |
|
}, |
|
{ |
|
"epoch": 0.17593128069891353, |
|
"grad_norm": 0.6781229972839355, |
|
"learning_rate": 3.698430479000865e-05, |
|
"loss": 0.6083, |
|
"step": 11408 |
|
}, |
|
{ |
|
"epoch": 0.17640935483124753, |
|
"grad_norm": 0.6572268605232239, |
|
"learning_rate": 3.691053335429509e-05, |
|
"loss": 0.6179, |
|
"step": 11439 |
|
}, |
|
{ |
|
"epoch": 0.17688742896358153, |
|
"grad_norm": 0.6720935702323914, |
|
"learning_rate": 3.683662750983147e-05, |
|
"loss": 0.6039, |
|
"step": 11470 |
|
}, |
|
{ |
|
"epoch": 0.17736550309591556, |
|
"grad_norm": 0.6098059415817261, |
|
"learning_rate": 3.676258809063518e-05, |
|
"loss": 0.608, |
|
"step": 11501 |
|
}, |
|
{ |
|
"epoch": 0.17784357722824956, |
|
"grad_norm": 0.6108371615409851, |
|
"learning_rate": 3.6688415932231004e-05, |
|
"loss": 0.6147, |
|
"step": 11532 |
|
}, |
|
{ |
|
"epoch": 0.17832165136058356, |
|
"grad_norm": 0.8087718486785889, |
|
"learning_rate": 3.661411187164166e-05, |
|
"loss": 0.6091, |
|
"step": 11563 |
|
}, |
|
{ |
|
"epoch": 0.17879972549291756, |
|
"grad_norm": 0.719902753829956, |
|
"learning_rate": 3.65396767473784e-05, |
|
"loss": 0.6107, |
|
"step": 11594 |
|
}, |
|
{ |
|
"epoch": 0.17927779962525156, |
|
"grad_norm": 0.5758282542228699, |
|
"learning_rate": 3.6465111399431465e-05, |
|
"loss": 0.6236, |
|
"step": 11625 |
|
}, |
|
{ |
|
"epoch": 0.17975587375758556, |
|
"grad_norm": 0.5602071285247803, |
|
"learning_rate": 3.6390416669260674e-05, |
|
"loss": 0.611, |
|
"step": 11656 |
|
}, |
|
{ |
|
"epoch": 0.1802339478899196, |
|
"grad_norm": 0.6070683002471924, |
|
"learning_rate": 3.63155933997859e-05, |
|
"loss": 0.609, |
|
"step": 11687 |
|
}, |
|
{ |
|
"epoch": 0.1807120220222536, |
|
"grad_norm": 0.64506596326828, |
|
"learning_rate": 3.624064243537758e-05, |
|
"loss": 0.605, |
|
"step": 11718 |
|
}, |
|
{ |
|
"epoch": 0.1811900961545876, |
|
"grad_norm": 0.6906920075416565, |
|
"learning_rate": 3.616556462184716e-05, |
|
"loss": 0.6168, |
|
"step": 11749 |
|
}, |
|
{ |
|
"epoch": 0.1816681702869216, |
|
"grad_norm": 0.6724758148193359, |
|
"learning_rate": 3.609036080643755e-05, |
|
"loss": 0.6143, |
|
"step": 11780 |
|
}, |
|
{ |
|
"epoch": 0.1821462444192556, |
|
"grad_norm": 0.6033251881599426, |
|
"learning_rate": 3.60150318378136e-05, |
|
"loss": 0.6161, |
|
"step": 11811 |
|
}, |
|
{ |
|
"epoch": 0.1826243185515896, |
|
"grad_norm": 0.6713966727256775, |
|
"learning_rate": 3.5939578566052465e-05, |
|
"loss": 0.619, |
|
"step": 11842 |
|
}, |
|
{ |
|
"epoch": 0.1831023926839236, |
|
"grad_norm": 0.6794861555099487, |
|
"learning_rate": 3.586400184263408e-05, |
|
"loss": 0.6116, |
|
"step": 11873 |
|
}, |
|
{ |
|
"epoch": 0.18358046681625761, |
|
"grad_norm": 0.5780847668647766, |
|
"learning_rate": 3.578830252043148e-05, |
|
"loss": 0.6159, |
|
"step": 11904 |
|
}, |
|
{ |
|
"epoch": 0.18405854094859161, |
|
"grad_norm": 0.6031593680381775, |
|
"learning_rate": 3.571248145370125e-05, |
|
"loss": 0.6135, |
|
"step": 11935 |
|
}, |
|
{ |
|
"epoch": 0.18453661508092561, |
|
"grad_norm": 0.6339271664619446, |
|
"learning_rate": 3.5636539498073794e-05, |
|
"loss": 0.6113, |
|
"step": 11966 |
|
}, |
|
{ |
|
"epoch": 0.18501468921325961, |
|
"grad_norm": 0.6663223505020142, |
|
"learning_rate": 3.556047751054378e-05, |
|
"loss": 0.6184, |
|
"step": 11997 |
|
}, |
|
{ |
|
"epoch": 0.18549276334559361, |
|
"grad_norm": 0.9799377918243408, |
|
"learning_rate": 3.548429634946039e-05, |
|
"loss": 0.6103, |
|
"step": 12028 |
|
}, |
|
{ |
|
"epoch": 0.18597083747792761, |
|
"grad_norm": 0.7260358929634094, |
|
"learning_rate": 3.540799687451768e-05, |
|
"loss": 0.6245, |
|
"step": 12059 |
|
}, |
|
{ |
|
"epoch": 0.18644891161026164, |
|
"grad_norm": 0.7097154259681702, |
|
"learning_rate": 3.533157994674485e-05, |
|
"loss": 0.6194, |
|
"step": 12090 |
|
}, |
|
{ |
|
"epoch": 0.18692698574259564, |
|
"grad_norm": 0.5897482633590698, |
|
"learning_rate": 3.5255046428496546e-05, |
|
"loss": 0.6171, |
|
"step": 12121 |
|
}, |
|
{ |
|
"epoch": 0.18740505987492964, |
|
"grad_norm": 0.6374841332435608, |
|
"learning_rate": 3.517839718344311e-05, |
|
"loss": 0.6141, |
|
"step": 12152 |
|
}, |
|
{ |
|
"epoch": 0.18788313400726364, |
|
"grad_norm": 0.6854335069656372, |
|
"learning_rate": 3.510163307656086e-05, |
|
"loss": 0.6077, |
|
"step": 12183 |
|
} |
|
], |
|
"logging_steps": 31, |
|
"max_steps": 30517, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 3052, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 8.48541631240156e+18, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|