|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.09413433883118585, |
|
"eval_steps": 500, |
|
"global_step": 6104, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0004780741323340042, |
|
"grad_norm": 3.1866345405578613, |
|
"learning_rate": 1.0157273918741808e-06, |
|
"loss": 1.194, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0009561482646680084, |
|
"grad_norm": 1.7708723545074463, |
|
"learning_rate": 2.0314547837483616e-06, |
|
"loss": 1.1124, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.0014342223970020126, |
|
"grad_norm": 1.6835601329803467, |
|
"learning_rate": 3.0471821756225426e-06, |
|
"loss": 1.0585, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.0019122965293360168, |
|
"grad_norm": 1.642537236213684, |
|
"learning_rate": 4.062909567496723e-06, |
|
"loss": 1.0213, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.002390370661670021, |
|
"grad_norm": 1.8476150035858154, |
|
"learning_rate": 5.078636959370905e-06, |
|
"loss": 1.0024, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.0028684447940040252, |
|
"grad_norm": 1.96336829662323, |
|
"learning_rate": 6.094364351245085e-06, |
|
"loss": 0.9938, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.0033465189263380294, |
|
"grad_norm": 1.8158528804779053, |
|
"learning_rate": 7.110091743119267e-06, |
|
"loss": 0.969, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.0038245930586720336, |
|
"grad_norm": 1.9796783924102783, |
|
"learning_rate": 8.125819134993446e-06, |
|
"loss": 0.9573, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.004302667191006038, |
|
"grad_norm": 1.8192514181137085, |
|
"learning_rate": 9.141546526867629e-06, |
|
"loss": 0.9498, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.004780741323340042, |
|
"grad_norm": 2.1396358013153076, |
|
"learning_rate": 1.015727391874181e-05, |
|
"loss": 0.9294, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.005258815455674046, |
|
"grad_norm": 2.0807156562805176, |
|
"learning_rate": 1.117300131061599e-05, |
|
"loss": 0.9186, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.0057368895880080505, |
|
"grad_norm": 2.002887010574341, |
|
"learning_rate": 1.218872870249017e-05, |
|
"loss": 0.9149, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.006214963720342055, |
|
"grad_norm": 2.240675687789917, |
|
"learning_rate": 1.3204456094364351e-05, |
|
"loss": 0.9139, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.006693037852676059, |
|
"grad_norm": 1.9016591310501099, |
|
"learning_rate": 1.4220183486238533e-05, |
|
"loss": 0.8946, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.007171111985010063, |
|
"grad_norm": 1.6729310750961304, |
|
"learning_rate": 1.5235910878112714e-05, |
|
"loss": 0.8959, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.007649186117344067, |
|
"grad_norm": 2.2607779502868652, |
|
"learning_rate": 1.6251638269986893e-05, |
|
"loss": 0.8847, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.00812726024967807, |
|
"grad_norm": 1.699507236480713, |
|
"learning_rate": 1.7267365661861077e-05, |
|
"loss": 0.8773, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.008605334382012076, |
|
"grad_norm": 1.93502938747406, |
|
"learning_rate": 1.8283093053735257e-05, |
|
"loss": 0.8773, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.009083408514346079, |
|
"grad_norm": 2.1234281063079834, |
|
"learning_rate": 1.9298820445609438e-05, |
|
"loss": 0.8669, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.009561482646680084, |
|
"grad_norm": 1.7465944290161133, |
|
"learning_rate": 2.031454783748362e-05, |
|
"loss": 0.8536, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.010039556779014087, |
|
"grad_norm": 1.7039932012557983, |
|
"learning_rate": 2.13302752293578e-05, |
|
"loss": 0.8624, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.010517630911348093, |
|
"grad_norm": 1.551837682723999, |
|
"learning_rate": 2.234600262123198e-05, |
|
"loss": 0.8555, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.010995705043682096, |
|
"grad_norm": 1.7622497081756592, |
|
"learning_rate": 2.336173001310616e-05, |
|
"loss": 0.8475, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.011473779176016101, |
|
"grad_norm": 2.000936269760132, |
|
"learning_rate": 2.437745740498034e-05, |
|
"loss": 0.852, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.011951853308350104, |
|
"grad_norm": 1.6770379543304443, |
|
"learning_rate": 2.5393184796854525e-05, |
|
"loss": 0.8455, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.01242992744068411, |
|
"grad_norm": 1.746506929397583, |
|
"learning_rate": 2.6408912188728702e-05, |
|
"loss": 0.8406, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.012908001573018113, |
|
"grad_norm": 1.507759690284729, |
|
"learning_rate": 2.7424639580602886e-05, |
|
"loss": 0.8556, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.013386075705352118, |
|
"grad_norm": 1.6984492540359497, |
|
"learning_rate": 2.8440366972477066e-05, |
|
"loss": 0.8281, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.013864149837686121, |
|
"grad_norm": 1.5145736932754517, |
|
"learning_rate": 2.9456094364351244e-05, |
|
"loss": 0.8359, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.014342223970020126, |
|
"grad_norm": 1.5700000524520874, |
|
"learning_rate": 3.0471821756225428e-05, |
|
"loss": 0.8305, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.01482029810235413, |
|
"grad_norm": 1.6657596826553345, |
|
"learning_rate": 3.148754914809961e-05, |
|
"loss": 0.8263, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.015298372234688135, |
|
"grad_norm": 1.527628779411316, |
|
"learning_rate": 3.2503276539973785e-05, |
|
"loss": 0.8318, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.015776446367022138, |
|
"grad_norm": 1.8353967666625977, |
|
"learning_rate": 3.351900393184797e-05, |
|
"loss": 0.8192, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.01625452049935614, |
|
"grad_norm": 1.2118042707443237, |
|
"learning_rate": 3.453473132372215e-05, |
|
"loss": 0.819, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.016732594631690145, |
|
"grad_norm": 1.3235970735549927, |
|
"learning_rate": 3.555045871559633e-05, |
|
"loss": 0.8176, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.01721066876402415, |
|
"grad_norm": 1.6729888916015625, |
|
"learning_rate": 3.6566186107470514e-05, |
|
"loss": 0.8202, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.017688742896358155, |
|
"grad_norm": 1.3251298666000366, |
|
"learning_rate": 3.7581913499344695e-05, |
|
"loss": 0.8104, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.018166817028692158, |
|
"grad_norm": 1.5231342315673828, |
|
"learning_rate": 3.8597640891218876e-05, |
|
"loss": 0.8113, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.01864489116102616, |
|
"grad_norm": 1.3263883590698242, |
|
"learning_rate": 3.9613368283093056e-05, |
|
"loss": 0.7981, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.019122965293360168, |
|
"grad_norm": 1.317791223526001, |
|
"learning_rate": 4.062909567496724e-05, |
|
"loss": 0.7962, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.01960103942569417, |
|
"grad_norm": 1.566698670387268, |
|
"learning_rate": 4.164482306684142e-05, |
|
"loss": 0.8072, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.020079113558028175, |
|
"grad_norm": 1.2935110330581665, |
|
"learning_rate": 4.26605504587156e-05, |
|
"loss": 0.7818, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.020557187690362178, |
|
"grad_norm": 1.336227536201477, |
|
"learning_rate": 4.367627785058978e-05, |
|
"loss": 0.7917, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.021035261822696185, |
|
"grad_norm": 1.5656548738479614, |
|
"learning_rate": 4.469200524246396e-05, |
|
"loss": 0.7785, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.02151333595503019, |
|
"grad_norm": 1.5208159685134888, |
|
"learning_rate": 4.570773263433814e-05, |
|
"loss": 0.7839, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.02199141008736419, |
|
"grad_norm": 1.4816375970840454, |
|
"learning_rate": 4.672346002621232e-05, |
|
"loss": 0.7789, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 0.022469484219698195, |
|
"grad_norm": 1.402166485786438, |
|
"learning_rate": 4.77391874180865e-05, |
|
"loss": 0.7803, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 0.022947558352032202, |
|
"grad_norm": 1.2986265420913696, |
|
"learning_rate": 4.875491480996068e-05, |
|
"loss": 0.7739, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 0.023425632484366205, |
|
"grad_norm": 1.3171271085739136, |
|
"learning_rate": 4.977064220183487e-05, |
|
"loss": 0.7694, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 0.02390370661670021, |
|
"grad_norm": 1.4870030879974365, |
|
"learning_rate": 4.9999915451558777e-05, |
|
"loss": 0.7663, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.024381780749034212, |
|
"grad_norm": 1.7419476509094238, |
|
"learning_rate": 4.999955597496219e-05, |
|
"loss": 0.7731, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 0.02485985488136822, |
|
"grad_norm": 1.109238624572754, |
|
"learning_rate": 4.9998914381774255e-05, |
|
"loss": 0.7704, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 0.025337929013702222, |
|
"grad_norm": 1.3478920459747314, |
|
"learning_rate": 4.999799067923527e-05, |
|
"loss": 0.7723, |
|
"step": 1643 |
|
}, |
|
{ |
|
"epoch": 0.025816003146036225, |
|
"grad_norm": 1.2739650011062622, |
|
"learning_rate": 4.999678487776908e-05, |
|
"loss": 0.7699, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 0.02629407727837023, |
|
"grad_norm": 1.3892935514450073, |
|
"learning_rate": 4.9995296990983006e-05, |
|
"loss": 0.7709, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.026772151410704235, |
|
"grad_norm": 1.2031512260437012, |
|
"learning_rate": 4.999352703566763e-05, |
|
"loss": 0.7557, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 0.02725022554303824, |
|
"grad_norm": 1.185471773147583, |
|
"learning_rate": 4.999147503179668e-05, |
|
"loss": 0.7645, |
|
"step": 1767 |
|
}, |
|
{ |
|
"epoch": 0.027728299675372242, |
|
"grad_norm": 1.2331740856170654, |
|
"learning_rate": 4.998914100252672e-05, |
|
"loss": 0.7655, |
|
"step": 1798 |
|
}, |
|
{ |
|
"epoch": 0.028206373807706245, |
|
"grad_norm": 1.1817049980163574, |
|
"learning_rate": 4.998652497419696e-05, |
|
"loss": 0.7617, |
|
"step": 1829 |
|
}, |
|
{ |
|
"epoch": 0.028684447940040252, |
|
"grad_norm": 1.2225860357284546, |
|
"learning_rate": 4.9983626976328927e-05, |
|
"loss": 0.7558, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.029162522072374256, |
|
"grad_norm": 1.6452528238296509, |
|
"learning_rate": 4.998044704162613e-05, |
|
"loss": 0.7566, |
|
"step": 1891 |
|
}, |
|
{ |
|
"epoch": 0.02964059620470826, |
|
"grad_norm": 1.3764828443527222, |
|
"learning_rate": 4.9976985205973705e-05, |
|
"loss": 0.7545, |
|
"step": 1922 |
|
}, |
|
{ |
|
"epoch": 0.030118670337042262, |
|
"grad_norm": 1.6581465005874634, |
|
"learning_rate": 4.997324150843799e-05, |
|
"loss": 0.7519, |
|
"step": 1953 |
|
}, |
|
{ |
|
"epoch": 0.03059674446937627, |
|
"grad_norm": 1.1065036058425903, |
|
"learning_rate": 4.99692159912661e-05, |
|
"loss": 0.7579, |
|
"step": 1984 |
|
}, |
|
{ |
|
"epoch": 0.031074818601710272, |
|
"grad_norm": 2.6465444564819336, |
|
"learning_rate": 4.996490869988546e-05, |
|
"loss": 0.7538, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.031552892734044276, |
|
"grad_norm": 1.3028968572616577, |
|
"learning_rate": 4.996031968290326e-05, |
|
"loss": 0.7522, |
|
"step": 2046 |
|
}, |
|
{ |
|
"epoch": 0.03203096686637828, |
|
"grad_norm": 1.0450382232666016, |
|
"learning_rate": 4.995544899210594e-05, |
|
"loss": 0.742, |
|
"step": 2077 |
|
}, |
|
{ |
|
"epoch": 0.03250904099871228, |
|
"grad_norm": 0.9411994814872742, |
|
"learning_rate": 4.9950296682458583e-05, |
|
"loss": 0.7401, |
|
"step": 2108 |
|
}, |
|
{ |
|
"epoch": 0.03298711513104629, |
|
"grad_norm": 1.0501608848571777, |
|
"learning_rate": 4.994486281210429e-05, |
|
"loss": 0.7455, |
|
"step": 2139 |
|
}, |
|
{ |
|
"epoch": 0.03346518926338029, |
|
"grad_norm": 1.266400694847107, |
|
"learning_rate": 4.9939147442363566e-05, |
|
"loss": 0.7391, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.033943263395714296, |
|
"grad_norm": 1.2862213850021362, |
|
"learning_rate": 4.9933150637733574e-05, |
|
"loss": 0.7397, |
|
"step": 2201 |
|
}, |
|
{ |
|
"epoch": 0.0344213375280483, |
|
"grad_norm": 1.2715409994125366, |
|
"learning_rate": 4.992687246588743e-05, |
|
"loss": 0.7467, |
|
"step": 2232 |
|
}, |
|
{ |
|
"epoch": 0.0348994116603823, |
|
"grad_norm": 1.3290003538131714, |
|
"learning_rate": 4.992031299767347e-05, |
|
"loss": 0.7432, |
|
"step": 2263 |
|
}, |
|
{ |
|
"epoch": 0.03537748579271631, |
|
"grad_norm": 1.0561761856079102, |
|
"learning_rate": 4.9913472307114386e-05, |
|
"loss": 0.7336, |
|
"step": 2294 |
|
}, |
|
{ |
|
"epoch": 0.035855559925050316, |
|
"grad_norm": 1.3272422552108765, |
|
"learning_rate": 4.9906350471406446e-05, |
|
"loss": 0.7251, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.036333634057384316, |
|
"grad_norm": 1.0749491453170776, |
|
"learning_rate": 4.989894757091861e-05, |
|
"loss": 0.7205, |
|
"step": 2356 |
|
}, |
|
{ |
|
"epoch": 0.03681170818971832, |
|
"grad_norm": 1.1581461429595947, |
|
"learning_rate": 4.989126368919158e-05, |
|
"loss": 0.7311, |
|
"step": 2387 |
|
}, |
|
{ |
|
"epoch": 0.03728978232205232, |
|
"grad_norm": 1.0796961784362793, |
|
"learning_rate": 4.988329891293693e-05, |
|
"loss": 0.7259, |
|
"step": 2418 |
|
}, |
|
{ |
|
"epoch": 0.03776785645438633, |
|
"grad_norm": 1.1916818618774414, |
|
"learning_rate": 4.987505333203608e-05, |
|
"loss": 0.7258, |
|
"step": 2449 |
|
}, |
|
{ |
|
"epoch": 0.038245930586720336, |
|
"grad_norm": 0.9515872001647949, |
|
"learning_rate": 4.9866527039539276e-05, |
|
"loss": 0.7273, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.038724004719054336, |
|
"grad_norm": 1.1217246055603027, |
|
"learning_rate": 4.9857720131664594e-05, |
|
"loss": 0.7269, |
|
"step": 2511 |
|
}, |
|
{ |
|
"epoch": 0.03920207885138834, |
|
"grad_norm": 0.9570199847221375, |
|
"learning_rate": 4.9848632707796773e-05, |
|
"loss": 0.7289, |
|
"step": 2542 |
|
}, |
|
{ |
|
"epoch": 0.03968015298372235, |
|
"grad_norm": 1.2980682849884033, |
|
"learning_rate": 4.9839264870486155e-05, |
|
"loss": 0.7382, |
|
"step": 2573 |
|
}, |
|
{ |
|
"epoch": 0.04015822711605635, |
|
"grad_norm": 2.1257143020629883, |
|
"learning_rate": 4.9829616725447526e-05, |
|
"loss": 0.8112, |
|
"step": 2604 |
|
}, |
|
{ |
|
"epoch": 0.04063630124839036, |
|
"grad_norm": 1.5091110467910767, |
|
"learning_rate": 4.981968838155888e-05, |
|
"loss": 0.7451, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.041114375380724356, |
|
"grad_norm": 2.4548749923706055, |
|
"learning_rate": 4.980947995086024e-05, |
|
"loss": 0.7358, |
|
"step": 2666 |
|
}, |
|
{ |
|
"epoch": 0.04159244951305836, |
|
"grad_norm": 1.176115870475769, |
|
"learning_rate": 4.979899154855234e-05, |
|
"loss": 0.7368, |
|
"step": 2697 |
|
}, |
|
{ |
|
"epoch": 0.04207052364539237, |
|
"grad_norm": 1.063672661781311, |
|
"learning_rate": 4.9788223292995386e-05, |
|
"loss": 0.7465, |
|
"step": 2728 |
|
}, |
|
{ |
|
"epoch": 0.04254859777772637, |
|
"grad_norm": 1.3062779903411865, |
|
"learning_rate": 4.977717530570768e-05, |
|
"loss": 0.7413, |
|
"step": 2759 |
|
}, |
|
{ |
|
"epoch": 0.04302667191006038, |
|
"grad_norm": 1.5550280809402466, |
|
"learning_rate": 4.976584771136425e-05, |
|
"loss": 0.724, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.043504746042394384, |
|
"grad_norm": 1.2525728940963745, |
|
"learning_rate": 4.975424063779547e-05, |
|
"loss": 0.7216, |
|
"step": 2821 |
|
}, |
|
{ |
|
"epoch": 0.04398282017472838, |
|
"grad_norm": 1.158134937286377, |
|
"learning_rate": 4.974235421598557e-05, |
|
"loss": 0.728, |
|
"step": 2852 |
|
}, |
|
{ |
|
"epoch": 0.04446089430706239, |
|
"grad_norm": 1.262291669845581, |
|
"learning_rate": 4.973018858007122e-05, |
|
"loss": 0.7191, |
|
"step": 2883 |
|
}, |
|
{ |
|
"epoch": 0.04493896843939639, |
|
"grad_norm": 2.826028347015381, |
|
"learning_rate": 4.9717743867339963e-05, |
|
"loss": 0.7211, |
|
"step": 2914 |
|
}, |
|
{ |
|
"epoch": 0.0454170425717304, |
|
"grad_norm": 1.0346958637237549, |
|
"learning_rate": 4.9705020218228695e-05, |
|
"loss": 0.7268, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 0.045895116704064404, |
|
"grad_norm": 1.4338330030441284, |
|
"learning_rate": 4.969201777632205e-05, |
|
"loss": 0.7154, |
|
"step": 2976 |
|
}, |
|
{ |
|
"epoch": 0.046373190836398404, |
|
"grad_norm": 0.9223676919937134, |
|
"learning_rate": 4.9678736688350846e-05, |
|
"loss": 0.7195, |
|
"step": 3007 |
|
}, |
|
{ |
|
"epoch": 0.04685126496873241, |
|
"grad_norm": 0.9936623573303223, |
|
"learning_rate": 4.966517710419033e-05, |
|
"loss": 0.7194, |
|
"step": 3038 |
|
}, |
|
{ |
|
"epoch": 0.04732933910106642, |
|
"grad_norm": 1.017962098121643, |
|
"learning_rate": 4.965133917685858e-05, |
|
"loss": 0.713, |
|
"step": 3069 |
|
}, |
|
{ |
|
"epoch": 0.04780741323340042, |
|
"grad_norm": 0.9654473662376404, |
|
"learning_rate": 4.9637223062514714e-05, |
|
"loss": 0.7096, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.048285487365734424, |
|
"grad_norm": 0.9372695684432983, |
|
"learning_rate": 4.962282892045718e-05, |
|
"loss": 0.7116, |
|
"step": 3131 |
|
}, |
|
{ |
|
"epoch": 0.048763561498068424, |
|
"grad_norm": 0.9450846910476685, |
|
"learning_rate": 4.9608156913121904e-05, |
|
"loss": 0.7129, |
|
"step": 3162 |
|
}, |
|
{ |
|
"epoch": 0.04924163563040243, |
|
"grad_norm": 1.0803054571151733, |
|
"learning_rate": 4.959320720608049e-05, |
|
"loss": 0.706, |
|
"step": 3193 |
|
}, |
|
{ |
|
"epoch": 0.04971970976273644, |
|
"grad_norm": 1.2218348979949951, |
|
"learning_rate": 4.9577979968038354e-05, |
|
"loss": 0.7076, |
|
"step": 3224 |
|
}, |
|
{ |
|
"epoch": 0.05019778389507044, |
|
"grad_norm": 1.0429767370224, |
|
"learning_rate": 4.956247537083282e-05, |
|
"loss": 0.7089, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.050675858027404444, |
|
"grad_norm": 0.9912049770355225, |
|
"learning_rate": 4.9546693589431145e-05, |
|
"loss": 0.7016, |
|
"step": 3286 |
|
}, |
|
{ |
|
"epoch": 0.051153932159738444, |
|
"grad_norm": 0.9687103033065796, |
|
"learning_rate": 4.9530634801928595e-05, |
|
"loss": 0.7071, |
|
"step": 3317 |
|
}, |
|
{ |
|
"epoch": 0.05163200629207245, |
|
"grad_norm": 0.8178670406341553, |
|
"learning_rate": 4.9514299189546395e-05, |
|
"loss": 0.6991, |
|
"step": 3348 |
|
}, |
|
{ |
|
"epoch": 0.05211008042440646, |
|
"grad_norm": 0.8601382374763489, |
|
"learning_rate": 4.949768693662973e-05, |
|
"loss": 0.6978, |
|
"step": 3379 |
|
}, |
|
{ |
|
"epoch": 0.05258815455674046, |
|
"grad_norm": 1.076370120048523, |
|
"learning_rate": 4.948079823064559e-05, |
|
"loss": 0.7044, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.053066228689074464, |
|
"grad_norm": 3.9457356929779053, |
|
"learning_rate": 4.946363326218074e-05, |
|
"loss": 0.6978, |
|
"step": 3441 |
|
}, |
|
{ |
|
"epoch": 0.05354430282140847, |
|
"grad_norm": 0.8186474442481995, |
|
"learning_rate": 4.9446192224939525e-05, |
|
"loss": 0.6974, |
|
"step": 3472 |
|
}, |
|
{ |
|
"epoch": 0.05402237695374247, |
|
"grad_norm": 0.9643816947937012, |
|
"learning_rate": 4.942847531574167e-05, |
|
"loss": 0.7025, |
|
"step": 3503 |
|
}, |
|
{ |
|
"epoch": 0.05450045108607648, |
|
"grad_norm": 1.024248480796814, |
|
"learning_rate": 4.941048273452008e-05, |
|
"loss": 0.7006, |
|
"step": 3534 |
|
}, |
|
{ |
|
"epoch": 0.05497852521841048, |
|
"grad_norm": 0.7718949317932129, |
|
"learning_rate": 4.9392214684318605e-05, |
|
"loss": 0.7024, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 0.055456599350744484, |
|
"grad_norm": 1.1313899755477905, |
|
"learning_rate": 4.93736713712897e-05, |
|
"loss": 0.701, |
|
"step": 3596 |
|
}, |
|
{ |
|
"epoch": 0.05593467348307849, |
|
"grad_norm": 1.0118827819824219, |
|
"learning_rate": 4.9354853004692124e-05, |
|
"loss": 0.7036, |
|
"step": 3627 |
|
}, |
|
{ |
|
"epoch": 0.05641274761541249, |
|
"grad_norm": 0.9465724229812622, |
|
"learning_rate": 4.93357597968886e-05, |
|
"loss": 0.6869, |
|
"step": 3658 |
|
}, |
|
{ |
|
"epoch": 0.0568908217477465, |
|
"grad_norm": 1.0233882665634155, |
|
"learning_rate": 4.931639196334338e-05, |
|
"loss": 0.6944, |
|
"step": 3689 |
|
}, |
|
{ |
|
"epoch": 0.057368895880080505, |
|
"grad_norm": 1.008609652519226, |
|
"learning_rate": 4.9296749722619826e-05, |
|
"loss": 0.6916, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.057846970012414504, |
|
"grad_norm": 1.0083181858062744, |
|
"learning_rate": 4.9276833296377966e-05, |
|
"loss": 0.6886, |
|
"step": 3751 |
|
}, |
|
{ |
|
"epoch": 0.05832504414474851, |
|
"grad_norm": 0.9374220967292786, |
|
"learning_rate": 4.925664290937196e-05, |
|
"loss": 0.6976, |
|
"step": 3782 |
|
}, |
|
{ |
|
"epoch": 0.05880311827708251, |
|
"grad_norm": 0.9066904783248901, |
|
"learning_rate": 4.9236178789447576e-05, |
|
"loss": 0.6911, |
|
"step": 3813 |
|
}, |
|
{ |
|
"epoch": 0.05928119240941652, |
|
"grad_norm": 0.9702699780464172, |
|
"learning_rate": 4.921544116753962e-05, |
|
"loss": 0.6959, |
|
"step": 3844 |
|
}, |
|
{ |
|
"epoch": 0.059759266541750525, |
|
"grad_norm": 0.8405037522315979, |
|
"learning_rate": 4.919443027766935e-05, |
|
"loss": 0.6896, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.060237340674084525, |
|
"grad_norm": 0.9460383653640747, |
|
"learning_rate": 4.91731463569418e-05, |
|
"loss": 0.6871, |
|
"step": 3906 |
|
}, |
|
{ |
|
"epoch": 0.06071541480641853, |
|
"grad_norm": 0.9355078339576721, |
|
"learning_rate": 4.915158964554312e-05, |
|
"loss": 0.6843, |
|
"step": 3937 |
|
}, |
|
{ |
|
"epoch": 0.06119348893875254, |
|
"grad_norm": 0.7211058735847473, |
|
"learning_rate": 4.912976038673786e-05, |
|
"loss": 0.6861, |
|
"step": 3968 |
|
}, |
|
{ |
|
"epoch": 0.06167156307108654, |
|
"grad_norm": 0.8674766421318054, |
|
"learning_rate": 4.9107658826866254e-05, |
|
"loss": 0.6939, |
|
"step": 3999 |
|
}, |
|
{ |
|
"epoch": 0.062149637203420545, |
|
"grad_norm": 0.8166181445121765, |
|
"learning_rate": 4.908528521534139e-05, |
|
"loss": 0.692, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.06262771133575455, |
|
"grad_norm": 0.841305136680603, |
|
"learning_rate": 4.906263980464644e-05, |
|
"loss": 0.6855, |
|
"step": 4061 |
|
}, |
|
{ |
|
"epoch": 0.06310578546808855, |
|
"grad_norm": 0.8942857384681702, |
|
"learning_rate": 4.903972285033178e-05, |
|
"loss": 0.6946, |
|
"step": 4092 |
|
}, |
|
{ |
|
"epoch": 0.06358385960042255, |
|
"grad_norm": 0.8595120310783386, |
|
"learning_rate": 4.901653461101213e-05, |
|
"loss": 0.6825, |
|
"step": 4123 |
|
}, |
|
{ |
|
"epoch": 0.06406193373275657, |
|
"grad_norm": 0.8155812621116638, |
|
"learning_rate": 4.8993075348363626e-05, |
|
"loss": 0.6821, |
|
"step": 4154 |
|
}, |
|
{ |
|
"epoch": 0.06454000786509057, |
|
"grad_norm": 0.8901113271713257, |
|
"learning_rate": 4.896934532712084e-05, |
|
"loss": 0.6898, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 0.06501808199742456, |
|
"grad_norm": 0.9637976288795471, |
|
"learning_rate": 4.8945344815073846e-05, |
|
"loss": 0.6829, |
|
"step": 4216 |
|
}, |
|
{ |
|
"epoch": 0.06549615612975858, |
|
"grad_norm": 0.7048139572143555, |
|
"learning_rate": 4.892107408306516e-05, |
|
"loss": 0.6834, |
|
"step": 4247 |
|
}, |
|
{ |
|
"epoch": 0.06597423026209258, |
|
"grad_norm": 0.8655612468719482, |
|
"learning_rate": 4.889653340498669e-05, |
|
"loss": 0.6778, |
|
"step": 4278 |
|
}, |
|
{ |
|
"epoch": 0.06645230439442658, |
|
"grad_norm": 0.8204261064529419, |
|
"learning_rate": 4.8871723057776664e-05, |
|
"loss": 0.6672, |
|
"step": 4309 |
|
}, |
|
{ |
|
"epoch": 0.06693037852676058, |
|
"grad_norm": 0.9259466528892517, |
|
"learning_rate": 4.8846643321416476e-05, |
|
"loss": 0.6778, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.06740845265909459, |
|
"grad_norm": 0.9826278686523438, |
|
"learning_rate": 4.882129447892753e-05, |
|
"loss": 0.6882, |
|
"step": 4371 |
|
}, |
|
{ |
|
"epoch": 0.06788652679142859, |
|
"grad_norm": 0.7376586198806763, |
|
"learning_rate": 4.8795676816368076e-05, |
|
"loss": 0.6802, |
|
"step": 4402 |
|
}, |
|
{ |
|
"epoch": 0.06836460092376259, |
|
"grad_norm": 0.6778755784034729, |
|
"learning_rate": 4.876979062282995e-05, |
|
"loss": 0.6814, |
|
"step": 4433 |
|
}, |
|
{ |
|
"epoch": 0.0688426750560966, |
|
"grad_norm": 0.8117042183876038, |
|
"learning_rate": 4.8743636190435325e-05, |
|
"loss": 0.6759, |
|
"step": 4464 |
|
}, |
|
{ |
|
"epoch": 0.0693207491884306, |
|
"grad_norm": 0.9493227005004883, |
|
"learning_rate": 4.871721381433344e-05, |
|
"loss": 0.6744, |
|
"step": 4495 |
|
}, |
|
{ |
|
"epoch": 0.0697988233207646, |
|
"grad_norm": 1.3356900215148926, |
|
"learning_rate": 4.869052379269719e-05, |
|
"loss": 0.6763, |
|
"step": 4526 |
|
}, |
|
{ |
|
"epoch": 0.07027689745309862, |
|
"grad_norm": 1.1542543172836304, |
|
"learning_rate": 4.866356642671985e-05, |
|
"loss": 0.6788, |
|
"step": 4557 |
|
}, |
|
{ |
|
"epoch": 0.07075497158543262, |
|
"grad_norm": 0.785176157951355, |
|
"learning_rate": 4.8636342020611634e-05, |
|
"loss": 0.6853, |
|
"step": 4588 |
|
}, |
|
{ |
|
"epoch": 0.07123304571776662, |
|
"grad_norm": 0.8102776408195496, |
|
"learning_rate": 4.860885088159626e-05, |
|
"loss": 0.6794, |
|
"step": 4619 |
|
}, |
|
{ |
|
"epoch": 0.07171111985010063, |
|
"grad_norm": 0.9685975313186646, |
|
"learning_rate": 4.858109331990751e-05, |
|
"loss": 0.6823, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.07218919398243463, |
|
"grad_norm": 0.7929072976112366, |
|
"learning_rate": 4.855306964878567e-05, |
|
"loss": 0.6781, |
|
"step": 4681 |
|
}, |
|
{ |
|
"epoch": 0.07266726811476863, |
|
"grad_norm": 0.8171564340591431, |
|
"learning_rate": 4.8524780184474084e-05, |
|
"loss": 0.6685, |
|
"step": 4712 |
|
}, |
|
{ |
|
"epoch": 0.07314534224710265, |
|
"grad_norm": 0.9218119382858276, |
|
"learning_rate": 4.8496225246215496e-05, |
|
"loss": 0.6796, |
|
"step": 4743 |
|
}, |
|
{ |
|
"epoch": 0.07362341637943665, |
|
"grad_norm": 3.1681158542633057, |
|
"learning_rate": 4.8467405156248505e-05, |
|
"loss": 0.676, |
|
"step": 4774 |
|
}, |
|
{ |
|
"epoch": 0.07410149051177065, |
|
"grad_norm": 0.9336240887641907, |
|
"learning_rate": 4.843832023980392e-05, |
|
"loss": 0.6734, |
|
"step": 4805 |
|
}, |
|
{ |
|
"epoch": 0.07457956464410465, |
|
"grad_norm": 0.8264138102531433, |
|
"learning_rate": 4.840897082510106e-05, |
|
"loss": 0.6905, |
|
"step": 4836 |
|
}, |
|
{ |
|
"epoch": 0.07505763877643866, |
|
"grad_norm": 0.819663405418396, |
|
"learning_rate": 4.8379357243344084e-05, |
|
"loss": 0.6814, |
|
"step": 4867 |
|
}, |
|
{ |
|
"epoch": 0.07553571290877266, |
|
"grad_norm": 3.124502420425415, |
|
"learning_rate": 4.8349479828718236e-05, |
|
"loss": 0.6704, |
|
"step": 4898 |
|
}, |
|
{ |
|
"epoch": 0.07601378704110666, |
|
"grad_norm": 0.7769860625267029, |
|
"learning_rate": 4.8319338918386075e-05, |
|
"loss": 0.6728, |
|
"step": 4929 |
|
}, |
|
{ |
|
"epoch": 0.07649186117344067, |
|
"grad_norm": 0.8232171535491943, |
|
"learning_rate": 4.828893485248369e-05, |
|
"loss": 0.6798, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.07696993530577467, |
|
"grad_norm": 0.8771420121192932, |
|
"learning_rate": 4.825826797411682e-05, |
|
"loss": 0.6722, |
|
"step": 4991 |
|
}, |
|
{ |
|
"epoch": 0.07744800943810867, |
|
"grad_norm": 0.8321033716201782, |
|
"learning_rate": 4.822733862935702e-05, |
|
"loss": 0.6724, |
|
"step": 5022 |
|
}, |
|
{ |
|
"epoch": 0.07792608357044269, |
|
"grad_norm": 0.9346029162406921, |
|
"learning_rate": 4.819614716723775e-05, |
|
"loss": 0.6764, |
|
"step": 5053 |
|
}, |
|
{ |
|
"epoch": 0.07840415770277669, |
|
"grad_norm": 0.9193580150604248, |
|
"learning_rate": 4.8164693939750425e-05, |
|
"loss": 0.6789, |
|
"step": 5084 |
|
}, |
|
{ |
|
"epoch": 0.07888223183511069, |
|
"grad_norm": 1.0850661993026733, |
|
"learning_rate": 4.813297930184042e-05, |
|
"loss": 0.6685, |
|
"step": 5115 |
|
}, |
|
{ |
|
"epoch": 0.0793603059674447, |
|
"grad_norm": 0.6851856708526611, |
|
"learning_rate": 4.810100361140314e-05, |
|
"loss": 0.6621, |
|
"step": 5146 |
|
}, |
|
{ |
|
"epoch": 0.0798383800997787, |
|
"grad_norm": 0.7950114607810974, |
|
"learning_rate": 4.8068767229279885e-05, |
|
"loss": 0.6642, |
|
"step": 5177 |
|
}, |
|
{ |
|
"epoch": 0.0803164542321127, |
|
"grad_norm": 0.9632556438446045, |
|
"learning_rate": 4.8036270519253854e-05, |
|
"loss": 0.6628, |
|
"step": 5208 |
|
}, |
|
{ |
|
"epoch": 0.0807945283644467, |
|
"grad_norm": 0.7066652178764343, |
|
"learning_rate": 4.8003513848046e-05, |
|
"loss": 0.6642, |
|
"step": 5239 |
|
}, |
|
{ |
|
"epoch": 0.08127260249678071, |
|
"grad_norm": 0.7749651670455933, |
|
"learning_rate": 4.79704975853109e-05, |
|
"loss": 0.6633, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.08175067662911471, |
|
"grad_norm": 1.702022671699524, |
|
"learning_rate": 4.793722210363262e-05, |
|
"loss": 0.6745, |
|
"step": 5301 |
|
}, |
|
{ |
|
"epoch": 0.08222875076144871, |
|
"grad_norm": 0.8134759068489075, |
|
"learning_rate": 4.7903687778520414e-05, |
|
"loss": 0.659, |
|
"step": 5332 |
|
}, |
|
{ |
|
"epoch": 0.08270682489378273, |
|
"grad_norm": 0.8805097341537476, |
|
"learning_rate": 4.7869894988404593e-05, |
|
"loss": 0.6779, |
|
"step": 5363 |
|
}, |
|
{ |
|
"epoch": 0.08318489902611673, |
|
"grad_norm": 0.9312698841094971, |
|
"learning_rate": 4.783584411463221e-05, |
|
"loss": 0.6634, |
|
"step": 5394 |
|
}, |
|
{ |
|
"epoch": 0.08366297315845073, |
|
"grad_norm": 0.7013948559761047, |
|
"learning_rate": 4.780153554146274e-05, |
|
"loss": 0.6589, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 0.08414104729078474, |
|
"grad_norm": 0.8027428388595581, |
|
"learning_rate": 4.7766969656063766e-05, |
|
"loss": 0.6603, |
|
"step": 5456 |
|
}, |
|
{ |
|
"epoch": 0.08461912142311874, |
|
"grad_norm": 1.0179306268692017, |
|
"learning_rate": 4.773214684850662e-05, |
|
"loss": 0.6595, |
|
"step": 5487 |
|
}, |
|
{ |
|
"epoch": 0.08509719555545274, |
|
"grad_norm": 0.8512592911720276, |
|
"learning_rate": 4.769706751176193e-05, |
|
"loss": 0.6659, |
|
"step": 5518 |
|
}, |
|
{ |
|
"epoch": 0.08557526968778675, |
|
"grad_norm": 0.8476304411888123, |
|
"learning_rate": 4.7661732041695264e-05, |
|
"loss": 0.6658, |
|
"step": 5549 |
|
}, |
|
{ |
|
"epoch": 0.08605334382012075, |
|
"grad_norm": 0.771584153175354, |
|
"learning_rate": 4.762614083706258e-05, |
|
"loss": 0.6622, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.08653141795245475, |
|
"grad_norm": 0.8719794154167175, |
|
"learning_rate": 4.759029429950581e-05, |
|
"loss": 0.661, |
|
"step": 5611 |
|
}, |
|
{ |
|
"epoch": 0.08700949208478877, |
|
"grad_norm": 0.9247443675994873, |
|
"learning_rate": 4.7554192833548235e-05, |
|
"loss": 0.6475, |
|
"step": 5642 |
|
}, |
|
{ |
|
"epoch": 0.08748756621712277, |
|
"grad_norm": 0.9540857672691345, |
|
"learning_rate": 4.751783684659e-05, |
|
"loss": 0.662, |
|
"step": 5673 |
|
}, |
|
{ |
|
"epoch": 0.08796564034945677, |
|
"grad_norm": 0.7939008474349976, |
|
"learning_rate": 4.748122674890348e-05, |
|
"loss": 0.6514, |
|
"step": 5704 |
|
}, |
|
{ |
|
"epoch": 0.08844371448179077, |
|
"grad_norm": 0.613531768321991, |
|
"learning_rate": 4.7444362953628654e-05, |
|
"loss": 0.6513, |
|
"step": 5735 |
|
}, |
|
{ |
|
"epoch": 0.08892178861412478, |
|
"grad_norm": 0.8084924817085266, |
|
"learning_rate": 4.7407245876768424e-05, |
|
"loss": 0.6465, |
|
"step": 5766 |
|
}, |
|
{ |
|
"epoch": 0.08939986274645878, |
|
"grad_norm": 0.7709007263183594, |
|
"learning_rate": 4.736987593718397e-05, |
|
"loss": 0.6618, |
|
"step": 5797 |
|
}, |
|
{ |
|
"epoch": 0.08987793687879278, |
|
"grad_norm": 0.6461811065673828, |
|
"learning_rate": 4.733225355658999e-05, |
|
"loss": 0.6516, |
|
"step": 5828 |
|
}, |
|
{ |
|
"epoch": 0.0903560110111268, |
|
"grad_norm": 0.6879326105117798, |
|
"learning_rate": 4.7294379159549926e-05, |
|
"loss": 0.655, |
|
"step": 5859 |
|
}, |
|
{ |
|
"epoch": 0.0908340851434608, |
|
"grad_norm": 0.7594075798988342, |
|
"learning_rate": 4.725625317347119e-05, |
|
"loss": 0.655, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 0.0913121592757948, |
|
"grad_norm": 0.774758517742157, |
|
"learning_rate": 4.7217876028600374e-05, |
|
"loss": 0.6697, |
|
"step": 5921 |
|
}, |
|
{ |
|
"epoch": 0.09179023340812881, |
|
"grad_norm": 0.9164844155311584, |
|
"learning_rate": 4.717924815801832e-05, |
|
"loss": 0.6483, |
|
"step": 5952 |
|
}, |
|
{ |
|
"epoch": 0.09226830754046281, |
|
"grad_norm": 0.792631208896637, |
|
"learning_rate": 4.714036999763532e-05, |
|
"loss": 0.6644, |
|
"step": 5983 |
|
}, |
|
{ |
|
"epoch": 0.09274638167279681, |
|
"grad_norm": 0.7735174298286438, |
|
"learning_rate": 4.7101241986186116e-05, |
|
"loss": 0.6545, |
|
"step": 6014 |
|
}, |
|
{ |
|
"epoch": 0.09322445580513082, |
|
"grad_norm": 0.725825309753418, |
|
"learning_rate": 4.7061864565225e-05, |
|
"loss": 0.6579, |
|
"step": 6045 |
|
}, |
|
{ |
|
"epoch": 0.09370252993746482, |
|
"grad_norm": 0.7191294431686401, |
|
"learning_rate": 4.702223817912081e-05, |
|
"loss": 0.6509, |
|
"step": 6076 |
|
} |
|
], |
|
"logging_steps": 31, |
|
"max_steps": 30517, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 3052, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.24270815620078e+18, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|