|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 5268, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0056947608200455585, |
|
"grad_norm": 12.728320121765137, |
|
"learning_rate": 4.990508731966591e-05, |
|
"loss": 1.816, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.011389521640091117, |
|
"grad_norm": 7.951778888702393, |
|
"learning_rate": 4.981017463933182e-05, |
|
"loss": 0.9591, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.017084282460136675, |
|
"grad_norm": 4.346314907073975, |
|
"learning_rate": 4.971526195899772e-05, |
|
"loss": 0.5211, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.022779043280182234, |
|
"grad_norm": 2.759699821472168, |
|
"learning_rate": 4.962034927866363e-05, |
|
"loss": 0.5977, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02847380410022779, |
|
"grad_norm": 4.309758186340332, |
|
"learning_rate": 4.9525436598329536e-05, |
|
"loss": 0.5117, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03416856492027335, |
|
"grad_norm": 3.065225601196289, |
|
"learning_rate": 4.9430523917995447e-05, |
|
"loss": 0.4366, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03986332574031891, |
|
"grad_norm": 4.769909381866455, |
|
"learning_rate": 4.933561123766136e-05, |
|
"loss": 0.4092, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04555808656036447, |
|
"grad_norm": 1.6601901054382324, |
|
"learning_rate": 4.924069855732726e-05, |
|
"loss": 0.3484, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.05125284738041002, |
|
"grad_norm": 4.936558723449707, |
|
"learning_rate": 4.9145785876993165e-05, |
|
"loss": 0.4445, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.05694760820045558, |
|
"grad_norm": 1.2995065450668335, |
|
"learning_rate": 4.9050873196659076e-05, |
|
"loss": 0.3302, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.06264236902050115, |
|
"grad_norm": 4.602411270141602, |
|
"learning_rate": 4.895596051632499e-05, |
|
"loss": 0.3969, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.0683371298405467, |
|
"grad_norm": 4.256335258483887, |
|
"learning_rate": 4.886104783599089e-05, |
|
"loss": 0.3385, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.07403189066059225, |
|
"grad_norm": 4.157656669616699, |
|
"learning_rate": 4.87661351556568e-05, |
|
"loss": 0.2872, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.07972665148063782, |
|
"grad_norm": 3.6777021884918213, |
|
"learning_rate": 4.8671222475322705e-05, |
|
"loss": 0.2591, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.08542141230068337, |
|
"grad_norm": 2.272169589996338, |
|
"learning_rate": 4.857630979498861e-05, |
|
"loss": 0.275, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.09111617312072894, |
|
"grad_norm": 0.8441994786262512, |
|
"learning_rate": 4.848139711465452e-05, |
|
"loss": 0.2029, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.09681093394077449, |
|
"grad_norm": 3.8441519737243652, |
|
"learning_rate": 4.838648443432043e-05, |
|
"loss": 0.3218, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.10250569476082004, |
|
"grad_norm": 1.3639609813690186, |
|
"learning_rate": 4.8291571753986335e-05, |
|
"loss": 0.2317, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1082004555808656, |
|
"grad_norm": 2.0190043449401855, |
|
"learning_rate": 4.8196659073652246e-05, |
|
"loss": 0.227, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.11389521640091116, |
|
"grad_norm": 2.7274367809295654, |
|
"learning_rate": 4.810174639331815e-05, |
|
"loss": 0.1802, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.11958997722095673, |
|
"grad_norm": 4.253875732421875, |
|
"learning_rate": 4.8006833712984054e-05, |
|
"loss": 0.1921, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.1252847380410023, |
|
"grad_norm": 2.3438820838928223, |
|
"learning_rate": 4.7911921032649964e-05, |
|
"loss": 0.235, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.13097949886104784, |
|
"grad_norm": 2.578488349914551, |
|
"learning_rate": 4.7817008352315875e-05, |
|
"loss": 0.2131, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.1366742596810934, |
|
"grad_norm": 1.772674560546875, |
|
"learning_rate": 4.772209567198178e-05, |
|
"loss": 0.2041, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.14236902050113895, |
|
"grad_norm": 1.3497803211212158, |
|
"learning_rate": 4.762718299164768e-05, |
|
"loss": 0.2185, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.1480637813211845, |
|
"grad_norm": 2.353133201599121, |
|
"learning_rate": 4.7532270311313594e-05, |
|
"loss": 0.2591, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.15375854214123008, |
|
"grad_norm": 1.3427927494049072, |
|
"learning_rate": 4.74373576309795e-05, |
|
"loss": 0.2021, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.15945330296127563, |
|
"grad_norm": 1.5266870260238647, |
|
"learning_rate": 4.734244495064541e-05, |
|
"loss": 0.2301, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.16514806378132119, |
|
"grad_norm": 1.570088505744934, |
|
"learning_rate": 4.724753227031132e-05, |
|
"loss": 0.2059, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.17084282460136674, |
|
"grad_norm": 1.912508487701416, |
|
"learning_rate": 4.715261958997722e-05, |
|
"loss": 0.2137, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.1765375854214123, |
|
"grad_norm": 2.4157466888427734, |
|
"learning_rate": 4.705770690964313e-05, |
|
"loss": 0.218, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.18223234624145787, |
|
"grad_norm": 4.4945831298828125, |
|
"learning_rate": 4.696279422930904e-05, |
|
"loss": 0.3602, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.18792710706150342, |
|
"grad_norm": 1.9692164659500122, |
|
"learning_rate": 4.686788154897495e-05, |
|
"loss": 0.2795, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.19362186788154898, |
|
"grad_norm": 3.558931350708008, |
|
"learning_rate": 4.677296886864085e-05, |
|
"loss": 0.1841, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.19931662870159453, |
|
"grad_norm": 1.7558096647262573, |
|
"learning_rate": 4.6678056188306763e-05, |
|
"loss": 0.2018, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.20501138952164008, |
|
"grad_norm": 1.1252487897872925, |
|
"learning_rate": 4.658314350797267e-05, |
|
"loss": 0.178, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.21070615034168566, |
|
"grad_norm": 0.8949424028396606, |
|
"learning_rate": 4.648823082763857e-05, |
|
"loss": 0.1891, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.2164009111617312, |
|
"grad_norm": 1.9878759384155273, |
|
"learning_rate": 4.639331814730448e-05, |
|
"loss": 0.1868, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.22209567198177677, |
|
"grad_norm": 2.801661491394043, |
|
"learning_rate": 4.629840546697039e-05, |
|
"loss": 0.2122, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.22779043280182232, |
|
"grad_norm": 1.7705647945404053, |
|
"learning_rate": 4.62034927866363e-05, |
|
"loss": 0.2144, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.23348519362186787, |
|
"grad_norm": 3.3794710636138916, |
|
"learning_rate": 4.61085801063022e-05, |
|
"loss": 0.1505, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.23917995444191345, |
|
"grad_norm": 2.587388277053833, |
|
"learning_rate": 4.601366742596811e-05, |
|
"loss": 0.1637, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.244874715261959, |
|
"grad_norm": 1.953997254371643, |
|
"learning_rate": 4.5918754745634016e-05, |
|
"loss": 0.1736, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.2505694760820046, |
|
"grad_norm": 1.6005550622940063, |
|
"learning_rate": 4.5823842065299926e-05, |
|
"loss": 0.1899, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.25626423690205014, |
|
"grad_norm": 2.8049798011779785, |
|
"learning_rate": 4.572892938496584e-05, |
|
"loss": 0.2553, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.2619589977220957, |
|
"grad_norm": 2.1441080570220947, |
|
"learning_rate": 4.563401670463174e-05, |
|
"loss": 0.1774, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.26765375854214124, |
|
"grad_norm": 1.1879520416259766, |
|
"learning_rate": 4.5539104024297645e-05, |
|
"loss": 0.1974, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.2733485193621868, |
|
"grad_norm": 1.3628582954406738, |
|
"learning_rate": 4.5444191343963556e-05, |
|
"loss": 0.154, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.27904328018223234, |
|
"grad_norm": 1.7644487619400024, |
|
"learning_rate": 4.5349278663629466e-05, |
|
"loss": 0.171, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.2847380410022779, |
|
"grad_norm": 1.8280380964279175, |
|
"learning_rate": 4.525436598329537e-05, |
|
"loss": 0.1542, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.29043280182232345, |
|
"grad_norm": 1.6000663042068481, |
|
"learning_rate": 4.515945330296128e-05, |
|
"loss": 0.1963, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.296127562642369, |
|
"grad_norm": 1.8194619417190552, |
|
"learning_rate": 4.5064540622627185e-05, |
|
"loss": 0.1298, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.30182232346241455, |
|
"grad_norm": 1.9171711206436157, |
|
"learning_rate": 4.496962794229309e-05, |
|
"loss": 0.1936, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.30751708428246016, |
|
"grad_norm": 1.4599436521530151, |
|
"learning_rate": 4.4874715261959e-05, |
|
"loss": 0.1442, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.3132118451025057, |
|
"grad_norm": 0.9252722263336182, |
|
"learning_rate": 4.477980258162491e-05, |
|
"loss": 0.2002, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.31890660592255127, |
|
"grad_norm": 1.1192878484725952, |
|
"learning_rate": 4.4684889901290815e-05, |
|
"loss": 0.1121, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.3246013667425968, |
|
"grad_norm": 1.3718199729919434, |
|
"learning_rate": 4.4589977220956725e-05, |
|
"loss": 0.1876, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.33029612756264237, |
|
"grad_norm": 1.7225291728973389, |
|
"learning_rate": 4.449506454062263e-05, |
|
"loss": 0.1476, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.3359908883826879, |
|
"grad_norm": 1.7924920320510864, |
|
"learning_rate": 4.440015186028853e-05, |
|
"loss": 0.1476, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.3416856492027335, |
|
"grad_norm": 2.5110108852386475, |
|
"learning_rate": 4.4305239179954444e-05, |
|
"loss": 0.1659, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.34738041002277903, |
|
"grad_norm": 1.2690166234970093, |
|
"learning_rate": 4.4210326499620355e-05, |
|
"loss": 0.1385, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.3530751708428246, |
|
"grad_norm": 1.8079414367675781, |
|
"learning_rate": 4.411541381928626e-05, |
|
"loss": 0.1213, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.35876993166287013, |
|
"grad_norm": 2.604128360748291, |
|
"learning_rate": 4.402050113895216e-05, |
|
"loss": 0.115, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.36446469248291574, |
|
"grad_norm": 1.001185655593872, |
|
"learning_rate": 4.3925588458618073e-05, |
|
"loss": 0.1608, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.3701594533029613, |
|
"grad_norm": 1.0883430242538452, |
|
"learning_rate": 4.383067577828398e-05, |
|
"loss": 0.1177, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.37585421412300685, |
|
"grad_norm": 2.1729352474212646, |
|
"learning_rate": 4.373576309794989e-05, |
|
"loss": 0.1514, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.3815489749430524, |
|
"grad_norm": 5.217422962188721, |
|
"learning_rate": 4.36408504176158e-05, |
|
"loss": 0.1534, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.38724373576309795, |
|
"grad_norm": 4.588407039642334, |
|
"learning_rate": 4.35459377372817e-05, |
|
"loss": 0.1499, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.3929384965831435, |
|
"grad_norm": 1.5867393016815186, |
|
"learning_rate": 4.345102505694761e-05, |
|
"loss": 0.192, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.39863325740318906, |
|
"grad_norm": 0.7483230829238892, |
|
"learning_rate": 4.335611237661352e-05, |
|
"loss": 0.1254, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.4043280182232346, |
|
"grad_norm": 1.8365514278411865, |
|
"learning_rate": 4.326119969627943e-05, |
|
"loss": 0.1735, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.41002277904328016, |
|
"grad_norm": 4.194180488586426, |
|
"learning_rate": 4.316628701594533e-05, |
|
"loss": 0.2097, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.4157175398633257, |
|
"grad_norm": 3.2393548488616943, |
|
"learning_rate": 4.307137433561124e-05, |
|
"loss": 0.116, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.4214123006833713, |
|
"grad_norm": 1.270488977432251, |
|
"learning_rate": 4.297646165527715e-05, |
|
"loss": 0.19, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.4271070615034169, |
|
"grad_norm": 1.7453019618988037, |
|
"learning_rate": 4.288154897494305e-05, |
|
"loss": 0.1397, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.4328018223234624, |
|
"grad_norm": 1.9998120069503784, |
|
"learning_rate": 4.278663629460896e-05, |
|
"loss": 0.1014, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.438496583143508, |
|
"grad_norm": 1.3646454811096191, |
|
"learning_rate": 4.269172361427487e-05, |
|
"loss": 0.1727, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.44419134396355353, |
|
"grad_norm": 0.5129444003105164, |
|
"learning_rate": 4.2596810933940777e-05, |
|
"loss": 0.1545, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.4498861047835991, |
|
"grad_norm": 4.377679347991943, |
|
"learning_rate": 4.250189825360668e-05, |
|
"loss": 0.1725, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.45558086560364464, |
|
"grad_norm": 1.2630000114440918, |
|
"learning_rate": 4.240698557327259e-05, |
|
"loss": 0.1531, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.4612756264236902, |
|
"grad_norm": 2.136129856109619, |
|
"learning_rate": 4.2312072892938495e-05, |
|
"loss": 0.1341, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.46697038724373574, |
|
"grad_norm": 1.4710100889205933, |
|
"learning_rate": 4.2217160212604406e-05, |
|
"loss": 0.1716, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.47266514806378135, |
|
"grad_norm": 1.6353185176849365, |
|
"learning_rate": 4.212224753227032e-05, |
|
"loss": 0.143, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.4783599088838269, |
|
"grad_norm": 9.000621795654297, |
|
"learning_rate": 4.202733485193622e-05, |
|
"loss": 0.1185, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.48405466970387245, |
|
"grad_norm": 1.606629729270935, |
|
"learning_rate": 4.1932422171602125e-05, |
|
"loss": 0.1555, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.489749430523918, |
|
"grad_norm": 1.9729490280151367, |
|
"learning_rate": 4.1837509491268035e-05, |
|
"loss": 0.1648, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.49544419134396356, |
|
"grad_norm": 9.148164749145508, |
|
"learning_rate": 4.174259681093394e-05, |
|
"loss": 0.1562, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.5011389521640092, |
|
"grad_norm": 2.9721126556396484, |
|
"learning_rate": 4.164768413059985e-05, |
|
"loss": 0.1489, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.5068337129840547, |
|
"grad_norm": 1.9185500144958496, |
|
"learning_rate": 4.155277145026576e-05, |
|
"loss": 0.1663, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.5125284738041003, |
|
"grad_norm": 1.8967223167419434, |
|
"learning_rate": 4.1457858769931665e-05, |
|
"loss": 0.138, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.5182232346241458, |
|
"grad_norm": 1.2305383682250977, |
|
"learning_rate": 4.136294608959757e-05, |
|
"loss": 0.1595, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.5239179954441914, |
|
"grad_norm": 1.6164170503616333, |
|
"learning_rate": 4.126803340926348e-05, |
|
"loss": 0.147, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.5296127562642369, |
|
"grad_norm": 3.8432424068450928, |
|
"learning_rate": 4.117312072892939e-05, |
|
"loss": 0.2108, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.5353075170842825, |
|
"grad_norm": 3.2506699562072754, |
|
"learning_rate": 4.1078208048595294e-05, |
|
"loss": 0.175, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.541002277904328, |
|
"grad_norm": 3.8444082736968994, |
|
"learning_rate": 4.0983295368261205e-05, |
|
"loss": 0.1942, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.5466970387243736, |
|
"grad_norm": 1.7612577676773071, |
|
"learning_rate": 4.088838268792711e-05, |
|
"loss": 0.1325, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.5523917995444191, |
|
"grad_norm": 1.2005575895309448, |
|
"learning_rate": 4.079347000759301e-05, |
|
"loss": 0.1129, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.5580865603644647, |
|
"grad_norm": 0.9540462493896484, |
|
"learning_rate": 4.0698557327258924e-05, |
|
"loss": 0.1258, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.5637813211845103, |
|
"grad_norm": 2.20613431930542, |
|
"learning_rate": 4.0603644646924834e-05, |
|
"loss": 0.179, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.5694760820045558, |
|
"grad_norm": 1.600826382637024, |
|
"learning_rate": 4.050873196659074e-05, |
|
"loss": 0.1809, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.5751708428246014, |
|
"grad_norm": 1.0167893171310425, |
|
"learning_rate": 4.041381928625664e-05, |
|
"loss": 0.0826, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.5808656036446469, |
|
"grad_norm": 1.0656858682632446, |
|
"learning_rate": 4.031890660592255e-05, |
|
"loss": 0.1323, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.5865603644646925, |
|
"grad_norm": 1.8568804264068604, |
|
"learning_rate": 4.022399392558846e-05, |
|
"loss": 0.1184, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.592255125284738, |
|
"grad_norm": 1.2088297605514526, |
|
"learning_rate": 4.012908124525437e-05, |
|
"loss": 0.115, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.5979498861047836, |
|
"grad_norm": 4.174439907073975, |
|
"learning_rate": 4.003416856492028e-05, |
|
"loss": 0.1541, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.6036446469248291, |
|
"grad_norm": 1.0429656505584717, |
|
"learning_rate": 3.993925588458618e-05, |
|
"loss": 0.1218, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.6093394077448747, |
|
"grad_norm": 4.0013651847839355, |
|
"learning_rate": 3.9844343204252087e-05, |
|
"loss": 0.1466, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.6150341685649203, |
|
"grad_norm": 1.9183622598648071, |
|
"learning_rate": 3.9749430523918e-05, |
|
"loss": 0.14, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.6207289293849658, |
|
"grad_norm": 2.64052152633667, |
|
"learning_rate": 3.965451784358391e-05, |
|
"loss": 0.1499, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.6264236902050114, |
|
"grad_norm": 2.6967151165008545, |
|
"learning_rate": 3.955960516324981e-05, |
|
"loss": 0.106, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.6321184510250569, |
|
"grad_norm": 1.5822768211364746, |
|
"learning_rate": 3.946469248291572e-05, |
|
"loss": 0.0858, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.6378132118451025, |
|
"grad_norm": 1.4362579584121704, |
|
"learning_rate": 3.936977980258163e-05, |
|
"loss": 0.1675, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.643507972665148, |
|
"grad_norm": 4.198774337768555, |
|
"learning_rate": 3.927486712224753e-05, |
|
"loss": 0.1887, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.6492027334851936, |
|
"grad_norm": 1.0296118259429932, |
|
"learning_rate": 3.917995444191344e-05, |
|
"loss": 0.174, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.6548974943052391, |
|
"grad_norm": 2.4037227630615234, |
|
"learning_rate": 3.908504176157935e-05, |
|
"loss": 0.1053, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.6605922551252847, |
|
"grad_norm": 1.8589047193527222, |
|
"learning_rate": 3.8990129081245256e-05, |
|
"loss": 0.1674, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.6662870159453302, |
|
"grad_norm": 0.8272297978401184, |
|
"learning_rate": 3.889521640091117e-05, |
|
"loss": 0.1646, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.6719817767653758, |
|
"grad_norm": 2.23038911819458, |
|
"learning_rate": 3.880030372057707e-05, |
|
"loss": 0.1499, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.6776765375854215, |
|
"grad_norm": 2.3637850284576416, |
|
"learning_rate": 3.8705391040242975e-05, |
|
"loss": 0.1577, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.683371298405467, |
|
"grad_norm": 3.252579927444458, |
|
"learning_rate": 3.8610478359908886e-05, |
|
"loss": 0.1223, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.6890660592255126, |
|
"grad_norm": 1.8192085027694702, |
|
"learning_rate": 3.8515565679574796e-05, |
|
"loss": 0.1594, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.6947608200455581, |
|
"grad_norm": 1.6105213165283203, |
|
"learning_rate": 3.84206529992407e-05, |
|
"loss": 0.1347, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.7004555808656037, |
|
"grad_norm": 1.618357539176941, |
|
"learning_rate": 3.8325740318906604e-05, |
|
"loss": 0.1309, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.7061503416856492, |
|
"grad_norm": 1.0436160564422607, |
|
"learning_rate": 3.8230827638572515e-05, |
|
"loss": 0.1221, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.7118451025056948, |
|
"grad_norm": 1.0755667686462402, |
|
"learning_rate": 3.813591495823842e-05, |
|
"loss": 0.138, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.7175398633257403, |
|
"grad_norm": 3.4023258686065674, |
|
"learning_rate": 3.804100227790433e-05, |
|
"loss": 0.2051, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.7232346241457859, |
|
"grad_norm": 1.2122399806976318, |
|
"learning_rate": 3.794608959757024e-05, |
|
"loss": 0.1616, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.7289293849658315, |
|
"grad_norm": 3.2242696285247803, |
|
"learning_rate": 3.7851176917236145e-05, |
|
"loss": 0.1678, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.734624145785877, |
|
"grad_norm": 0.8978271484375, |
|
"learning_rate": 3.775626423690205e-05, |
|
"loss": 0.1504, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.7403189066059226, |
|
"grad_norm": 2.9367473125457764, |
|
"learning_rate": 3.766135155656796e-05, |
|
"loss": 0.1859, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.7460136674259681, |
|
"grad_norm": 1.9458057880401611, |
|
"learning_rate": 3.756643887623387e-05, |
|
"loss": 0.1306, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.7517084282460137, |
|
"grad_norm": 1.1347579956054688, |
|
"learning_rate": 3.7471526195899774e-05, |
|
"loss": 0.1102, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.7574031890660592, |
|
"grad_norm": 1.1623060703277588, |
|
"learning_rate": 3.7376613515565685e-05, |
|
"loss": 0.126, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.7630979498861048, |
|
"grad_norm": 1.9671999216079712, |
|
"learning_rate": 3.728170083523159e-05, |
|
"loss": 0.1033, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.7687927107061503, |
|
"grad_norm": 0.981979489326477, |
|
"learning_rate": 3.718678815489749e-05, |
|
"loss": 0.1642, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.7744874715261959, |
|
"grad_norm": 4.021563529968262, |
|
"learning_rate": 3.7091875474563403e-05, |
|
"loss": 0.14, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.7801822323462415, |
|
"grad_norm": 0.7121520638465881, |
|
"learning_rate": 3.6996962794229314e-05, |
|
"loss": 0.1542, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.785876993166287, |
|
"grad_norm": 2.763511896133423, |
|
"learning_rate": 3.690205011389522e-05, |
|
"loss": 0.1408, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.7915717539863326, |
|
"grad_norm": 2.2274112701416016, |
|
"learning_rate": 3.680713743356112e-05, |
|
"loss": 0.131, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.7972665148063781, |
|
"grad_norm": 0.9881575703620911, |
|
"learning_rate": 3.671222475322703e-05, |
|
"loss": 0.1189, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.8029612756264237, |
|
"grad_norm": 1.166239619255066, |
|
"learning_rate": 3.661731207289294e-05, |
|
"loss": 0.1033, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.8086560364464692, |
|
"grad_norm": 0.5504360795021057, |
|
"learning_rate": 3.652239939255885e-05, |
|
"loss": 0.1182, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.8143507972665148, |
|
"grad_norm": 2.770014762878418, |
|
"learning_rate": 3.642748671222476e-05, |
|
"loss": 0.1144, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.8200455580865603, |
|
"grad_norm": 3.9895546436309814, |
|
"learning_rate": 3.633257403189066e-05, |
|
"loss": 0.1217, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.8257403189066059, |
|
"grad_norm": 3.492967128753662, |
|
"learning_rate": 3.6237661351556566e-05, |
|
"loss": 0.1508, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.8314350797266514, |
|
"grad_norm": 1.7888444662094116, |
|
"learning_rate": 3.614274867122248e-05, |
|
"loss": 0.1201, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.837129840546697, |
|
"grad_norm": 1.955121397972107, |
|
"learning_rate": 3.604783599088839e-05, |
|
"loss": 0.1618, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.8428246013667426, |
|
"grad_norm": 1.1534236669540405, |
|
"learning_rate": 3.595292331055429e-05, |
|
"loss": 0.1441, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.8485193621867881, |
|
"grad_norm": 1.145704746246338, |
|
"learning_rate": 3.58580106302202e-05, |
|
"loss": 0.1603, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.8542141230068337, |
|
"grad_norm": 1.3455891609191895, |
|
"learning_rate": 3.5763097949886106e-05, |
|
"loss": 0.1348, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.8599088838268792, |
|
"grad_norm": 1.2209105491638184, |
|
"learning_rate": 3.566818526955201e-05, |
|
"loss": 0.1008, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.8656036446469249, |
|
"grad_norm": 1.7053004503250122, |
|
"learning_rate": 3.557327258921792e-05, |
|
"loss": 0.0978, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.8712984054669703, |
|
"grad_norm": 2.105926036834717, |
|
"learning_rate": 3.547835990888383e-05, |
|
"loss": 0.1482, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.876993166287016, |
|
"grad_norm": 2.1003365516662598, |
|
"learning_rate": 3.5383447228549736e-05, |
|
"loss": 0.1308, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.8826879271070615, |
|
"grad_norm": 1.7915514707565308, |
|
"learning_rate": 3.528853454821565e-05, |
|
"loss": 0.1432, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.8883826879271071, |
|
"grad_norm": 3.394533395767212, |
|
"learning_rate": 3.519362186788155e-05, |
|
"loss": 0.1982, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.8940774487471527, |
|
"grad_norm": 3.049562931060791, |
|
"learning_rate": 3.5098709187547455e-05, |
|
"loss": 0.1317, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.8997722095671982, |
|
"grad_norm": 0.992368757724762, |
|
"learning_rate": 3.5003796507213365e-05, |
|
"loss": 0.1017, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.9054669703872438, |
|
"grad_norm": 1.088188886642456, |
|
"learning_rate": 3.4908883826879276e-05, |
|
"loss": 0.0877, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.9111617312072893, |
|
"grad_norm": 0.7528336048126221, |
|
"learning_rate": 3.481397114654518e-05, |
|
"loss": 0.1053, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.9168564920273349, |
|
"grad_norm": 5.400979042053223, |
|
"learning_rate": 3.4719058466211084e-05, |
|
"loss": 0.1763, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.9225512528473804, |
|
"grad_norm": 0.7936645150184631, |
|
"learning_rate": 3.4624145785876995e-05, |
|
"loss": 0.1084, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.928246013667426, |
|
"grad_norm": 1.9876333475112915, |
|
"learning_rate": 3.45292331055429e-05, |
|
"loss": 0.1491, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.9339407744874715, |
|
"grad_norm": 1.7372020483016968, |
|
"learning_rate": 3.443432042520881e-05, |
|
"loss": 0.0863, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.9396355353075171, |
|
"grad_norm": 0.9634379148483276, |
|
"learning_rate": 3.433940774487472e-05, |
|
"loss": 0.0897, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.9453302961275627, |
|
"grad_norm": 0.9360265135765076, |
|
"learning_rate": 3.4244495064540624e-05, |
|
"loss": 0.1238, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.9510250569476082, |
|
"grad_norm": 1.3839788436889648, |
|
"learning_rate": 3.414958238420653e-05, |
|
"loss": 0.1259, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.9567198177676538, |
|
"grad_norm": 2.2220139503479004, |
|
"learning_rate": 3.405466970387244e-05, |
|
"loss": 0.1176, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.9624145785876993, |
|
"grad_norm": 1.8980664014816284, |
|
"learning_rate": 3.395975702353835e-05, |
|
"loss": 0.1412, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.9681093394077449, |
|
"grad_norm": 7.803280830383301, |
|
"learning_rate": 3.3864844343204254e-05, |
|
"loss": 0.1334, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.9738041002277904, |
|
"grad_norm": 0.679198682308197, |
|
"learning_rate": 3.3769931662870164e-05, |
|
"loss": 0.0845, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.979498861047836, |
|
"grad_norm": 2.330993175506592, |
|
"learning_rate": 3.367501898253607e-05, |
|
"loss": 0.1032, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.9851936218678815, |
|
"grad_norm": 2.838163375854492, |
|
"learning_rate": 3.358010630220197e-05, |
|
"loss": 0.1171, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.9908883826879271, |
|
"grad_norm": 1.7064838409423828, |
|
"learning_rate": 3.348519362186788e-05, |
|
"loss": 0.0948, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.9965831435079726, |
|
"grad_norm": 4.830978870391846, |
|
"learning_rate": 3.3390280941533794e-05, |
|
"loss": 0.1238, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.968639214616853, |
|
"eval_f1": 0.7834769305097028, |
|
"eval_loss": 0.1241590604186058, |
|
"eval_precision": 0.7507716049382716, |
|
"eval_recall": 0.8191614749957905, |
|
"eval_runtime": 25.3499, |
|
"eval_samples_per_second": 128.206, |
|
"eval_steps_per_second": 16.055, |
|
"step": 1756 |
|
}, |
|
{ |
|
"epoch": 1.0022779043280183, |
|
"grad_norm": 0.97957444190979, |
|
"learning_rate": 3.32953682611997e-05, |
|
"loss": 0.1341, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.0079726651480638, |
|
"grad_norm": 1.6682242155075073, |
|
"learning_rate": 3.32004555808656e-05, |
|
"loss": 0.0814, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.0136674259681093, |
|
"grad_norm": 1.047098159790039, |
|
"learning_rate": 3.310554290053151e-05, |
|
"loss": 0.1028, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.0193621867881548, |
|
"grad_norm": 2.5938913822174072, |
|
"learning_rate": 3.3010630220197417e-05, |
|
"loss": 0.1297, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.0250569476082005, |
|
"grad_norm": 2.022613525390625, |
|
"learning_rate": 3.291571753986333e-05, |
|
"loss": 0.0786, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.030751708428246, |
|
"grad_norm": 1.2228267192840576, |
|
"learning_rate": 3.282080485952924e-05, |
|
"loss": 0.1056, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.0364464692482915, |
|
"grad_norm": 0.591299295425415, |
|
"learning_rate": 3.272589217919514e-05, |
|
"loss": 0.0958, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.042141230068337, |
|
"grad_norm": 1.737338900566101, |
|
"learning_rate": 3.2630979498861046e-05, |
|
"loss": 0.0887, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.0478359908883828, |
|
"grad_norm": 1.6468360424041748, |
|
"learning_rate": 3.253606681852696e-05, |
|
"loss": 0.0749, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.0535307517084282, |
|
"grad_norm": 0.9322206377983093, |
|
"learning_rate": 3.244115413819286e-05, |
|
"loss": 0.0978, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.0592255125284737, |
|
"grad_norm": 0.8681425452232361, |
|
"learning_rate": 3.234624145785877e-05, |
|
"loss": 0.0736, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.0649202733485195, |
|
"grad_norm": 1.229645013809204, |
|
"learning_rate": 3.225132877752468e-05, |
|
"loss": 0.0875, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.070615034168565, |
|
"grad_norm": 0.8657091856002808, |
|
"learning_rate": 3.2156416097190586e-05, |
|
"loss": 0.0984, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.0763097949886105, |
|
"grad_norm": 1.5904412269592285, |
|
"learning_rate": 3.206150341685649e-05, |
|
"loss": 0.1398, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.082004555808656, |
|
"grad_norm": 1.811844825744629, |
|
"learning_rate": 3.19665907365224e-05, |
|
"loss": 0.0844, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.0876993166287017, |
|
"grad_norm": 3.4504454135894775, |
|
"learning_rate": 3.187167805618831e-05, |
|
"loss": 0.1053, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.0933940774487472, |
|
"grad_norm": 2.874379873275757, |
|
"learning_rate": 3.1776765375854216e-05, |
|
"loss": 0.0868, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.0990888382687927, |
|
"grad_norm": 1.921471357345581, |
|
"learning_rate": 3.1681852695520126e-05, |
|
"loss": 0.0906, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.1047835990888384, |
|
"grad_norm": 1.5753288269042969, |
|
"learning_rate": 3.158694001518603e-05, |
|
"loss": 0.0923, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.1104783599088839, |
|
"grad_norm": 2.416849136352539, |
|
"learning_rate": 3.1492027334851934e-05, |
|
"loss": 0.0872, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.1161731207289294, |
|
"grad_norm": 1.3285380601882935, |
|
"learning_rate": 3.1397114654517845e-05, |
|
"loss": 0.1078, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.1218678815489749, |
|
"grad_norm": 2.1748809814453125, |
|
"learning_rate": 3.1302201974183756e-05, |
|
"loss": 0.0991, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.1275626423690206, |
|
"grad_norm": 1.5692195892333984, |
|
"learning_rate": 3.120728929384966e-05, |
|
"loss": 0.0852, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.133257403189066, |
|
"grad_norm": 1.2222596406936646, |
|
"learning_rate": 3.1112376613515564e-05, |
|
"loss": 0.1153, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.1389521640091116, |
|
"grad_norm": 2.329113245010376, |
|
"learning_rate": 3.1017463933181475e-05, |
|
"loss": 0.1013, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.144646924829157, |
|
"grad_norm": 0.9628048539161682, |
|
"learning_rate": 3.092255125284738e-05, |
|
"loss": 0.0983, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.1503416856492028, |
|
"grad_norm": 0.4654466211795807, |
|
"learning_rate": 3.082763857251329e-05, |
|
"loss": 0.0957, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.1560364464692483, |
|
"grad_norm": 0.722862184047699, |
|
"learning_rate": 3.07327258921792e-05, |
|
"loss": 0.0974, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.1617312072892938, |
|
"grad_norm": 4.287933349609375, |
|
"learning_rate": 3.0637813211845104e-05, |
|
"loss": 0.152, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.1674259681093395, |
|
"grad_norm": 1.48385488986969, |
|
"learning_rate": 3.054290053151101e-05, |
|
"loss": 0.0812, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.173120728929385, |
|
"grad_norm": 0.6806052327156067, |
|
"learning_rate": 3.044798785117692e-05, |
|
"loss": 0.082, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.1788154897494305, |
|
"grad_norm": 1.8856145143508911, |
|
"learning_rate": 3.0353075170842826e-05, |
|
"loss": 0.0913, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.184510250569476, |
|
"grad_norm": 1.6212036609649658, |
|
"learning_rate": 3.0258162490508733e-05, |
|
"loss": 0.0893, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.1902050113895217, |
|
"grad_norm": 2.615706205368042, |
|
"learning_rate": 3.0163249810174644e-05, |
|
"loss": 0.0886, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.1958997722095672, |
|
"grad_norm": 1.3023104667663574, |
|
"learning_rate": 3.0068337129840545e-05, |
|
"loss": 0.0953, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.2015945330296127, |
|
"grad_norm": 1.685548186302185, |
|
"learning_rate": 2.9973424449506455e-05, |
|
"loss": 0.147, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.2072892938496582, |
|
"grad_norm": 1.6766068935394287, |
|
"learning_rate": 2.9878511769172363e-05, |
|
"loss": 0.0698, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.212984054669704, |
|
"grad_norm": 1.5873198509216309, |
|
"learning_rate": 2.978359908883827e-05, |
|
"loss": 0.0766, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.2186788154897494, |
|
"grad_norm": 3.058396339416504, |
|
"learning_rate": 2.968868640850418e-05, |
|
"loss": 0.1245, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.224373576309795, |
|
"grad_norm": 0.7530670762062073, |
|
"learning_rate": 2.959377372817008e-05, |
|
"loss": 0.1088, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.2300683371298406, |
|
"grad_norm": 0.7259340882301331, |
|
"learning_rate": 2.949886104783599e-05, |
|
"loss": 0.0962, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.2357630979498861, |
|
"grad_norm": 0.7594104409217834, |
|
"learning_rate": 2.94039483675019e-05, |
|
"loss": 0.098, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.2414578587699316, |
|
"grad_norm": 0.6500957012176514, |
|
"learning_rate": 2.9309035687167807e-05, |
|
"loss": 0.0781, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.2471526195899771, |
|
"grad_norm": 1.0029600858688354, |
|
"learning_rate": 2.9214123006833714e-05, |
|
"loss": 0.0986, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.2528473804100229, |
|
"grad_norm": 2.1104345321655273, |
|
"learning_rate": 2.9119210326499625e-05, |
|
"loss": 0.111, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.2585421412300684, |
|
"grad_norm": 0.5959046483039856, |
|
"learning_rate": 2.9024297646165526e-05, |
|
"loss": 0.0705, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.2642369020501139, |
|
"grad_norm": 0.35329675674438477, |
|
"learning_rate": 2.8929384965831436e-05, |
|
"loss": 0.0849, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.2699316628701594, |
|
"grad_norm": 1.3002897500991821, |
|
"learning_rate": 2.8834472285497344e-05, |
|
"loss": 0.0823, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.275626423690205, |
|
"grad_norm": 1.5088927745819092, |
|
"learning_rate": 2.873955960516325e-05, |
|
"loss": 0.0931, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.2813211845102506, |
|
"grad_norm": 0.8356180191040039, |
|
"learning_rate": 2.8644646924829162e-05, |
|
"loss": 0.0908, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.287015945330296, |
|
"grad_norm": 1.3802344799041748, |
|
"learning_rate": 2.8549734244495063e-05, |
|
"loss": 0.1129, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.2927107061503418, |
|
"grad_norm": 3.4083027839660645, |
|
"learning_rate": 2.8454821564160973e-05, |
|
"loss": 0.0976, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.2984054669703873, |
|
"grad_norm": 2.6577627658843994, |
|
"learning_rate": 2.835990888382688e-05, |
|
"loss": 0.1106, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.3041002277904328, |
|
"grad_norm": 1.930678129196167, |
|
"learning_rate": 2.8264996203492788e-05, |
|
"loss": 0.1213, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.3097949886104785, |
|
"grad_norm": 0.6453108787536621, |
|
"learning_rate": 2.8170083523158695e-05, |
|
"loss": 0.0705, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.315489749430524, |
|
"grad_norm": 1.228203535079956, |
|
"learning_rate": 2.8075170842824606e-05, |
|
"loss": 0.1067, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.3211845102505695, |
|
"grad_norm": 2.278114080429077, |
|
"learning_rate": 2.7980258162490507e-05, |
|
"loss": 0.096, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.326879271070615, |
|
"grad_norm": 0.5799562335014343, |
|
"learning_rate": 2.7885345482156417e-05, |
|
"loss": 0.0707, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.3325740318906605, |
|
"grad_norm": 3.1779398918151855, |
|
"learning_rate": 2.7790432801822325e-05, |
|
"loss": 0.0929, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.3382687927107062, |
|
"grad_norm": 2.468745470046997, |
|
"learning_rate": 2.7695520121488232e-05, |
|
"loss": 0.0967, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.3439635535307517, |
|
"grad_norm": 1.107359528541565, |
|
"learning_rate": 2.7600607441154143e-05, |
|
"loss": 0.0798, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 1.3496583143507972, |
|
"grad_norm": 1.7985248565673828, |
|
"learning_rate": 2.7505694760820043e-05, |
|
"loss": 0.0876, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 1.355353075170843, |
|
"grad_norm": 1.1344681978225708, |
|
"learning_rate": 2.7410782080485954e-05, |
|
"loss": 0.0963, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 1.3610478359908884, |
|
"grad_norm": 1.5203508138656616, |
|
"learning_rate": 2.731586940015186e-05, |
|
"loss": 0.0715, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 1.366742596810934, |
|
"grad_norm": 2.8507308959960938, |
|
"learning_rate": 2.722095671981777e-05, |
|
"loss": 0.1341, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.3724373576309796, |
|
"grad_norm": 2.638719081878662, |
|
"learning_rate": 2.7126044039483676e-05, |
|
"loss": 0.1249, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 1.3781321184510251, |
|
"grad_norm": 0.7994898557662964, |
|
"learning_rate": 2.7031131359149587e-05, |
|
"loss": 0.1074, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 1.3838268792710706, |
|
"grad_norm": 1.2879323959350586, |
|
"learning_rate": 2.6936218678815488e-05, |
|
"loss": 0.0682, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 1.3895216400911161, |
|
"grad_norm": 0.7568653225898743, |
|
"learning_rate": 2.68413059984814e-05, |
|
"loss": 0.0929, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 1.3952164009111616, |
|
"grad_norm": 1.0090585947036743, |
|
"learning_rate": 2.6746393318147306e-05, |
|
"loss": 0.0945, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.4009111617312073, |
|
"grad_norm": 2.003541946411133, |
|
"learning_rate": 2.6651480637813213e-05, |
|
"loss": 0.0874, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 1.4066059225512528, |
|
"grad_norm": 2.603950023651123, |
|
"learning_rate": 2.6556567957479124e-05, |
|
"loss": 0.0616, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 1.4123006833712983, |
|
"grad_norm": 1.5461757183074951, |
|
"learning_rate": 2.6461655277145024e-05, |
|
"loss": 0.0893, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 1.417995444191344, |
|
"grad_norm": 1.2687543630599976, |
|
"learning_rate": 2.6366742596810935e-05, |
|
"loss": 0.0989, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 1.4236902050113895, |
|
"grad_norm": 1.3670613765716553, |
|
"learning_rate": 2.6271829916476843e-05, |
|
"loss": 0.1227, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.429384965831435, |
|
"grad_norm": 3.420806884765625, |
|
"learning_rate": 2.617691723614275e-05, |
|
"loss": 0.1326, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 1.4350797266514808, |
|
"grad_norm": 2.4182112216949463, |
|
"learning_rate": 2.608200455580866e-05, |
|
"loss": 0.1245, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 1.4407744874715263, |
|
"grad_norm": 0.6075257658958435, |
|
"learning_rate": 2.5987091875474568e-05, |
|
"loss": 0.0473, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 1.4464692482915718, |
|
"grad_norm": 2.5870461463928223, |
|
"learning_rate": 2.589217919514047e-05, |
|
"loss": 0.1004, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 1.4521640091116172, |
|
"grad_norm": 1.4278345108032227, |
|
"learning_rate": 2.579726651480638e-05, |
|
"loss": 0.1374, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.4578587699316627, |
|
"grad_norm": 2.7487120628356934, |
|
"learning_rate": 2.5702353834472287e-05, |
|
"loss": 0.0825, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 1.4635535307517085, |
|
"grad_norm": 0.9678652286529541, |
|
"learning_rate": 2.5607441154138194e-05, |
|
"loss": 0.0854, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 1.469248291571754, |
|
"grad_norm": 2.752776861190796, |
|
"learning_rate": 2.5512528473804105e-05, |
|
"loss": 0.0891, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 1.4749430523917995, |
|
"grad_norm": 2.2584118843078613, |
|
"learning_rate": 2.5417615793470005e-05, |
|
"loss": 0.0905, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 1.4806378132118452, |
|
"grad_norm": 0.9626738429069519, |
|
"learning_rate": 2.5322703113135916e-05, |
|
"loss": 0.0904, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.4863325740318907, |
|
"grad_norm": 2.8413443565368652, |
|
"learning_rate": 2.5227790432801824e-05, |
|
"loss": 0.0863, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 1.4920273348519362, |
|
"grad_norm": 2.619218349456787, |
|
"learning_rate": 2.513287775246773e-05, |
|
"loss": 0.1038, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 1.497722095671982, |
|
"grad_norm": 0.9081758260726929, |
|
"learning_rate": 2.503796507213364e-05, |
|
"loss": 0.08, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 1.5034168564920274, |
|
"grad_norm": 0.5638437867164612, |
|
"learning_rate": 2.4943052391799546e-05, |
|
"loss": 0.067, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 1.5091116173120729, |
|
"grad_norm": 0.5109384655952454, |
|
"learning_rate": 2.4848139711465453e-05, |
|
"loss": 0.09, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.5148063781321186, |
|
"grad_norm": 1.9207391738891602, |
|
"learning_rate": 2.475322703113136e-05, |
|
"loss": 0.1144, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 1.5205011389521639, |
|
"grad_norm": 8.998539924621582, |
|
"learning_rate": 2.4658314350797268e-05, |
|
"loss": 0.0964, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 1.5261958997722096, |
|
"grad_norm": 0.7903996109962463, |
|
"learning_rate": 2.4563401670463175e-05, |
|
"loss": 0.0715, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 1.531890660592255, |
|
"grad_norm": 0.3726605772972107, |
|
"learning_rate": 2.4468488990129082e-05, |
|
"loss": 0.1521, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 1.5375854214123006, |
|
"grad_norm": 0.42937004566192627, |
|
"learning_rate": 2.437357630979499e-05, |
|
"loss": 0.0708, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.5432801822323463, |
|
"grad_norm": 1.1452207565307617, |
|
"learning_rate": 2.4278663629460897e-05, |
|
"loss": 0.1002, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 1.5489749430523918, |
|
"grad_norm": 1.046943187713623, |
|
"learning_rate": 2.4183750949126804e-05, |
|
"loss": 0.1268, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 1.5546697038724373, |
|
"grad_norm": 3.5698766708374023, |
|
"learning_rate": 2.4088838268792712e-05, |
|
"loss": 0.0923, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 1.560364464692483, |
|
"grad_norm": 2.4647140502929688, |
|
"learning_rate": 2.399392558845862e-05, |
|
"loss": 0.0645, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 1.5660592255125285, |
|
"grad_norm": 2.386082887649536, |
|
"learning_rate": 2.3899012908124527e-05, |
|
"loss": 0.1273, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.571753986332574, |
|
"grad_norm": 0.756199061870575, |
|
"learning_rate": 2.3804100227790434e-05, |
|
"loss": 0.1246, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 1.5774487471526197, |
|
"grad_norm": 2.8555424213409424, |
|
"learning_rate": 2.370918754745634e-05, |
|
"loss": 0.0928, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 1.583143507972665, |
|
"grad_norm": 0.9831840991973877, |
|
"learning_rate": 2.361427486712225e-05, |
|
"loss": 0.0749, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 1.5888382687927107, |
|
"grad_norm": 1.1987966299057007, |
|
"learning_rate": 2.3519362186788156e-05, |
|
"loss": 0.1, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 1.5945330296127562, |
|
"grad_norm": 1.14650559425354, |
|
"learning_rate": 2.3424449506454063e-05, |
|
"loss": 0.0891, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.6002277904328017, |
|
"grad_norm": 2.077380657196045, |
|
"learning_rate": 2.332953682611997e-05, |
|
"loss": 0.0838, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 1.6059225512528474, |
|
"grad_norm": 0.967408299446106, |
|
"learning_rate": 2.3234624145785878e-05, |
|
"loss": 0.0725, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 1.611617312072893, |
|
"grad_norm": 0.815949022769928, |
|
"learning_rate": 2.3139711465451785e-05, |
|
"loss": 0.1292, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 1.6173120728929384, |
|
"grad_norm": 3.764101028442383, |
|
"learning_rate": 2.3044798785117693e-05, |
|
"loss": 0.1105, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 1.6230068337129842, |
|
"grad_norm": 1.8564128875732422, |
|
"learning_rate": 2.29498861047836e-05, |
|
"loss": 0.1112, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.6287015945330297, |
|
"grad_norm": 0.6224936246871948, |
|
"learning_rate": 2.2854973424449508e-05, |
|
"loss": 0.0828, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 1.6343963553530751, |
|
"grad_norm": 0.6306477785110474, |
|
"learning_rate": 2.2760060744115415e-05, |
|
"loss": 0.0787, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 1.6400911161731209, |
|
"grad_norm": 1.4436335563659668, |
|
"learning_rate": 2.2665148063781322e-05, |
|
"loss": 0.0877, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 1.6457858769931661, |
|
"grad_norm": 2.165911912918091, |
|
"learning_rate": 2.257023538344723e-05, |
|
"loss": 0.0771, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 1.6514806378132119, |
|
"grad_norm": 2.1198298931121826, |
|
"learning_rate": 2.2475322703113137e-05, |
|
"loss": 0.0801, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.6571753986332574, |
|
"grad_norm": 0.6920590400695801, |
|
"learning_rate": 2.2380410022779044e-05, |
|
"loss": 0.1039, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 1.6628701594533029, |
|
"grad_norm": 1.152017593383789, |
|
"learning_rate": 2.2285497342444952e-05, |
|
"loss": 0.1118, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 1.6685649202733486, |
|
"grad_norm": 1.1572786569595337, |
|
"learning_rate": 2.219058466211086e-05, |
|
"loss": 0.0853, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 1.674259681093394, |
|
"grad_norm": 1.9907824993133545, |
|
"learning_rate": 2.2095671981776766e-05, |
|
"loss": 0.1256, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 1.6799544419134396, |
|
"grad_norm": 0.39809224009513855, |
|
"learning_rate": 2.2000759301442674e-05, |
|
"loss": 0.0824, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.6856492027334853, |
|
"grad_norm": 1.5977418422698975, |
|
"learning_rate": 2.190584662110858e-05, |
|
"loss": 0.0604, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 1.6913439635535308, |
|
"grad_norm": 0.5906954407691956, |
|
"learning_rate": 2.181093394077449e-05, |
|
"loss": 0.0892, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 1.6970387243735763, |
|
"grad_norm": 1.2924199104309082, |
|
"learning_rate": 2.1716021260440396e-05, |
|
"loss": 0.0665, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 1.702733485193622, |
|
"grad_norm": 1.207828402519226, |
|
"learning_rate": 2.1621108580106303e-05, |
|
"loss": 0.1109, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 1.7084282460136673, |
|
"grad_norm": 2.1666877269744873, |
|
"learning_rate": 2.152619589977221e-05, |
|
"loss": 0.0754, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.714123006833713, |
|
"grad_norm": 2.3492608070373535, |
|
"learning_rate": 2.1431283219438118e-05, |
|
"loss": 0.1017, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 1.7198177676537585, |
|
"grad_norm": 1.4836989641189575, |
|
"learning_rate": 2.1336370539104025e-05, |
|
"loss": 0.0811, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 1.725512528473804, |
|
"grad_norm": 2.9044885635375977, |
|
"learning_rate": 2.1241457858769933e-05, |
|
"loss": 0.0541, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 1.7312072892938497, |
|
"grad_norm": 2.6181652545928955, |
|
"learning_rate": 2.114654517843584e-05, |
|
"loss": 0.0839, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 1.7369020501138952, |
|
"grad_norm": 0.5739513039588928, |
|
"learning_rate": 2.1051632498101747e-05, |
|
"loss": 0.0795, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.7425968109339407, |
|
"grad_norm": 1.8764702081680298, |
|
"learning_rate": 2.0956719817767655e-05, |
|
"loss": 0.0945, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 1.7482915717539864, |
|
"grad_norm": 1.5999131202697754, |
|
"learning_rate": 2.0861807137433562e-05, |
|
"loss": 0.0646, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 1.753986332574032, |
|
"grad_norm": 1.1158217191696167, |
|
"learning_rate": 2.076689445709947e-05, |
|
"loss": 0.0908, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 1.7596810933940774, |
|
"grad_norm": 0.7986214756965637, |
|
"learning_rate": 2.0671981776765377e-05, |
|
"loss": 0.153, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 1.7653758542141231, |
|
"grad_norm": 2.1894545555114746, |
|
"learning_rate": 2.0577069096431284e-05, |
|
"loss": 0.1073, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.7710706150341684, |
|
"grad_norm": 0.9665547013282776, |
|
"learning_rate": 2.048215641609719e-05, |
|
"loss": 0.0888, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 1.7767653758542141, |
|
"grad_norm": 0.6699535846710205, |
|
"learning_rate": 2.03872437357631e-05, |
|
"loss": 0.0872, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 1.7824601366742598, |
|
"grad_norm": 2.1171014308929443, |
|
"learning_rate": 2.0292331055429006e-05, |
|
"loss": 0.115, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 1.7881548974943051, |
|
"grad_norm": 1.2174530029296875, |
|
"learning_rate": 2.0197418375094914e-05, |
|
"loss": 0.0852, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 1.7938496583143508, |
|
"grad_norm": 2.183448553085327, |
|
"learning_rate": 2.010250569476082e-05, |
|
"loss": 0.0955, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 1.7995444191343963, |
|
"grad_norm": 0.9667510390281677, |
|
"learning_rate": 2.000759301442673e-05, |
|
"loss": 0.0902, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 1.8052391799544418, |
|
"grad_norm": 3.9229986667633057, |
|
"learning_rate": 1.9912680334092636e-05, |
|
"loss": 0.0941, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 1.8109339407744875, |
|
"grad_norm": 0.8753280639648438, |
|
"learning_rate": 1.9817767653758543e-05, |
|
"loss": 0.0776, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 1.816628701594533, |
|
"grad_norm": 1.5324796438217163, |
|
"learning_rate": 1.972285497342445e-05, |
|
"loss": 0.1014, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 1.8223234624145785, |
|
"grad_norm": 0.4302336275577545, |
|
"learning_rate": 1.9627942293090358e-05, |
|
"loss": 0.1232, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.8280182232346243, |
|
"grad_norm": 1.9868009090423584, |
|
"learning_rate": 1.9533029612756265e-05, |
|
"loss": 0.1117, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 1.8337129840546698, |
|
"grad_norm": 0.7633128762245178, |
|
"learning_rate": 1.9438116932422173e-05, |
|
"loss": 0.0646, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 1.8394077448747153, |
|
"grad_norm": 1.8619776964187622, |
|
"learning_rate": 1.934320425208808e-05, |
|
"loss": 0.1116, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 1.845102505694761, |
|
"grad_norm": 2.578927993774414, |
|
"learning_rate": 1.9248291571753987e-05, |
|
"loss": 0.0889, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 1.8507972665148062, |
|
"grad_norm": 2.5693464279174805, |
|
"learning_rate": 1.9153378891419895e-05, |
|
"loss": 0.0842, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 1.856492027334852, |
|
"grad_norm": 1.0885404348373413, |
|
"learning_rate": 1.9058466211085802e-05, |
|
"loss": 0.0909, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 1.8621867881548975, |
|
"grad_norm": 2.0621140003204346, |
|
"learning_rate": 1.896355353075171e-05, |
|
"loss": 0.0771, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 1.867881548974943, |
|
"grad_norm": 1.275061011314392, |
|
"learning_rate": 1.8868640850417617e-05, |
|
"loss": 0.1135, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 1.8735763097949887, |
|
"grad_norm": 1.1576536893844604, |
|
"learning_rate": 1.8773728170083524e-05, |
|
"loss": 0.0948, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 1.8792710706150342, |
|
"grad_norm": 0.5448009967803955, |
|
"learning_rate": 1.867881548974943e-05, |
|
"loss": 0.0908, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.8849658314350797, |
|
"grad_norm": 1.3317946195602417, |
|
"learning_rate": 1.858390280941534e-05, |
|
"loss": 0.1246, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 1.8906605922551254, |
|
"grad_norm": 1.0352773666381836, |
|
"learning_rate": 1.8488990129081246e-05, |
|
"loss": 0.074, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 1.896355353075171, |
|
"grad_norm": 1.7326701879501343, |
|
"learning_rate": 1.8394077448747154e-05, |
|
"loss": 0.0773, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 1.9020501138952164, |
|
"grad_norm": 1.2691752910614014, |
|
"learning_rate": 1.829916476841306e-05, |
|
"loss": 0.1251, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 1.907744874715262, |
|
"grad_norm": 3.039222240447998, |
|
"learning_rate": 1.8204252088078968e-05, |
|
"loss": 0.0819, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 1.9134396355353074, |
|
"grad_norm": 3.1683478355407715, |
|
"learning_rate": 1.8109339407744876e-05, |
|
"loss": 0.0673, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 1.919134396355353, |
|
"grad_norm": 1.2576136589050293, |
|
"learning_rate": 1.8014426727410783e-05, |
|
"loss": 0.0793, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 1.9248291571753986, |
|
"grad_norm": 1.9472663402557373, |
|
"learning_rate": 1.791951404707669e-05, |
|
"loss": 0.0968, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 1.930523917995444, |
|
"grad_norm": 2.7382168769836426, |
|
"learning_rate": 1.7824601366742598e-05, |
|
"loss": 0.0716, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 1.9362186788154898, |
|
"grad_norm": 0.67622971534729, |
|
"learning_rate": 1.7729688686408505e-05, |
|
"loss": 0.0688, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.9419134396355353, |
|
"grad_norm": 0.3040902614593506, |
|
"learning_rate": 1.7634776006074412e-05, |
|
"loss": 0.0733, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 1.9476082004555808, |
|
"grad_norm": 1.6363804340362549, |
|
"learning_rate": 1.753986332574032e-05, |
|
"loss": 0.1081, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 1.9533029612756265, |
|
"grad_norm": 0.7806687355041504, |
|
"learning_rate": 1.7444950645406227e-05, |
|
"loss": 0.0959, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 1.958997722095672, |
|
"grad_norm": 0.37240535020828247, |
|
"learning_rate": 1.7350037965072134e-05, |
|
"loss": 0.0776, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 1.9646924829157175, |
|
"grad_norm": 0.8642539978027344, |
|
"learning_rate": 1.7255125284738042e-05, |
|
"loss": 0.0689, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 1.9703872437357632, |
|
"grad_norm": 0.2137574553489685, |
|
"learning_rate": 1.716021260440395e-05, |
|
"loss": 0.0428, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 1.9760820045558085, |
|
"grad_norm": 1.5309349298477173, |
|
"learning_rate": 1.7065299924069857e-05, |
|
"loss": 0.1032, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 1.9817767653758542, |
|
"grad_norm": 0.8801184296607971, |
|
"learning_rate": 1.6970387243735764e-05, |
|
"loss": 0.0796, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 1.9874715261958997, |
|
"grad_norm": 0.5306097865104675, |
|
"learning_rate": 1.687547456340167e-05, |
|
"loss": 0.0421, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 1.9931662870159452, |
|
"grad_norm": 1.5307530164718628, |
|
"learning_rate": 1.678056188306758e-05, |
|
"loss": 0.0844, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.998861047835991, |
|
"grad_norm": 0.7120900750160217, |
|
"learning_rate": 1.6685649202733486e-05, |
|
"loss": 0.0752, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9732167205578713, |
|
"eval_f1": 0.8090976270629773, |
|
"eval_loss": 0.10903619229793549, |
|
"eval_precision": 0.7895833333333333, |
|
"eval_recall": 0.8296009429196834, |
|
"eval_runtime": 25.6763, |
|
"eval_samples_per_second": 126.576, |
|
"eval_steps_per_second": 15.851, |
|
"step": 3512 |
|
}, |
|
{ |
|
"epoch": 2.0045558086560367, |
|
"grad_norm": 1.5597575902938843, |
|
"learning_rate": 1.6590736522399393e-05, |
|
"loss": 0.0975, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 2.010250569476082, |
|
"grad_norm": 1.3607456684112549, |
|
"learning_rate": 1.64958238420653e-05, |
|
"loss": 0.0771, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 2.0159453302961277, |
|
"grad_norm": 1.3556320667266846, |
|
"learning_rate": 1.6400911161731208e-05, |
|
"loss": 0.0853, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 2.021640091116173, |
|
"grad_norm": 1.3193871974945068, |
|
"learning_rate": 1.6305998481397115e-05, |
|
"loss": 0.0966, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.0273348519362187, |
|
"grad_norm": 4.825347423553467, |
|
"learning_rate": 1.6211085801063023e-05, |
|
"loss": 0.0916, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 2.0330296127562644, |
|
"grad_norm": 0.9464587569236755, |
|
"learning_rate": 1.611617312072893e-05, |
|
"loss": 0.0686, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 2.0387243735763096, |
|
"grad_norm": 1.426204800605774, |
|
"learning_rate": 1.6021260440394838e-05, |
|
"loss": 0.0741, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 2.0444191343963554, |
|
"grad_norm": 0.7591248154640198, |
|
"learning_rate": 1.5926347760060745e-05, |
|
"loss": 0.0636, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 2.050113895216401, |
|
"grad_norm": 0.7133740186691284, |
|
"learning_rate": 1.5831435079726652e-05, |
|
"loss": 0.0371, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.0558086560364464, |
|
"grad_norm": 1.1163349151611328, |
|
"learning_rate": 1.573652239939256e-05, |
|
"loss": 0.0799, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 2.061503416856492, |
|
"grad_norm": 2.288480281829834, |
|
"learning_rate": 1.5641609719058467e-05, |
|
"loss": 0.0739, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 2.067198177676538, |
|
"grad_norm": 0.6606568694114685, |
|
"learning_rate": 1.5546697038724374e-05, |
|
"loss": 0.0633, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 2.072892938496583, |
|
"grad_norm": 0.63154137134552, |
|
"learning_rate": 1.545178435839028e-05, |
|
"loss": 0.064, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 2.078587699316629, |
|
"grad_norm": 1.6103167533874512, |
|
"learning_rate": 1.535687167805619e-05, |
|
"loss": 0.0416, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.084282460136674, |
|
"grad_norm": 0.6107991337776184, |
|
"learning_rate": 1.5261958997722096e-05, |
|
"loss": 0.0405, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 2.08997722095672, |
|
"grad_norm": 0.8715682029724121, |
|
"learning_rate": 1.5167046317388004e-05, |
|
"loss": 0.0814, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 2.0956719817767655, |
|
"grad_norm": 0.7718030214309692, |
|
"learning_rate": 1.5072133637053911e-05, |
|
"loss": 0.061, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 2.1013667425968108, |
|
"grad_norm": 0.5676106214523315, |
|
"learning_rate": 1.4977220956719817e-05, |
|
"loss": 0.0462, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 2.1070615034168565, |
|
"grad_norm": 0.8370518088340759, |
|
"learning_rate": 1.4882308276385726e-05, |
|
"loss": 0.0686, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.112756264236902, |
|
"grad_norm": 1.26499605178833, |
|
"learning_rate": 1.4787395596051635e-05, |
|
"loss": 0.0604, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 2.1184510250569475, |
|
"grad_norm": 2.1694042682647705, |
|
"learning_rate": 1.469248291571754e-05, |
|
"loss": 0.0658, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 2.124145785876993, |
|
"grad_norm": 1.244402527809143, |
|
"learning_rate": 1.4597570235383448e-05, |
|
"loss": 0.0474, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 2.129840546697039, |
|
"grad_norm": 0.6539400219917297, |
|
"learning_rate": 1.4502657555049357e-05, |
|
"loss": 0.06, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 2.135535307517084, |
|
"grad_norm": 1.2413984537124634, |
|
"learning_rate": 1.4407744874715263e-05, |
|
"loss": 0.0497, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 2.14123006833713, |
|
"grad_norm": 0.9623197317123413, |
|
"learning_rate": 1.431283219438117e-05, |
|
"loss": 0.053, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 2.146924829157175, |
|
"grad_norm": 6.023757457733154, |
|
"learning_rate": 1.4217919514047076e-05, |
|
"loss": 0.0844, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 2.152619589977221, |
|
"grad_norm": 0.9312296509742737, |
|
"learning_rate": 1.4123006833712985e-05, |
|
"loss": 0.0863, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 2.1583143507972666, |
|
"grad_norm": 1.2199676036834717, |
|
"learning_rate": 1.4028094153378892e-05, |
|
"loss": 0.0692, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 2.164009111617312, |
|
"grad_norm": 1.3029935359954834, |
|
"learning_rate": 1.3933181473044798e-05, |
|
"loss": 0.078, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.1697038724373576, |
|
"grad_norm": 0.484778493642807, |
|
"learning_rate": 1.3838268792710707e-05, |
|
"loss": 0.0595, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 2.1753986332574033, |
|
"grad_norm": 0.8675511479377747, |
|
"learning_rate": 1.3743356112376616e-05, |
|
"loss": 0.0575, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 2.1810933940774486, |
|
"grad_norm": 1.9901808500289917, |
|
"learning_rate": 1.3648443432042522e-05, |
|
"loss": 0.0949, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 2.1867881548974943, |
|
"grad_norm": 2.4126904010772705, |
|
"learning_rate": 1.3553530751708429e-05, |
|
"loss": 0.0813, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 2.19248291571754, |
|
"grad_norm": 1.9077173471450806, |
|
"learning_rate": 1.3458618071374338e-05, |
|
"loss": 0.1219, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 2.1981776765375853, |
|
"grad_norm": 2.687405586242676, |
|
"learning_rate": 1.3363705391040244e-05, |
|
"loss": 0.0884, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 2.203872437357631, |
|
"grad_norm": 1.8448173999786377, |
|
"learning_rate": 1.3268792710706151e-05, |
|
"loss": 0.08, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 2.2095671981776768, |
|
"grad_norm": 0.3753576874732971, |
|
"learning_rate": 1.3173880030372057e-05, |
|
"loss": 0.0749, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 2.215261958997722, |
|
"grad_norm": 2.6273550987243652, |
|
"learning_rate": 1.3078967350037966e-05, |
|
"loss": 0.0774, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 2.2209567198177678, |
|
"grad_norm": 2.107893228530884, |
|
"learning_rate": 1.2984054669703875e-05, |
|
"loss": 0.0708, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.226651480637813, |
|
"grad_norm": 0.2233879417181015, |
|
"learning_rate": 1.2889141989369779e-05, |
|
"loss": 0.054, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 2.2323462414578588, |
|
"grad_norm": 1.054631233215332, |
|
"learning_rate": 1.2794229309035688e-05, |
|
"loss": 0.0654, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 2.2380410022779045, |
|
"grad_norm": 1.4175485372543335, |
|
"learning_rate": 1.2699316628701597e-05, |
|
"loss": 0.0553, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 2.2437357630979498, |
|
"grad_norm": 0.12185627222061157, |
|
"learning_rate": 1.2604403948367503e-05, |
|
"loss": 0.0938, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 2.2494305239179955, |
|
"grad_norm": 1.3942961692810059, |
|
"learning_rate": 1.250949126803341e-05, |
|
"loss": 0.0696, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 2.255125284738041, |
|
"grad_norm": 0.9566187262535095, |
|
"learning_rate": 1.2414578587699317e-05, |
|
"loss": 0.0599, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 2.2608200455580865, |
|
"grad_norm": 0.304811030626297, |
|
"learning_rate": 1.2319665907365225e-05, |
|
"loss": 0.0907, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 2.266514806378132, |
|
"grad_norm": 1.0834438800811768, |
|
"learning_rate": 1.2224753227031132e-05, |
|
"loss": 0.0573, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 2.2722095671981775, |
|
"grad_norm": 1.1209036111831665, |
|
"learning_rate": 1.212984054669704e-05, |
|
"loss": 0.0789, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 2.277904328018223, |
|
"grad_norm": 2.8387298583984375, |
|
"learning_rate": 1.2034927866362947e-05, |
|
"loss": 0.0982, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.283599088838269, |
|
"grad_norm": 1.0303692817687988, |
|
"learning_rate": 1.1940015186028854e-05, |
|
"loss": 0.0724, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 2.289293849658314, |
|
"grad_norm": 0.5699959397315979, |
|
"learning_rate": 1.1845102505694761e-05, |
|
"loss": 0.068, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 2.29498861047836, |
|
"grad_norm": 0.30835121870040894, |
|
"learning_rate": 1.1750189825360669e-05, |
|
"loss": 0.0452, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 2.3006833712984056, |
|
"grad_norm": 1.6180905103683472, |
|
"learning_rate": 1.1655277145026576e-05, |
|
"loss": 0.0887, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 2.306378132118451, |
|
"grad_norm": 0.6430849432945251, |
|
"learning_rate": 1.1560364464692483e-05, |
|
"loss": 0.0593, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 2.3120728929384966, |
|
"grad_norm": 1.6492300033569336, |
|
"learning_rate": 1.146545178435839e-05, |
|
"loss": 0.0537, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 2.3177676537585423, |
|
"grad_norm": 3.9676427841186523, |
|
"learning_rate": 1.1370539104024298e-05, |
|
"loss": 0.0745, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 2.3234624145785876, |
|
"grad_norm": 1.95708167552948, |
|
"learning_rate": 1.1275626423690206e-05, |
|
"loss": 0.0638, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 2.3291571753986333, |
|
"grad_norm": 1.8522908687591553, |
|
"learning_rate": 1.1180713743356113e-05, |
|
"loss": 0.0745, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 2.334851936218679, |
|
"grad_norm": 1.5721604824066162, |
|
"learning_rate": 1.108580106302202e-05, |
|
"loss": 0.0624, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 2.3405466970387243, |
|
"grad_norm": 0.46538046002388, |
|
"learning_rate": 1.0990888382687928e-05, |
|
"loss": 0.0669, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 2.34624145785877, |
|
"grad_norm": 1.6386337280273438, |
|
"learning_rate": 1.0895975702353835e-05, |
|
"loss": 0.0804, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 2.3519362186788153, |
|
"grad_norm": 0.9034251570701599, |
|
"learning_rate": 1.0801063022019742e-05, |
|
"loss": 0.0562, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 2.357630979498861, |
|
"grad_norm": 7.529191970825195, |
|
"learning_rate": 1.070615034168565e-05, |
|
"loss": 0.104, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 2.3633257403189067, |
|
"grad_norm": 1.01326584815979, |
|
"learning_rate": 1.0611237661351557e-05, |
|
"loss": 0.073, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 2.369020501138952, |
|
"grad_norm": 1.8131357431411743, |
|
"learning_rate": 1.0516324981017464e-05, |
|
"loss": 0.0612, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 2.3747152619589977, |
|
"grad_norm": 0.7966225147247314, |
|
"learning_rate": 1.0421412300683372e-05, |
|
"loss": 0.0672, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 2.3804100227790435, |
|
"grad_norm": 0.7099034190177917, |
|
"learning_rate": 1.032649962034928e-05, |
|
"loss": 0.053, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 2.3861047835990887, |
|
"grad_norm": 1.4396919012069702, |
|
"learning_rate": 1.0231586940015187e-05, |
|
"loss": 0.0477, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 2.3917995444191344, |
|
"grad_norm": 1.2298674583435059, |
|
"learning_rate": 1.0136674259681094e-05, |
|
"loss": 0.0548, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 2.3974943052391797, |
|
"grad_norm": 0.5935720801353455, |
|
"learning_rate": 1.0041761579347001e-05, |
|
"loss": 0.0592, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 2.4031890660592254, |
|
"grad_norm": 1.7547837495803833, |
|
"learning_rate": 9.946848899012909e-06, |
|
"loss": 0.0619, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 2.408883826879271, |
|
"grad_norm": 0.3924281597137451, |
|
"learning_rate": 9.851936218678816e-06, |
|
"loss": 0.0316, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 2.4145785876993164, |
|
"grad_norm": 1.2887072563171387, |
|
"learning_rate": 9.757023538344723e-06, |
|
"loss": 0.0616, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 2.420273348519362, |
|
"grad_norm": 0.9915182590484619, |
|
"learning_rate": 9.66211085801063e-06, |
|
"loss": 0.0673, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 2.425968109339408, |
|
"grad_norm": 4.4653167724609375, |
|
"learning_rate": 9.567198177676538e-06, |
|
"loss": 0.0862, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 2.431662870159453, |
|
"grad_norm": 1.6697877645492554, |
|
"learning_rate": 9.472285497342445e-06, |
|
"loss": 0.048, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 2.437357630979499, |
|
"grad_norm": 1.1500027179718018, |
|
"learning_rate": 9.377372817008353e-06, |
|
"loss": 0.0733, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 2.4430523917995446, |
|
"grad_norm": 3.383201837539673, |
|
"learning_rate": 9.28246013667426e-06, |
|
"loss": 0.0666, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 2.44874715261959, |
|
"grad_norm": 0.4077356159687042, |
|
"learning_rate": 9.187547456340167e-06, |
|
"loss": 0.0694, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 2.4544419134396356, |
|
"grad_norm": 0.7718432545661926, |
|
"learning_rate": 9.092634776006075e-06, |
|
"loss": 0.0515, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 2.4601366742596813, |
|
"grad_norm": 0.4951815605163574, |
|
"learning_rate": 8.997722095671982e-06, |
|
"loss": 0.0511, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 2.4658314350797266, |
|
"grad_norm": 1.9625097513198853, |
|
"learning_rate": 8.90280941533789e-06, |
|
"loss": 0.0576, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 2.4715261958997723, |
|
"grad_norm": 2.4783849716186523, |
|
"learning_rate": 8.807896735003797e-06, |
|
"loss": 0.0924, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 2.477220956719818, |
|
"grad_norm": 0.669106125831604, |
|
"learning_rate": 8.712984054669704e-06, |
|
"loss": 0.0698, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 2.4829157175398633, |
|
"grad_norm": 1.0594432353973389, |
|
"learning_rate": 8.618071374335612e-06, |
|
"loss": 0.0562, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 2.488610478359909, |
|
"grad_norm": 3.4004504680633545, |
|
"learning_rate": 8.523158694001519e-06, |
|
"loss": 0.102, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 2.4943052391799543, |
|
"grad_norm": 1.7753770351409912, |
|
"learning_rate": 8.428246013667426e-06, |
|
"loss": 0.087, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 3.4696385860443115, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 0.0491, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 2.5056947608200457, |
|
"grad_norm": 0.3402756452560425, |
|
"learning_rate": 8.238420652999241e-06, |
|
"loss": 0.0633, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 2.511389521640091, |
|
"grad_norm": 1.155333399772644, |
|
"learning_rate": 8.143507972665148e-06, |
|
"loss": 0.0814, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 2.5170842824601367, |
|
"grad_norm": 1.0245141983032227, |
|
"learning_rate": 8.048595292331056e-06, |
|
"loss": 0.0796, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 2.522779043280182, |
|
"grad_norm": 1.1780219078063965, |
|
"learning_rate": 7.953682611996963e-06, |
|
"loss": 0.0565, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 2.5284738041002277, |
|
"grad_norm": 1.4757074117660522, |
|
"learning_rate": 7.85876993166287e-06, |
|
"loss": 0.0689, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 2.5341685649202734, |
|
"grad_norm": 0.504363477230072, |
|
"learning_rate": 7.763857251328778e-06, |
|
"loss": 0.0483, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 2.5398633257403187, |
|
"grad_norm": 1.0538045167922974, |
|
"learning_rate": 7.668944570994685e-06, |
|
"loss": 0.0443, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 2.5455580865603644, |
|
"grad_norm": 0.31798219680786133, |
|
"learning_rate": 7.574031890660592e-06, |
|
"loss": 0.0616, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 2.55125284738041, |
|
"grad_norm": 2.8450510501861572, |
|
"learning_rate": 7.4791192103265e-06, |
|
"loss": 0.0416, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 2.5569476082004554, |
|
"grad_norm": 1.6050668954849243, |
|
"learning_rate": 7.384206529992407e-06, |
|
"loss": 0.0623, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 2.562642369020501, |
|
"grad_norm": 1.804625391960144, |
|
"learning_rate": 7.289293849658315e-06, |
|
"loss": 0.0684, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.568337129840547, |
|
"grad_norm": 0.7985472083091736, |
|
"learning_rate": 7.194381169324223e-06, |
|
"loss": 0.0778, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 2.574031890660592, |
|
"grad_norm": 0.7117947340011597, |
|
"learning_rate": 7.099468488990129e-06, |
|
"loss": 0.0729, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 2.579726651480638, |
|
"grad_norm": 4.265881061553955, |
|
"learning_rate": 7.004555808656037e-06, |
|
"loss": 0.0676, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 2.5854214123006836, |
|
"grad_norm": 1.5959819555282593, |
|
"learning_rate": 6.909643128321943e-06, |
|
"loss": 0.0794, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 2.591116173120729, |
|
"grad_norm": 0.9075917601585388, |
|
"learning_rate": 6.814730447987852e-06, |
|
"loss": 0.089, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 2.5968109339407746, |
|
"grad_norm": 0.2236226499080658, |
|
"learning_rate": 6.719817767653759e-06, |
|
"loss": 0.0651, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 2.6025056947608203, |
|
"grad_norm": 0.5465313792228699, |
|
"learning_rate": 6.624905087319666e-06, |
|
"loss": 0.0507, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 2.6082004555808656, |
|
"grad_norm": 1.364637017250061, |
|
"learning_rate": 6.529992406985573e-06, |
|
"loss": 0.0732, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 2.6138952164009113, |
|
"grad_norm": 0.6693851947784424, |
|
"learning_rate": 6.435079726651482e-06, |
|
"loss": 0.0568, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 2.619589977220957, |
|
"grad_norm": 0.8072832822799683, |
|
"learning_rate": 6.340167046317388e-06, |
|
"loss": 0.049, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 2.6252847380410023, |
|
"grad_norm": 2.479691505432129, |
|
"learning_rate": 6.245254365983296e-06, |
|
"loss": 0.051, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 2.630979498861048, |
|
"grad_norm": 1.4793975353240967, |
|
"learning_rate": 6.150341685649203e-06, |
|
"loss": 0.0959, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 2.6366742596810933, |
|
"grad_norm": 0.482715368270874, |
|
"learning_rate": 6.05542900531511e-06, |
|
"loss": 0.0661, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 2.642369020501139, |
|
"grad_norm": 1.2736594676971436, |
|
"learning_rate": 5.960516324981018e-06, |
|
"loss": 0.0703, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 2.6480637813211843, |
|
"grad_norm": 2.8748881816864014, |
|
"learning_rate": 5.865603644646925e-06, |
|
"loss": 0.0706, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 2.65375854214123, |
|
"grad_norm": 1.8054091930389404, |
|
"learning_rate": 5.7706909643128325e-06, |
|
"loss": 0.0602, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 2.6594533029612757, |
|
"grad_norm": 1.6916309595108032, |
|
"learning_rate": 5.67577828397874e-06, |
|
"loss": 0.0747, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 2.665148063781321, |
|
"grad_norm": 0.40390637516975403, |
|
"learning_rate": 5.580865603644647e-06, |
|
"loss": 0.0501, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 2.6708428246013667, |
|
"grad_norm": 0.5195274949073792, |
|
"learning_rate": 5.4859529233105546e-06, |
|
"loss": 0.0453, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 2.6765375854214124, |
|
"grad_norm": 1.2867252826690674, |
|
"learning_rate": 5.391040242976462e-06, |
|
"loss": 0.0704, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 2.6822323462414577, |
|
"grad_norm": 0.6296011209487915, |
|
"learning_rate": 5.296127562642369e-06, |
|
"loss": 0.0673, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 2.6879271070615034, |
|
"grad_norm": 2.337752342224121, |
|
"learning_rate": 5.201214882308277e-06, |
|
"loss": 0.079, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 2.693621867881549, |
|
"grad_norm": 0.4544166624546051, |
|
"learning_rate": 5.106302201974184e-06, |
|
"loss": 0.0703, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 2.6993166287015944, |
|
"grad_norm": 0.8933178782463074, |
|
"learning_rate": 5.011389521640091e-06, |
|
"loss": 0.0808, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 2.70501138952164, |
|
"grad_norm": 0.5265564322471619, |
|
"learning_rate": 4.916476841305999e-06, |
|
"loss": 0.0755, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 2.710706150341686, |
|
"grad_norm": 1.0185718536376953, |
|
"learning_rate": 4.821564160971906e-06, |
|
"loss": 0.0748, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 2.716400911161731, |
|
"grad_norm": 0.5462549924850464, |
|
"learning_rate": 4.7266514806378134e-06, |
|
"loss": 0.0675, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 2.722095671981777, |
|
"grad_norm": 0.5356141328811646, |
|
"learning_rate": 4.631738800303721e-06, |
|
"loss": 0.0601, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 2.7277904328018225, |
|
"grad_norm": 0.587726891040802, |
|
"learning_rate": 4.536826119969628e-06, |
|
"loss": 0.0542, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 2.733485193621868, |
|
"grad_norm": 0.5419397950172424, |
|
"learning_rate": 4.4419134396355355e-06, |
|
"loss": 0.0814, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 2.7391799544419135, |
|
"grad_norm": 0.7181763052940369, |
|
"learning_rate": 4.347000759301443e-06, |
|
"loss": 0.0674, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 2.7448747152619593, |
|
"grad_norm": 2.263903856277466, |
|
"learning_rate": 4.25208807896735e-06, |
|
"loss": 0.0572, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 2.7505694760820045, |
|
"grad_norm": 1.2922956943511963, |
|
"learning_rate": 4.157175398633258e-06, |
|
"loss": 0.0745, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 2.7562642369020502, |
|
"grad_norm": 0.6281763315200806, |
|
"learning_rate": 4.062262718299165e-06, |
|
"loss": 0.0686, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 2.7619589977220955, |
|
"grad_norm": 0.612315833568573, |
|
"learning_rate": 3.967350037965072e-06, |
|
"loss": 0.0805, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 2.7676537585421412, |
|
"grad_norm": 0.9715772271156311, |
|
"learning_rate": 3.87243735763098e-06, |
|
"loss": 0.0503, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 2.7733485193621865, |
|
"grad_norm": 2.821282148361206, |
|
"learning_rate": 3.777524677296887e-06, |
|
"loss": 0.0831, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 2.7790432801822322, |
|
"grad_norm": 1.8253675699234009, |
|
"learning_rate": 3.682611996962795e-06, |
|
"loss": 0.054, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 2.784738041002278, |
|
"grad_norm": 1.2902501821517944, |
|
"learning_rate": 3.5876993166287018e-06, |
|
"loss": 0.0664, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 2.7904328018223232, |
|
"grad_norm": 1.3351030349731445, |
|
"learning_rate": 3.492786636294609e-06, |
|
"loss": 0.0365, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 2.796127562642369, |
|
"grad_norm": 1.3298659324645996, |
|
"learning_rate": 3.3978739559605165e-06, |
|
"loss": 0.1199, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 2.8018223234624147, |
|
"grad_norm": 1.367462158203125, |
|
"learning_rate": 3.302961275626424e-06, |
|
"loss": 0.0567, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 2.80751708428246, |
|
"grad_norm": 0.7492626309394836, |
|
"learning_rate": 3.208048595292331e-06, |
|
"loss": 0.07, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 2.8132118451025057, |
|
"grad_norm": 1.3312326669692993, |
|
"learning_rate": 3.1131359149582386e-06, |
|
"loss": 0.0738, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 2.8189066059225514, |
|
"grad_norm": 1.0007520914077759, |
|
"learning_rate": 3.018223234624146e-06, |
|
"loss": 0.0574, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 2.8246013667425967, |
|
"grad_norm": 0.8825734257698059, |
|
"learning_rate": 2.9233105542900533e-06, |
|
"loss": 0.0805, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 2.8302961275626424, |
|
"grad_norm": 2.035719633102417, |
|
"learning_rate": 2.8283978739559607e-06, |
|
"loss": 0.1008, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 2.835990888382688, |
|
"grad_norm": 1.2549408674240112, |
|
"learning_rate": 2.733485193621868e-06, |
|
"loss": 0.093, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 2.8416856492027334, |
|
"grad_norm": 0.5822533965110779, |
|
"learning_rate": 2.6385725132877754e-06, |
|
"loss": 0.0503, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 2.847380410022779, |
|
"grad_norm": 1.7420156002044678, |
|
"learning_rate": 2.5436598329536827e-06, |
|
"loss": 0.0792, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 2.853075170842825, |
|
"grad_norm": 0.5257258415222168, |
|
"learning_rate": 2.44874715261959e-06, |
|
"loss": 0.0494, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 2.85876993166287, |
|
"grad_norm": 1.4526097774505615, |
|
"learning_rate": 2.3538344722854975e-06, |
|
"loss": 0.0743, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 2.864464692482916, |
|
"grad_norm": 1.7381209135055542, |
|
"learning_rate": 2.258921791951405e-06, |
|
"loss": 0.0697, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 2.8701594533029615, |
|
"grad_norm": 0.9051744341850281, |
|
"learning_rate": 2.164009111617312e-06, |
|
"loss": 0.0439, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 2.875854214123007, |
|
"grad_norm": 1.1579861640930176, |
|
"learning_rate": 2.0690964312832195e-06, |
|
"loss": 0.0514, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 2.8815489749430525, |
|
"grad_norm": 0.17134447395801544, |
|
"learning_rate": 1.974183750949127e-06, |
|
"loss": 0.0587, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 2.887243735763098, |
|
"grad_norm": 2.1867082118988037, |
|
"learning_rate": 1.879271070615034e-06, |
|
"loss": 0.0806, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 2.8929384965831435, |
|
"grad_norm": 1.2143350839614868, |
|
"learning_rate": 1.7843583902809414e-06, |
|
"loss": 0.0817, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 2.8986332574031892, |
|
"grad_norm": 1.7585662603378296, |
|
"learning_rate": 1.6894457099468492e-06, |
|
"loss": 0.0561, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 2.9043280182232345, |
|
"grad_norm": 1.874373435974121, |
|
"learning_rate": 1.5945330296127566e-06, |
|
"loss": 0.0553, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 2.91002277904328, |
|
"grad_norm": 5.039700031280518, |
|
"learning_rate": 1.4996203492786637e-06, |
|
"loss": 0.0845, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 2.9157175398633255, |
|
"grad_norm": 1.0349217653274536, |
|
"learning_rate": 1.404707668944571e-06, |
|
"loss": 0.0774, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 2.921412300683371, |
|
"grad_norm": 1.7359356880187988, |
|
"learning_rate": 1.3097949886104784e-06, |
|
"loss": 0.0483, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 2.927107061503417, |
|
"grad_norm": 3.1591551303863525, |
|
"learning_rate": 1.2148823082763858e-06, |
|
"loss": 0.0682, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 2.932801822323462, |
|
"grad_norm": 2.8375813961029053, |
|
"learning_rate": 1.1199696279422931e-06, |
|
"loss": 0.0608, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 2.938496583143508, |
|
"grad_norm": 0.6938211917877197, |
|
"learning_rate": 1.0250569476082005e-06, |
|
"loss": 0.0691, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 2.9441913439635536, |
|
"grad_norm": 0.46077239513397217, |
|
"learning_rate": 9.301442672741079e-07, |
|
"loss": 0.068, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 2.949886104783599, |
|
"grad_norm": 1.4797725677490234, |
|
"learning_rate": 8.352315869400152e-07, |
|
"loss": 0.0947, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 2.9555808656036446, |
|
"grad_norm": 1.3453457355499268, |
|
"learning_rate": 7.403189066059226e-07, |
|
"loss": 0.082, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 2.9612756264236904, |
|
"grad_norm": 0.7412590384483337, |
|
"learning_rate": 6.4540622627183e-07, |
|
"loss": 0.0533, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 2.9669703872437356, |
|
"grad_norm": 2.559391975402832, |
|
"learning_rate": 5.504935459377373e-07, |
|
"loss": 0.0693, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 2.9726651480637813, |
|
"grad_norm": 0.8116249442100525, |
|
"learning_rate": 4.5558086560364467e-07, |
|
"loss": 0.088, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 2.978359908883827, |
|
"grad_norm": 0.675849437713623, |
|
"learning_rate": 3.6066818526955203e-07, |
|
"loss": 0.072, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 2.9840546697038723, |
|
"grad_norm": 1.1731585264205933, |
|
"learning_rate": 2.657555049354594e-07, |
|
"loss": 0.0511, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 2.989749430523918, |
|
"grad_norm": 0.8083361983299255, |
|
"learning_rate": 1.7084282460136675e-07, |
|
"loss": 0.0528, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 2.995444191343964, |
|
"grad_norm": 0.12277799099683762, |
|
"learning_rate": 7.593014426727411e-08, |
|
"loss": 0.0482, |
|
"step": 5260 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 5268, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2751824963545344.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|