|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9990426041168023, |
|
"eval_steps": 500, |
|
"global_step": 348, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0057443752991862135, |
|
"grad_norm": 5.363491058349609, |
|
"learning_rate": 2.8571428571428575e-07, |
|
"loss": 0.8587, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.011488750598372427, |
|
"grad_norm": 5.751570701599121, |
|
"learning_rate": 5.714285714285715e-07, |
|
"loss": 0.8936, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01723312589755864, |
|
"grad_norm": 5.681423187255859, |
|
"learning_rate": 8.571428571428572e-07, |
|
"loss": 0.9174, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.022977501196744854, |
|
"grad_norm": 5.466259956359863, |
|
"learning_rate": 1.142857142857143e-06, |
|
"loss": 0.8825, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.028721876495931067, |
|
"grad_norm": 5.4661688804626465, |
|
"learning_rate": 1.4285714285714286e-06, |
|
"loss": 0.8794, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03446625179511728, |
|
"grad_norm": 5.406000137329102, |
|
"learning_rate": 1.7142857142857145e-06, |
|
"loss": 0.8566, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.040210627094303494, |
|
"grad_norm": 4.255201816558838, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.8527, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.04595500239348971, |
|
"grad_norm": 3.8569490909576416, |
|
"learning_rate": 2.285714285714286e-06, |
|
"loss": 0.8273, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.05169937769267592, |
|
"grad_norm": 2.5158278942108154, |
|
"learning_rate": 2.571428571428571e-06, |
|
"loss": 0.7763, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.057443752991862135, |
|
"grad_norm": 2.4135913848876953, |
|
"learning_rate": 2.8571428571428573e-06, |
|
"loss": 0.832, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06318812829104835, |
|
"grad_norm": 2.236311674118042, |
|
"learning_rate": 3.142857142857143e-06, |
|
"loss": 0.8195, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.06893250359023456, |
|
"grad_norm": 2.0858547687530518, |
|
"learning_rate": 3.428571428571429e-06, |
|
"loss": 0.7293, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.07467687888942078, |
|
"grad_norm": 3.858452320098877, |
|
"learning_rate": 3.7142857142857146e-06, |
|
"loss": 0.7993, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.08042125418860699, |
|
"grad_norm": 3.8865373134613037, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.7821, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0861656294877932, |
|
"grad_norm": 3.8490350246429443, |
|
"learning_rate": 4.2857142857142855e-06, |
|
"loss": 0.7953, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.09191000478697942, |
|
"grad_norm": 3.2923967838287354, |
|
"learning_rate": 4.571428571428572e-06, |
|
"loss": 0.7904, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.09765438008616563, |
|
"grad_norm": 2.2245757579803467, |
|
"learning_rate": 4.857142857142858e-06, |
|
"loss": 0.7397, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.10339875538535184, |
|
"grad_norm": 1.9646612405776978, |
|
"learning_rate": 5.142857142857142e-06, |
|
"loss": 0.7405, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.10914313068453806, |
|
"grad_norm": 1.5325367450714111, |
|
"learning_rate": 5.428571428571429e-06, |
|
"loss": 0.6697, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.11488750598372427, |
|
"grad_norm": 1.4567933082580566, |
|
"learning_rate": 5.7142857142857145e-06, |
|
"loss": 0.6977, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.12063188128291048, |
|
"grad_norm": 1.4041451215744019, |
|
"learning_rate": 6e-06, |
|
"loss": 0.6796, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.1263762565820967, |
|
"grad_norm": 1.1699495315551758, |
|
"learning_rate": 6.285714285714286e-06, |
|
"loss": 0.674, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.13212063188128292, |
|
"grad_norm": 1.0591332912445068, |
|
"learning_rate": 6.571428571428572e-06, |
|
"loss": 0.6336, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.13786500718046912, |
|
"grad_norm": 1.2895734310150146, |
|
"learning_rate": 6.857142857142858e-06, |
|
"loss": 0.6701, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.14360938247965535, |
|
"grad_norm": 1.0462803840637207, |
|
"learning_rate": 7.1428571428571436e-06, |
|
"loss": 0.6557, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.14935375777884155, |
|
"grad_norm": 1.1943080425262451, |
|
"learning_rate": 7.428571428571429e-06, |
|
"loss": 0.6453, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.15509813307802778, |
|
"grad_norm": 1.0741592645645142, |
|
"learning_rate": 7.714285714285716e-06, |
|
"loss": 0.6813, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.16084250837721398, |
|
"grad_norm": 0.9222673773765564, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.6415, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.1665868836764002, |
|
"grad_norm": 0.8281923532485962, |
|
"learning_rate": 8.285714285714287e-06, |
|
"loss": 0.6287, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.1723312589755864, |
|
"grad_norm": 1.0101515054702759, |
|
"learning_rate": 8.571428571428571e-06, |
|
"loss": 0.6347, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.17807563427477263, |
|
"grad_norm": 0.9029678702354431, |
|
"learning_rate": 8.857142857142858e-06, |
|
"loss": 0.5939, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.18382000957395883, |
|
"grad_norm": 0.6645869016647339, |
|
"learning_rate": 9.142857142857144e-06, |
|
"loss": 0.5704, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.18956438487314506, |
|
"grad_norm": 0.7702145576477051, |
|
"learning_rate": 9.42857142857143e-06, |
|
"loss": 0.6125, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.19530876017233126, |
|
"grad_norm": 0.8352411985397339, |
|
"learning_rate": 9.714285714285715e-06, |
|
"loss": 0.6332, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.20105313547151749, |
|
"grad_norm": 0.6543819308280945, |
|
"learning_rate": 1e-05, |
|
"loss": 0.6196, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.20679751077070369, |
|
"grad_norm": 0.6806799173355103, |
|
"learning_rate": 9.999748146823376e-06, |
|
"loss": 0.6135, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.2125418860698899, |
|
"grad_norm": 0.7468794584274292, |
|
"learning_rate": 9.99899261266551e-06, |
|
"loss": 0.5972, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.2182862613690761, |
|
"grad_norm": 0.6796684861183167, |
|
"learning_rate": 9.997733473639876e-06, |
|
"loss": 0.6111, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.22403063666826234, |
|
"grad_norm": 0.6619651913642883, |
|
"learning_rate": 9.995970856593739e-06, |
|
"loss": 0.6117, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.22977501196744854, |
|
"grad_norm": 0.6917636394500732, |
|
"learning_rate": 9.993704939095376e-06, |
|
"loss": 0.5887, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.23551938726663477, |
|
"grad_norm": 0.6591335535049438, |
|
"learning_rate": 9.9909359494162e-06, |
|
"loss": 0.631, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.24126376256582097, |
|
"grad_norm": 0.7333484888076782, |
|
"learning_rate": 9.987664166507749e-06, |
|
"loss": 0.5682, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.2470081378650072, |
|
"grad_norm": 0.7811906933784485, |
|
"learning_rate": 9.983889919973586e-06, |
|
"loss": 0.6056, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.2527525131641934, |
|
"grad_norm": 0.691569447517395, |
|
"learning_rate": 9.979613590036108e-06, |
|
"loss": 0.6209, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.2584968884633796, |
|
"grad_norm": 0.684109091758728, |
|
"learning_rate": 9.974835607498224e-06, |
|
"loss": 0.605, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.26424126376256585, |
|
"grad_norm": 0.7513927817344666, |
|
"learning_rate": 9.969556453699966e-06, |
|
"loss": 0.556, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.26998563906175205, |
|
"grad_norm": 0.7860096096992493, |
|
"learning_rate": 9.963776660469996e-06, |
|
"loss": 0.5852, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.27573001436093825, |
|
"grad_norm": 0.555550754070282, |
|
"learning_rate": 9.957496810072027e-06, |
|
"loss": 0.5736, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.28147438966012445, |
|
"grad_norm": 0.8144235014915466, |
|
"learning_rate": 9.95071753514617e-06, |
|
"loss": 0.6078, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.2872187649593107, |
|
"grad_norm": 0.6273301839828491, |
|
"learning_rate": 9.943439518645193e-06, |
|
"loss": 0.5638, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2929631402584969, |
|
"grad_norm": 0.5855135321617126, |
|
"learning_rate": 9.935663493765726e-06, |
|
"loss": 0.5501, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.2987075155576831, |
|
"grad_norm": 0.7060447931289673, |
|
"learning_rate": 9.9273902438744e-06, |
|
"loss": 0.596, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.3044518908568693, |
|
"grad_norm": 0.6160895228385925, |
|
"learning_rate": 9.918620602428916e-06, |
|
"loss": 0.568, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.31019626615605556, |
|
"grad_norm": 0.6151447296142578, |
|
"learning_rate": 9.909355452894098e-06, |
|
"loss": 0.5581, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.31594064145524176, |
|
"grad_norm": 0.5906559824943542, |
|
"learning_rate": 9.899595728652883e-06, |
|
"loss": 0.5628, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.32168501675442795, |
|
"grad_norm": 0.6064203977584839, |
|
"learning_rate": 9.889342412912296e-06, |
|
"loss": 0.5708, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.32742939205361415, |
|
"grad_norm": 0.645464301109314, |
|
"learning_rate": 9.878596538604388e-06, |
|
"loss": 0.5792, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.3331737673528004, |
|
"grad_norm": 0.6362553238868713, |
|
"learning_rate": 9.867359188282193e-06, |
|
"loss": 0.5733, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.3389181426519866, |
|
"grad_norm": 0.680167019367218, |
|
"learning_rate": 9.855631494010661e-06, |
|
"loss": 0.5473, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.3446625179511728, |
|
"grad_norm": 0.6046214699745178, |
|
"learning_rate": 9.843414637252615e-06, |
|
"loss": 0.5568, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.350406893250359, |
|
"grad_norm": 0.6066192388534546, |
|
"learning_rate": 9.830709848749727e-06, |
|
"loss": 0.5732, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.35615126854954526, |
|
"grad_norm": 0.5794113278388977, |
|
"learning_rate": 9.817518408398536e-06, |
|
"loss": 0.5825, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.36189564384873146, |
|
"grad_norm": 0.5537358522415161, |
|
"learning_rate": 9.803841645121505e-06, |
|
"loss": 0.5664, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.36764001914791766, |
|
"grad_norm": 0.6195133328437805, |
|
"learning_rate": 9.78968093673314e-06, |
|
"loss": 0.55, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.37338439444710386, |
|
"grad_norm": 0.5777291655540466, |
|
"learning_rate": 9.775037709801206e-06, |
|
"loss": 0.572, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.3791287697462901, |
|
"grad_norm": 0.6024558544158936, |
|
"learning_rate": 9.759913439502982e-06, |
|
"loss": 0.5657, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.3848731450454763, |
|
"grad_norm": 0.5568858981132507, |
|
"learning_rate": 9.74430964947668e-06, |
|
"loss": 0.5261, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.3906175203446625, |
|
"grad_norm": 0.5718939304351807, |
|
"learning_rate": 9.728227911667934e-06, |
|
"loss": 0.5494, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.3963618956438487, |
|
"grad_norm": 0.5540202856063843, |
|
"learning_rate": 9.711669846171443e-06, |
|
"loss": 0.5771, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.40210627094303497, |
|
"grad_norm": 0.5410825610160828, |
|
"learning_rate": 9.694637121067764e-06, |
|
"loss": 0.501, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.40785064624222117, |
|
"grad_norm": 0.6998445987701416, |
|
"learning_rate": 9.677131452255272e-06, |
|
"loss": 0.555, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.41359502154140737, |
|
"grad_norm": 0.5608072876930237, |
|
"learning_rate": 9.659154603277283e-06, |
|
"loss": 0.5344, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.41933939684059357, |
|
"grad_norm": 0.5644481182098389, |
|
"learning_rate": 9.640708385144403e-06, |
|
"loss": 0.5547, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.4250837721397798, |
|
"grad_norm": 0.643834114074707, |
|
"learning_rate": 9.62179465615209e-06, |
|
"loss": 0.5578, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.430828147438966, |
|
"grad_norm": 0.6082519888877869, |
|
"learning_rate": 9.602415321693434e-06, |
|
"loss": 0.5687, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.4365725227381522, |
|
"grad_norm": 0.4994487762451172, |
|
"learning_rate": 9.582572334067213e-06, |
|
"loss": 0.5447, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.4423168980373384, |
|
"grad_norm": 0.52949458360672, |
|
"learning_rate": 9.562267692281212e-06, |
|
"loss": 0.5713, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.4480612733365247, |
|
"grad_norm": 0.5970189571380615, |
|
"learning_rate": 9.541503441850844e-06, |
|
"loss": 0.5486, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.4538056486357109, |
|
"grad_norm": 0.5419505834579468, |
|
"learning_rate": 9.520281674593084e-06, |
|
"loss": 0.5577, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.4595500239348971, |
|
"grad_norm": 0.5340558886528015, |
|
"learning_rate": 9.498604528415731e-06, |
|
"loss": 0.5397, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.4652943992340833, |
|
"grad_norm": 0.5403291583061218, |
|
"learning_rate": 9.476474187102033e-06, |
|
"loss": 0.5438, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.47103877453326953, |
|
"grad_norm": 0.6180256605148315, |
|
"learning_rate": 9.453892880090696e-06, |
|
"loss": 0.5624, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.47678314983245573, |
|
"grad_norm": 0.5336986780166626, |
|
"learning_rate": 9.430862882251279e-06, |
|
"loss": 0.5473, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.48252752513164193, |
|
"grad_norm": 0.6668586134910583, |
|
"learning_rate": 9.40738651365503e-06, |
|
"loss": 0.5757, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.48827190043082813, |
|
"grad_norm": 0.6107608675956726, |
|
"learning_rate": 9.38346613934115e-06, |
|
"loss": 0.5335, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.4940162757300144, |
|
"grad_norm": 0.5242419838905334, |
|
"learning_rate": 9.359104169078541e-06, |
|
"loss": 0.5509, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.4997606510292006, |
|
"grad_norm": 0.6356630921363831, |
|
"learning_rate": 9.334303057123044e-06, |
|
"loss": 0.5676, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.5055050263283868, |
|
"grad_norm": 0.533847987651825, |
|
"learning_rate": 9.309065301970193e-06, |
|
"loss": 0.5515, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.511249401627573, |
|
"grad_norm": 0.5083225965499878, |
|
"learning_rate": 9.283393446103506e-06, |
|
"loss": 0.5474, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.5169937769267592, |
|
"grad_norm": 0.5779216885566711, |
|
"learning_rate": 9.257290075738365e-06, |
|
"loss": 0.5574, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.5227381522259454, |
|
"grad_norm": 0.6124880313873291, |
|
"learning_rate": 9.23075782056147e-06, |
|
"loss": 0.535, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.5284825275251317, |
|
"grad_norm": 0.598517119884491, |
|
"learning_rate": 9.20379935346592e-06, |
|
"loss": 0.5409, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.5342269028243178, |
|
"grad_norm": 0.492055207490921, |
|
"learning_rate": 9.176417390281944e-06, |
|
"loss": 0.5293, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.5399712781235041, |
|
"grad_norm": 0.4873235523700714, |
|
"learning_rate": 9.148614689503307e-06, |
|
"loss": 0.5585, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.5457156534226902, |
|
"grad_norm": 0.5771122574806213, |
|
"learning_rate": 9.120394052009412e-06, |
|
"loss": 0.5464, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.5514600287218765, |
|
"grad_norm": 0.5583028793334961, |
|
"learning_rate": 9.091758320783139e-06, |
|
"loss": 0.5603, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.5572044040210627, |
|
"grad_norm": 0.5799354910850525, |
|
"learning_rate": 9.062710380624439e-06, |
|
"loss": 0.5307, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.5629487793202489, |
|
"grad_norm": 0.5598204731941223, |
|
"learning_rate": 9.033253157859715e-06, |
|
"loss": 0.5733, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.5686931546194351, |
|
"grad_norm": 0.5341041684150696, |
|
"learning_rate": 9.003389620047012e-06, |
|
"loss": 0.5288, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.5744375299186214, |
|
"grad_norm": 0.5493537187576294, |
|
"learning_rate": 8.973122775677078e-06, |
|
"loss": 0.5322, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.5801819052178075, |
|
"grad_norm": 0.7178077101707458, |
|
"learning_rate": 8.942455673870278e-06, |
|
"loss": 0.5638, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.5859262805169938, |
|
"grad_norm": 0.5725718140602112, |
|
"learning_rate": 8.91139140406941e-06, |
|
"loss": 0.5603, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.59167065581618, |
|
"grad_norm": 0.5904880166053772, |
|
"learning_rate": 8.879933095728485e-06, |
|
"loss": 0.5454, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.5974150311153662, |
|
"grad_norm": 0.5282031893730164, |
|
"learning_rate": 8.848083917997463e-06, |
|
"loss": 0.5287, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.6031594064145525, |
|
"grad_norm": 0.5110641121864319, |
|
"learning_rate": 8.815847079402972e-06, |
|
"loss": 0.5301, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.6089037817137386, |
|
"grad_norm": 0.7060251235961914, |
|
"learning_rate": 8.783225827525098e-06, |
|
"loss": 0.5529, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.6146481570129249, |
|
"grad_norm": 0.5038994550704956, |
|
"learning_rate": 8.750223448670204e-06, |
|
"loss": 0.5464, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.6203925323121111, |
|
"grad_norm": 0.6462810039520264, |
|
"learning_rate": 8.716843267539868e-06, |
|
"loss": 0.5634, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.6261369076112973, |
|
"grad_norm": 0.5832483768463135, |
|
"learning_rate": 8.683088646895955e-06, |
|
"loss": 0.5102, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.6318812829104835, |
|
"grad_norm": 0.5752907991409302, |
|
"learning_rate": 8.648962987221837e-06, |
|
"loss": 0.5767, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.6376256582096697, |
|
"grad_norm": 0.714330792427063, |
|
"learning_rate": 8.614469726379833e-06, |
|
"loss": 0.5567, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.6433700335088559, |
|
"grad_norm": 0.5703815817832947, |
|
"learning_rate": 8.579612339264867e-06, |
|
"loss": 0.566, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.6491144088080422, |
|
"grad_norm": 0.7698194980621338, |
|
"learning_rate": 8.544394337454409e-06, |
|
"loss": 0.5308, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.6548587841072283, |
|
"grad_norm": 0.5629884600639343, |
|
"learning_rate": 8.508819268854713e-06, |
|
"loss": 0.555, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.6606031594064146, |
|
"grad_norm": 0.6958817839622498, |
|
"learning_rate": 8.472890717343391e-06, |
|
"loss": 0.5623, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.6663475347056008, |
|
"grad_norm": 0.6036946773529053, |
|
"learning_rate": 8.436612302408376e-06, |
|
"loss": 0.526, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.672091910004787, |
|
"grad_norm": 0.511654794216156, |
|
"learning_rate": 8.399987678783285e-06, |
|
"loss": 0.5446, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.6778362853039732, |
|
"grad_norm": 0.7489587068557739, |
|
"learning_rate": 8.36302053607924e-06, |
|
"loss": 0.5671, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.6835806606031594, |
|
"grad_norm": 0.6197418570518494, |
|
"learning_rate": 8.325714598413169e-06, |
|
"loss": 0.5701, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.6893250359023456, |
|
"grad_norm": 0.6502342820167542, |
|
"learning_rate": 8.288073624032634e-06, |
|
"loss": 0.5321, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.6950694112015319, |
|
"grad_norm": 0.49902498722076416, |
|
"learning_rate": 8.250101404937223e-06, |
|
"loss": 0.5542, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.700813786500718, |
|
"grad_norm": 0.5273813009262085, |
|
"learning_rate": 8.211801766496537e-06, |
|
"loss": 0.5311, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.7065581617999043, |
|
"grad_norm": 0.5683380365371704, |
|
"learning_rate": 8.17317856706482e-06, |
|
"loss": 0.523, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.7123025370990905, |
|
"grad_norm": 0.5648055076599121, |
|
"learning_rate": 8.13423569759226e-06, |
|
"loss": 0.5678, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.7180469123982767, |
|
"grad_norm": 0.5629974007606506, |
|
"learning_rate": 8.094977081233006e-06, |
|
"loss": 0.5669, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.7237912876974629, |
|
"grad_norm": 0.602506160736084, |
|
"learning_rate": 8.055406672949957e-06, |
|
"loss": 0.5275, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.7295356629966491, |
|
"grad_norm": 0.572467029094696, |
|
"learning_rate": 8.015528459116321e-06, |
|
"loss": 0.5275, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.7352800382958353, |
|
"grad_norm": 0.5794216394424438, |
|
"learning_rate": 7.975346457114034e-06, |
|
"loss": 0.5396, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.7410244135950216, |
|
"grad_norm": 0.560357391834259, |
|
"learning_rate": 7.934864714929036e-06, |
|
"loss": 0.5483, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.7467687888942077, |
|
"grad_norm": 0.5050742626190186, |
|
"learning_rate": 7.894087310743468e-06, |
|
"loss": 0.5617, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.752513164193394, |
|
"grad_norm": 0.4990502595901489, |
|
"learning_rate": 7.853018352524845e-06, |
|
"loss": 0.5253, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.7582575394925802, |
|
"grad_norm": 0.4985724985599518, |
|
"learning_rate": 7.811661977612202e-06, |
|
"loss": 0.5303, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.7640019147917664, |
|
"grad_norm": 0.5744158029556274, |
|
"learning_rate": 7.770022352299294e-06, |
|
"loss": 0.5235, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.7697462900909526, |
|
"grad_norm": 0.5937462449073792, |
|
"learning_rate": 7.728103671414889e-06, |
|
"loss": 0.5148, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.7754906653901388, |
|
"grad_norm": 0.5738289952278137, |
|
"learning_rate": 7.685910157900158e-06, |
|
"loss": 0.5345, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.781235040689325, |
|
"grad_norm": 0.5279061794281006, |
|
"learning_rate": 7.643446062383273e-06, |
|
"loss": 0.5451, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.7869794159885113, |
|
"grad_norm": 0.5690545439720154, |
|
"learning_rate": 7.600715662751166e-06, |
|
"loss": 0.5083, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.7927237912876974, |
|
"grad_norm": 0.5676944255828857, |
|
"learning_rate": 7.557723263718596e-06, |
|
"loss": 0.5516, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.7984681665868837, |
|
"grad_norm": 0.6335099339485168, |
|
"learning_rate": 7.514473196394467e-06, |
|
"loss": 0.5583, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.8042125418860699, |
|
"grad_norm": 0.6167879104614258, |
|
"learning_rate": 7.470969817845518e-06, |
|
"loss": 0.5675, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.8099569171852561, |
|
"grad_norm": 0.6238246560096741, |
|
"learning_rate": 7.427217510657383e-06, |
|
"loss": 0.5242, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.8157012924844423, |
|
"grad_norm": 0.6096833944320679, |
|
"learning_rate": 7.383220682493081e-06, |
|
"loss": 0.56, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.8214456677836285, |
|
"grad_norm": 0.6408026814460754, |
|
"learning_rate": 7.338983765648985e-06, |
|
"loss": 0.517, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.8271900430828147, |
|
"grad_norm": 0.6161112785339355, |
|
"learning_rate": 7.294511216608308e-06, |
|
"loss": 0.5568, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.832934418382001, |
|
"grad_norm": 0.6821258664131165, |
|
"learning_rate": 7.249807515592149e-06, |
|
"loss": 0.5391, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.8386787936811871, |
|
"grad_norm": 0.5701781511306763, |
|
"learning_rate": 7.2048771661081515e-06, |
|
"loss": 0.5393, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.8444231689803734, |
|
"grad_norm": 0.573361873626709, |
|
"learning_rate": 7.159724694496815e-06, |
|
"loss": 0.5104, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.8501675442795597, |
|
"grad_norm": 0.6415104269981384, |
|
"learning_rate": 7.114354649475499e-06, |
|
"loss": 0.5373, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.8559119195787458, |
|
"grad_norm": 0.5799002647399902, |
|
"learning_rate": 7.068771601680191e-06, |
|
"loss": 0.516, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.861656294877932, |
|
"grad_norm": 0.6010524034500122, |
|
"learning_rate": 7.022980143205046e-06, |
|
"loss": 0.5421, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.8674006701771182, |
|
"grad_norm": 0.5849425196647644, |
|
"learning_rate": 6.976984887139775e-06, |
|
"loss": 0.5576, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.8731450454763044, |
|
"grad_norm": 0.49850520491600037, |
|
"learning_rate": 6.930790467104916e-06, |
|
"loss": 0.4881, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.8788894207754907, |
|
"grad_norm": 0.6236470937728882, |
|
"learning_rate": 6.884401536785045e-06, |
|
"loss": 0.5521, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.8846337960746768, |
|
"grad_norm": 0.5845939517021179, |
|
"learning_rate": 6.837822769459942e-06, |
|
"loss": 0.5143, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.8903781713738631, |
|
"grad_norm": 0.48170965909957886, |
|
"learning_rate": 6.791058857533814e-06, |
|
"loss": 0.5248, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.8961225466730494, |
|
"grad_norm": 0.5586768984794617, |
|
"learning_rate": 6.744114512062571e-06, |
|
"loss": 0.543, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.9018669219722355, |
|
"grad_norm": 0.5925922393798828, |
|
"learning_rate": 6.696994462279223e-06, |
|
"loss": 0.5272, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.9076112972714218, |
|
"grad_norm": 0.5067867636680603, |
|
"learning_rate": 6.6497034551174585e-06, |
|
"loss": 0.5572, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.9133556725706079, |
|
"grad_norm": 0.5289098024368286, |
|
"learning_rate": 6.602246254733431e-06, |
|
"loss": 0.5216, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.9191000478697942, |
|
"grad_norm": 0.5776088833808899, |
|
"learning_rate": 6.554627642025807e-06, |
|
"loss": 0.5278, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.9248444231689804, |
|
"grad_norm": 0.4834924340248108, |
|
"learning_rate": 6.506852414154138e-06, |
|
"loss": 0.5479, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.9305887984681666, |
|
"grad_norm": 0.5455948114395142, |
|
"learning_rate": 6.4589253840555856e-06, |
|
"loss": 0.4813, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.9363331737673528, |
|
"grad_norm": 0.5379589200019836, |
|
"learning_rate": 6.41085137996006e-06, |
|
"loss": 0.5391, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.9420775490665391, |
|
"grad_norm": 0.5952578783035278, |
|
"learning_rate": 6.362635244903818e-06, |
|
"loss": 0.556, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.9478219243657252, |
|
"grad_norm": 0.5639842748641968, |
|
"learning_rate": 6.314281836241573e-06, |
|
"loss": 0.552, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.9535662996649115, |
|
"grad_norm": 0.5705066323280334, |
|
"learning_rate": 6.265796025157154e-06, |
|
"loss": 0.5028, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.9593106749640976, |
|
"grad_norm": 0.5012354850769043, |
|
"learning_rate": 6.217182696172776e-06, |
|
"loss": 0.5229, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.9650550502632839, |
|
"grad_norm": 0.5164003968238831, |
|
"learning_rate": 6.168446746656973e-06, |
|
"loss": 0.5166, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.9707994255624701, |
|
"grad_norm": 0.5868187546730042, |
|
"learning_rate": 6.119593086331225e-06, |
|
"loss": 0.5342, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.9765438008616563, |
|
"grad_norm": 0.5106255412101746, |
|
"learning_rate": 6.070626636775349e-06, |
|
"loss": 0.526, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.9822881761608425, |
|
"grad_norm": 0.44993191957473755, |
|
"learning_rate": 6.021552330931693e-06, |
|
"loss": 0.5599, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.9880325514600288, |
|
"grad_norm": 0.5432422161102295, |
|
"learning_rate": 5.972375112608182e-06, |
|
"loss": 0.5265, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.9937769267592149, |
|
"grad_norm": 0.5361687541007996, |
|
"learning_rate": 5.923099935980278e-06, |
|
"loss": 0.5652, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.9995213020584012, |
|
"grad_norm": 0.43177682161331177, |
|
"learning_rate": 5.8737317650918905e-06, |
|
"loss": 0.5462, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.0052656773575874, |
|
"grad_norm": 1.580591082572937, |
|
"learning_rate": 5.824275573355278e-06, |
|
"loss": 0.9082, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.0110100526567736, |
|
"grad_norm": 0.5395136475563049, |
|
"learning_rate": 5.7747363430500395e-06, |
|
"loss": 0.4953, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.0167544279559597, |
|
"grad_norm": 0.5305489301681519, |
|
"learning_rate": 5.725119064821185e-06, |
|
"loss": 0.5229, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.022498803255146, |
|
"grad_norm": 0.5417148470878601, |
|
"learning_rate": 5.675428737176367e-06, |
|
"loss": 0.461, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.0282431785543322, |
|
"grad_norm": 0.5382701754570007, |
|
"learning_rate": 5.625670365982332e-06, |
|
"loss": 0.4442, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.0339875538535184, |
|
"grad_norm": 0.5847955346107483, |
|
"learning_rate": 5.575848963960621e-06, |
|
"loss": 0.5286, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.0397319291527047, |
|
"grad_norm": 0.4929944574832916, |
|
"learning_rate": 5.525969550182577e-06, |
|
"loss": 0.4789, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.0454763044518909, |
|
"grad_norm": 0.5635377764701843, |
|
"learning_rate": 5.4760371495637256e-06, |
|
"loss": 0.4753, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.051220679751077, |
|
"grad_norm": 0.4980822801589966, |
|
"learning_rate": 5.426056792357552e-06, |
|
"loss": 0.4895, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.0569650550502634, |
|
"grad_norm": 0.5097235441207886, |
|
"learning_rate": 5.376033513648743e-06, |
|
"loss": 0.4794, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.0627094303494495, |
|
"grad_norm": 0.5179345607757568, |
|
"learning_rate": 5.325972352845965e-06, |
|
"loss": 0.4957, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.0684538056486357, |
|
"grad_norm": 0.4883691072463989, |
|
"learning_rate": 5.2758783531741655e-06, |
|
"loss": 0.5047, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.0741981809478218, |
|
"grad_norm": 0.46270909905433655, |
|
"learning_rate": 5.225756561166521e-06, |
|
"loss": 0.5066, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.0799425562470082, |
|
"grad_norm": 0.5014520883560181, |
|
"learning_rate": 5.175612026156045e-06, |
|
"loss": 0.5012, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.0856869315461943, |
|
"grad_norm": 0.4673246443271637, |
|
"learning_rate": 5.125449799766916e-06, |
|
"loss": 0.4638, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.0914313068453805, |
|
"grad_norm": 0.4951716363430023, |
|
"learning_rate": 5.075274935405554e-06, |
|
"loss": 0.4915, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.0971756821445668, |
|
"grad_norm": 0.4571003317832947, |
|
"learning_rate": 5.025092487751552e-06, |
|
"loss": 0.494, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.102920057443753, |
|
"grad_norm": 0.4891183078289032, |
|
"learning_rate": 4.974907512248451e-06, |
|
"loss": 0.4549, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.1086644327429391, |
|
"grad_norm": 0.4218912422657013, |
|
"learning_rate": 4.924725064594448e-06, |
|
"loss": 0.489, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.1144088080421255, |
|
"grad_norm": 0.45297980308532715, |
|
"learning_rate": 4.874550200233085e-06, |
|
"loss": 0.4907, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.1201531833413116, |
|
"grad_norm": 0.5533784031867981, |
|
"learning_rate": 4.824387973843957e-06, |
|
"loss": 0.5129, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.1258975586404978, |
|
"grad_norm": 0.45622408390045166, |
|
"learning_rate": 4.7742434388334815e-06, |
|
"loss": 0.4418, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.1316419339396842, |
|
"grad_norm": 0.4435925781726837, |
|
"learning_rate": 4.724121646825838e-06, |
|
"loss": 0.5022, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.1373863092388703, |
|
"grad_norm": 0.4371262490749359, |
|
"learning_rate": 4.674027647154037e-06, |
|
"loss": 0.4711, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.1431306845380564, |
|
"grad_norm": 0.473237544298172, |
|
"learning_rate": 4.623966486351257e-06, |
|
"loss": 0.4465, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.1488750598372426, |
|
"grad_norm": 0.4501272439956665, |
|
"learning_rate": 4.573943207642452e-06, |
|
"loss": 0.4659, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.154619435136429, |
|
"grad_norm": 0.5544123649597168, |
|
"learning_rate": 4.523962850436276e-06, |
|
"loss": 0.4974, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.160363810435615, |
|
"grad_norm": 0.519002377986908, |
|
"learning_rate": 4.474030449817423e-06, |
|
"loss": 0.5099, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.1661081857348012, |
|
"grad_norm": 0.48358842730522156, |
|
"learning_rate": 4.424151036039381e-06, |
|
"loss": 0.4687, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.1718525610339876, |
|
"grad_norm": 0.5397114753723145, |
|
"learning_rate": 4.3743296340176694e-06, |
|
"loss": 0.505, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.1775969363331737, |
|
"grad_norm": 0.47014209628105164, |
|
"learning_rate": 4.3245712628236356e-06, |
|
"loss": 0.5201, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.18334131163236, |
|
"grad_norm": 0.4648483395576477, |
|
"learning_rate": 4.274880935178817e-06, |
|
"loss": 0.4652, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.1890856869315463, |
|
"grad_norm": 0.45514115691185, |
|
"learning_rate": 4.225263656949961e-06, |
|
"loss": 0.4692, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.1948300622307324, |
|
"grad_norm": 0.48053470253944397, |
|
"learning_rate": 4.175724426644724e-06, |
|
"loss": 0.4842, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.2005744375299185, |
|
"grad_norm": 0.45836907625198364, |
|
"learning_rate": 4.12626823490811e-06, |
|
"loss": 0.4535, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.206318812829105, |
|
"grad_norm": 0.43734660744667053, |
|
"learning_rate": 4.076900064019721e-06, |
|
"loss": 0.5134, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.212063188128291, |
|
"grad_norm": 0.4292330741882324, |
|
"learning_rate": 4.02762488739182e-06, |
|
"loss": 0.4933, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.2178075634274772, |
|
"grad_norm": 0.48980435729026794, |
|
"learning_rate": 3.978447669068309e-06, |
|
"loss": 0.4924, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.2235519387266636, |
|
"grad_norm": 0.415892630815506, |
|
"learning_rate": 3.929373363224654e-06, |
|
"loss": 0.4602, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.2292963140258497, |
|
"grad_norm": 0.5073506236076355, |
|
"learning_rate": 3.8804069136687775e-06, |
|
"loss": 0.4469, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.2350406893250359, |
|
"grad_norm": 0.4534899592399597, |
|
"learning_rate": 3.8315532533430285e-06, |
|
"loss": 0.4937, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.2407850646242222, |
|
"grad_norm": 0.43552905321121216, |
|
"learning_rate": 3.7828173038272266e-06, |
|
"loss": 0.5012, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.2465294399234084, |
|
"grad_norm": 0.45762044191360474, |
|
"learning_rate": 3.7342039748428473e-06, |
|
"loss": 0.448, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.2522738152225945, |
|
"grad_norm": 0.4884515702724457, |
|
"learning_rate": 3.685718163758427e-06, |
|
"loss": 0.4803, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.2580181905217809, |
|
"grad_norm": 0.414726585149765, |
|
"learning_rate": 3.6373647550961834e-06, |
|
"loss": 0.4844, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.263762565820967, |
|
"grad_norm": 0.44502031803131104, |
|
"learning_rate": 3.5891486200399413e-06, |
|
"loss": 0.5254, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.2695069411201532, |
|
"grad_norm": 0.46549510955810547, |
|
"learning_rate": 3.5410746159444165e-06, |
|
"loss": 0.503, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.2752513164193395, |
|
"grad_norm": 0.480082631111145, |
|
"learning_rate": 3.4931475858458634e-06, |
|
"loss": 0.474, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.2809956917185257, |
|
"grad_norm": 0.5088851451873779, |
|
"learning_rate": 3.445372357974194e-06, |
|
"loss": 0.5197, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.2867400670177118, |
|
"grad_norm": 0.4297751486301422, |
|
"learning_rate": 3.397753745266571e-06, |
|
"loss": 0.4698, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.292484442316898, |
|
"grad_norm": 0.436560720205307, |
|
"learning_rate": 3.350296544882543e-06, |
|
"loss": 0.4573, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.2982288176160843, |
|
"grad_norm": 0.451806902885437, |
|
"learning_rate": 3.303005537720778e-06, |
|
"loss": 0.4654, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.3039731929152705, |
|
"grad_norm": 0.4921393394470215, |
|
"learning_rate": 3.255885487937431e-06, |
|
"loss": 0.5075, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.3097175682144566, |
|
"grad_norm": 0.4880327880382538, |
|
"learning_rate": 3.2089411424661864e-06, |
|
"loss": 0.5467, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.3154619435136428, |
|
"grad_norm": 0.40174514055252075, |
|
"learning_rate": 3.1621772305400603e-06, |
|
"loss": 0.4965, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.3212063188128291, |
|
"grad_norm": 0.41413503885269165, |
|
"learning_rate": 3.1155984632149565e-06, |
|
"loss": 0.4826, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.3269506941120153, |
|
"grad_norm": 0.43642458319664, |
|
"learning_rate": 3.0692095328950843e-06, |
|
"loss": 0.4355, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.3326950694112014, |
|
"grad_norm": 0.4538045823574066, |
|
"learning_rate": 3.023015112860228e-06, |
|
"loss": 0.5378, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.3384394447103878, |
|
"grad_norm": 0.42121201753616333, |
|
"learning_rate": 2.977019856794955e-06, |
|
"loss": 0.4811, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.344183820009574, |
|
"grad_norm": 0.3827037513256073, |
|
"learning_rate": 2.93122839831981e-06, |
|
"loss": 0.4956, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.34992819530876, |
|
"grad_norm": 0.3937554359436035, |
|
"learning_rate": 2.8856453505245018e-06, |
|
"loss": 0.4729, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.3556725706079464, |
|
"grad_norm": 0.4603598415851593, |
|
"learning_rate": 2.840275305503186e-06, |
|
"loss": 0.5352, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.3614169459071326, |
|
"grad_norm": 0.4124448001384735, |
|
"learning_rate": 2.7951228338918506e-06, |
|
"loss": 0.4523, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.3671613212063187, |
|
"grad_norm": 0.41025596857070923, |
|
"learning_rate": 2.7501924844078538e-06, |
|
"loss": 0.4824, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.372905696505505, |
|
"grad_norm": 0.3917722702026367, |
|
"learning_rate": 2.7054887833916933e-06, |
|
"loss": 0.5059, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.3786500718046912, |
|
"grad_norm": 0.4388699531555176, |
|
"learning_rate": 2.6610162343510183e-06, |
|
"loss": 0.5039, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.3843944471038774, |
|
"grad_norm": 0.4307573139667511, |
|
"learning_rate": 2.616779317506921e-06, |
|
"loss": 0.4668, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.3901388224030637, |
|
"grad_norm": 0.40513068437576294, |
|
"learning_rate": 2.572782489342617e-06, |
|
"loss": 0.4365, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.39588319770225, |
|
"grad_norm": 0.43141913414001465, |
|
"learning_rate": 2.5290301821544826e-06, |
|
"loss": 0.5055, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.401627573001436, |
|
"grad_norm": 0.4245367646217346, |
|
"learning_rate": 2.4855268036055346e-06, |
|
"loss": 0.5001, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.4073719483006224, |
|
"grad_norm": 0.41667869687080383, |
|
"learning_rate": 2.4422767362814045e-06, |
|
"loss": 0.5056, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.4131163235998085, |
|
"grad_norm": 0.41470879316329956, |
|
"learning_rate": 2.3992843372488357e-06, |
|
"loss": 0.477, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.4188606988989947, |
|
"grad_norm": 0.43821245431900024, |
|
"learning_rate": 2.3565539376167295e-06, |
|
"loss": 0.5074, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.424605074198181, |
|
"grad_norm": 0.38262587785720825, |
|
"learning_rate": 2.3140898420998425e-06, |
|
"loss": 0.4686, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.4303494494973672, |
|
"grad_norm": 0.3910864591598511, |
|
"learning_rate": 2.271896328585114e-06, |
|
"loss": 0.4852, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.4360938247965533, |
|
"grad_norm": 0.43671905994415283, |
|
"learning_rate": 2.2299776477007073e-06, |
|
"loss": 0.4729, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.4418382000957397, |
|
"grad_norm": 0.3937380611896515, |
|
"learning_rate": 2.1883380223878004e-06, |
|
"loss": 0.5039, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.4475825753949259, |
|
"grad_norm": 0.4383409023284912, |
|
"learning_rate": 2.1469816474751566e-06, |
|
"loss": 0.5264, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.453326950694112, |
|
"grad_norm": 0.38024550676345825, |
|
"learning_rate": 2.105912689256533e-06, |
|
"loss": 0.4575, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.4590713259932984, |
|
"grad_norm": 0.4220210313796997, |
|
"learning_rate": 2.0651352850709656e-06, |
|
"loss": 0.4846, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.4648157012924845, |
|
"grad_norm": 0.4223257005214691, |
|
"learning_rate": 2.0246535428859652e-06, |
|
"loss": 0.4968, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.4705600765916707, |
|
"grad_norm": 0.3835613429546356, |
|
"learning_rate": 1.984471540883679e-06, |
|
"loss": 0.4874, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.4763044518908568, |
|
"grad_norm": 0.43379145860671997, |
|
"learning_rate": 1.9445933270500444e-06, |
|
"loss": 0.5144, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.4820488271900432, |
|
"grad_norm": 0.3767804205417633, |
|
"learning_rate": 1.905022918766995e-06, |
|
"loss": 0.4621, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.4877932024892293, |
|
"grad_norm": 0.4112907648086548, |
|
"learning_rate": 1.8657643024077431e-06, |
|
"loss": 0.5212, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.4935375777884154, |
|
"grad_norm": 0.41141629219055176, |
|
"learning_rate": 1.8268214329351797e-06, |
|
"loss": 0.4859, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.4992819530876016, |
|
"grad_norm": 0.3843127191066742, |
|
"learning_rate": 1.7881982335034625e-06, |
|
"loss": 0.4308, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.505026328386788, |
|
"grad_norm": 0.3895561695098877, |
|
"learning_rate": 1.7498985950627794e-06, |
|
"loss": 0.4565, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.510770703685974, |
|
"grad_norm": 0.4275520443916321, |
|
"learning_rate": 1.7119263759673677e-06, |
|
"loss": 0.4704, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.5165150789851602, |
|
"grad_norm": 0.4638034999370575, |
|
"learning_rate": 1.6742854015868349e-06, |
|
"loss": 0.5137, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.5222594542843466, |
|
"grad_norm": 0.35882094502449036, |
|
"learning_rate": 1.6369794639207626e-06, |
|
"loss": 0.4582, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.5280038295835328, |
|
"grad_norm": 0.4297800362110138, |
|
"learning_rate": 1.6000123212167158e-06, |
|
"loss": 0.471, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.533748204882719, |
|
"grad_norm": 0.37162676453590393, |
|
"learning_rate": 1.5633876975916261e-06, |
|
"loss": 0.479, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.5394925801819053, |
|
"grad_norm": 0.4432937502861023, |
|
"learning_rate": 1.5271092826566108e-06, |
|
"loss": 0.4918, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.5452369554810914, |
|
"grad_norm": 0.4131205976009369, |
|
"learning_rate": 1.4911807311452874e-06, |
|
"loss": 0.4862, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.5509813307802776, |
|
"grad_norm": 0.39494404196739197, |
|
"learning_rate": 1.4556056625455922e-06, |
|
"loss": 0.472, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.556725706079464, |
|
"grad_norm": 0.3842601776123047, |
|
"learning_rate": 1.4203876607351347e-06, |
|
"loss": 0.5468, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.56247008137865, |
|
"grad_norm": 0.39018625020980835, |
|
"learning_rate": 1.3855302736201686e-06, |
|
"loss": 0.4695, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.5682144566778362, |
|
"grad_norm": 0.4116652011871338, |
|
"learning_rate": 1.3510370127781635e-06, |
|
"loss": 0.5009, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.5739588319770226, |
|
"grad_norm": 0.4268287420272827, |
|
"learning_rate": 1.3169113531040462e-06, |
|
"loss": 0.509, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.5797032072762087, |
|
"grad_norm": 0.403522253036499, |
|
"learning_rate": 1.2831567324601325e-06, |
|
"loss": 0.4961, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.5854475825753949, |
|
"grad_norm": 0.37210026383399963, |
|
"learning_rate": 1.2497765513297976e-06, |
|
"loss": 0.4923, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.5911919578745812, |
|
"grad_norm": 0.4218740463256836, |
|
"learning_rate": 1.2167741724749026e-06, |
|
"loss": 0.4894, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.5969363331737674, |
|
"grad_norm": 0.3306220769882202, |
|
"learning_rate": 1.1841529205970281e-06, |
|
"loss": 0.4224, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.6026807084729535, |
|
"grad_norm": 0.4241751730442047, |
|
"learning_rate": 1.1519160820025382e-06, |
|
"loss": 0.4937, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.6084250837721399, |
|
"grad_norm": 0.3803061842918396, |
|
"learning_rate": 1.1200669042715163e-06, |
|
"loss": 0.4921, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.614169459071326, |
|
"grad_norm": 0.40259426832199097, |
|
"learning_rate": 1.0886085959305915e-06, |
|
"loss": 0.5068, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.6199138343705122, |
|
"grad_norm": 0.3979838490486145, |
|
"learning_rate": 1.057544326129723e-06, |
|
"loss": 0.4888, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.6256582096696985, |
|
"grad_norm": 0.39289093017578125, |
|
"learning_rate": 1.026877224322923e-06, |
|
"loss": 0.5025, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.6314025849688847, |
|
"grad_norm": 0.3723068833351135, |
|
"learning_rate": 9.966103799529891e-07, |
|
"loss": 0.5262, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.6371469602680708, |
|
"grad_norm": 0.3888184726238251, |
|
"learning_rate": 9.66746842140287e-07, |
|
"loss": 0.4623, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.6428913355672572, |
|
"grad_norm": 0.38385969400405884, |
|
"learning_rate": 9.372896193755621e-07, |
|
"loss": 0.4814, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.6486357108664431, |
|
"grad_norm": 0.386476069688797, |
|
"learning_rate": 9.082416792168608e-07, |
|
"loss": 0.5195, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.6543800861656295, |
|
"grad_norm": 0.3499607741832733, |
|
"learning_rate": 8.7960594799059e-07, |
|
"loss": 0.4491, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.6601244614648158, |
|
"grad_norm": 0.3445354402065277, |
|
"learning_rate": 8.513853104966951e-07, |
|
"loss": 0.5042, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.6658688367640018, |
|
"grad_norm": 0.41543084383010864, |
|
"learning_rate": 8.235826097180566e-07, |
|
"loss": 0.4904, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.6716132120631881, |
|
"grad_norm": 0.40791216492652893, |
|
"learning_rate": 7.962006465340821e-07, |
|
"loss": 0.4554, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.6773575873623745, |
|
"grad_norm": 0.36069628596305847, |
|
"learning_rate": 7.692421794385313e-07, |
|
"loss": 0.473, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.6831019626615604, |
|
"grad_norm": 0.44029319286346436, |
|
"learning_rate": 7.427099242616348e-07, |
|
"loss": 0.5168, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.6888463379607468, |
|
"grad_norm": 0.4227386713027954, |
|
"learning_rate": 7.166065538964955e-07, |
|
"loss": 0.5405, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.694590713259933, |
|
"grad_norm": 0.37983396649360657, |
|
"learning_rate": 6.909346980298093e-07, |
|
"loss": 0.4794, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.700335088559119, |
|
"grad_norm": 0.3403533101081848, |
|
"learning_rate": 6.656969428769567e-07, |
|
"loss": 0.4514, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.7060794638583054, |
|
"grad_norm": 0.3760150372982025, |
|
"learning_rate": 6.408958309214597e-07, |
|
"loss": 0.4801, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.7118238391574916, |
|
"grad_norm": 0.4047829508781433, |
|
"learning_rate": 6.165338606588517e-07, |
|
"loss": 0.5115, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.7175682144566777, |
|
"grad_norm": 0.38393792510032654, |
|
"learning_rate": 5.926134863449712e-07, |
|
"loss": 0.4826, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.723312589755864, |
|
"grad_norm": 0.3590121865272522, |
|
"learning_rate": 5.691371177487215e-07, |
|
"loss": 0.4491, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.7290569650550502, |
|
"grad_norm": 0.4042126536369324, |
|
"learning_rate": 5.461071199093048e-07, |
|
"loss": 0.5254, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.7348013403542364, |
|
"grad_norm": 0.3463343381881714, |
|
"learning_rate": 5.235258128979676e-07, |
|
"loss": 0.4591, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.7405457156534228, |
|
"grad_norm": 0.37464749813079834, |
|
"learning_rate": 5.0139547158427e-07, |
|
"loss": 0.4621, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.746290090952609, |
|
"grad_norm": 0.386735737323761, |
|
"learning_rate": 4.797183254069176e-07, |
|
"loss": 0.499, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.752034466251795, |
|
"grad_norm": 0.3763267695903778, |
|
"learning_rate": 4.5849655814915683e-07, |
|
"loss": 0.5162, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.7577788415509814, |
|
"grad_norm": 0.37057819962501526, |
|
"learning_rate": 4.3773230771879004e-07, |
|
"loss": 0.4925, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.7635232168501676, |
|
"grad_norm": 0.35524293780326843, |
|
"learning_rate": 4.1742766593278974e-07, |
|
"loss": 0.4587, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.7692675921493537, |
|
"grad_norm": 0.40347564220428467, |
|
"learning_rate": 3.9758467830656623e-07, |
|
"loss": 0.5026, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.77501196744854, |
|
"grad_norm": 0.3765867054462433, |
|
"learning_rate": 3.782053438479094e-07, |
|
"loss": 0.4692, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.7807563427477262, |
|
"grad_norm": 0.35763201117515564, |
|
"learning_rate": 3.5929161485559694e-07, |
|
"loss": 0.4576, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.7865007180469124, |
|
"grad_norm": 0.4578535556793213, |
|
"learning_rate": 3.4084539672271764e-07, |
|
"loss": 0.5168, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.7922450933460987, |
|
"grad_norm": 0.36126625537872314, |
|
"learning_rate": 3.228685477447291e-07, |
|
"loss": 0.4769, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.7979894686452849, |
|
"grad_norm": 0.40714967250823975, |
|
"learning_rate": 3.0536287893223603e-07, |
|
"loss": 0.4646, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.803733843944471, |
|
"grad_norm": 0.35251492261886597, |
|
"learning_rate": 2.883301538285582e-07, |
|
"loss": 0.4506, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.8094782192436574, |
|
"grad_norm": 0.36679378151893616, |
|
"learning_rate": 2.717720883320685e-07, |
|
"loss": 0.4858, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.8152225945428435, |
|
"grad_norm": 0.39732909202575684, |
|
"learning_rate": 2.556903505233216e-07, |
|
"loss": 0.5302, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.8209669698420297, |
|
"grad_norm": 0.37163034081459045, |
|
"learning_rate": 2.4008656049701875e-07, |
|
"loss": 0.4868, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.826711345141216, |
|
"grad_norm": 0.37081223726272583, |
|
"learning_rate": 2.2496229019879635e-07, |
|
"loss": 0.4698, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.832455720440402, |
|
"grad_norm": 0.3732491135597229, |
|
"learning_rate": 2.1031906326685946e-07, |
|
"loss": 0.4716, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.8382000957395883, |
|
"grad_norm": 0.437520831823349, |
|
"learning_rate": 1.9615835487849677e-07, |
|
"loss": 0.4804, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.8439444710387747, |
|
"grad_norm": 0.3673948645591736, |
|
"learning_rate": 1.824815916014644e-07, |
|
"loss": 0.5048, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.8496888463379606, |
|
"grad_norm": 0.3330841064453125, |
|
"learning_rate": 1.6929015125027314e-07, |
|
"loss": 0.4454, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.855433221637147, |
|
"grad_norm": 0.3728371560573578, |
|
"learning_rate": 1.5658536274738623e-07, |
|
"loss": 0.5037, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.8611775969363333, |
|
"grad_norm": 0.36687803268432617, |
|
"learning_rate": 1.443685059893396e-07, |
|
"loss": 0.5074, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.8669219722355193, |
|
"grad_norm": 0.3871447741985321, |
|
"learning_rate": 1.3264081171780797e-07, |
|
"loss": 0.4878, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.8726663475347056, |
|
"grad_norm": 0.34846240282058716, |
|
"learning_rate": 1.2140346139561277e-07, |
|
"loss": 0.4694, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.8784107228338918, |
|
"grad_norm": 0.37107598781585693, |
|
"learning_rate": 1.1065758708770468e-07, |
|
"loss": 0.4856, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.884155098133078, |
|
"grad_norm": 0.38006165623664856, |
|
"learning_rate": 1.004042713471165e-07, |
|
"loss": 0.5022, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.8898994734322643, |
|
"grad_norm": 0.36080440878868103, |
|
"learning_rate": 9.064454710590253e-08, |
|
"loss": 0.4619, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.8956438487314504, |
|
"grad_norm": 0.350732684135437, |
|
"learning_rate": 8.137939757108526e-08, |
|
"loss": 0.4584, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.9013882240306366, |
|
"grad_norm": 0.37510958313941956, |
|
"learning_rate": 7.260975612560173e-08, |
|
"loss": 0.4813, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.907132599329823, |
|
"grad_norm": 0.3882618844509125, |
|
"learning_rate": 6.433650623427379e-08, |
|
"loss": 0.4304, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.912876974629009, |
|
"grad_norm": 0.4088869094848633, |
|
"learning_rate": 5.6560481354807625e-08, |
|
"loss": 0.4624, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.9186213499281952, |
|
"grad_norm": 0.3656178414821625, |
|
"learning_rate": 4.928246485383148e-08, |
|
"loss": 0.5049, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.9243657252273816, |
|
"grad_norm": 0.39486929774284363, |
|
"learning_rate": 4.250318992797375e-08, |
|
"loss": 0.5062, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.9301101005265677, |
|
"grad_norm": 0.39059191942214966, |
|
"learning_rate": 3.622333953000601e-08, |
|
"loss": 0.4485, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.9358544758257539, |
|
"grad_norm": 0.3584195673465729, |
|
"learning_rate": 3.0443546300035764e-08, |
|
"loss": 0.4358, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.9415988511249402, |
|
"grad_norm": 0.3339657187461853, |
|
"learning_rate": 2.516439250177749e-08, |
|
"loss": 0.46, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.9473432264241264, |
|
"grad_norm": 0.36130189895629883, |
|
"learning_rate": 2.038640996389285e-08, |
|
"loss": 0.5263, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.9530876017233125, |
|
"grad_norm": 0.3555588722229004, |
|
"learning_rate": 1.6110080026414123e-08, |
|
"loss": 0.4882, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.958831977022499, |
|
"grad_norm": 0.38061201572418213, |
|
"learning_rate": 1.2335833492252425e-08, |
|
"loss": 0.4988, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.964576352321685, |
|
"grad_norm": 0.3624252676963806, |
|
"learning_rate": 9.06405058380022e-09, |
|
"loss": 0.5049, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.9703207276208712, |
|
"grad_norm": 0.39548906683921814, |
|
"learning_rate": 6.295060904623618e-09, |
|
"loss": 0.4578, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.9760651029200575, |
|
"grad_norm": 0.37694376707077026, |
|
"learning_rate": 4.02914340626226e-09, |
|
"loss": 0.4775, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.9818094782192437, |
|
"grad_norm": 0.3581026494503021, |
|
"learning_rate": 2.2665263601240328e-09, |
|
"loss": 0.491, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.9875538535184298, |
|
"grad_norm": 0.37684017419815063, |
|
"learning_rate": 1.0073873344895735e-09, |
|
"loss": 0.5071, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.9932982288176162, |
|
"grad_norm": 0.3794161379337311, |
|
"learning_rate": 2.5185317662490547e-10, |
|
"loss": 0.5096, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.9990426041168023, |
|
"grad_norm": 0.35005468130111694, |
|
"learning_rate": 0.0, |
|
"loss": 0.436, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.9990426041168023, |
|
"step": 348, |
|
"total_flos": 358710641426432.0, |
|
"train_loss": 0.5378552473139488, |
|
"train_runtime": 32465.3196, |
|
"train_samples_per_second": 1.029, |
|
"train_steps_per_second": 0.011 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 348, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 358710641426432.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|